Example #1
0
 def _batch_norm(self, name, x, add_ops=True):
     with tf.variable_scope(name):
         n_out = x.get_shape()[-1]
         try:
             n_out = int(n_out)
             shape = [n_out]
         except:
             shape = None
         beta = self._weight_variable(shape,
                                      init_method="constant",
                                      init_param={"val": 0.0},
                                      name="beta",
                                      dtype=self.dtype)
         gamma = self._weight_variable(shape,
                                       init_method="constant",
                                       init_param={"val": 1.},
                                       name="gamma",
                                       dtype=self.dtype)
         normed, ops = batch_norm(x,
                                  self.is_training,
                                  gamma=gamma,
                                  beta=beta,
                                  axes=[0, 1, 2],
                                  eps=1e-3,
                                  name="bn_out")
         if add_ops:
             if ops is not None:
                 self._bn_update_ops.extend(ops)
         return normed
Example #2
0
    def _batch_norm(self, name, x):
        """
    Batch normalization.

    Only active if specified in config
    """
        if self.config.batch_norm:
            with tf.variable_scope(name):
                n_out = x.get_shape()[-1]
                beta = nn.weight_variable([n_out],
                                          init_method="constant",
                                          init_param={"val": 0.0},
                                          name="beta")
                gamma = nn.weight_variable([n_out],
                                           init_method="constant",
                                           init_param={"val": 1.0},
                                           name="gamma")
                return nn.batch_norm(x,
                                     self.is_training,
                                     gamma=gamma,
                                     beta=beta,
                                     axes=[0, 1, 2],
                                     eps=1e-3,
                                     scope="bn",
                                     name="bn_out")
        else:
            return x