Example #1
0
    def initialize(self) -> None:
        # Batchnorm after Dense layer
        if len(self.input_shape) == 2:
            n, _ = self.input_shape
            self.mean = self.mean.reshape(1, n)
            self.variance = self.variance.reshape(1, n)
            self.scale = self.scale.reshape(1, n)
            self.offset = self.offset.reshape(1, n)

        # Batchnorm after Conv2D layer
        elif len(self.input_shape) == 4:
            if self.channels_first:
                # NCHW format
                _, c, _, _ = self.input_shape
                self.mean = self.mean.reshape(1, c, 1, 1)
                self.variance = self.variance.reshape(1, c, 1, 1)
                self.scale = self.scale.reshape(1, c, 1, 1)
                self.offset = self.offset.reshape(1, c, 1, 1)
            else:
                # NHWC format
                _, _, _, c = self.input_shape
                self.mean = self.mean.reshape(1, 1, 1, c)
                self.variance = self.variance.reshape(1, 1, 1, c)
                self.scale = self.scale.reshape(1, 1, 1, c)
                self.offset = self.offset.reshape(1, 1, 1, c)

        denomtemp = 1.0 / np.sqrt(self.variance + self.variance_epsilon)

        self.denom = tfe.define_public_variable(denomtemp)
        self.mean = tfe.define_public_variable(self.mean)
        self.variance = tfe.define_public_variable(self.variance)
        self.scale = tfe.define_public_variable(self.scale)
        self.offset = tfe.define_public_variable(self.offset)
    def build(self, input_shape):
        c = input_shape[self.axis]
        if len(input_shape) == 2:
            param_shape = [1, 1]
        elif len(input_shape) == 4:
            param_shape = [1, 1, 1, 1]

        param_shape[self.axis] = int(c)

        if self.scale:
            gamma = self.gamma_initializer(param_shape)
            self.gamma = self.add_weight(gamma, make_private=False)
        else:
            self.gamma = None

        if self.center:
            beta = self.beta_initializer(param_shape)
            self.beta = self.add_weight(beta, make_private=False)
        else:
            self.beta = None

        moving_mean = self.moving_mean_initializer(param_shape)
        self.moving_mean = self.add_weight(moving_mean, make_private=False)

        moving_variance_init = self.moving_variance_initializer(param_shape)
        self.moving_variance = self.add_weight(moving_variance_init,
                                               make_private=False)

        denomtemp = 1.0 / tf.sqrt(moving_variance_init + self.epsilon)

        # We have two different public variables for moving_variance and
        # denomtemp to avoid calling tfe.sqrt everytime denom is used
        self.denom = tfe.define_public_variable(denomtemp)

        self.built = True
Example #3
0
  def add_weight(self, variable, make_private=True):
    if make_private:
      variable = tfe.define_private_variable(variable)
      self.weights.append(variable)
    else:
      variable = tfe.define_public_variable(variable)
      self.weights.append(variable)

    return variable
    def rearrange_kernel(self, kernel):
        """ Rearrange kernel to match normal convoluion kernels
    Arguments:
      kernel: kernel to be rearranged
    """
        mask = self.get_mask(self.input_dim)

        if isinstance(kernel, tf.Tensor):
            mask = tf.constant(mask.tolist(),
                               dtype=tf.float32,
                               shape=(self.kernel_size[0], self.kernel_size[1],
                                      self.input_dim * self.depth_multiplier,
                                      self.input_dim))

            if self.depth_multiplier > 1:
                # rearrange kernel
                kernel = tf.transpose(kernel, [0, 1, 3, 2])
                kernel = tf.reshape(
                    kernel,
                    shape=self.kernel_size +
                    (self.input_dim * self.depth_multiplier, 1))

            kernel = tf.multiply(kernel, mask)

        elif isinstance(kernel, np.ndarray):
            if self.depth_multiplier > 1:
                # rearrange kernel
                kernel = np.transpose(kernel, [0, 1, 3, 2])
                kernel = np.reshape(
                    kernel,
                    newshape=self.kernel_size +
                    (self.input_dim * self.depth_multiplier, 1))

            kernel = np.multiply(kernel, mask)

        elif isinstance(kernel, PondPrivateTensor):
            mask = tfe.define_public_variable(mask)
            if self.depth_multiplier > 1:
                # rearrange kernel
                kernel = tfe.transpose(kernel, [0, 1, 3, 2])
                kernel = tfe.reshape(
                    kernel,
                    shape=self.kernel_size +
                    (self.input_dim * self.depth_multiplier, 1))

            kernel = tfe.mul(kernel, mask)

        return kernel