Example #1
0
class GroupNorm(M.Module):
    def __init__(self, num_groups, num_channels, eps=1e-5, affine=True):
        super().__init__()
        self.num_groups = num_groups
        self.num_channels = num_channels
        self.eps = eps
        self.affine = affine
        if self.affine:
            self.weight = Parameter(np.ones(num_channels, dtype=np.float32))
            self.bias = Parameter(np.zeros(num_channels, dtype=np.float32))
        else:
            self.weight = None
            self.bias = None
        self.reset_parameters()

    def reset_parameters(self):
        if self.affine:
            M.init.ones_(self.weight)
            M.init.zeros_(self.bias)

    def forward(self, x):
        output = x.reshape(x.shape[0], self.num_groups, -1)
        mean = F.mean(output, axis=2, keepdims=True)
        mean2 = F.mean(output**2, axis=2, keepdims=True)
        var = mean2 - mean * mean

        output = (output - mean) / F.sqrt(var + self.eps)
        output = output.reshape(x.shape)
        if self.affine:
            output = self.weight.reshape(1, -1, 1, 1) * output + \
                self.bias.reshape(1, -1, 1, 1)

        return output
Example #2
0
class GroupNorm(Module):
    """
    Simple implementation of GroupNorm. Only support 4d tensor now.
    Reference: https://arxiv.org/pdf/1803.08494.pdf.
    """
    def __init__(self,
                 num_groups,
                 num_channels,
                 eps=1e-5,
                 affine=True,
                 **kwargs):
        super().__init__(**kwargs)
        assert num_channels % num_groups == 0
        self.num_groups = num_groups
        self.num_channels = num_channels
        self.eps = eps
        self.affine = affine
        if self.affine:
            self.weight = Parameter(np.ones(num_channels, dtype=np.float32))
            self.bias = Parameter(np.zeros(num_channels, dtype=np.float32))
        else:
            self.weight = None
            self.bias = None
        self.reset_parameters()

    def reset_parameters(self):
        if self.affine:
            ones_(self.weight)
            zeros_(self.bias)

    def forward(self, x):
        N, C, H, W = x.shape
        assert C == self.num_channels

        x = x.reshape(N, self.num_groups, -1)
        mean = x.mean(axis=2, keepdims=True)
        var = (x * x).mean(axis=2, keepdims=True) - mean * mean

        x = (x - mean) / F.sqrt(var + self.eps)
        x = x.reshape(N, C, H, W)
        if self.affine:
            x = self.weight.reshape(1, -1, 1, 1) * x + self.bias.reshape(
                1, -1, 1, 1)

        return x

    def _module_info_string(self) -> str:
        s = ("groups={num_groups}, channels={num_channels}, "
             "eps={eps}, affine={affine}")
        return s.format(**self.__dict__)
Example #3
0
class LayerNorm(M.Module):
    """
    Simple implementation of LayerNorm. Only support 4d tensor now.
    Reference: https://arxiv.org/pdf/1803.08494.pdf.
    Note that LayerNorm equals using GroupNorm with num_groups=1.
    """
    def __init__(self, num_channels, eps=1e-05, affine=True):
        super().__init__()
        self.num_channels = num_channels
        self.eps = eps
        self.affine = affine
        if self.affine:
            self.weight = Parameter(np.ones(num_channels, dtype="float32"))
            self.bias = Parameter(np.zeros(num_channels, dtype="float32"))
        else:
            self.weight = None
            self.bias = None
        self.reset_parameters()

    def reset_parameters(self):
        if self.affine:
            M.init.ones_(self.weight)
            M.init.zeros_(self.bias)

    def forward(self, x):
        N, C, H, W = x.shape
        assert C == self.num_channels
        x = x.reshape(x.shape[0], -1)
        # NOTE mean will keepdims in next two lines.
        mean = x.mean(axis=1, keepdims=1)
        var = (x**2).mean(axis=1, keepdims=1) - mean * mean

        x = (x - mean) / F.sqrt(var + self.eps)
        x = x.reshape(N, C, H, W)
        if self.affine:
            x = self.weight.reshape(1, -1, 1, 1) * x + self.bias.reshape(
                1, -1, 1, 1)

        return x

    def _module_info_string(self) -> str:
        s = "channels={num_channels}, eps={eps}, affine={affine}"
        return s.format(**self.__dict__)
Example #4
0
class InstanceNorm(Module):
    """
    Simple implementation of InstanceNorm. Only support 4d tensor now.
    Reference: https://arxiv.org/abs/1607.08022.
    Note that InstanceNorm equals using GroupNome with num_groups=num_channels.
    """
    def __init__(self, num_channels, eps=1e-05, affine=True):
        super().__init__()
        self.num_channels = num_channels
        self.eps = eps
        self.affine = affine
        if self.affine:
            self.weight = Parameter(np.ones(num_channels, dtype="float32"))
            self.bias = Parameter(np.zeros(num_channels, dtype="float32"))
        else:
            self.weight = None
            self.bias = None
        self.reset_parameters()

    def reset_parameters(self):
        if self.affine:
            ones_(self.weight)
            zeros_(self.bias)

    def forward(self, x):
        N, C, H, W = x.shape
        assert C == self.num_channels
        x = x.reshape(N, C, -1)
        mean = x.mean(axis=2, keepdims=True)
        var = (x**2).mean(axis=2, keepdims=True) - mean * mean

        x = (x - mean) / F.sqrt(var + self.eps)
        x = x.reshape(N, C, H, W)
        if self.affine:
            x = self.weight.reshape(1, -1, 1, 1) * x + self.bias.reshape(
                1, -1, 1, 1)

        return x

    def _module_info_string(self) -> str:
        s = "channels={num_channels}, eps={eps}, affine={affine}"
        return s.format(**self.__dict__)
Example #5
0
class FrozenBatchNorm2d(M.Module):
    """
    BatchNorm2d, which the weight, bias, running_mean, running_var
    are immutable.
    """
    def __init__(self, num_features, eps=1e-5):
        super().__init__()
        self.num_features = num_features
        self.eps = eps

        self.weight = Parameter(np.ones(num_features, dtype=np.float32))
        self.bias = Parameter(np.zeros(num_features, dtype=np.float32))

        self.running_mean = Parameter(
            np.zeros((1, num_features, 1, 1), dtype=np.float32))
        self.running_var = Parameter(
            np.ones((1, num_features, 1, 1), dtype=np.float32))

    def forward(self, x):
        scale = self.weight.reshape(
            1, -1, 1, 1) * (1.0 / F.sqrt(self.running_var + self.eps))
        bias = self.bias.reshape(1, -1, 1, 1) - self.running_mean * scale
        return x * scale.detach() + bias.detach()