Example #1
0
    def test_uniform(self):
        a = init.uniform(5, "float32")
        assert ((a > 0) & (a < 1)).all()
        a = init.uniform((2, 3), low=-1, high=1)
        assert ((a > -1) & (a < 1)).all()

        linear = nn.Linear(2, 2)
        init.uniform_(linear.weight)
        assert (linear.weight > 0).all()
        linear.weight.uniform_()
        assert (linear.weight > 0).all()
Example #2
0
def linear(x, n):
    w = jt.make_var([n, x.shape[-1]],
                    init=lambda *a: init.invariant_uniform(*a))
    w = w.reindex([w.shape[1], w.shape[0]], ["i1", "i0"])
    bound = 1.0 / math.sqrt(w.shape[0])
    b = jt.make_var([n], init=lambda *a: init.uniform(*a, -bound, bound))
    return jt.matmul(x, w) + b
Example #3
0
    def __init__(self, in_channels, out_channels, kernel_size, stride=1, \
                 padding=0, output_padding=0, groups=1, bias=True, dilation=1):
        self.in_channels = in_channels
        self.out_channels = out_channels

        # added
        self.dilation = dilation
        self.group = groups
        assert groups == 1, "Group conv not supported yet."

        self.kernel_size = kernel_size if isinstance(
            kernel_size, tuple) else (kernel_size, kernel_size)
        self.stride = stride if isinstance(stride, tuple) else (stride, stride)
        self.dilation = dilation if isinstance(dilation, tuple) else (dilation,
                                                                      dilation)
        # added
        self.padding = padding if isinstance(padding, tuple) else (padding,
                                                                   padding)
        self.real_padding = (self.dilation[0] * (self.kernel_size[0] - 1) -
                             self.padding[0], self.dilation[1] *
                             (self.kernel_size[1] - 1) - self.padding[1])
        self.output_padding = output_padding if isinstance(
            output_padding, tuple) else (output_padding, output_padding)

        self.weight = init.relu_invariant_gauss(
            (in_channels, out_channels) + self.kernel_size,
            dtype="float",
            mode="fan_out")
        if bias:
            self.bias = init.uniform([out_channels],
                                     dtype="float",
                                     low=-1,
                                     high=1)
        else:
            self.bias = None
Example #4
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              stride=1,
              padding=0,
              dilation=1,
              groups=1,
              bias=True):
     self.in_channels = in_channels
     self.out_channels = out_channels
     self.kernel_size = kernel_size if isinstance(
         kernel_size, tuple) else (kernel_size, kernel_size)
     self.stride = stride if isinstance(stride, tuple) else (stride, stride)
     self.padding = padding if isinstance(padding, tuple) else (padding,
                                                                padding)
     self.dilation = dilation if isinstance(dilation, tuple) else (dilation,
                                                                   dilation)
     self.groups = groups
     assert in_channels % groups == 0, 'in_channels must be divisible by groups'
     assert out_channels % groups == 0, 'out_channels must be divisible by groups'
     Kh, Kw = self.kernel_size
     self.weight = init.relu_invariant_gauss(
         [out_channels, in_channels // groups, Kh, Kw],
         dtype="float",
         mode="fan_out")
     if bias:
         self.bias = init.uniform([out_channels],
                                  dtype="float",
                                  low=-1,
                                  high=1)
     else:
         self.bias = None
Example #5
0
    def __init__(self, in_channels, out_channels, kernel_size, stride=1, \
                 padding=0, output_padding=0, groups=1, bias=True, dilation=1):
        self.in_channels = in_channels
        self.out_channels = out_channels

        # added
        self.dilation = dilation
        self.group = groups
        assert groups==1, "Group conv not supported yet."

        self.kernel_size = kernel_size if isinstance(kernel_size, tuple) else (kernel_size, kernel_size)
        self.stride = stride if isinstance(stride, tuple) else (stride, stride)
        self.dilation = dilation if isinstance(dilation, tuple) else (dilation, dilation)
        # added
        self.padding = padding if isinstance(padding, tuple) else (padding, padding)
        self.real_padding = (self.dilation[0] * (self.kernel_size[0] - 1) - self.padding[0],
            self.dilation[1] * (self.kernel_size[1] - 1) - self.padding[1])
        self.output_padding = output_padding if isinstance (output_padding, tuple) else (output_padding, output_padding)
        assert self.output_padding[0] < max(self.stride[0], self.dilation[0]) and \
            self.output_padding[1] < max(self.stride[1], self.dilation[1]), \
            "output padding must be smaller than max(stride, dilation)"

        self.weight = init.invariant_uniform((in_channels, out_channels) + self.kernel_size, dtype="float")
        if bias:
            fan=1
            for i in self.weight.shape[1:]:
                fan *= i
            bound = 1 / math.sqrt(fan)
            self.bias = init.uniform([out_channels], dtype="float", low=-bound, high=bound)
        else:
            self.bias = None
Example #6
0
File: nn.py Project: zzmcdc/jittor
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride=1,
                 padding=0,
                 dilation=1,
                 groups=1,
                 bias=True):
        assert groups == 1

        self.in_channels = in_channels
        self.out_channels = out_channels
        self.kernel_size = kernel_size if isinstance(
            kernel_size, tuple) else (kernel_size, kernel_size)
        self.stride = stride if isinstance(stride, tuple) else (stride, stride)
        self.padding = padding if isinstance(padding, tuple) else (padding,
                                                                   padding)
        self.dilation = dilation if isinstance(dilation, tuple) else (dilation,
                                                                      dilation)
        Kh, Kw = self.kernel_size
        assert groups == 1, "Group conv not supported yet."
        self.weight = init.relu_invariant_gauss(
            [out_channels, in_channels, Kh, Kw], dtype="float", mode="fan_out")
        if bias:
            self.bias = init.uniform([out_channels],
                                     dtype="float",
                                     low=-1,
                                     high=1)
        else:
            self.bias = None
Example #7
0
 def __init__(self, in_features, out_features, bias=True):
     self.in_features = in_features
     self.out_features = out_features
     self.weight = init.invariant_uniform((out_features, in_features),
                                          "float32")
     bound = 1.0 / math.sqrt(in_features)
     self.bias = init.uniform(
         (out_features, ), "float32", -bound, bound) if bias else None
Example #8
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride=1,
                 padding=0,
                 dilation=1,
                 groups=1,
                 bias=True):
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.kernel_size = kernel_size if isinstance(
            kernel_size, tuple) else (kernel_size, kernel_size, kernel_size)
        self.stride = stride if isinstance(stride, tuple) else (stride, stride,
                                                                stride)
        self.padding = padding if isinstance(padding, tuple) else (padding,
                                                                   padding,
                                                                   padding)
        self.dilation = dilation if isinstance(dilation, tuple) else (dilation,
                                                                      dilation,
                                                                      dilation)
        self.groups = groups
        assert in_channels % groups == 0, 'in_channels must be divisible by groups'
        assert out_channels % groups == 0, 'out_channels must be divisible by groups'
        Kh, Kw, Kd = self.kernel_size
        self.groups = groups
        assert in_channels % groups == 0, 'in_channels must be divisible by groups'
        assert out_channels % groups == 0, 'out_channels must be divisible by groups'

        self.weight = init.invariant_uniform(
            [out_channels, in_channels // groups, Kh, Kw, Kd], dtype="float")
        if bias:
            fan = 1
            for i in self.weight.shape[1:]:
                fan *= i
            bound = 1 / math.sqrt(fan)
            self.bias = init.uniform([out_channels],
                                     dtype="float",
                                     low=-bound,
                                     high=bound)
        else:
            self.bias = None