def conv(x, in_planes, out_planes, kernel_size, padding, stride=1, init_method=None): Kw = kernel_size Kh = kernel_size _C = in_planes Kc = out_planes N, C, H, W = x.shape assert C == _C if init_method == None: w = jt.make_var( [Kc, _C, Kh, Kw], init=lambda *a: init.relu_invariant_gauss(*a, mode="fan_out")) else: w = jt.make_var([Kc, _C, Kh, Kw], init=init_method) xx = x.reindex( [ N, Kc, C, (H + padding * 2 - kernel_size) // stride + 1, (W + padding * 2 - kernel_size) // stride + 1, Kh, Kw ], [ 'i0', # Nid 'i2', # Cid f'i3*{stride}-{padding}+i5', # Hid+Khid f'i4*{stride}-{padding}+i6', # Wid+KWid ]) ww = w.broadcast(xx.shape, [0, 3, 4]) yy = xx * ww y = yy.sum([2, 5, 6]) # C, Kh, Kw return y
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size if isinstance( kernel_size, tuple) else (kernel_size, kernel_size) self.stride = stride if isinstance(stride, tuple) else (stride, stride) self.padding = padding if isinstance(padding, tuple) else (padding, padding) self.dilation = dilation if isinstance(dilation, tuple) else (dilation, dilation) self.groups = groups assert in_channels % groups == 0, 'in_channels must be divisible by groups' assert out_channels % groups == 0, 'out_channels must be divisible by groups' Kh, Kw = self.kernel_size self.weight = init.relu_invariant_gauss( [out_channels, in_channels // groups, Kh, Kw], dtype="float", mode="fan_out") if bias: self.bias = init.uniform([out_channels], dtype="float", low=-1, high=1) else: self.bias = None
def __init__(self, in_channels, out_channels, kernel_size, stride=1, \ padding=0, output_padding=0, groups=1, bias=True, dilation=1): self.in_channels = in_channels self.out_channels = out_channels # added self.dilation = dilation self.group = groups assert groups == 1, "Group conv not supported yet." self.kernel_size = kernel_size if isinstance( kernel_size, tuple) else (kernel_size, kernel_size) self.stride = stride if isinstance(stride, tuple) else (stride, stride) self.dilation = dilation if isinstance(dilation, tuple) else (dilation, dilation) # added self.padding = padding if isinstance(padding, tuple) else (padding, padding) self.real_padding = (self.dilation[0] * (self.kernel_size[0] - 1) - self.padding[0], self.dilation[1] * (self.kernel_size[1] - 1) - self.padding[1]) self.output_padding = output_padding if isinstance( output_padding, tuple) else (output_padding, output_padding) self.weight = init.relu_invariant_gauss( (in_channels, out_channels) + self.kernel_size, dtype="float", mode="fan_out") if bias: self.bias = init.uniform([out_channels], dtype="float", low=-1, high=1) else: self.bias = None
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): assert groups == 1 self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size if isinstance( kernel_size, tuple) else (kernel_size, kernel_size) self.stride = stride if isinstance(stride, tuple) else (stride, stride) self.padding = padding if isinstance(padding, tuple) else (padding, padding) self.dilation = dilation if isinstance(dilation, tuple) else (dilation, dilation) Kh, Kw = self.kernel_size assert groups == 1, "Group conv not supported yet." self.weight = init.relu_invariant_gauss( [out_channels, in_channels, Kh, Kw], dtype="float", mode="fan_out") if bias: self.bias = init.uniform([out_channels], dtype="float", low=-1, high=1) else: self.bias = None