def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, weight_init=Kaiming_Normal(), bias_init=Zeros()): """ :param in_channels: :param out_channels: :param kernel_size: :param stride: :param padding: :param dilation: :param groups: :param bias: :param weight_init: :param bias_init: """ super(Conv2d, self).__init__(in_channels, out_channels, _pair(kernel_size), _pair(stride), _pair(padding), _pair(dilation), groups, bias, _pair(0), weight_init, bias_init)
def __init__(self, in_channels, kernel_size, stride=1, padding=0, out_padding=0, dilation=1, bias=True, multiplier=1, weight_init=Kaiming_Normal(), bias_init=Zeros()): """ :param in_channels: :param kernel_size: :param stride: :param padding: :param dilation: :param bias: :param multiplier: :param weight_init: :param bias_init: """ super(DepthwiseConvTranspose2d, self).__init__(in_channels, in_channels * multiplier, _pair(kernel_size), _pair(stride), _pair(padding), _pair(dilation), in_channels, bias, _pair(out_padding), weight_init, bias_init)
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): kernel_size = _pair(kernel_size) stride = _pair(stride) padding = _pair(padding) dilation = _pair(dilation) assert out_channels % 2 == 0 and out_channels > 0 super(FlipConv2dUD, self).__init__(in_channels, int(out_channels / 2), kernel_size, stride, padding, dilation, False, _pair(0), groups, bias)
def _conv_forward(self, input, weight): if self.padding_mode != 'zeros': return F.conv2d( F.pad(input, self._reversed_padding_repeated_twice, mode=self.padding_mode), weight, self.bias, self.stride, _pair(0), self.dilation, self.groups) return F.conv2d(input, weight, self.bias, self.stride, self.padding, self.dilation, self.groups)
def forward(self, input): batch_size, input_planes, input_height, input_width = input.size() self.kernel_size = _pair(self.kernel_size) self.padding = _pair(self.padding) self.stride = _pair(self.stride) kernel_ops = self.kernel_size[0] * self.kernel_size[1] # :math:`H_{out} = floor((H_{in} + 2 * padding[0] - kernel\_size[0]) # / stride[0] + 1)` # :math:`W_{out} = floor((W_{in} + 2 * padding[1] - kernel\_size[1]) # / stride[1] + 1)` output_height = math.floor((input_height + 2 * self.padding[0] - self.kernel_size[0]) / self.stride[0] + 1) output_width = math.floor((input_width + 2 * self.padding[1] - self.kernel_size[1]) / self.stride[1] + 1) self.__flops__ = batch_size * input_planes * output_width * output_height * kernel_ops return super(AvgPool2d, self).forward(input)
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, padding_mode='zeros', p=0.5): kernel_size = _pair(kernel_size) stride = _pair(stride) padding = _pair(padding) dilation = _pair(dilation) super(DropConnectConv2D, self).__init__(in_channels, out_channels, kernel_size, stride, padding, dilation, False, _pair(0), groups, bias, padding_mode) self.dropout = nn.Dropout(p) self.p = p
def __init__(self, kernel_size,stride=1, in_channels=None, out_channels=None, padding=0, dilation=1, groups=1, bias=True, reuse_features=True, weight_initializer=default_initializer, bias_initializer=default_initializer ): self.stride = _pair(stride) self.padding = _pair(padding) self.dilation = _pair(dilation) self.groups = groups self.kernel_size = _pair(kernel_size) self.output_padding = _pair(0) self.bias = bias super(Conv2d, self).__init__( in_features=in_channels, out_features=out_channels, weight_initializer=weight_initializer, bias_initializer=bias_initializer, weight_allocation=self.kernel_size, reuse_features=reuse_features, bias=bias )
def __init__( self, in_channels: int, out_channels: int, *args, hidden_kernel: Union[int, Tuple[int, int]], **kwargs, # XXX star-args are passed directly to Conv2d for the `input` ): # make sure the kenrel sizes are odd, and get the padding # XXX the only place where the `2d-ness` is hardcoded here! n_kernel = _pair(hidden_kernel) assert all(k & 1 for k in n_kernel) n_pad = [k >> 1 for k in n_kernel] # stride == 1 super().__init__() # input to reset and update gates, and the candidate state self.x_hrz = Conv2d( in_channels, 3 * out_channels, *args, **kwargs, ) # hidden state to reset and update gates self.h_rz = Conv2d( out_channels, 2 * out_channels, n_kernel, stride=1, bias=False, padding=n_pad, ) # hidden state to the candidate self.h_h = Conv2d( out_channels, out_channels, n_kernel, stride=1, bias=False, padding=n_pad, )