Пример #1
0
    def init(self,
             in_shape,
             out_channels,
             kernel_size,
             stride=1,
             global_args=None,
             bias=True,
             padding=0,
             out_padding=0,
             activation="ReLU",
             **kargs):
        self.prev = in_shape
        self.in_channels = in_shape[0]
        self.out_channels = out_channels
        self.kernel_size = kernel_size
        self.stride = stride
        self.padding = padding
        self.out_padding = out_padding
        self.activation = activation
        self.use_softplus = h.default(global_args, 'use_softplus', False)

        weights_shape = (self.in_channels, self.out_channels, kernel_size,
                         kernel_size)
        self.weight = torch.nn.Parameter(torch.Tensor(*weights_shape))
        if bias:
            self.bias = torch.nn.Parameter(torch.Tensor(weights_shape[0]))
        else:
            self.bias = None  # h.zeros(weights_shape[0])

        outshape = getShapeConvTranspose(
            in_shape, (out_channels, kernel_size, kernel_size), stride,
            padding, out_padding)
        return outshape
Пример #2
0
 def init(self, prev, out_channels, kernel_size, stride = 1, global_args = None, bias=True, padding = 0, **kargs):
     self.prev = prev
     self.in_channels = prev[0]
     self.out_channels = out_channels
     self.kernel_size = kernel_size
     self.stride = stride
     self.padding = padding
     self.use_softplus = h.default(global_args, 'use_softplus', False)
     
     weights_shape = (self.out_channels, self.in_channels, kernel_size, kernel_size)        
     self.weight = torch.nn.Parameter(torch.Tensor(*weights_shape))
     if bias:
         self.bias = torch.nn.Parameter(torch.Tensor(weights_shape[0]))
     else:
         self.bias = h.zeros(weights_shape[0])
         
     outshape = getShapeConv(prev, (out_channels, kernel_size, kernel_size), stride, padding)
     return outshape
Пример #3
0
 def init(self, prev, global_args = None, **kargs):
     self.use_softplus = h.default(global_args, 'use_softplus', False)
     return prev