Esempio n. 1
0
    def __init__(self,
                 ndim,
                 in_channels,
                 out_channels,
                 ksize,
                 stride=1,
                 pad=0,
                 nobias=False,
                 initialW=None,
                 initial_bias=None,
                 cover_all=False,
                 dilate=1,
                 groups=1):
        super(ConvolutionND, self).__init__()

        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_shape = (out_channels, in_channels) + ksize
            self.W = variable.Parameter(
                initializers._get_initializer(initialW), W_shape)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Esempio n. 2
0
 def __init__(self, ndim, stride=1, pad=0, outsize=None):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     if outsize is not None:
         assert len(outsize) == ndim
     self.outs = outsize
Esempio n. 3
0
    def __init__(self,
                 ndim,
                 ksize,
                 stride=None,
                 pad=0,
                 cover_all=True,
                 return_indices=False):
        if stride is None:
            stride = ksize

        if ndim <= 0:
            raise ValueError(
                'pooling operation requires at least one spatial dimension.')

        self.ndim = ndim
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = conv_nd.as_tuple(stride, ndim)
        self.pad = conv_nd.as_tuple(pad, ndim)

        self.cover_all = cover_all
        self.return_indices = return_indices

        self._used_cudnn = False
        self._cudnn_inputs = None
        self._cudnn_outputs = None
Esempio n. 4
0
    def __init__(self, ndim, in_channels, out_channels, ksize=None, stride=1,
                 pad=0, nobias=False, initialW=None, initial_bias=None,
                 cover_all=False, dilate=1, groups=1):
        super(ConvolutionND, self).__init__()

        if ksize is None:
            out_channels, ksize, in_channels = \
                in_channels, out_channels, None

        self.out_channels = out_channels
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Esempio n. 5
0
 def __init__(self, ndim, stride=1, pad=0, outsize=None):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     if outsize is not None:
         assert len(outsize) == ndim
     self.outs = outsize
Esempio n. 6
0
    def __init__(self, ndim, in_channels, out_channels, ksize=None, stride=1,
                 pad=0, nobias=False, outsize=None, initialW=None,
                 initial_bias=None, dilate=1, groups=1):
        super(DeconvolutionND, self).__init__()

        if ksize is None:
            out_channels, ksize, in_channels = \
                in_channels, out_channels, None

        self.out_channels = out_channels
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.outsize = outsize
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Esempio n. 7
0
 def __init__(self, ndim, stride=1, pad=0, cover_all=False,
              dilate=1, groups=1):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     self.cover_all = cover_all
     self.dilate = conv_nd.as_tuple(dilate, ndim)
     self.groups = groups
Esempio n. 8
0
 def __init__(self, ndim, stride=1, pad=0, cover_all=False,
              dilate=1, groups=1):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     self.cover_all = cover_all
     self.dilate = conv_nd.as_tuple(dilate, ndim)
     self.groups = groups
Esempio n. 9
0
 def __init__(self, ndim, stride=1, pad=0, outsize=None,
              dilate=1, groups=1):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     if outsize is not None:
         assert len(outsize) == ndim
     self.outs = outsize
     self.dilate = conv_nd.as_tuple(dilate, ndim)
     self.groups = groups
Esempio n. 10
0
 def __init__(self, ndim, stride=1, pad=0, outsize=None,
              dilate=1, groups=1):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     if outsize is not None:
         assert len(outsize) == ndim
     self.outs = outsize
     self.dilate = conv_nd.as_tuple(dilate, ndim)
     self.groups = groups
Esempio n. 11
0
    def __init__(self, ndim, ksize, stride=None, pad=0, cover_all=True):
        if stride is None:
            stride = ksize

        self.ndim = ndim
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = conv_nd.as_tuple(stride, ndim)
        self.pad = conv_nd.as_tuple(pad, ndim)

        self.cover_all = cover_all
        self._used_cudnn = False
Esempio n. 12
0
    def __init__(self, ndim, ksize, stride=None, pad=0, cover_all=True):
        if stride is None:
            stride = ksize

        self.ndim = ndim
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = conv_nd.as_tuple(stride, ndim)
        self.pad = conv_nd.as_tuple(pad, ndim)

        self.cover_all = cover_all
        self._used_cudnn = False
Esempio n. 13
0
    def __init__(self, ndim, ksize, stride=None, pad=0, outsize=None,
                 cover_all=True):
        utils.experimental('chainer.functions.pooling.UnpoolingND')
        if stride is None:
            stride = ksize

        self.ndim = ndim
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = conv_nd.as_tuple(stride, ndim)
        self.pad = conv_nd.as_tuple(pad, ndim)
        self.outs = outsize
        self.cover_all = cover_all
Esempio n. 14
0
    def __init__(self, ndim, ksize, stride=None, pad=0, cover_all=True):
        if stride is None:
            stride = ksize

        if ndim <= 0:
            raise ValueError(
                'pooling operation requires at least one spatial dimension.')

        self.ndim = ndim
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = conv_nd.as_tuple(stride, ndim)
        self.pad = conv_nd.as_tuple(pad, ndim)

        self.cover_all = cover_all
        self._used_cudnn = False
Esempio n. 15
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize,
                 stride=1,
                 pad=0,
                 initialV=None,
                 nobias=False,
                 cover_all=False):
        super(Convolution1D, self).__init__()
        ksize = conv_nd.as_tuple(ksize, 1)
        self.ksize = ksize
        self.nobias = nobias
        self.stride = stride
        self.pad = pad
        self.out_channels = out_channels
        self.in_channels = in_channels
        self.cover_all = cover_all

        self.initialV = initialV

        with self.init_scope():
            V_shape = (out_channels, in_channels) + ksize
            initialV = initializers._get_initializer(initialV)
            self.V = Parameter(initialV, V_shape)

        if nobias:
            self.b = None
Esempio n. 16
0
    def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None,
                 cover_all=False, use_gamma=False, Ip=1, factor=None):
        super(SNConvolutionND, self).__init__()
        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all
        self.use_gamma = use_gamma
        self.Ip = Ip
        self.u = np.random.normal(size=(1, out_channels)).astype(dtype="f")
        self.register_persistent('u')
        self.factor = factor
        with self.init_scope():
            W_shape = (out_channels, in_channels) + ksize
            self.W = variable.Parameter(
                initializers._get_initializer(initialW), W_shape)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)

            if self.use_gamma:
                W_mat = self.W.data.reshape(self.W.shape[0], -1)
                _, s, _ = np.linalg.svd(W_mat)
                self.gamma = variable.Parameter(s[0], (1,) * len(self.W.shape))
Esempio n. 17
0
    def __init__(self,
                 ndim,
                 in_channels,
                 out_channels,
                 ksize,
                 stride=1,
                 pad=0,
                 nobias=False,
                 outsize=None,
                 initialW=None,
                 initial_bias=None):
        super(DeconvolutionND, self).__init__()

        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.outsize = outsize

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer,
                                        (in_channels, out_channels) + ksize)
            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Esempio n. 18
0
    def __init__(self,
                 ndim,
                 in_channels,
                 out_channels,
                 ksize,
                 stride=1,
                 pad=0,
                 outsize=None,
                 initialW=None,
                 initial_bias=0,
                 use_cudnn=True):
        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.use_cudnn = use_cudnn
        self.outsize = outsize

        super(DeconvolutionND, self).__init__()

        W_shape = (in_channels, out_channels) + ksize
        initialW = initializers._get_initializer(initialW)
        self.add_param('W', W_shape, initializer=initialW)

        if initial_bias is None:
            self.b = None
        else:
            initial_bias = initializers._get_initializer(initial_bias)
            self.add_param('b', out_channels, initializer=initial_bias)
    def __init__(self,
                 ndim,
                 ksize,
                 stride=None,
                 pad=0,
                 outsize=None,
                 cover_all=True):
        utils.experimental('chainer.functions.pooling.UnpoolingND')
        if stride is None:
            stride = ksize

        self.ndim = ndim
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = conv_nd.as_tuple(stride, ndim)
        self.pad = conv_nd.as_tuple(pad, ndim)
        self.outs = outsize
        self.cover_all = cover_all
Esempio n. 20
0
    def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, outsize=None, initialW=None, initial_bias=None,
                 dilate=1, groups=1):
        super(DeconvolutionND, self).__init__()

        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.outsize = outsize
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer,
                                        (in_channels, out_channels) + ksize)
            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Esempio n. 21
0
    def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
                 initialW=None, initial_bias=None, use_cudnn=True,
                 cover_all=False):
        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.use_cudnn = use_cudnn
        self.cover_all = cover_all

        W_shape = (out_channels, in_channels) + ksize
        super(ConvolutionND, self).__init__(W=W_shape)
        initializers.init_weight(self.W.data, initialW)

        if initial_bias is None:
            self.b = None
        else:
            self.add_param('b', out_channels)
            initializers.init_weight(self.b.data, initial_bias)
Esempio n. 22
0
    def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
                 outsize=None, initialW=None, initial_bias=0, use_cudnn=True):
        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.use_cudnn = use_cudnn
        self.outsize = outsize

        super(DeconvolutionND, self).__init__()

        W_shape = (in_channels, out_channels) + ksize
        initialW = initializers._get_initializer(initialW)
        self.add_param('W', W_shape, initializer=initialW)

        if initial_bias is None:
            self.b = None
        else:
            initial_bias = initializers._get_initializer(initial_bias)
            self.add_param('b', out_channels, initializer=initial_bias)
Esempio n. 23
0
    def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None,
                 cover_all=False):
        super(ConvolutionND, self).__init__()

        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all

        with self.init_scope():
            W_shape = (out_channels, in_channels) + ksize
            self.W = variable.Parameter(
                initializers._get_initializer(initialW), W_shape)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
 def test_tuple(self):
     actual = conv_nd.as_tuple((1, 2, 3), 3)
     expected = (1, 2, 3)
     self.assertEqual(actual, expected)
 def test_scalar(self):
     actual = conv_nd.as_tuple(1, 3)
     expected = (1, 1, 1)
     self.assertEqual(actual, expected)
Esempio n. 26
0
 def __init__(self, ndim, stride=1, pad=0, use_cudnn=True, cover_all=False):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     self.use_cudnn = use_cudnn
     self.cover_all = cover_all
 def test_tuple_invalid_length(self):
     with self.assertRaises(AssertionError):
         conv_nd.as_tuple((1,), 3)
Esempio n. 28
0
 def __init__(self, ndim, stride=1, pad=0, use_cudnn=True, cover_all=False):
     self.ndim = ndim
     self.stride = conv_nd.as_tuple(stride, ndim)
     self.pad = conv_nd.as_tuple(pad, ndim)
     self.use_cudnn = use_cudnn
     self.cover_all = cover_all
 def test_list(self):
     actual = conv_nd.as_tuple([1, 2, 3], 3)
     expected = (1, 2, 3)
     self.assertEqual(actual, expected)