def __init__(self,
                 groups,
                 size=None,
                 eps=1e-5,
                 initial_gamma=None,
                 initial_beta=None):
        super(GroupNormalization, self).__init__()
        if initial_gamma is None:
            initial_gamma = 1
        if initial_beta is None:
            initial_beta = 0

        highprec_dtype = chainer.get_dtype(None, map_mixed16=numpy.float32)

        with self.init_scope():
            self.groups = groups
            gamma_initializer = \
                initializers._get_initializer(initial_gamma)
            gamma_initializer.dtype = highprec_dtype
            beta_initializer = \
                initializers._get_initializer(initial_beta)
            beta_initializer.dtype = highprec_dtype
            self.gamma = variable.Parameter(gamma_initializer)
            self.beta = variable.Parameter(beta_initializer)
            self.eps = eps

        if size is not None:
            self._initialize_params(size)
Пример #2
0
    def __init__(self, size, comm, decay=0.9, eps=2e-5, dtype=numpy.float32,
                 use_gamma=True, use_beta=True,
                 initial_gamma=None, initial_beta=None,
                 communication_backend='auto'):
        chainer.utils.experimental(
            'chainermn.links.MultiNodeBatchNormalization')

        super(MultiNodeBatchNormalization, self).__init__()
        self.comm = comm
        self.avg_mean = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_mean')
        self.avg_var = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_var')
        self.N = 0
        self.register_persistent('N')
        self.decay = decay
        self.eps = eps

        self._communication_backend = \
            get_communication_backend(comm, communication_backend)

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                initial_gamma = initializers._get_initializer(initial_gamma)
                initial_gamma.dtype = dtype
                self.gamma = variable.Parameter(initial_gamma, size)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                initial_beta = initializers._get_initializer(initial_beta)
                initial_beta.dtype = dtype
                self.beta = variable.Parameter(initial_beta, size)
Пример #3
0
    def __init__(self,
                 in_size,
                 out_size=None,
                 nobias=False,
                 initialW=None,
                 initial_bias=None):
        super(Linear, self).__init__()

        if out_size is None:
            in_size, out_size = None, in_size
        self.out_size = out_size

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_size is not None:
                self._initialize_params(in_size)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_size)
Пример #4
0
    def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0,
                nobias=False, initialW=None, initial_bias=None, **kwargs):
        super(ComplexConv2D, self).__init__()
        
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.out_channels = out_channels
        self.in_channels = in_channels

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_channels)
Пример #5
0
    def __init__(self, n_layers, in_size, out_size, dropout, use_bi_direction,
                 **kwargs):
        argument.check_unexpected_kwargs(
            kwargs,
            use_cudnn='use_cudnn argument is not supported anymore. '
            'Use chainer.using_config')
        argument.assert_kwargs_empty(kwargs)

        weights = []
        direction = 2 if use_bi_direction else 1
        for i in six.moves.range(n_layers):
            for di in six.moves.range(direction):
                weight = link.Link()
                with weight.init_scope():
                    for j in six.moves.range(6):
                        if i == 0 and j < 3:
                            w_in = in_size
                        elif i > 0 and j < 3:
                            w_in = out_size * direction
                        else:
                            w_in = out_size
                        w = variable.Parameter(GlorotNormal(),
                                               (out_size, w_in))
                        b = variable.Parameter(0, (out_size, ))
                        setattr(weight, 'w%d' % j, w)
                        setattr(weight, 'b%d' % j, b)
                weights.append(weight)

        super(NStepGRUBase, self).__init__(*weights)

        self.n_layers = n_layers
        self.dropout = dropout
        self.out_size = out_size
        self.direction = direction
        self.rnn = rnn.n_step_bigru if use_bi_direction else rnn.n_step_gru
Пример #6
0
	def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0, nobias=False, initialV=None, **kwargs):
		super(Convolution2D, self).__init__()

		argument.check_unexpected_kwargs(
			kwargs, deterministic="deterministic argument is not "
			"supported anymore. "
			"Use chainer.using_config('cudnn_deterministic', value) "
			"context where value is either `True` or `False`.")
		argument.assert_kwargs_empty(kwargs)

		if ksize is None:
			out_channels, ksize, in_channels = in_channels, out_channels, None

		self.ksize = ksize
		self.stride = _pair(stride)
		self.pad = _pair(pad)
		self.out_channels = out_channels
		self. nobias = nobias

		with self.init_scope():
			V_initializer = initializers._get_initializer(initialV)
			self.V = variable.Parameter(V_initializer)
			if in_channels is not None:
				kh, kw = _pair(self.ksize)
				V_shape = (self.out_channels, in_channels, kh, kw)
				self.V.initialize(V_shape)

			self.b = None if nobias else variable.Parameter(None)
			self.g = variable.Parameter(None)		
Пример #7
0
    def __init__(self,
                 size,
                 decay=0.9,
                 eps=2e-5,
                 dtype=numpy.float32,
                 valid_test=False,
                 use_gamma=True,
                 use_beta=True,
                 initial_gamma=None,
                 initial_beta=None):
        super(InstanceNormalization, self).__init__()
        self.valid_test = valid_test
        self.avg_mean = None
        self.avg_var = None
        self.N = 0
        if valid_test:
            self.register_persistent('avg_mean')
            self.register_persistent('avg_var')
            self.register_persistent('N')
        self.decay = decay
        self.eps = eps

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                initial_gamma = initializers._get_initializer(initial_gamma)
                initial_gamma.dtype = dtype
                self.gamma = variable.Parameter(initial_gamma, size)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                initial_beta = initializers._get_initializer(initial_beta)
                initial_beta.dtype = dtype
                self.beta = variable.Parameter(initial_beta, size)
    def __init__(self,
                 ndim,
                 in_channels,
                 out_channels,
                 ksize,
                 stride=1,
                 pad=0,
                 nobias=False,
                 initialW=None,
                 initial_bias=None,
                 cover_all=False):
        super(ConvolutionND, self).__init__()

        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all

        with self.init_scope():
            W_shape = (out_channels, in_channels) + ksize
            self.W = variable.Parameter(
                initializers._get_initializer(initialW), W_shape)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Пример #9
0
    def __init__(self,
                 size,
                 decay=0.9,
                 eps=2e-5,
                 dtype=None,
                 valid_test=False):
        super(ConditionalInstanceNormalization, self).__init__()
        self.valid_test = valid_test
        self.dtype = chainer.get_dtype(dtype)
        self.decay = decay
        self.eps = eps

        with self.init_scope():
            self.instance_norm = InstanceNormalization(size,
                                                       use_gamma=False,
                                                       use_beta=False,
                                                       decay=self.decay,
                                                       eps=self.eps,
                                                       dtype=self.dtype)
            # class 0
            initial_gamma0 = initializers._get_initializer(1)
            initial_gamma0.dtype = self.dtype
            self.gamma0 = variable.Parameter(initial_gamma0, (1, size, 1, 1))
            initial_beta0 = initializers._get_initializer(0)
            initial_beta0.dtype = self.dtype
            self.beta0 = variable.Parameter(initial_beta0, (1, size, 1, 1))
            # class 1
            initial_gamma1 = initializers._get_initializer(1)
            initial_gamma1.dtype = self.dtype
            self.gamma1 = variable.Parameter(initial_gamma1, (1, size, 1, 1))
            initial_beta1 = initializers._get_initializer(0)
            initial_beta1.dtype = self.dtype
            self.beta1 = variable.Parameter(initial_beta1, (1, size, 1, 1))
Пример #10
0
    def __init__(self,
                 in_size,
                 out_size,
                 ratio=.5,
                 nobias=False,
                 initialW=None,
                 initial_bias=None):
        super(SimplifiedDropconnect, self).__init__()

        self.out_size = out_size
        self.ratio = ratio

        if initialW is None:
            initialW = initializers.HeNormal(1. / numpy.sqrt(2))

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_size is not None:
                self._initialize_params(in_size)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = initializers.Constant(0)
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_size)
Пример #11
0
    def __init__(
            self,
            in_size: tp.Optional[int],
            out_size: tp.Optional[int] = None,
            nobias: bool = False,
            initialW: tp.Optional[types.InitializerSpec] = None,
            initial_bias: tp.Optional[types.InitializerSpec] = None) -> None:
        super(Linear, self).__init__()

        if out_size is None:
            in_size, out_size = None, in_size
        self.in_size = in_size
        self.out_size = out_size

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(
                W_initializer)  # type: variable.Variable  # NOQA
            if in_size is not None:
                self._initialize_params(in_size)

            if nobias:
                self.b = None  # type: tp.Optional[variable.Variable]
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_size)
Пример #12
0
    def __init__(self,mid,sz):
        super(newLSTM, self).__init__(#(1,3,100,100)→(1,mid*4,100,100)
            Wx_Inc_1 = L.Convolution2D(None, mid, ksize=1, pad=0),
            Wx_Inc_3 = L.Convolution2D(None, mid*2, ksize=3, pad=1),
            Wx_Inc_5 = L.Convolution2D(None, mid//2, ksize=5, pad=2),
            Wx_concat = L.Convolution2D(None, mid*4, ksize=1, pad=0),
            Wx_input = L.Convolution2D(None,mid*4,ksize=sz,pad=sz//2),
            channel_attention = ChannelAttention(mid),
            spatial_attention = SpatialAttention(),
            Wh1_Linear = L.Linear(None,256),
            Wh2_Linear = L.Linear(None,256),
            Wh1_conv = L.Convolution2D(None, 1, ksize=7, pad=3),
            Wh_repeat = L.Convolution2D(None, mid*4, ksize=sz, pad=sz//2, nobias = True)#(1,3,100,100)→(1,mid*4,100,100)
        )

        self.mid = mid #64
        self.pc = None
        self.ph = None

        with self.init_scope():
            Wci_initializer = initializers.Zero()
            self.Wci = variable.Parameter(Wci_initializer)
            Wcf_initializer = initializers.Zero()
            self.Wcf = variable.Parameter(Wcf_initializer)
            Wco_initializer = initializers.Zero()
            self.Wco = variable.Parameter(Wco_initializer)
Пример #13
0
    def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None, mode=None):

        if mode == None:
            raise NotImplementedError()

        super(UVConvolution2D, self).__init__()

        self.mode = mode

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.out_channels = out_channels

        with self.init_scope():
            U_initializer = initializers._get_initializer(initialW)
            V_initializer = initializers._get_initializer(initialW)
            D_initializer = initializers._get_initializer(chainer.initializers.One())
            self.U = variable.Parameter(U_initializer)
            self.V = variable.Parameter(V_initializer)
            self.D = variable.Parameter(D_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_channels)
    def __init__(self, in_channels, out_channels, A, K, wscale=1, bias=0,
                 nobias=False, initialW=None, initial_bias=None):
        super(GraphConvolution, self).__init__()

        L = graph.create_laplacian(A)

        self.K = K
        self.out_channels = out_channels

        self.wscale = wscale

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = bias
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_channels)

        self.func = graph_convolution.GraphConvolutionFunction(L, K)
Пример #15
0
    def __init__(self, ndim, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None,
                 cover_all=False, use_gamma=False, Ip=1, factor=None):
        super(SNConvolutionND, self).__init__()
        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all
        self.use_gamma = use_gamma
        self.Ip = Ip
        self.u = np.random.normal(size=(1, out_channels)).astype(dtype="f")
        self.register_persistent('u')
        self.factor = factor
        with self.init_scope():
            W_shape = (out_channels, in_channels) + ksize
            self.W = variable.Parameter(
                initializers._get_initializer(initialW), W_shape)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)

            if self.use_gamma:
                W_mat = self.W.data.reshape(self.W.shape[0], -1)
                _, s, _ = np.linalg.svd(W_mat)
                self.gamma = variable.Parameter(s[0], (1,) * len(self.W.shape))
    def __init__(self,
                 in_size,
                 out_size,
                 use_gamma=False,
                 nobias=False,
                 initialW=None,
                 initial_bias=None,
                 mode=None):
        if mode == None:
            raise NotImplementedError()
        super(UVLinear, self).__init__()
        self.mode = mode
        self.in_size = in_size
        self.out_size = out_size

        with self.init_scope():
            U_initializer = initializers._get_initializer(initialW)
            V_initializer = initializers._get_initializer(initialW)
            D_initializer = initializers._get_initializer(
                chainer.initializers.One())
            self.U = variable.Parameter(U_initializer)
            self.V = variable.Parameter(V_initializer)
            self.D = variable.Parameter(D_initializer)
            if in_size is not None:
                self._initialize_params(in_size)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_size)
Пример #17
0
    def __init__(self, size=None, decay=0.9, eps=2e-5, dtype=numpy.float32,
                 use_gamma=True, use_beta=True,
                 initial_gamma=None, initial_beta=None, axis=None):
        super(BatchNormalization, self).__init__()

        if size is None and axis is None:
            raise RuntimeError('size or axis is required')
        self.N = 0
        self.register_persistent('N')
        self.decay = decay
        self.eps = eps
        if isinstance(axis, int):
            axis = (axis,)
        self.axis = axis
        self._dtype = dtype

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                gamma_initializer = \
                    initializers._get_initializer(initial_gamma)
                gamma_initializer.dtype = self._dtype
                self.gamma = variable.Parameter(gamma_initializer)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                beta_initializer = initializers._get_initializer(initial_beta)
                beta_initializer.dtype = self._dtype
                self.beta = variable.Parameter(beta_initializer)

        if size is not None:
            self._initialize_params(size)
Пример #18
0
    def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None):
        super(IncompleteDepthwiseConvolution2D, self).__init__()
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None
        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.channel_multiplier = out_channels
        self.nobias = nobias

        if initialW is None:
            initialW = initializers.HeNormal(1. / numpy.sqrt(2))

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = initializers.Constant(0)
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer)

        if in_channels is not None:
            self._initialize_params(in_channels)
Пример #19
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize=None,
                 stride=1,
                 pad=0,
                 dilate=1,
                 nobias=False,
                 initialW=None,
                 initial_bias=None):
        super(DilatedConvolution2D, self).__init__()

        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.dilate = _pair(dilate)
        self.out_channels = out_channels

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Пример #20
0
    def __init__(self, ndim, in_channels, out_channels, ksize=None, stride=1,
                 pad=0, nobias=False, initialW=None, initial_bias=None,
                 cover_all=False, dilate=1, groups=1):
        super(ConvolutionND, self).__init__()

        if ksize is None:
            out_channels, ksize, in_channels = \
                in_channels, out_channels, None

        self.out_channels = out_channels
        self.ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.cover_all = cover_all
        self.dilate = conv_nd.as_tuple(dilate, ndim)
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Пример #21
0
    def __init__(self, in_channels, out_channels, ksize=None, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None, **kwargs):
        super(Convolution2D, self).__init__()

        dilate, groups = argument.parse_kwargs(
            kwargs, ('dilate', 1), ('groups', 1),
            deterministic="deterministic argument is not supported anymore. "
            "Use chainer.using_config('cudnn_deterministic', value) "
            "context where value is either `True` or `False`.")

        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.dilate = _pair(dilate)
        self.out_channels = out_channels
        self.groups = int(groups)

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)
            if in_channels is not None:
                self._initialize_params(in_channels)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer, out_channels)
Пример #22
0
    def __init__(self,
                 size,
                 decay=0.9,
                 eps=2e-5,
                 dtype=numpy.float32,
                 use_gamma=True,
                 use_beta=True,
                 initial_gamma=None,
                 initial_beta=None,
                 always_stats=False):
        super(MaskedBatchNormalization, self).__init__()
        self.avg_mean = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_mean')
        self.avg_var = numpy.zeros(size, dtype=dtype)
        self.register_persistent('avg_var')
        self.N = 0
        self.register_persistent('N')
        self.decay = decay
        self.eps = eps
        self.always_stats = always_stats
        self.mask = None

        with self.init_scope():
            if use_gamma:
                if initial_gamma is None:
                    initial_gamma = 1
                initial_gamma = initializers._get_initializer(initial_gamma)
                initial_gamma.dtype = dtype
                self.gamma = variable.Parameter(initial_gamma, size)
            if use_beta:
                if initial_beta is None:
                    initial_beta = 0
                initial_beta = initializers._get_initializer(initial_beta)
                initial_beta.dtype = dtype
                self.beta = variable.Parameter(initial_beta, size)
    def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
                 nobias=False, initialW=None, initial_bias=None):
        super(DeformableConvolution2DSampler, self).__init__()

        self.ksize = ksize
        self.stride = _pair(stride)
        self.pad = _pair(pad)
        self.out_channels = out_channels
        self.initialW = initialW

        if initialW is None:
            initialW = constant.Zero()

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = initializers.Constant(0)
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer)

        if in_channels is not None:
            self._initialize_params(in_channels)
Пример #24
0
    def __init__(self,
                 ndim,
                 in_channels,
                 out_channels,
                 ksize,
                 stride=1,
                 pad=0,
                 nobias=False,
                 outsize=None,
                 initialW=None,
                 initial_bias=None):
        super(DeconvolutionND, self).__init__()

        ksize = conv_nd.as_tuple(ksize, ndim)
        self.stride = stride
        self.pad = pad
        self.outsize = outsize

        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer,
                                        (in_channels, out_channels) + ksize)
            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                initial_bias = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(initial_bias, out_channels)
Пример #25
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 in_size=None,
                 ksize=None,
                 stride=1,
                 nobias=False,
                 initialW=None,
                 initial_bias=None,
                 **kwargs):
        super(LocalConvolution2D, self).__init__()
        self.ksize = ksize
        self.stride = _pair(stride)
        self.nobias = nobias
        self.out_channels = out_channels
        with self.init_scope():
            W_initializer = initializers._get_initializer(initialW)
            self.W = variable.Parameter(W_initializer)

            if nobias:
                self.b = None
            else:
                if initial_bias is None:
                    initial_bias = 0
                bias_initializer = initializers._get_initializer(initial_bias)
                self.b = variable.Parameter(bias_initializer)

            if in_channels is not None and in_size is not None:
                self._initialize_params(in_channels, _pair(in_size))
    def __init__(self,
                 n_layers,
                 in_size,
                 out_size,
                 dropout,
                 *,
                 initialW=None,
                 initial_bias=None,
                 **kwargs):
        if kwargs:
            argument.check_unexpected_kwargs(
                kwargs,
                use_cudnn='use_cudnn argument is not supported anymore. '
                'Use chainer.using_config',
                use_bi_direction='use_bi_direction is not supported anymore',
                activation='activation is not supported anymore')
            argument.assert_kwargs_empty(kwargs)

        weights = []
        if self.use_bi_direction:
            direction = 2
        else:
            direction = 1

        W_initializer = initializers._get_initializer(initialW)
        if initial_bias is None:
            initial_bias = 0
        bias_initializer = initializers._get_initializer(initial_bias)

        for i in six.moves.range(n_layers):
            for di in six.moves.range(direction):
                weight = link.Link()
                with weight.init_scope():
                    for j in six.moves.range(self.n_weights):
                        if i == 0 and j < self.n_weights // 2:
                            w_in = in_size
                        elif i > 0 and j < self.n_weights // 2:
                            w_in = out_size * direction
                        else:
                            w_in = out_size
                        w = variable.Parameter(W_initializer, (out_size, w_in))
                        b = variable.Parameter(bias_initializer, out_size)
                        setattr(weight, 'w%d' % j, w)
                        setattr(weight, 'b%d' % j, b)
                weights.append(weight)

        super(NStepRNNBase, self).__init__(*weights)

        self.ws = [[
            getattr(layer, 'w%d' % i) for i in six.moves.range(self.n_weights)
        ] for layer in self]
        self.bs = [[
            getattr(layer, 'b%d' % i) for i in six.moves.range(self.n_weights)
        ] for layer in self]

        self.n_layers = n_layers
        self.dropout = dropout
        self.out_size = out_size
        self.direction = direction
Пример #27
0
 def __init__(self, n_out):
     super(RPL4, self).__init__()
     with self.init_scope():
         zero_initializer = initializers.Constant(0.0)
         self.W = variable.Parameter(zero_initializer, (1, n_out))
         self.b = variable.Parameter(zero_initializer, (1, n_out))
         logbias_initializer = initializers.Constant(-20.0)
         self.lb = variable.Parameter(logbias_initializer, (1, n_out))
Пример #28
0
    def __init__(self, beta_shape, beta_init=1.0):
        super(Swish, self).__init__()

        with self.init_scope():
            if beta_shape is not None:
                self.beta = variable.Parameter(beta_init, beta_shape)
            else:
                beta_init = initializers.Constant(beta_init)
                self.beta = variable.Parameter(beta_init)
Пример #29
0
    def added(self, link):
        if isinstance(link, (L.Deconvolution1D, L.Deconvolution2D,
                             L.Deconvolution3D, L.DeconvolutionND)):
            if self.axis == 1:
                warn("Please pay attention to the axis when "
                     "weight normalization is applied to Deconvolution.")

        with link.init_scope():
            setattr(link, self.g_name, variable.Parameter())
            setattr(link, self.v_name, variable.Parameter())
Пример #30
0
    def __init__(self, hidden_size, bias_init=0., gain_init=1., epsilon=1e-6):
        super(LayerNormalization, self).__init__()
        self.hidden_size = hidden_size
        self.epsilon = epsilon

        with self.init_scope():
            self.bias = variable.Parameter(bias_init)
            self.gain = variable.Parameter(gain_init)

            if hidden_size is not None:
                self._initialize_params(hidden_size)