Esempio n. 1
0
    def forward(self, *xs):
        """Applies broadcasted elementwise product.

        Args:
            xs (list of Variables): Input variables whose length should
                be one if the link has a learnable weight parameter, otherwise
                should be two.
        """
        axis = self.axis

        # Case of only one argument where W is a learnt parameter.
        if hasattr(self, 'W'):
            if chainer.is_debug():
                assert len(xs) == 1
            x, = xs
            W = self.W
            z = scale.scale(x, W, axis)
        # Case of two arguments where W is given as an argument.
        else:
            if chainer.is_debug():
                assert len(xs) == 2
            x, y = xs
            z = scale.scale(x, y, axis)

        # Forward propagate bias term if given.
        if hasattr(self, 'bias'):
            return self.bias(z)
        else:
            return z
Esempio n. 2
0
    def __call__(self, *xs):
        """Applies broadcasted elementwise product.

        Args:
            xs (list of Variables): Input variables whose length should
                be one if the link has a learnable weight parameter, otherwise
                should be two.
        """
        axis = self.axis

        # Case of only one argument where W is a learnt parameter.
        if hasattr(self, 'W'):
            if chainer.is_debug():
                assert len(xs) == 1
            x, = xs
            W = self.W
            z = scale.scale(x, W, axis)
        # Case of two arguments where W is given as an argument.
        else:
            if chainer.is_debug():
                assert len(xs) == 2
            x, y = xs
            z = scale.scale(x, y, axis)

        # Forward propagate bias term if given.
        if hasattr(self, 'bias'):
            return self.bias(z)
        else:
            return z
Esempio n. 3
0
    def __call__(self, x):
        """Apply layer normalization to given input.

        Args:
            x (~chainer.Variable): Batch vectors.
                Shape of this value must be `(batch_size, unit_size)`,
                e.g., the output of :func:`~chainer.functions.linear`.

        Returns:
            ~chainer.Variable: Output of the layer normalization.

        """
        if self.gamma.data is None:
            self._initialize_params(x.size // x.shape[0])

        normalized = self._normalize(x)
        return bias.bias(scale.scale(normalized, self.gamma), self.beta)
Esempio n. 4
0
    def __call__(self, x):
        """Apply layer normalization to given input.

        Args:
            x (~chainer.Variable): Batch vectors.
                Shape of this value must be `(batch_size, unit_size)`,
                e.g., the output of :func:`~chainer.functions.linear`.

        Returns:
            ~chainer.Variable: Output of the layer normalization.

        """
        if self.has_uninitialized_params:
            with cuda.get_device_from_id(self._device_id):
                self._initialize_params(x.size // x.shape[0])

        normalized = self._normalize(x)
        return bias.bias(scale.scale(normalized, self.gamma), self.beta)