Esempio n. 1
0
    def __call__(self, x, finetune=False):
        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = variable.Variable(self.xp.ones(
                    self.avg_mean.shape, dtype=x.dtype))
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = variable.Variable(self.xp.zeros(
                    self.avg_mean.shape, dtype=x.dtype))

        if chainer.configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = MultiNodeBatchNormalizationFunction(
                self.comm, self.eps, self.avg_mean, self.avg_var, decay)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)
            var = variable.Variable(self.avg_var)
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 2
0
    def __call__(self, x, finetune=False):
        if self.gamma is not None:
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = self.xp.ones(self.avg_mean.shape, dtype=x.dtype)

        if self.beta is not None:
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = self.xp.zeros(self.avg_mean.shape, dtype=x.dtype)

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = batch_renormalization.BatchRenormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, decay, self.rmax,
                self.dmax)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = self.avg_mean
            var = self.avg_var
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 3
0
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controlls three
        different running mode.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using precomputed statistics.
            finetune (bool): If ``True``, BatchNormalization runs in finetuning
                mode; it accumulates the input array to compute population
                statistics for normalization, and normalizes the input using
                batch statistics.

        If ``test`` and ``finetune`` are both ``False``, then
        BatchNormalization runs in training mode; it computes moving averages
        of mean and variance for evaluation during training, and normalizes the
        input using batch statistics.

        """
        use_batch_mean = not test or finetune

        if use_batch_mean:
            ret = batch_normalization.batch_normalization(
                x, self.gamma, self.beta, self.eps)
            func = ret.creator
            if finetune:
                self.N += 1
                decay = 1. / self.N
            else:
                decay = self.decay
            m = x.data.size // self.gamma.data.size
            adjust = m / max(m - 1., 1.)  # unbiased estimatio

            self.avg_mean = cuda.to_gpu(self.avg_mean) * decay
            func.mean = cuda.to_gpu(func.mean) * (
                1 - decay)  # reuse buffer as a temporary
            self.avg_mean += func.mean
            del func.mean
            self.avg_var = cuda.to_gpu(self.avg_var) * decay
            func.var = cuda.to_gpu(
                func.var) * (1 - decay) * adjust  # reuse buffer as a temporary
            self.avg_var += func.var
            del func.var

#            self.avg_mean *= decay
#            func.mean *= 1 - decay  # reuse buffer as a temporary
#            self.avg_mean += func.mean
#            del func.mean
#            self.avg_var *= decay
#            func.var *= (1 - decay) * adjust  # reuse buffer as a temporary
#            self.avg_var += func.var
#            del func.var
        else:
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, self.gamma, self.beta, mean, var, self.eps)
        return ret
Esempio n. 4
0
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controls three
        different running mode.

        Args:
            x (Variable): Input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using pre-computed statistics.
            finetune (bool): If ``finetune`` is ``True`` and ``test`` is
                ``False``, BatchNormalization runs in fine-tuning mode; it
                accumulates the input array to compute population statistics
                for normalization, and normalizes the input using batch
                statistics.

        If ``test`` is ``False``, then BatchNormalization runs in training
        mode; it computes moving averages of mean and variance for evaluation
        during training, and normalizes the input using batch statistics.

        """
        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            gamma_ = self.xp.ones(self.avg_mean.shape, dtype=x.dtype)
            if self.device is None:
                gamma = variable.Variable(gamma_, volatile='auto')
            else:
                gamma = \
                        variable.Variable(cuda.to_gpu(gamma_, self.device), volatile='auto')
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            beta_ = self.xp.zeros(self.avg_mean.shape, dtype=x.dtype)
            if self.device is None:
                beta = variable.Variable(beta_, volatile='auto')
            else:
                beta = variable.Variable(cuda.to_gpu(beta_, self.device), volatile='auto')

        if not test:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = batch_normalization.BatchNormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, True, decay,
                self.use_cudnn)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps, self.use_cudnn)
        return ret
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controls three
        different running mode.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using pre-computed statistics.
            finetune (bool): If ``True``, BatchNormalization runs in
                fine-tuning mode; it accumulates the input array to compute
                population statistics for normalization, and normalizes the
                input using batch statistics.

        If ``test`` and ``finetune`` are both ``False``, then
        BatchNormalization runs in training mode; it computes moving averages
        of mean and variance for evaluation during training, and normalizes the
        input using batch statistics.

        """
        #        use_batch_mean = not test or finetune --------------------------------
        # -----------------------------------------------------------------------------
        use_batch_mean = False

        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            gamma = variable.Variable(self.xp.ones(self.avg_mean.shape,
                                                   dtype=x.dtype),
                                      volatile='auto')
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            beta = variable.Variable(self.xp.zeros(self.avg_mean.shape,
                                                   dtype=x.dtype),
                                     volatile='auto')

        if use_batch_mean:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = function_batch_normalization.BatchNormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, True, decay)
            ret = func(x, gamma, beta)

            self.avg_mean = func.running_mean
            self.avg_var = func.running_var

        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)  #, volatile='auto')
            var = variable.Variable(self.avg_var)  #, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)

        return ret
Esempio n. 6
0
def fixed_batch_renormalization(x, gamma, beta, mean, var, eps=2e-5):
    warnings.warn(
        'fixed_batch_renormalization is deprecated. '
        'Use fixed_batch_normalization instead.', DeprecationWarning)
    with configuration.using_config('train', False):
        return batch_normalization.fixed_batch_normalization(
            x, gamma, beta, mean, var, eps)
Esempio n. 7
0
    def __call__(self, x, finetune=False):
        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = variable.Variable(
                    self.xp.ones(self.avg_mean.shape, dtype=x.dtype))
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = variable.Variable(
                    self.xp.zeros(self.avg_mean.shape, dtype=x.dtype))

        if chainer.configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = MultiNodeBatchNormalizationFunction(self.comm, self.eps,
                                                       self.avg_mean,
                                                       self.avg_var, decay)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)
            var = variable.Variable(self.avg_var)
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 8
0
    def forward(self, x, finetune=False):
        if self.gamma is not None:
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = self.xp.ones(
                    self.avg_mean.shape, dtype=x.dtype)

        if self.beta is not None:
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = self.xp.zeros(
                    self.avg_mean.shape, dtype=x.dtype)

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            ret = batch_renormalization.batch_renormalization(
                x, gamma, beta, self.rmax, self.dmax,
                self.eps, self.avg_mean, self.avg_var, decay,
                update_statistics=True)
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = self.avg_mean
            var = self.avg_var
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 9
0
    def forward(self, x, finetune=False):
        if self.gamma is not None:
            gamma = self.gamma
        else:
            with chainer.using_device(self.device):
                gamma = self.xp.ones(
                    self.avg_mean.shape, dtype=x.dtype)

        if self.beta is not None:
            beta = self.beta
        else:
            with chainer.using_device(self.device):
                beta = self.xp.zeros(
                    self.avg_mean.shape, dtype=x.dtype)

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            ret = batch_renormalization.batch_renormalization(
                x, gamma, beta, self.rmax, self.dmax,
                self.eps, self.avg_mean, self.avg_var, decay,
                update_statistics=True)
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = self.avg_mean
            var = self.avg_var
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controlls three
        different running mode.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using precomputed statistics.
            finetune (bool): If ``True``, BatchNormalization runs in finetuning
                mode; it accumulates the input array to compute population
                statistics for normalization, and normalizes the input using
                batch statistics.

        If ``test`` and ``finetune`` are both ``False``, then
        BatchNormalization runs in training mode; it computes moving averages
        of mean and variance for evaluation during training, and normalizes the
        input using batch statistics.

        """
        use_batch_mean = not test or finetune

        if use_batch_mean:
            ret = batch_normalization.batch_normalization(
                x, self.gamma, self.beta, self.eps)
            func = ret.creator
            if finetune:
                self.N += 1
                decay = 1. / self.N
            else:
                decay = self.decay
            m = x.data.size // self.gamma.data.size
            adjust = m / max(m - 1., 1.)  # unbiased estimatio

            self.avg_mean = cuda.to_gpu(self.avg_mean) * decay
            func.mean = cuda.to_gpu(func.mean) * (1 - decay)  # reuse buffer as a temporary
            self.avg_mean += func.mean
            del func.mean
            self.avg_var = cuda.to_gpu(self.avg_var) * decay
            func.var = cuda.to_gpu(func.var) * (1 - decay) * adjust  # reuse buffer as a temporary
            self.avg_var += func.var
            del func.var

#            self.avg_mean *= decay
#            func.mean *= 1 - decay  # reuse buffer as a temporary
#            self.avg_mean += func.mean
#            del func.mean
#            self.avg_var *= decay
#            func.var *= (1 - decay) * adjust  # reuse buffer as a temporary
#            self.avg_var += func.var
#            del func.var
        else:
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, self.gamma, self.beta, mean, var, self.eps)
        return ret
Esempio n. 11
0
def fixed_batch_renormalization(x, gamma, beta, mean, var, eps=2e-5):
    warnings.warn(
        'fixed_batch_renormalization is deprecated. '
        'Use fixed_batch_normalization instead.',
        DeprecationWarning)
    with configuration.using_config('train', False):
        return batch_normalization.fixed_batch_normalization(
            x, gamma, beta, mean, var, eps
        )
    def __call__(self, x, gamma, beta, **kwargs):
        """Invokes the forward propagation of BatchNormalization.

        In training mode, the BatchNormalization computes moving averages of
        mean and variance for evaluatino during training, and normalizes the
        input using batch statistics.
        .. warning::
           ``test`` argument is not supported anymore since v2.
           Instead, use ``chainer.using_config('train', train)``.
           See :func:`chainer.using_config`.
        Args:
            x (Variable): Input variable.
            gamma (Variable): Input variable of gamma of shape
            finetune (bool): If it is in the training mode and ``finetune`` is
                ``True``, BatchNormalization runs in fine-tuning mode; it
                accumulates the input array to compute population statistics
                for normalization, and normalizes the input using batch
                statistics.
        """
        argument.check_unexpected_kwargs(
            kwargs, test='test argument is not supported anymore. '
                         'Use chainer.using_config')
        finetune, = argument.parse_kwargs(kwargs, ('finetune', False))
        with cuda.get_device_from_id(self._device_id):
            _gamma = variable.Variable(self.xp.ones(
                self.avg_mean.shape, dtype=x.dtype))
        with cuda.get_device_from_id(self._device_id):
            _beta = variable.Variable(self.xp.zeros(
                self.avg_mean.shape, dtype=x.dtype))
        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay
            if self.comm is not None:
                func = MultiNodeBatchNormalizationFunction(
                    self.comm, self.eps, self.avg_mean, self.avg_var, decay)
                ret = func(x, _gamma, _beta)
                self.avg_mean[:] = func.running_mean
                self.avg_var[:] = func.running_var
            else:
                ret = chainer.functions.batch_normalization(x, _gamma, _beta, eps=self.eps, running_mean=self.avg_mean,
                                                            running_var=self.avg_var, decay=decay)
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)
            var = variable.Variable(self.avg_var)
            ret = batch_normalization.fixed_batch_normalization(
                x, _gamma, _beta, mean, var, self.eps)
        shape = ret.shape
        ndim = len(shape)
        gamma = F.broadcast_to(F.reshape(gamma, list(gamma.shape) + [1] * (ndim - len(gamma.shape))), shape)
        beta = F.broadcast_to(F.reshape(beta, list(beta.shape) + [1] * (ndim - len(beta.shape))), shape)
        return gamma * ret + beta
    def forward(self, x, finetune=False):
        if self.gamma is not None:
            gamma = self.gamma
        else:
            with chainer.using_device(self.device):
                gamma = self.xp.ones(self.avg_mean.shape, dtype=x.dtype)

        if self.beta is not None:
            beta = self.beta
        else:
            with chainer.using_device(self.device):
                beta = self.xp.zeros(self.avg_mean.shape, dtype=x.dtype)

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            avg_mean = self.avg_mean
            avg_var = self.avg_var
            update_statistics = True

            if chainer.config.in_recomputing:
                # Do not update statistics when extra forward computation is
                # called.
                if finetune:
                    self.N -= 1  # Revert the count
                avg_mean = self._prev_avg_mean
                avg_var = self._prev_avg_var
                update_statistics = False
            elif chainer.config._will_recompute:
                self._prev_avg_mean = avg_mean.copy()
                self._prev_avg_var = avg_var.copy()

            ret = batch_renormalization.batch_renormalization(
                x,
                gamma,
                beta,
                self.rmax,
                self.dmax,
                self.eps,
                avg_mean,
                avg_var,
                decay,
                update_statistics=update_statistics)
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = self.avg_mean
            var = self.avg_var
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 14
0
    def __call__(self, x, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        In training mode, the BatchNormalization computes moving averages of
        mean and variance for evaluatino during training, and normalizes the
        input using batch statistics.

        Args:
            x (Variable): Input variable.
            finetune (bool): If it is in the training mode and ``finetune`` is
                ``True``, BatchNormalization runs in fine-tuning mode; it
                accumulates the input array to compute population statistics
                for normalization, and normalizes the input using batch
                statistics.

        """
        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            with cuda.get_device(self._device_id):
                gamma = variable.Variable(self.xp.ones(self.avg_mean.shape,
                                                       dtype=x.dtype),
                                          volatile='auto')
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            with cuda.get_device(self._device_id):
                beta = variable.Variable(self.xp.zeros(self.avg_mean.shape,
                                                       dtype=x.dtype),
                                         volatile='auto')

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = batch_normalization.BatchNormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, decay, self.use_cudnn)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps, self.use_cudnn)
        return ret
Esempio n. 15
0
    def forward(self, x, finetune=False):
        if self.gamma is not None:
            gamma = self.gamma
        else:
            with chainer.using_device(self.device):
                gamma = self.xp.ones(
                    self.avg_mean.shape, dtype=x.dtype)

        if self.beta is not None:
            beta = self.beta
        else:
            with chainer.using_device(self.device):
                beta = self.xp.zeros(
                    self.avg_mean.shape, dtype=x.dtype)

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            avg_mean = self.avg_mean
            avg_var = self.avg_var

            if chainer.config.in_recomputing:
                # Do not update statistics when extra forward computation is
                # called.
                if finetune:
                    self.N -= 1  # Revert the count
                avg_mean = self.xp.zeros_like(self.avg_mean)
                avg_var = self.xp.zeros_like(self.avg_var)

            ret = batch_renormalization.batch_renormalization(
                x, gamma, beta, self.rmax, self.dmax,
                self.eps, avg_mean, avg_var, decay,
                update_statistics=True)
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = self.avg_mean
            var = self.avg_var
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 16
0
    def __call__(self, x, finetune=False):
        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = variable.Variable(
                    self.xp.ones(self.avg_mean.shape,
                                 dtype=self._highprec_dtype))
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = variable.Variable(
                    self.xp.zeros(self.avg_mean.shape,
                                  dtype=self._highprec_dtype))

        if chainer.configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = batch_normalization.BatchNormalization(
                self.eps,
                self.avg_mean,
                self.avg_var,
                decay,
                impl_selector=(
                    chainermn_batch_normalization.MultiNodeBNImplSelector(
                        self.comm, self._communication_backend)))

            ret = func.apply((x, gamma, beta))[0]

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)
            var = variable.Variable(self.avg_var)
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 17
0
    def __call__(self, x, **kwargs):
        """__call__(self, x, finetune=False)

        Invokes the forward propagation of BatchNormalization.

        In training mode, the BatchNormalization computes moving averages of
        mean and variance for evaluatino during training, and normalizes the
        input using batch statistics.

        .. warning::

           ``test`` argument is not supported anymore since v2.
           Instead, use ``chainer.using_config('train', train)``.
           See :func:`chainer.using_config`.

        Args:
            x (Variable): Input variable.
            finetune (bool): If it is in the training mode and ``finetune`` is
                ``True``, BatchNormalization runs in fine-tuning mode; it
                accumulates the input array to compute population statistics
                for normalization, and normalizes the input using batch
                statistics.

        """
        argument.check_unexpected_kwargs(
            kwargs, test='test argument is not supported anymore. '
            'Use chainer.using_config')
        finetune, = argument.parse_kwargs(kwargs, ('finetune', False))

        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = variable.Variable(self.xp.ones(
                    self.avg_mean.shape, dtype=x.dtype))
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = variable.Variable(self.xp.zeros(
                    self.avg_mean.shape, dtype=x.dtype))

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = batch_normalization.BatchNormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, decay)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)
            var = variable.Variable(self.avg_var)
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
Esempio n. 18
0
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controls three
        different running mode.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using pre-computed statistics.
            finetune (bool): If ``True``, BatchNormalization runs in
                fine-tuning mode; it accumulates the input array to compute
                population statistics for normalization, and normalizes the
                input using batch statistics.

        If ``test`` and ``finetune`` are both ``False``, then
        BatchNormalization runs in training mode; it computes moving averages
        of mean and variance for evaluation during training, and normalizes the
        input using batch statistics.

        """
        use_batch_mean = not test or finetune

        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            gamma = variable.Variable(self.xp.ones(
                self.avg_mean.shape, dtype=x.data.dtype), volatile='auto')
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            beta = variable.Variable(self.xp.zeros(
                self.avg_mean.shape, dtype=x.data.dtype), volatile='auto')

        if use_batch_mean:
            func = batch_normalization.BatchNormalizationFunction(self.eps)
            ret = func(x, gamma, beta)

            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            with cuda.get_device(x.data):
                m = x.data.size // gamma.data.size
                adjust = m / max(m - 1., 1.)  # unbiased estimation
                self.avg_mean *= decay
                func.mean *= 1 - decay  # reuse buffer as a temporary
                self.avg_mean += func.mean
                del func.mean
                self.avg_var *= decay
                func.var *= (1 - decay) * adjust  # reuse buffer as a temporary
                self.avg_var += func.var
                del func.var
        else:
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
    def __call__(self, x, **kwargs):
        """__call__(self, x, finetune=False)

        Invokes the forward propagation of BatchNormalization.

        In training mode, the BatchNormalization computes moving averages of
        mean and variance for evaluatino during training, and normalizes the
        input using batch statistics.

        .. warning::

           ``test`` argument is not supported anymore since v2.
           Instead, use ``chainer.using_config('train', train)``.
           See :func:`chainer.using_config`.

        Args:
            x (Variable): Input variable.
            finetune (bool): If it is in the training mode and ``finetune`` is
                ``True``, BatchNormalization runs in fine-tuning mode; it
                accumulates the input array to compute population statistics
                for normalization, and normalizes the input using batch
                statistics.

        """
        argument.check_unexpected_kwargs(
            kwargs,
            test='test argument is not supported anymore. '
            'Use chainer.using_config')
        finetune, = argument.parse_kwargs(kwargs, ('finetune', False))

        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            with cuda.get_device_from_id(self._device_id):
                gamma = variable.Variable(
                    self.xp.ones(self.avg_mean.shape, dtype=x.dtype))
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            with cuda.get_device_from_id(self._device_id):
                beta = variable.Variable(
                    self.xp.zeros(self.avg_mean.shape, dtype=x.dtype))

        if configuration.config.train:
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            func = batch_normalization.BatchNormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, decay)
            ret = func(x, gamma, beta)

            self.avg_mean[:] = func.running_mean
            self.avg_var[:] = func.running_var
        else:
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean)
            var = variable.Variable(self.avg_var)
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controls three
        different running mode.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using pre-computed statistics.
            finetune (bool): If ``True``, BatchNormalization runs in
                fine-tuning mode; it accumulates the input array to compute
                population statistics for normalization, and normalizes the
                input using batch statistics.

        If ``test`` and ``finetune`` are both ``False``, then
        BatchNormalization runs in training mode; it computes moving averages
        of mean and variance for evaluation during training, and normalizes the
        input using batch statistics.

        """
        #        use_batch_mean = not test or finetune --------------------------------
        # -----------------------------------------------------------------------------
        use_batch_mean = False  #True
        # Falseに設定して、後段のfixed batch normalizationをコールするように設定する.
        # でないと、mean, stdが実行長毎に代わる(学習時)なので、推論時に計算できましえn...

        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            gamma = variable.Variable(self.xp.ones(self.avg_mean.shape,
                                                   dtype=x.dtype),
                                      volatile='auto')
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            beta = variable.Variable(self.xp.zeros(self.avg_mean.shape,
                                                   dtype=x.dtype),
                                     volatile='auto')

# デバッグ用に表示したルーチン
#        print("gamma in link")
#        print(gamma.data)
#        print("beta in link")
#        print(beta.data)

        if use_batch_mean:
            #            print("use batch mean")
            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

#            print("self.avg_mean")
#            print(self.avg_mean)

            func = function_batch_normalization.BatchNormalizationFunction(
                self.eps, self.avg_mean, self.avg_var, True, decay)
            #            func = function_batch_normalization.BatchNormalizationFunction(
            #                self.eps, self.avg_mean, self.avg_var, False, decay)
            ret = func(x, gamma, beta)

            self.avg_mean = func.running_mean
            self.avg_var = func.running_var

#            print("func.running_mean")
#            print(func.running_mean)

        else:
            #            print("use running average(fixed batch normalization)")
            # Use running average statistics or fine-tuned statistics.
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)

#            print("mean")
#            print(self.avg_mean)
#            print("var")
#            print(self.avg_var)

        return ret
    def __call__(self, x, test=False, finetune=False):
        """Invokes the forward propagation of BatchNormalization.

        BatchNormalization accepts additional arguments, which controls three
        different running mode.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, BatchNormalization runs in testing mode;
                it normalizes the input using pre-computed statistics.
            finetune (bool): If ``True``, BatchNormalization runs in
                fine-tuning mode; it accumulates the input array to compute
                population statistics for normalization, and normalizes the
                input using batch statistics.

        If ``test`` and ``finetune`` are both ``False``, then
        BatchNormalization runs in training mode; it computes moving averages
        of mean and variance for evaluation during training, and normalizes the
        input using batch statistics.

        """
        use_batch_mean = not test or finetune

        if hasattr(self, 'gamma'):
            gamma = self.gamma
        else:
            gamma = variable.Variable(self.xp.ones(self.avg_mean.shape,
                                                   dtype=x.data.dtype),
                                      volatile='auto')
        if hasattr(self, 'beta'):
            beta = self.beta
        else:
            beta = variable.Variable(self.xp.zeros(self.avg_mean.shape,
                                                   dtype=x.data.dtype),
                                     volatile='auto')

        if use_batch_mean:
            func = batch_normalization.BatchNormalizationFunction(self.eps)
            ret = func(x, gamma, beta)

            if finetune:
                self.N += 1
                decay = 1. - 1. / self.N
            else:
                decay = self.decay

            with cuda.get_device(x.data):
                m = x.data.size // gamma.data.size
                adjust = m / max(m - 1., 1.)  # unbiased estimation
                self.avg_mean *= decay
                func.mean *= 1 - decay  # reuse buffer as a temporary
                self.avg_mean += func.mean
                del func.mean
                self.avg_var *= decay
                func.var *= (1 - decay) * adjust  # reuse buffer as a temporary
                self.avg_var += func.var
                del func.var
        else:
            mean = variable.Variable(self.avg_mean, volatile='auto')
            var = variable.Variable(self.avg_var, volatile='auto')
            ret = batch_normalization.fixed_batch_normalization(
                x, gamma, beta, mean, var, self.eps)
        return ret