示例#1
0
    def norm_factor_gradient(self, variances):
        """Compute normalization factor and its gradient.

        Compute normalization factor given current variance
        and dimensionality.

        Parameters
        ----------
        variances : array-like, shape=[n]
            Value of variance.

        Returns
        -------
        norm_factor : array-like, shape=[n]
            Normalisation factor.
        norm_factor_gradient : array-like, shape=[n]
            Gradient of the normalization factor.
        """
        variances = gs.transpose(gs.to_ndarray(variances, to_ndim=2))
        dim_range = gs.arange(0, self.dim, 1.0)
        alpha = self._compute_alpha(dim_range)

        binomial_coefficient = gs.ones(self.dim)
        binomial_coefficient[1:] = (self.dim - 1 + 1 - dim_range[1:]) / dim_range[1:]
        binomial_coefficient = gs.cumprod(binomial_coefficient)

        beta = ((-gs.ones(self.dim)) ** dim_range) * binomial_coefficient

        sigma_repeated = gs.repeat(variances, self.dim, -1)
        prod_alpha_sigma = gs.einsum("ij,j->ij", sigma_repeated, alpha)
        term_2 = gs.exp((prod_alpha_sigma) ** 2) * (1 + gs.erf(prod_alpha_sigma))
        term_1 = gs.sqrt(gs.pi / 2.0) * (1.0 / (2 ** (self.dim - 1)))
        term_2 = gs.einsum("ij,j->ij", term_2, beta)
        norm_factor = term_1 * variances * gs.sum(term_2, axis=-1, keepdims=True)
        grad_term_1 = 1 / variances

        grad_term_21 = 1 / gs.sum(term_2, axis=-1, keepdims=True)

        grad_term_211 = (
            gs.exp((prod_alpha_sigma) ** 2)
            * (1 + gs.erf(prod_alpha_sigma))
            * gs.einsum("ij,j->ij", sigma_repeated, alpha**2)
            * 2
        )

        grad_term_212 = gs.repeat(
            gs.expand_dims((2 / gs.sqrt(gs.pi)) * alpha, axis=0),
            variances.shape[0],
            axis=0,
        )

        grad_term_22 = grad_term_211 + grad_term_212
        grad_term_22 = gs.einsum("ij, j->ij", grad_term_22, beta)
        grad_term_22 = gs.sum(grad_term_22, axis=-1, keepdims=True)

        norm_factor_gradient = grad_term_1 + (grad_term_21 * grad_term_22)

        return gs.squeeze(norm_factor), gs.squeeze(norm_factor_gradient)
示例#2
0
 def summand(k):
     exp_arg = -((dim - 1 - 2 * k)**2) / 2 / variances
     erf_arg_1 = (dim - 1 - 2 * k) * 1j / gs.sqrt(2 * variances)
     erf_arg_2 = (gs.pi * variances -
                  (dim - 1 - 2 * k) * 1j) / gs.sqrt(2 * variances)
     sign = (-1.0)**k
     comb = gs.comb(dim - 1, k)
     erf_terms = gs.imag(gs.erf(erf_arg_2) + gs.erf(erf_arg_1))
     return sign * comb * gs.exp(exp_arg) * erf_terms
示例#3
0
 def summand(k):
     exp_arg = -((dim - 1 - 2 * k)**2) / 2 / variances
     erf_arg_2 = (gs.pi * variances -
                  (dim - 1 - 2 * k) * 1j) / gs.sqrt(2 * variances)
     sign = (-1.0)**k
     comb_2 = gs.comb(k, dim - 1)
     return sign * comb_2 * gs.exp(exp_arg) * gs.real(gs.erf(erf_arg_2))
示例#4
0
    def _normalization_factor_odd_dim(self, variances):
        """Compute the normalization factor - odd dimension."""
        dim = self.dim
        half_dim = int((dim + 1) / 2)
        area = 2 * gs.pi**half_dim / math.factorial(half_dim - 1)
        comb = gs.comb(dim - 1, half_dim - 1)

        erf_arg = gs.sqrt(variances / 2) * gs.pi
        first_term = (area / (2**dim - 1) * comb *
                      gs.sqrt(gs.pi / (2 * variances)) * gs.erf(erf_arg))

        def summand(k):
            exp_arg = -((dim - 1 - 2 * k)**2) / 2 / variances
            erf_arg_2 = (gs.pi * variances -
                         (dim - 1 - 2 * k) * 1j) / gs.sqrt(2 * variances)
            sign = (-1.0)**k
            comb_2 = gs.comb(k, dim - 1)
            return sign * comb_2 * gs.exp(exp_arg) * gs.real(gs.erf(erf_arg_2))

        if half_dim > 2:
            sum_term = gs.sum(
                gs.stack([summand(k)] for k in range(half_dim - 2)))
        else:
            sum_term = summand(0)
        coef = area / 2 / erf_arg * gs.pi**0.5 * (-1.0)**(half_dim - 1)

        return first_term + coef / 2**(dim - 2) * sum_term
示例#5
0
    def normalization_factor(self, variances):
        """Return normalization factor.

        Parameters
        ----------
        variances : array-like, shape=[n,]
            Array of equally distant values of the
            variance precision time.

        Returns
        -------
        norm_func : array-like, shape=[n,]
            Normalisation factor for all given variances.
        """
        binomial_coefficient = None
        n_samples = variances.shape[0]

        expand_variances = gs.expand_dims(variances, axis=0)
        expand_variances = gs.repeat(expand_variances, self.dim, axis=0)

        if binomial_coefficient is None:

            dim_range = gs.arange(self.dim)
            dim_range[0] = 1
            n_fact = dim_range.prod()

            k_fact = gs.concatenate([
                gs.expand_dims(dim_range[:i].prod(), 0)
                for i in range(1, dim_range.shape[0] + 1)
            ], 0)

            nmk_fact = gs.flip(k_fact, 0)

            binomial_coefficient = n_fact / (k_fact * nmk_fact)

        binomial_coefficient = gs.expand_dims(binomial_coefficient, -1)
        binomial_coefficient = gs.repeat(binomial_coefficient,
                                         n_samples,
                                         axis=1)

        range_ = gs.expand_dims(gs.arange(self.dim), -1)
        range_ = gs.repeat(range_, n_samples, axis=1)

        ones_ = gs.expand_dims(gs.ones(self.dim), -1)
        ones_ = gs.repeat(ones_, n_samples, axis=1)

        alternate_neg = (-ones_)**(range_)

        erf_arg = ((
            (self.dim - 1) - 2 * range_) * expand_variances) / gs.sqrt(2)
        exp_arg = ((((self.dim - 1) - 2 * range_) * expand_variances) /
                   gs.sqrt(2))**2
        norm_func_1 = (1 + gs.erf(erf_arg)) * gs.exp(exp_arg)
        norm_func_2 = binomial_coefficient * norm_func_1
        norm_func_3 = alternate_neg * norm_func_2

        norm_func = NORMALIZATION_FACTOR_CST * variances * \
            norm_func_3.sum(0) * (1 / (2 ** (self.dim - 1)))

        return norm_func
示例#6
0
    def weighted_gmm_pdf(mixture_coefficients,
                         mesh_data,
                         means,
                         variances,
                         metric):
        """Return the probability density function of a GMM.

        Parameters
        ----------
        mixture_coefficients : array-like, shape=[n_gaussians,]
            Coefficients of the Gaussian mixture model.
        mesh_data : array-like, shape=[n_precision, dim]
            Points at which the GMM probability density is computed.
        means : array-like, shape=[n_gaussians, dim]
            Means of each component of the GMM.
        variances : array-like, shape=[n_gaussians,]
            Variances of each component of the GMM.
        metric : function
            Distance function associated with the used metric.

        Returns
        -------
        weighted_pdf : array-like, shape=[n_precision, n_gaussians,]
            Probability density function computed for each point of
            the mesh data, for each component of the GMM.
        """
        distance_to_mean = metric.dist_broadcast(mesh_data, means)

        variances_units = gs.expand_dims(variances, 0)
        variances_units = gs.repeat(
            variances_units, distance_to_mean.shape[0], axis=0)

        distribution_normal = gs.exp(
            -(distance_to_mean ** 2) / (2 * variances_units ** 2))

        zeta_sigma = PI_2_3 * variances
        zeta_sigma = zeta_sigma * gs.exp(
            (variances ** 2 / 2) * gs.erf(variances / gs.sqrt(2)))

        result_num = gs.expand_dims(mixture_coefficients, 0)
        result_num = gs.repeat(
            result_num, len(distribution_normal), axis=0)
        result_num = result_num * distribution_normal

        result_denum = gs.expand_dims(zeta_sigma, 0)
        result_denum = gs.repeat(
            result_denum, len(distribution_normal), axis=0)

        weighted_pdf = result_num / result_denum

        return weighted_pdf