Ejemplo n.º 1
0
    def _loss(self, yt: np.ndarray, yp: np.ndarray) -> np.ndarray:
        """
        Calculate the per-observation loss as a function of `yt` and `yp`.

        Overrides BaseLoss._loss.
        """
        c1 = betainc(self.alpha, self.beta, yt) - betainc(
            self.alpha, self.beta, yp)
        c2 = betainc(self.alpha + 1.0, self.beta, yt) - betainc(
            self.alpha + 1.0, self.beta, yp)
        b1 = beta_function(self.alpha, self.beta)
        b2 = beta_function(self.alpha + 1, self.beta)
        return (yt * c1 * b1 - c2 * b2) * self.scale
Ejemplo n.º 2
0
    def beta_callback(
            alpha: float,
            beta: float,
            eps: float = 1e-10) -> Callable[[np.ndarray], np.ndarray]:
        """
        Compute the beta callback function for quasi-deviance.

        Parameters
        ----------
        alpha: float
            Value of alpha in beta loss denominator, mu ** (1 - alpha)

        beta: float
            Value of beta in beta loss denominator, (1 - mu) ** (1 - beta)

        eps: float
            Small value to use in log calculations to avoid numerical error

        Returns
        -------
        callable
            A callable that takes an np.ndarray and returns an np.ndarray after
            calculating the beta quasi-deviance denominator
        """
        scale = 1.0 / beta_function(alpha, beta)

        def vt_callback(yp: np.ndarray) -> np.ndarray:
            return np.exp(-np.log(scale) + (1.0 - alpha) * np.log(yp + eps) +
                          (1.0 - beta) * np.log(1.0 - yp + eps))

        return vt_callback
Ejemplo n.º 3
0
    def _sample_b(self, word_index):
        b_not_v = self.b_sum_ax1 - self.b[:, word_index]

        b_not_v[b_not_v == 0] += self.delta_0
        b_not_v_beta = b_not_v * self.beta_0

        num_a = b_not_v_beta + np.sum(self.n, axis=1)
        num_b = self.beta_0
        num = beta_function(num_a, num_b)
        denom = beta_function(b_not_v_beta, self.beta_0)
        activation = sigmoid(self.pi[:, word_index])
        p_1 = num * activation / denom
        p_0 = 1 - activation
        p = p_1 / (p_1 + p_0)

        self.b_sum_ax1 -= self.b[:, word_index]
        self.b[:, word_index] |= np.random.binomial(1, p)
        self.b_sum_ax1 += self.b[:, word_index]
Ejemplo n.º 4
0
def betabinom_pmf(k, n, a, b):
    """Probability mass function for a beta binomial distribution

    Parameters
    ----------
    k : int
        number of successes
    n : int
        number of trials
    a : float
        first (positive) shape parameter
    b : float
        second (positive) shape parameter

    Returns
    -------
    float
        the probability mass
    """

    return comb(n, k) * beta_function(k + a, n - k + b) / beta_function(a, b)
Ejemplo n.º 5
0
    def _coeff(self, powers):
        """
        @returns Coefficient of a term in multinomial formula
        """
        x = len([p for p in powers if p != 0])
        y = self.beta - x + 1
        falling_factorial = gamma(x) / beta_function(x, y)

        count = Counter(powers).values()
        F = prod([factorial(c) for c in count])

        q = sum(powers)
        choose = 1.
        for p in powers:
            choose *= comb(q, p)
            q -= p

        coeff = falling_factorial * choose / (F * self.beta**self.observed)
        return coeff
Ejemplo n.º 6
0
    def __init__(self, alpha: float, beta: float, eps: float = 1e-10):
        """
        Class initializer.

        Extends BaseLoss.__init__.

        Parameters
        ----------
        alpha: float
            Value of alpha in beta loss denominator, mu ** (1 - alpha)

        beta: float
            Value of beta in beta loss denominator, (1 - mu) ** (1 - beta)

        eps: float
            Small value to use in log calculations to avoid numerical error
        """
        super().__init__()
        self.alpha = alpha
        self.beta = beta
        self.eps = eps
        self.scale = 1.0 / beta_function(alpha, beta)
        self._vt_callback = self.beta_callback(alpha, beta, eps)