예제 #1
0
파일: bayespy.py 프로젝트: kinnala/gammy
    def theta_marginal(self, i):
        """Extract marginal distribution for a specific term

        """
        mus = utils.unflatten(self.theta.get_moments()[0], self.formula.bases)
        covs = utils.extract_diag_blocks(utils.solve_covariance(self.theta),
                                         self.formula.bases)
        return bp.nodes.Gaussian(mu=mus[i],
                                 Lambda=bp.utils.linalg.inv(covs[i]))
예제 #2
0
    def theta_marginal(self, i: int) -> Gaussian:
        """Extract marginal distribution for a specific term

        """
        u = self.theta.get_moments()
        mus = utils.unflatten(u[0], self.formula.terms)
        covs = utils.extract_diag_blocks(utils.solve_covariance(u),
                                         self.formula.terms)
        return Gaussian(mu=mus[i], Lambda=np.linalg.inv(covs[i]))
예제 #3
0
파일: bayespy.py 프로젝트: kinnala/gammy
    def theta_marginals(self):
        """Nodes for the basis specific marginal distributions

        """
        # TODO: Test that the marginal distributions are correct
        mus = utils.unflatten(self.theta.get_moments()[0], self.formula.bases)
        covs = utils.extract_diag_blocks(utils.solve_covariance(self.theta),
                                         self.formula.bases)
        return [
            bp.nodes.Gaussian(mu=mu, Lambda=bp.utils.linalg.inv(cov))
            for mu, cov in zip(mus, covs)
        ]
예제 #4
0
    def predict_variance(self, input_data) -> Tuple[np.ndarray]:
        """Predict mean and variance

        Parameters
        ----------
        input_data : np.ndarray

        """
        X = self.formula.design_matrix(input_data)
        F = bp.nodes.SumMultiply("i,i", self.theta, X)
        u = F.get_moments()
        return (u[0], np.diag(utils.solve_covariance(u)) + self.inv_mean_tau)
예제 #5
0
    def theta_marginals(self) -> List[Gaussian]:
        """Marginal distributions of model parameters

        """
        u = self.theta.get_moments()
        mus = utils.unflatten(u[0], self.formula.terms)
        covs = utils.extract_diag_blocks(utils.solve_covariance(u),
                                         self.formula.terms)
        return [
            Gaussian(mu=mu, Lambda=np.linalg.inv(cov))
            for (mu, cov) in zip(mus, covs)
        ]
예제 #6
0
    def predict_variance_theta(self, input_data) -> Tuple[np.ndarray]:
        """Predict observations with variance from model parameters

        Parameters
        ----------
        input_data : np.ndarray

        """
        X = self.formula.design_matrix(input_data)
        Sigma = utils.solve_covariance(self.theta.get_moments())
        return (np.dot(X,
                       self.theta.mu), np.diag(np.dot(X, np.dot(Sigma, X.T))))
예제 #7
0
    def predict_variance_marginal(self, input_data,
                                  i: int) -> Tuple[np.ndarray]:
        """Evaluate mean and variance for a given term

        Parameters
        ----------
        input_data : np.ndarray

        """
        X = self.formula.design_matrix(input_data, i)
        Sigma = utils.solve_covariance(self.theta_marginal(i).get_moments())
        mu = np.dot(X, self.mean_theta[i])
        sigma = np.diag(np.dot(X, np.dot(Sigma, X.T)))
        return (mu, sigma)
예제 #8
0
    def predict_variance(self, input_data) -> Tuple[np.ndarray]:
        """Predict mean and variance

        Parameters
        ----------
        input_data : np.ndarray

        """
        X = self.formula.design_matrix(input_data)
        Sigma = utils.solve_covariance(self.theta.get_moments())
        return (
            np.dot(X, self.theta.mu),
            # Based on formula: var(A x) = A var(x) A'
            np.diag(np.dot(X, np.dot(Sigma, X.T))) + self.inv_mean_tau)
예제 #9
0
    def predict_variance_marginal(self, input_data,
                                  i: int) -> Tuple[np.ndarray]:
        """Predict marginal distributions means and variances

        Parameters
        ----------
        input_data : np.ndarray

        """
        # Not refactored with predict_marginal for perf reasons
        X = self.formula.design_matrix(input_data, i)
        F = bp.nodes.SumMultiply("i,i", self.theta_marginal(i), X)
        mu = np.dot(X, self.mean_theta[i])
        sigma = np.diag(utils.solve_covariance(F.get_moments()))
        return (mu, sigma)
예제 #10
0
    def predict_variance_theta(self, input_data) -> Tuple[np.ndarray]:
        """Predict observations with variance from model parameters

        Parameters
        ----------
        input_data : np.ndarray

        """
        X = self.formula.design_matrix(input_data)
        F = bp.nodes.SumMultiply("i,i", self.theta, X)
        # Ensuring correct moments
        #
        # F = F._ensure_moments(
        #     F, bp.inference.vmp.nodes.gaussian.GaussianMoments, ndim=0
        # )
        #
        # NOTE: See also bp.plot.plot_gaussian how std can be calculated
        u = F.get_moments()
        return (u[0], np.diag(utils.solve_covariance(u)))
예제 #11
0
파일: plot.py 프로젝트: silky/gammy
def gaussian1d_density_plot(model, grid_limits=[0.5, 1.5]):
    """Plot 1-D density for each parameter

    Parameters
    ----------
    grid_limits : list
        Grid of `tau` has endpoints `[grid_limits[0] * mu, grid_limits[1] * mu]`
        where `mu` is the expectation of `tau`.

    """
    N = len(model)
    fig = plt.figure(figsize=(8, max(4 * N // 2, 8)))
    gs = fig.add_gridspec(N + 1, 1)

    # Plot inverse gamma
    ax = fig.add_subplot(gs[0])
    (b, a) = (-model.tau.phi[0], model.tau.phi[1])
    mu = a / b
    grid = np.arange(0.5 * mu, 1.5 * mu, mu / 300)
    ax.plot(grid, model.tau.pdf(grid))
    ax.set_title(r"$\tau$ = noise inverse variance")
    ax.grid(True)

    # Plot marginal thetas
    for i, theta in enumerate(model.theta_marginals):
        ax = fig.add_subplot(gs[i + 1])
        mus = theta.get_moments()[0]
        mus = np.array([mus]) if mus.shape == () else mus
        cov = utils.solve_covariance(theta)
        stds = pipe(
            np.array([cov]) if cov.shape == () else np.diag(cov), np.sqrt)
        left = (mus - 4 * stds).min()
        right = (mus + 4 * stds).max()
        grid = np.arange(left, right, (right - left) / 300)
        for (mu, std) in zip(mus, stds):
            node = bp.nodes.GaussianARD(mu, 1 / std**2)
            ax.plot(grid, node.pdf(grid))
        ax.set_title(r"$\theta_{0}$".format(i))
        ax.grid(True)

    fig.tight_layout()
    return fig
예제 #12
0
    def predict_variance_marginals(self,
                                   input_data) -> List[Tuple[np.ndarray]]:
        """Predict variance (theta) for marginal parameter distributions

        Parameters
        ----------
        input_data : np.ndarray

        """
        Xs = [
            self.formula.design_matrix(input_data, i)
            for i in range(len(self.formula))
        ]
        Sigmas = [
            utils.solve_covariance(theta.get_moments())
            for theta in self.theta_marginals
        ]
        mus = [np.dot(X, c) for (X, c) in zip(Xs, self.mean_theta)]
        sigmas = [
            np.diag(np.dot(X, np.dot(Sigma, X.T)))
            for (X, Sigma) in zip(Xs, Sigmas)
        ]
        return list(zip(mus, sigmas))
예제 #13
0
def gaussian1d_density_plot(model: gammy.bayespy.GAM):
    """Plot 1-D density for each parameter

    """
    N = len(model.formula)
    N_rows = 2 + (N + 1) // 2
    fig = plt.figure(figsize=(8, 2 * N_rows))
    gs = fig.add_gridspec(N + 1, 1)

    # Plot inverse gamma
    ax = fig.add_subplot(gs[0])
    (b, a) = (-model.tau.phi[0], model.tau.phi[1])
    mu = a / b
    grid = np.arange(0.5 * mu, 1.5 * mu, mu / 300)
    ax.plot(grid, model.tau.pdf(grid))
    ax.set_title(r"$\tau$ = noise inverse variance")
    ax.grid(True)

    # Plot marginal thetas
    for i, theta in enumerate(model.theta_marginals):
        ax = fig.add_subplot(gs[i + 1])
        mus = theta.get_moments()[0]
        mus = np.array([mus]) if mus.shape == () else mus
        cov = utils.solve_covariance(theta.get_moments())
        stds = pipe(
            np.array([cov]) if cov.shape == () else np.diag(cov), np.sqrt)
        left = (mus - 4 * stds).min()
        right = (mus + 4 * stds).max()
        grid = np.arange(left, right, (right - left) / 300)
        for (mu, std) in zip(mus, stds):
            node = bp.nodes.GaussianARD(mu, 1 / std**2)
            ax.plot(grid, node.pdf(grid))
        ax.set_title(r"$\theta_{0}$".format(i))
        ax.grid(True)

    fig.tight_layout()
    return fig
예제 #14
0
    def predict_variance_marginals(self,
                                   input_data) -> List[Tuple[np.ndarray]]:
        """Predict variance (theta) for marginal parameter distributions

        NOTE: Analogous to self.predict_variance_theta but for marginal
        distributions. Adding observation noise does not make sense as we don't
        know how it is splitted among the model terms.

        Parameters
        ----------
        input_data : np.ndarray

        """
        Xs = [
            self.formula.design_matrix(input_data, i)
            for i in range(len(self.formula))
        ]
        Fs = [
            bp.nodes.SumMultiply("i,i", theta, X)
            for (X, theta) in zip(Xs, self.theta_marginals)
        ]
        mus = [np.dot(X, c) for (X, c) in zip(Xs, self.mean_theta)]
        sigmas = [np.diag(utils.solve_covariance(F.get_moments())) for F in Fs]
        return list(zip(mus, sigmas))
예제 #15
0
    def covariance_theta(self) -> np.ndarray:
        """Covariance estimate of model parameters

        """
        return utils.solve_covariance(self.theta.get_moments())
예제 #16
0
def test_solve_covariance(mu, Sigma):
    node = bp.nodes.Gaussian(mu, np.linalg.inv(Sigma))
    assert_almost_equal(utils.solve_covariance(node.get_moments()),
                        Sigma,
                        decimal=8)
    return