def __init__(self, N, B=1, C=3, pi=10.0, mu_0=None, Sigma_0=None, nu_0=None, special_case_self_conns=True): super(SBMGaussianWeightSharedCov, self).\ __init__(N, B=B, C=C, pi=pi, mu_0=mu_0, Sigma_0=Sigma_0, nu_0=nu_0, special_case_self_conns=special_case_self_conns) if mu_0 is None: mu_0 = np.zeros(B) if Sigma_0 is None: Sigma_0 = np.eye(B) if nu_0 is None: nu_0 = B + 2 self._cov_model = GaussianFixedMean(mu=np.zeros(B), nu_0=nu_0, lmbda_0=Sigma_0) self._gaussians = [[ GaussianFixedCov(mu_0=mu_0, sigma_0=np.eye(B), sigma=self._cov_model.sigma) for _ in xrange(C) ] for _ in xrange(C)]
def __init__(self, N, B=1, dim=2, b=0.5, sigma=None, Sigma_0=None, nu_0=None, mu_self=0.0, eta=0.01): super(_LatentDistanceModelGaussianMixin, self).__init__(N, B) self.B = B self.dim = dim self.b = b self.eta = eta self.L = np.sqrt(eta) * np.random.randn(N, dim) if Sigma_0 is None: Sigma_0 = np.eye(B) if nu_0 is None: nu_0 = B + 2 self.cov = GaussianFixedMean(mu=np.zeros(B), sigma=sigma, lmbda_0=Sigma_0, nu_0=nu_0) # Special case self-weights (along the diagonal) self._self_gaussian = Gaussian(mu_0=mu_self * np.ones(B), sigma_0=Sigma_0, nu_0=nu_0, kappa_0=1.0)
def __init__(self, N, B=1, dim=2, b=0.5, sigma=None, Sigma_0=None, nu_0=None, mu_self=0.0, eta=0.01): """ Initialize SBM with parameters defined above. """ super(LatentDistanceGaussianWeightDistribution, self).__init__(N) self.B = B self.dim = dim self.b = b self.eta = eta self.L = np.sqrt(eta) * np.random.randn(N, dim) if Sigma_0 is None: Sigma_0 = np.eye(B) if nu_0 is None: nu_0 = B + 2 self.cov = GaussianFixedMean(mu=np.zeros(B), sigma=sigma, lmbda_0=Sigma_0, nu_0=nu_0) # Special case self-weights (along the diagonal) self._self_gaussian = Gaussian(mu_0=mu_self * np.ones(B), sigma_0=Sigma_0, nu_0=nu_0, kappa_0=1.0)
# Check that the PG-Multinomial samples are distributed like the prior thetas = np.array(thetas) theta_mean = thetas.mean(0) theta_std = thetas.std(0) betas = np.array(betas) beta_mean = betas.mean(0) beta_std = betas.std(0) # Now sample from the prior for comparison print("Sampling from prior") from pybasicbayes.distributions import GaussianFixedMean from pgmult.utils import compute_uniform_mean_psi, psi_to_pi mu, sigma0 = compute_uniform_mean_psi(T) psis_prior = np.array( [GaussianFixedMean(mu=mu, lmbda_0=T * sigma0, nu_0=T).rvs(1) for _ in range(N_iter)]) thetas_prior = psi_to_pi(psis_prior[:,0,:]) betas_prior = np.random.dirichlet(alpha_beta*np.ones(V), size=(N_iter,)) # print "Mean psi: ", psi_mean, " +- ", psi_std import pybasicbayes.util.general as general percentilecutoff = 5 def plot_1d_scaled_quantiles(p1,p2,plot_midline=True): # scaled quantiles so that multiple calls line up p1.sort(), p2.sort() # NOTE: destructive! but that's cool xmin,xmax = general.scoreatpercentile(p1,percentilecutoff), \ general.scoreatpercentile(p1,100-percentilecutoff) ymin,ymax = general.scoreatpercentile(p2,percentilecutoff), \ general.scoreatpercentile(p2,100-percentilecutoff)