def construct_hyper_grids(X, n_grid=30): grids = dict() grids['m'] = gu.log_linspace(1e-4, max(X), n_grid) grids['r'] = gu.log_linspace(.1, float(len(X)), n_grid) grids['nu'] = gu.log_linspace(.1, float(len(X)), n_grid) grids['s'] = gu.log_linspace(.1, float(len(X)), n_grid) return grids
def construct_hyper_grids(X, n_grid=300): grids = dict() # Plus 1 for single observation case. N = len(X) + 1. ssqdev = np.var(X) * len(X) + 1. # Data dependent heuristics. grids['a'] = gu.log_linspace(1./(10*N), 10*N, n_grid) grids['b'] = gu.log_linspace(ssqdev/100., ssqdev, n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() # Plus 1 for single observation case. N = len(X) + 1. ssqdev = np.var(X) * len(X) + 1. # Data dependent heuristics. grids['m'] = np.linspace(min(X), max(X) + 5, n_grid) grids['r'] = gu.log_linspace(1. / N, N, n_grid) grids['s'] = gu.log_linspace(ssqdev / 100., ssqdev, n_grid) grids['nu'] = gu.log_linspace(1., N, n_grid) # df >= 1 return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() # Plus 1 for single observation case. N = len(X) if len(X) > 0 else 5 minX = min(X) if len(X) > 0 else 0 maxX = max(X) if len(X) > 0 else 1 ssqdev = np.var(X) * N + 1 if len(X) > 0 else 1 # Data dependent heuristics. grids['m'] = np.linspace(minX, maxX + 5, n_grid) grids['r'] = log_linspace(1. / N, N, n_grid) grids['s'] = log_linspace(ssqdev / 100., ssqdev, n_grid) grids['nu'] = log_linspace(1., N, n_grid) # df >= 1 return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() # only use integers for a so we can nicely draw from a negative binomial # in predictive_draw grids['a'] = np.unique(np.round(np.linspace(1, len(X), n_grid))) grids['b'] = gu.log_linspace(.1, float(len(X)), n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() N = float(len(X)) ssx = np.sum(np.sin(X)) scx = np.sum(np.cos(X)) k = Vonmises.estimate_kappa(N, ssx, scx) grids['a'] = gu.log_linspace(1. / N, N, n_grid) grids['b'] = np.linspace(2 * pi / n_grid, 2 * pi, n_grid) grids['k'] = np.linspace(k, N * k, n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() grids['alpha'] = gu.log_linspace(1./len(X), len(X), n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() N = len(X) if len(X) > 0 else 5 grids['alpha'] = log_linspace(1. / N, N, n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() grids['alpha'] = gu.log_linspace(1., float(len(X)), n_grid) grids['beta'] = gu.log_linspace(1., float(len(X)), n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() grids['a'] = gu.log_linspace(1, float(len(X)) / 2., n_grid) grids['b'] = gu.log_linspace(.1, float(len(X)) / 2., n_grid) return grids
def construct_hyper_grids(X, n_grid=30): grids = dict() N = len(X) if len(X) > 0 else 2 grids['a'] = np.unique(np.round(np.linspace(1, N, n_grid))) grids['b'] = log_linspace(.1, float(N), n_grid) return grids
probe_values = set(P).union({max(P) + 1}) assert Nk == crp.counts.values() # Table predictive probabilities. assert np.allclose(gu.logp_crp_fresh(N, Nk, alpha), [crp.logpdf(-1, {0: v}, None) for v in probe_values]) # Data probability. assert np.allclose(gu.logp_crp(N, Nk, alpha), crp.logpdf_score()) # Gibbs transition probabilities. Z = crp.data.values() for i, rowid in enumerate(crp.data): assert np.allclose(gu.logp_crp_gibbs(Nk, Z, i, alpha, 1), crp.gibbs_logps(rowid)) N = [2**i for i in xrange(8)] alpha = gu.log_linspace(.001, 100, 10) seed = [5] @pytest.mark.parametrize('N, alpha, seed', itertools.product(N, alpha, seed)) def test_crp_simple(N, alpha, seed): # Obtain the partitions. A = gu.simulate_crp(N, alpha, rng=gu.gen_rng(seed)) Nk = list(np.bincount(A)) crp = simulate_crp_gpm(N, alpha, rng=gu.gen_rng(seed)) assert A == crp.data.values() assert_crp_equality(alpha, Nk, crp)