def plot_dist(X, clusters, distargs=None): Y = range(distargs['K']) X_hist = numpy.array(utils.bincount(X, Y)) X_hist = X_hist / float(len(X)) K = len(clusters) pdf = numpy.zeros((K, distargs['K'])) denom = log(float(len(X))) a = clusters[0].alpha pylab.bar(Y, X_hist, color="black", alpha=1, edgecolor="none") W = [log(clusters[k].N) - denom for k in range(K)] for k in range(K): w = W[k] N = clusters[k].N ww = clusters[k].w for n in range(len(Y)): y = Y[n] pdf[k, n] = numpy.exp( w + cc_multinomial.calc_predictive_logp(y, N, ww, a)) pylab.bar(Y, pdf[k, :], color="white", edgecolor="none", alpha=.5) pylab.bar(Y, numpy.sum(pdf, axis=0), color='none', edgecolor="red", linewidth=1) # pylab.ylim([0,1.0]) pylab.title('multinomial')
def plot_dist(X, clusters, distargs=None): Y = range(distargs['K']) X_hist = numpy.array(utils.bincount(X,Y)) X_hist = X_hist/float(len(X)) K = len(clusters) pdf = numpy.zeros((K,distargs['K'])) denom = log(float(len(X))) a = clusters[0].alpha pylab.bar(Y, X_hist, color="black", alpha=1, edgecolor="none") W = [log(clusters[k].N) - denom for k in range(K)] for k in range(K): w = W[k] N = clusters[k].N ww = clusters[k].w for n in range(len(Y)): y = Y[n] pdf[k, n] = numpy.exp(w + cc_multinomial.calc_predictive_logp(y, N, ww, a)) pylab.bar(Y, pdf[k,:], color="white", edgecolor="none", alpha=.5) pylab.bar(Y, numpy.sum(pdf,axis=0), color='none', edgecolor="red", linewidth=1) # pylab.ylim([0,1.0]) pylab.title('multinomial')
def plot_dist(X, clusters, distargs=None): colors = [ "red", "blue", "green", "yellow", "orange", "purple", "brown", "black" ] x_min = min(X) x_max = max(X) Y = range(int(x_max) + 1) nn = len(Y) K = len(clusters) pdf = numpy.zeros((K, nn)) denom = log(float(len(X))) a = clusters[0].a b = clusters[0].b nbins = min([len(Y), 50]) toplt = numpy.array(utils.bincount(X, Y)) / float(len(X)) pylab.bar(Y, toplt, color="gray", edgecolor="none") W = [log(clusters[k].N) - denom for k in range(K)] for k in range(K): w = W[k] N = clusters[k].N sum_x = clusters[k].sum_x sum_log_fact_x = clusters[k].sum_log_fact_x for n in range(nn): y = Y[n] pdf[k, n] = numpy.exp(w + cc_poisson.calc_predictive_logp( y, N, sum_x, sum_log_fact_x, a, b)) if k >= 8: color = "white" alpha = .3 else: color = colors[k] alpha = .7 pylab.bar(Y, pdf[k, :], color=color, edgecolor='none', alpha=alpha) pylab.bar(Y, numpy.sum(pdf, axis=0), color='none', edgecolor='black', linewidth=3) # print integral for debugging (should never be greater that 1) # print utils.line_quad(Y, numpy.sum(pdf,axis=0)) pylab.xlim([0, x_max + 1]) pylab.title('poisson')
def plot_dist(X, clusters, distargs=None): colors = ["red", "blue", "green", "yellow", "orange", "purple", "brown", "black"] x_min = min(X) x_max = max(X) Y = range(int(x_max)+1) nn = len(Y) K = len(clusters) pdf = numpy.zeros((K,nn)) denom = log(float(len(X))) a = clusters[0].a b = clusters[0].b nbins = min([len(Y), 50]) toplt = numpy.array(utils.bincount(X,Y))/float(len(X)) pylab.bar(Y, toplt, color="gray", edgecolor="none") W = [log(clusters[k].N) - denom for k in range(K)] for k in range(K): w = W[k] N = clusters[k].N sum_x = clusters[k].sum_x sum_log_fact_x = clusters[k].sum_log_fact_x for n in range(nn): y = Y[n] pdf[k, n] = numpy.exp(w + cc_poisson.calc_predictive_logp(y, N, sum_x, sum_log_fact_x, a, b)) if k >= 8: color = "white" alpha=.3 else: color = colors[k] alpha=.7 pylab.bar(Y, pdf[k,:], color=color, edgecolor='none', alpha=alpha) pylab.bar(Y, numpy.sum(pdf,axis=0), color='none', edgecolor='black', linewidth=3) # print integral for debugging (should never be greater that 1) # print utils.line_quad(Y, numpy.sum(pdf,axis=0)) pylab.xlim([0, x_max+1]) pylab.title('poisson')
def __init__(self, dims, alpha=None, Z=None, n_grid=30): """ Constructor input arguments: -- dims: a list of cc_dim objects optional arguments: -- alpha: crp concentration parameter. If none, is selected from grid. -- Z: starting partiton of rows to categories. If nonde, is intialized from CRP(alpha) -- n_grid: number of grid points in the hyperparameter grids """ N = dims[0].N self.N = N # generate alpha self.alpha_grid = utils.log_linspace(1.0/self.N, self.N, n_grid) if alpha is None: alpha = random.choice(self.alpha_grid) else: assert alpha > 0.0 if Z is None: Z, Nk, K = utils.crp_gen(N, alpha) else: assert len(Z) == dims[0].X.shape[0] Nk = utils.bincount(Z) K = len(Nk) assert sum(Nk) == N assert K == len(Nk) self.dims = dict() for dim in dims: dim.reassign(Z) self.dims[dim.index] = dim self.alpha = alpha self.Z = numpy.array(Z) self.K = K self.Nk = Nk
def __init__(self, dims, alpha=None, Z=None, n_grid=30): """ Constructor input arguments: -- dims: a list of cc_dim objects optional arguments: -- alpha: crp concentration parameter. If none, is selected from grid. -- Z: starting partiton of rows to categories. If nonde, is intialized from CRP(alpha) -- n_grid: number of grid points in the hyperparameter grids """ N = dims[0].N self.N = N # generate alpha self.alpha_grid = utils.log_linspace(1.0 / self.N, self.N, n_grid) if alpha is None: alpha = random.choice(self.alpha_grid) else: assert alpha > 0.0 if Z is None: Z, Nk, K = utils.crp_gen(N, alpha) else: assert len(Z) == dims[0].X.shape[0] Nk = utils.bincount(Z) K = len(Nk) assert sum(Nk) == N assert K == len(Nk) self.dims = dict() for dim in dims: dim.reassign(Z) self.dims[dim.index] = dim self.alpha = alpha self.Z = numpy.array(Z) self.K = K self.Nk = Nk
def __init__(self, X, cctypes, distargs, n_grid=30, Zv=None, Zrcv=None, hypers=None, seed=None): """ cc_state constructor input arguments: -- X: a list of numpy data columns. -- cctypes: a list of strings where each entry is the data type for each column. -- distargs: a list of distargs appropriate for each type in cctype. For details on distrags see the documentation for each data type. optional arguments: -- n_grid: number of bins for hyperparameter grids. Default = 30. -- Zv: The assignment of columns to views. If not specified, a partition is generated randomly -- Zrcv: The assignment of rows to clusters for each view -- ct_kernel: which column transition kenerl to use. Default = 0 (Gibbs) -- seed: seed the random number generator. Default = system time. example: >>> import numpy >>> n_rows = 100 >>> X = [numpy.random.normal(n_rows), numpy.random.normal(n_rows)] >>> State = cc_state(X, ['normal', 'normal'], [None, None]) """ if seed is not None: random.seed(seed) numpy.random.seed(seed) self.n_rows = len(X[0]) self.n_cols = len(X) self.n_grid = n_grid # construct the dims self.dims = [] for col in range(self.n_cols): Y = X[col] cctype = cctypes[col] if _is_uncollapsed[cctype]: dim = cc_dim_uc(Y, _cctype_class[cctype], col, n_grid=n_grid, distargs=distargs[col]) else: dim = cc_dim(Y, _cctype_class[cctype], col, n_grid=n_grid, distargs=distargs[col]) self.dims.append(dim) # set the hyperparameters in the dims if hypers is not None: for d in range(self.n_cols): self.dims[d].set_hypers(hypers[d]) # initialize CRP alpha self.alpha_grid = utils.log_linspace(1.0/self.n_cols, self.n_cols, self.n_grid) self.alpha = random.choice(self.alpha_grid) assert len(self.dims) == self.n_cols if Zrcv is not None: assert Zv is not None assert len(Zv) == self.n_cols assert len(Zrcv) == max(Zv)+1 assert len(Zrcv[0]) == self.n_rows # construct the view partition if Zv is None: Zv, Nv, V = utils.crp_gen(self.n_cols, self.alpha) else: Nv = utils.bincount(Zv) V = len(Nv) # construct views self.views = [] for view in range(V): indices = [i for i in range(self.n_cols) if Zv[i] == view] dims_view = [] for index in indices: dims_view.append(self.dims[index]) if Zrcv is None: self.views.append(cc_view(dims_view, n_grid=n_grid)) else: self.views.append(cc_view(dims_view, Z=numpy.array(Zrcv[view]), n_grid=n_grid)) self.Zv = numpy.array(Zv) self.Nv = Nv self.V = V