def load_data(self, path): with open(path, 'rb') as file: out = pickle.load(file) deltas = out['deltas'] mus = out['mus'] Sigmas = out['Sigmas'] zetas = out['zetas'] etas = out['etas'] rs = out['rs'] self.nSamp = deltas.shape[0] self.nDat = deltas.shape[1] self.nCol = mus.shape[1] self.data = Data_From_Sphere(out['V']) try: self.data.fill_outcome(out['Y']) except KeyError: pass self.samples = Samples_(self.nSamp, self.nDat, self.nCol) self.samples.delta = deltas self.samples.eta = etas self.samples.zeta = [zetas[np.where(zetas.T[0] == i)[0], 1:] for i in range(self.nSamp)] self.samples.mu = mus self.samples.Sigma = Sigmas.reshape(self.nSamp, self.nCol, self.nCol) self.samples.r = rs return
def load_data(self, path): with open(path, 'rb') as file: out = pickle.load(file) deltas = out['deltas'] etas = out['etas'] zetas = out['zetas'] sigmas = out['sigmas'] alphas = out['alphas'] betas = out['betas'] xis = out['xis'] taus = out['taus'] rs = out['rs'] self.nSamp = deltas.shape[0] self.nDat = deltas.shape[1] self.nCol = alphas.shape[1] self.data = Data_From_Sphere(out['V']) try: self.data.fill_outcome(out['Y']) except KeyError: pass self.samples = Samples(self.nSamp, self.nDat, self.nCol) self.samples.delta = deltas self.samples.eta = etas self.samples.alpha = alphas self.samples.beta = betas self.samples.xi = xis self.samples.tau = taus self.samples.zeta = [ zetas[np.where(zetas.T[0] == i)[0], 1:] for i in range(self.nSamp) ] self.samples.sigma = [ sigmas[np.where(sigmas.T[0] == i)[0], 1:] for i in range(self.nSamp) ] self.samples.r = rs return
class Result(object): def generate_posterior_predictive_gammas(self, n_per_sample = 1, m = 10): new_gammas = [] for s in range(self.nSamp): dmax = self.samples.delta[s].max() njs = np.bincount(self.samples.delta[s], minlength = int(dmax + 1 + m)) ljs = njs + (njs == 0) * self.samples.eta[s] / m new_zetas = gamma( shape = self.samples.alpha[s], scale = 1. / self.samples.beta[s], size = (m, self.nCol), ) new_sigmas = np.hstack(( np.ones((m, 1)), gamma( shape = self.samples.xi[s], scale = self.samples.tau[s], size = (m, self.nCol - 1), ), )) prob = ljs / ljs.sum() deltas = generate_indices(prob, n_per_sample) zeta = np.vstack((self.samples.zeta[s], new_zetas))[deltas] sigma = np.vstack((self.samples.sigma[s], new_sigmas))[deltas] new_gammas.append(gamma(shape = zeta, scale = 1 / sigma)) return np.vstack(new_gammas) def generate_posterior_predictive_hypercube(self, n_per_sample = 1, m = 10): gammas = self.generate_posterior_predictive_gammas(n_per_sample, m) return euclidean_to_hypercube(gammas) def generate_posterior_predictive_angular(self, n_per_sample = 1, m = 10): hyp = self.generate_posterior_predictive_hypercube(n_per_sample, m) return euclidean_to_angular(hyp) def write_posterior_predictive(self, path, n_per_sample = 1): thetas = pd.DataFrame( self.generate_posterior_predictive_angular(n_per_sample), columns = ['theta_{}'.format(i) for i in range(1, self.nCol)], ) thetas.to_csv(path, index = False) return def load_data(self, path): with open(path, 'rb') as file: out = pickle.load(file) deltas = out['deltas'] etas = out['etas'] zetas = out['zetas'] sigmas = out['sigmas'] alphas = out['alphas'] betas = out['betas'] xis = out['xis'] taus = out['taus'] rs = out['rs'] self.nSamp = deltas.shape[0] self.nDat = deltas.shape[1] self.nCol = alphas.shape[1] self.data = Data_From_Sphere(out['V']) try: self.data.fill_outcome(out['Y']) except KeyError: pass self.samples = Samples(self.nSamp, self.nDat, self.nCol) self.samples.delta = deltas self.samples.eta = etas self.samples.alpha = alphas self.samples.beta = betas self.samples.xi = xis self.samples.tau = taus self.samples.zeta = [ zetas[np.where(zetas.T[0] == i)[0], 1:] for i in range(self.nSamp) ] self.samples.sigma = [ sigmas[np.where(sigmas.T[0] == i)[0], 1:] for i in range(self.nSamp) ] self.samples.r = rs return def __init__(self, path): self.load_data(path) return
data = MixedData( raw, eval(p.cats), decluster=eval(p.decluster), quantile=float(p.quantile), ) except: data = MixedData( raw, eval(p.cats), decluster=eval(p.decluster), quantile=float(p.quantile), ) else: if eval(p.sphere): data = Data_From_Sphere(raw) else: try: data = Data_From_Raw( raw, decluster=eval(p.decluster), quantile=float(p.quantile), ) except: data = Data_From_Raw( raw, decluster=eval(p.decluster), quantile=float(p.quantile), ) ## If there's a supplied outcome, initialize it
class Result(object): def generate_posterior_predictive_gammas(self, n_per_sample = 1, m = 10): new_gammas = [] for s in range(self.nSamp): njs = np.bincount( self.samples.delta[s], minlength = int(self.samples.delta[s].max() + 1 + m), ) ljs = njs + (njs == 0) * self.samples.eta[s] / m new_zetas = np.exp( + self.samples.mu[s].reshape(1,self.nCol) + (cholesky(self.samples.Sigma[s]) @ normal(size = (self.nCol, m))).T ) prob = ljs / ljs.sum() deltas = generate_indices(prob, n_per_sample) zeta = np.vstack((self.samples.zeta[s], new_zetas))[deltas] new_gammas.append(gamma(shape = zeta)) return np.vstack(new_gammas) def generate_posterior_predictive_hypercube(self, n_per_sample = 1, m = 10): gammas = self.generate_posterior_predictive_gammas(n_per_sample, m) return euclidean_to_hypercube(gammas) def generate_posterior_predictive_angular(self, n_per_sample = 1, m = 10): hyp = self.generate_posterior_predictive_hypercube(n_per_sample, m) return euclidean_to_angular(hyp) def write_posterior_predictive(self, path, n_per_sample = 1): thetas = pd.DataFrame( self.generate_posterior_predictive_angular(n_per_sample), columns = ['theta_{}'.format(i) for i in range(1, self.nCol)], ) thetas.to_csv(path, index = False) return def load_data(self, path): with open(path, 'rb') as file: out = pickle.load(file) deltas = out['deltas'] mus = out['mus'] Sigmas = out['Sigmas'] zetas = out['zetas'] etas = out['etas'] rs = out['rs'] self.nSamp = deltas.shape[0] self.nDat = deltas.shape[1] self.nCol = mus.shape[1] self.data = Data_From_Sphere(out['V']) try: self.data.fill_outcome(out['Y']) except KeyError: pass self.samples = Samples_(self.nSamp, self.nDat, self.nCol) self.samples.delta = deltas self.samples.eta = etas self.samples.zeta = [zetas[np.where(zetas.T[0] == i)[0], 1:] for i in range(self.nSamp)] self.samples.mu = mus self.samples.Sigma = Sigmas.reshape(self.nSamp, self.nCol, self.nCol) self.samples.r = rs return def __init__(self, path): self.load_data(path) return