def initial_pool(prior_obj, eps0, N_particles, N_threads=1): """ Initial Pool """ args_list = [[i, prior_obj, eps0, N_particles] for i in xrange(N_particles)] if N_threads > 1: pool = InterruptiblePool(processes=N_threads) mapfn = pool.map results = mapfn(initial_pool_sampling, args_list) pool.close() pool.terminate() pool.join() else: results = [] for arg in args_list: results.append(initial_pool_sampling(arg)) results = np.array(results).T theta_t = results[1:prior_obj.n_params + 1, :] w_t = results[prior_obj.n_params + 1, :] rhos = results[prior_obj.n_params + 2, :] sig_t = covariance(theta_t, w_t) return theta_t, w_t, rhos, sig_t
def pmc_abc(N_threads = N_threads): # initial pool theta_t, w_t, rhos, sig_t = initial_pool() t = 0 # iternation number plot_thetas(theta_t , w_t, t) plt.savefig("/home/mj/public_html/scatter_hod_gaussian_t"+str(t)+".png") plt.close() while t < N_iter: eps_t = np.percentile(rhos, 75) print 'New Distance Threshold Eps_t = ', eps_t theta_t_1 = theta_t.copy() w_t_1 = w_t.copy() sig_t_1 = sig_t.copy() """these lines are borrowed from initial sampling to double-check multiprocessing""" #pool = InterruptiblePool(processes = N_threads) #mapfn = pool.map #args_list = [i for i in xrange(N_particles)] #results = mapfn(initial_pool_sampling, args_list) #pool.close() #pool.terminate() #pool.join() pool = InterruptiblePool(processes = N_threads) mapfn = pool.map args_list = [[i, theta_t_1, w_t_1, sig_t_1, eps_t] for i in xrange(N_particles)] #results = [] #for args in args_list: # pool_sample = importance_pool_sampling(args) # results.append( pool_sample ) results = mapfn(importance_pool_sampling, args_list) pool.close() pool.terminate() pool.join() sig_t = np.cov(theta_t) results = np.array(results).T theta_t = results[1:n_params+1,:] w_t = results[n_params+1,:] rhos = results[n_params+2,:] sig_t = np.cov(theta_t) t += 1 plot_thetas(theta_t, w_t , t) plt.savefig("/home/mj/public_html/scatter_hod_gaussian_t"+str(t)+".png") plt.close()
def pmc_abc(prior_dict, N_particles=100, N_iter=30, eps0=20.0, N_threads=1): """ """ prior_obj = Prior(prior_dict) # initial pool theta_t, w_t, rhos, sig_t = initial_pool(prior_obj, eps0, N_particles, N_threads=N_threads) t = 0 # iternation number #plot_thetas(theta_t , w_t, prior_dict, t) while t < N_iter: eps_t = np.percentile(rhos, 75) print 'New Distance Threshold Eps_t = ', eps_t theta_t_1 = theta_t.copy() w_t_1 = w_t.copy() sig_t_1 = sig_t.copy() args_list = [[i, prior_obj, theta_t_1, w_t_1, sig_t_1, eps_t] for i in xrange(N_particles)] if N_threads > 1: pool = InterruptiblePool(processes=N_threads) mapfn = pool.map results = mapfn(importance_pool_sampling, args_list) pool.close() pool.terminate() pool.join() else: results = [] for args in args_list: pool_sample = importance_pool_sampling(args) results.append(pool_sample) results = np.array(results).T theta_t = results[1:prior_obj.n_params + 1, :] w_t = results[prior_obj.n_params + 1, :] rhos = results[prior_obj.n_params + 2, :] sig_t = covariance(theta_t, w_t) t += 1 plot_thetas(theta_t, w_t, prior_dict, t)
def pmc_abc(N_threads=N_threads): # initial pool theta_t, w_t, rhos, sig_t = initial_pool() w_t = w_t / np.sum(w_t) t = 0 # iternation number plot_thetas(theta_t, w_t, t) while t < N_iter: if t < 4: eps_t = np.percentile(np.atleast_2d(rhos), 20, axis=1) else: eps_t = np.percentile(np.atleast_2d(rhos), 50, axis=1) print 'New Distance Threshold Eps_t = ', eps_t, "t=", t theta_t_1 = theta_t.copy() w_t_1 = w_t.copy() sig_t_1 = sig_t.copy() args_list = [[i, theta_t_1, w_t_1, sig_t_1, eps_t] for i in xrange(N_particles)] """serial""" results = [] #for args in args_list: # pool_sample = importance_pool_sampling(args) # results.append( pool_sample ) """parallel""" pool = InterruptiblePool(processes=N_threads) mapfn = pool.map results = mapfn(importance_pool_sampling, args_list) pool.close() pool.terminate() pool.join() results = np.array(results).T theta_t = results[1:n_params + 1, :] w_t = results[n_params + 1, :] w_t = w_t / np.sum(w_t) rhos = results[n_params + 2:, :] #sig_t = knn_sigma(theta_t , k = 10) sig_t = 2. * covariance(theta_t, w_t) t += 1 plot_thetas(theta_t, w_t, t)
def initial_pool(): pool = InterruptiblePool(processes = N_threads) mapfn = pool.map args_list = [i for i in xrange(N_particles)] results = mapfn(initial_pool_sampling, args_list) pool.close() pool.terminate() pool.join() results = np.array(results).T theta_t = results[1:n_params+1,:] w_t = results[n_params+1,:] rhos = results[n_params+2,:] sig_t = np.cov(theta_t) return theta_t, w_t, rhos, sig_t
def initial_pool(): args_list = np.arange(N_particles) """serial""" #results = [] #for arg in args_list: # results.append(initial_pool_sampling(arg)) """parallel""" pool = InterruptiblePool(processes=N_threads) mapfn = pool.map results = mapfn(initial_pool_sampling, args_list) pool.close() pool.terminate() pool.join() results = np.array(results).T theta_t = results[1:n_params + 1, :] w_t = results[n_params + 1, :] w_t = w_t / np.sum(w_t) rhos = results[n_params + 2:, :] sig_t = covariance(theta_t, w_t) return theta_t, w_t, rhos, sig_t
def pmc_abc(self): """ """ self.rhos = self.initial_pool() while self.t < self.T: self.eps_t = np.percentile(self.rhos, 75) print 'Epsilon t', self.eps_t self.theta_t_1 = self.theta_t.copy() self.w_t_1 = self.w_t.copy() self.sig_t_1 = self.sig_t.copy() pool = InterruptiblePool(self.Nthreads) mapfn = pool.map args_list = [ i for i in xrange(self.N) ] results = mapfn(unwrap_self_importance_sampling, zip([self]*len(args_list), args_list)) pool.close() pool.terminate() pool.join() pars = np.array(results).T self.theta_t = pars[1:self.n_params+1,:].copy() self.w_t = pars[self.n_params+1,:].copy() self.rhos = pars[self.n_params+2,:].copy() self.sig_t = 2.0 * np.cov(self.theta_t) self.t += 1 self.writeout() self.plotout() return None