def burn_tests(): graph = burn() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'M') print("P(MapoDoufu | Burn=false) = <", mean, ", ", f_mean, ">") print()
def decMCMC(self, data, parguess, bounds, steps): #deconvolution with MCMC self.deconvolute(data, parguess, bounds, False) parguess = self.pars Norm = np.max(data.current) / 100 Y = data.current / Norm corr = np.asarray( [Norm if n % 3 == 0 else 1 for n in np.arange(len(parguess))]) _, bounds, corr = self.checkParams(parguess, np.asarray(bounds), corr, self.shape) parguess = [par / cor for par, cor in zip(self.pars, corr)] try: mcmc = MCMC(self.model, parguess, bounds) mcmc.pg.connect(self.pg.emit) st = 0.5 * np.ones(len(parguess)) st[[ int(np.sum(self.args[:i]) + 2) for i in range(0, len(self.args)) ]] = 0.1 st[[ int(np.sum(self.args[:i + 1]) - 1) for i in range(0, len(self.args)) if self.args[i] == 4 ]] = 0.01 # mcmc.step = [0.1 if (n-1)%3 == 0 else 0.5 for n in np.arange(len(parguess))] mcmc.step = st self.pars, self.perr = mcmc(data.X, Y, steps, corr=corr) self.pars_u = [ ufloat(p, abs(e)) for p, e in zip(self.pars, self.perr) ] #Calculate each peak for i, name in enumerate(self.names): indx = int(np.sum(self.args[:i])) self.peaks[name] = self.Peak(self, data.X, *self.pars[indx:indx + self.args[i]], shape=self.shape[i]) self.peaks_u[name] = self.Peak(self, data.X, *self.pars_u[indx:indx + self.args[i]], shape=self.shape[i], uncert=True) self.peaks['cumulative'] = self.model( data.X, *self.pars) #save the fit result self.peaks_u['cumulative'] = self.model( data.X, *self.pars_u, uncert=True) #save the fit result self.fitGoodness = self.goodness(data.current, self.peaks['cumulative']) self.FWHM() #calculate fwhm self.Area() #calculate the areas self.printResult(data) #print the fit report except Exception as e: print('exception\n', e)
def tanks_tests(): graph = tanks() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(50, 100000) Tests.mixing_plot(samples, 'num_tanks') Tests.plot_distribution(samples, 'num_tanks') Tests.mixing_plot(samples, 'A') Tests.plot_distribution(samples, 'A')
def faculty_evaluation_tests(): graph = faculty_evals() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(500, 10000) Tests.mixing_plot(samples, 'mu') Tests.mixing_plot(samples, 'sigma2') Tests.plotposterior([s['mu'] for s in samples], faculty_mean_prior, 'mean', 5.0, 6.5) Tests.plotposterior([s['sigma2'] for s in samples], faculty_var_prior, 'var', 0.0001, 1.0)
def progress_tests(): graph = progress() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(2000, 50000) Tests.plot_distribution(samples, 'Encounters') encounters = [s['Encounters'] for s in samples] median = np.median(encounters) print('median', median)
def golfer_network_tests(): graph = golf() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(1000, 100000) print('woohoo! the golfers finished!') with open('golf_samples.pickle', 'wb') as gs: pickle.dump(samples, gs) with open('golf_graph.pickle', 'wb') as gg: pickle.dump(graph, gg)
def __init__(self, NP, means, mins, maxs, sds, outfile, errlev=0.1, goodchi2=350.0): """ Instantiates the class by synthetically generating data. """ MCMC.__init__(self, TargetAcceptedPoints=1000, NumberOfParams=NP, Mins=mins, Maxs=maxs, SDs=sds, \ write2file=True, outputfilename=outfile, alpha=0.1, debug=False,\ EstimateCovariance=True, CovNum=100, goodchi2=goodchi2) lcurv.readmap() lcurv.mockdata(means,errlev)
def hyper_alarm_inference(): legs = [10, 25, 50, 100, 1000] model = 'lab' for n in legs: name = model + '-inf_' + str(n) graph = hyper_alarm(val_dict=name, inference=True) mcmc = MCMC(graph=graph) samples = mcmc.gibbs(2000, 100000) mean, f_mean = Tests.sample_dim(samples, 'B') print(mean)
def pareto_poisson_tests(): graph = pareto() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(2000, 100000) for node in graph.hidden_nodes: #Tests.mixing_plot(samples, node.name) Tests.plot_distribution(samples, node.name) encounters = [s[node.name] for s in samples] median = np.median(encounters) print('median', median)
def thomas_tests(): graph = thomas() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'Cross') print("P(Cross | Diesel=true, Diesel10=true) = <", mean, ", ", f_mean, ">") graph = thomas() graph.node_dict['Diesel'].observed = False graph.node_dict['Diesel10'].observed = False graph.node_dict['Thomas'].observed = True mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'Cross') print("P(Cross | Thomas=true) = <", mean, ", ", f_mean, ">") graph = thomas() graph.node_dict['Diesel'].observed = False graph.node_dict['Diesel10'].observed = False graph.node_dict['Cross'].observed = True mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'Diesel') print("P(Diesel | Cross=true) = <", mean, ", ", f_mean, ">") print()
def wacky_network_tests(): graph = wacky() # graph.node_dict['G'].observed = True mcmc = MCMC(graph=graph) samples = mcmc.gibbs(20000, 2000000) with open('wacky_G_samples.pickle', 'wb') as ws: pickle.dump(samples, ws) with open('wacky_G_graph.pickle', 'wb') as wg: pickle.dump(graph, wg) for node in graph.nodes: Tests.mixing_plot(samples, node.name) Tests.plot_distribution(samples, node.name)
def hyper_faculty_tests(): meta_samples = [] for n in range(5): graph = faculty_evals_1hyper(n) mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 100000) print(n) Tests.plot_multi(samples, ['mu', 'sigma2']) print(graph.nodes) for node in graph.hidden_nodes: if node.name != 'mu' and node.name != 'sigma2': Tests.mixing_plot(samples, node.name) Tests.plot_distribution(samples, node.name)
def aufg15c(): normal = MCMC(2, -3, 2) sample = normal.sample(15, 10000) bins = plt.hist(sample, bins=50, density=True, label="Gezogen")[1] x = np.linspace(bins[0], bins[-1], 1000) plt.plot(x, normal.pdf(x), label="PDF") plt.xlabel("$x$") plt.ylabel("Wahrscheinlichkeitsdichte") plt.legend() # plt.show() plt.savefig("A15c.pdf") plt.clf() return sample
def SMC(n, k, N, n_steps, verbose=False): # Trigger timer start = time.time() # Build the puzzle puzzle = Puzzle(n, k) print('Number of pieces in puzzle %s' % puzzle.d) # Initialize the algorithm particles = (np.ones((N, puzzle.d))*np.arange(puzzle.d)).astype(int) particles = np.array([np.random.permutation(particles[i][:]) for i in range(N)]) t = 0 av_loss = [] # Annealing kernel kernel = Kernel(t) # Compute initial loss av_loss.append(np.mean(puzzle.loss_global(particles))) if verbose: print('Initial average number of non matching edges: %s' % av_loss[-1]) print('') while t < max_iter: if verbose: print('Step %s' % (t+1)) # Annealing loss loss = TargetLoss(puzzle.loss, 2*np.log(t + np.exp(1))) # Re-sample for importance weights isampler = IS(loss) isampler.fit(particles) particles = isampler.resample() # Forward search mcmc = MCMC(loss, kernel, n_steps) particles = mcmc.forward(particles) # Track performance av_loss.append(np.mean(puzzle.loss_global(particles))) if verbose: print('Current average number of non matching edges: %s' % av_loss[-1]) print('------------------------------------------------------------------') # Update t t += 1 end = time.time() return av_loss, t, end-start
def hyper_alarm_learning_tests(): legs = [1, 10, 25, 50, 75, 100, 250, 500, 750, 1000] prior = 'lab' model = '01_50' for n in legs: graph = hyper_alarm_learn('alarm-gen-' + model + '.json', n=n, val_dict=prior) mcmc = MCMC(graph=graph) samples = mcmc.gibbs(1000, 1000) mean, f_mean = Tests.sample_dim(samples, 'b_B') print(mean, f_mean) #mean, f_mean = Tests.sample_dim(samples, 'b_E') #print(mean, f_mean) name = 'alarm-' + model + '_' + str(n) + '_samples.pickle' with open(name, 'wb') as f: pickle.dump(samples, f)
def setUp(self): freq = {'A': .292, 'C': .213, 'G': .237, 'T': .258} k = 22.2 alpha = .0005 tau = 1 draws = 100 filename = 'infile' self.mcmc = MCMC(freq, k, alpha, tau, draws, filename)
def hyper_alarm_generate(): val_dict = 'orig' graph = hyper_alarm(val_dict) mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 30000) mean, f_mean = Tests.sample_dim(samples, 'B') print(mean, f_mean) mean, f_mean = Tests.sample_dim(samples, 'E') print(mean, f_mean) saved_samples = [] for i, sample in enumerate(samples): if i < 3001 and i % 3 == 0: #print(sample) saved_samples.append(sample) with open('alarm-gen-' + val_dict + '.json', 'w') as f: json.dump(saved_samples,f)
def __init__(self, NP, means, mins, maxs, sds, outfile, errlev=0.1, goodchi2=350.0): """ Instantiates the class by synthetically generating data. """ MCMC.__init__(self, TargetAcceptedPoints=1000, NumberOfParams=NP, Mins=mins, Maxs=maxs, SDs=sds, \ write2file=True, outputfilename=outfile, alpha=0.1, debug=False,\ EstimateCovariance=True, CovNum=100, goodchi2=goodchi2) lcurv.readmap() lcurv.mockdata(means, errlev)
def beta_bernoulli_tests(): graph = beta_bernoulli() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(5000, 100000) Tests.plotposterior([s['A'] for s in samples], beta_expected_t, 'beta-bernoulli', 0, 1) graph = beta_bernoulli(b=0) mcmc = MCMC(graph=graph) samples = mcmc.gibbs(13000, 100000) Tests.plotposterior([s['A'] for s in samples], beta_expected_f, 'beta-bernoulli', 0, 1)
def home_or_school_tests(): graph = home_or_school() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'AS') print("P(AS | IA=false) = <", mean, ", ", f_mean, ">") graph = home_or_school() mcmc = MCMC({'IA': 1}, graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'AS') print("P(AS | IA=false) = <", mean, ", ", f_mean, ">") print()
def dirty_roommates_tests(): graph = dirty_roommates() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'CK') print("P(CK | DR=false) = <", mean, ", ", f_mean, ">") graph = dirty_roommates() graph.node_dict['DR'].observed = False graph.node_dict['CK'].observed = True mcmc = MCMC({'CK': 1}, graph=graph) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'IT') print("P(IT | CK=true) = <", mean, ", ", f_mean, ">")
class MCMCTest(unittest.TestCase): def setUp(self): freq = {'A': .292, 'C': .213, 'G': .237, 'T': .258} k = 22.2 alpha = .0005 tau = 1 draws = 100 filename = 'infile' self.mcmc = MCMC(freq, k, alpha, tau, draws, filename) def test_constructor(self): self.assertEqual(self.mcmc.tau, 1) def test_fun_matrix(self): self.assertEqual(len(self.mcmc.matrix), 64) def test_topology(self): nodes = self.mcmc.tree.get_internal() self.mcmc.target = nodes[0] self.mcmc.generate_topology(self.mcmc.target) # print [node.sequence for node in self.mcmc.old_topo] # print [node.sequence for node in self.mcmc.new_topo] def test_calc_matrix(self): nodes = self.mcmc.tree.nodes() # print len(nodes) # for node in nodes: # print node.sequence self.mcmc.target = nodes[1] # print self.mcmc.target.sequence # print self.mcmc.target.parent.sequence # print self.mcmc.target.sibling.sequence # print self.mcmc.target.left.sequence # print self.mcmc.target.right.sequence self.mcmc.calc_fun_matrix(self.mcmc.target, self.mcmc.new_topo) self.assertEqual(len(self.mcmc.matrix), 64) def test_select_time(self): nodes = self.mcmc.tree.get_internal() self.mcmc.target = nodes[0] topo = self.mcmc.generate_topology(self.mcmc.target) self.mcmc.calc_fun_matrix(self.mcmc.target, self.mcmc.new_topo) def test_select_seq(self): nodes = self.mcmc.tree.get_internal() self.mcmc.target = nodes[0] target = self.mcmc.target topo = self.mcmc.generate_topology(self.mcmc.target) self.mcmc.calc_fun_matrix(self.mcmc.target, self.mcmc.new_topo) time = self.mcmc.select_time(target, topo) result = self.mcmc.select_sequence(target, topo, time) # print target.sequence # print result # print target.time # print time def test_accept(self): nodes = self.mcmc.tree.get_internal() self.mcmc.target = nodes[0] target = self.mcmc.target topo = self.mcmc.generate_topology(self.mcmc.target) self.mcmc.calc_fun_matrix(self.mcmc.target, self.mcmc.new_topo)
def alarm_tests(): mcmc = MCMC() samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'B') print("P(Burglary | JohnCalls=true, MaryCalls=true) = <", mean, ", ", f_mean, ">") mean, f_mean = Tests.sample_dim(samples, 'A') print("P(Alarm | JohnCalls=true, MaryCalls=true) = <", mean, ", ", f_mean, ">") mean, f_mean = Tests.sample_dim(samples, 'E') print("P(Earthquake | JohnCalls=true, MaryCalls=true) = <", mean, ", ", f_mean, ">") mcmc = MCMC({'J': 1., 'M': 0.}) samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'B') print("P(Burglary | JohnCalls=true, MaryCalls=false) = <", mean, ", ", f_mean, ">") mcmc = MCMC({'J': 1.}) mcmc.graph.node_dict['M'].observed = False samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'B') print("P(Burglary | JohnCalls=true) = <", mean, ", ", f_mean, ">") mcmc = MCMC({'M': 1.}) mcmc.graph.node_dict['J'].observed = False samples = mcmc.gibbs(10000, 10000) mean, f_mean = Tests.sample_dim(samples, 'B') print("P(Burglary | MaryCalls=true) = <", mean, ", ", f_mean, ">") print()
def normal_normal_tests(): graph = normal_normal() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(500, 10000) Tests.plotposterior([s['A'] for s in samples], normal_expected, 'normal-normal', -3, 3)
np.savetxt("./mcmc/log_theta_sigma2.txt", dgp.session.run(dgp.log_theta_sigma2), fmt="%f", delimiter='\t') np.savetxt("./mcmc/log_theta_lengthscale.txt", dgp.session.run(dgp.log_theta_lengthscale), fmt="%f", delimiter='\t') np.savetxt("./mcmc/log_lambda.txt", dgp.session.run([dgp.likelihood.log_var]), fmt="%f", delimiter='\t') ## Run the MCMC sampler and save some quantities of interest for comparison with the variational approximation samples_F1, samples_F2, predictions_F1, predictions_F2 = MCMC( data.X, data.Y, test.X) np.savetxt("./mcmc/predictions_MCMC_F1.txt", predictions_F1, fmt="%f", delimiter='\t') np.savetxt("./mcmc/predictions_MCMC_F2.txt", predictions_F2, fmt="%f", delimiter='\t') np.savetxt("./mcmc/samples_MCMC_F1.txt", samples_F1, fmt="%f", delimiter='\t') np.savetxt("./mcmc/samples_MCMC_F2.txt", samples_F2, fmt="%f",
count = 0 for i in range(len(x)): xsum = +x[i] count = +i + 1 cumsum[i] = xsum / count plt.plot(cumsum, label=u'E(X) Convergence') plt.show() ######################################################################### import numpy as np import matplotlib.pyplot as plt from mcmc import MCMC, DataDistribution mu1 = [-2.0, 2.0, -5.0, -10.0] sd1 = [1.0, 0.5, 0.5, 0.5] w1 = [0.2, 0.4, 0.3, 0.1] mixtureDens1 = DataDistribution(mu1, sd1, w1) x0 = np.random.rand(1) sigma = 5 n = int(10000) sim = MCMC(x0=x0, sigma=sigma, n=n) x_out = sim.mcmc(mixtureDens1.gaussian_mixture) sim.plotHistorgram(mixtureDens1.gaussian_mixture, x_out, xrange=[-15, 5]) sim.plotConvergence(x_out)
def gamma_poisson_tests(): graph = gamma_poisson() mcmc = MCMC(graph=graph) samples = mcmc.gibbs(50000, 10000) Tests.plotposterior([s['L'] for s in samples], gamma_expected, 'gamma-poisson', 0, 12)