def per_iteration(lmparams, i, resids, mod_pars, photo_data, rv_data, *args, **kws): if i%10 == 0.0: ncores, fname = args params = utilfuncs.get_lmfit_parameters(mod_pars, lmparams) redchisqr = utilfuncs.reduced_chisqr(mod_pars, params, photo_data, rv_data) utilfuncs.iterprint(mod_pars, params, 0.0, redchisqr, 0.0, 0.0) utilfuncs.report_as_input(mod_pars, params, fname)
def per_iteration(mod_pars, theta, lnl, model): global max_lnlike if lnl > max_lnlike: max_lnlike = lnl params = utilfuncs.split_parameters(theta, mod_pars[0]) redchisqr = np.sum(((photo_data[1] - model) / photo_data[2]) ** 2) / \ (photo_data[1].size - 1 - (mod_pars[0] * 5 + (mod_pars[0] - 1) * 6)) utilfuncs.iterprint(mod_pars, params, max_lnlike, redchisqr, 0.0, 0.0) utilfuncs.report_as_input(mod_pars, params, fname)
def generate(lmod_pars, lparams, lphoto_data, lrv_data, lncores, lfname): global mod_pars, params, photo_data, rv_data, ncores, fname mod_pars, params, photo_data, rv_data, ncores, fname = \ lmod_pars, lparams, lphoto_data, lrv_data, lncores, lfname # number of dimensions our problem has parameters = ["{0}".format(i) for i in range(mod_pars[0] * 5 + (mod_pars[0] - 1) * 6)] nparams = len(parameters) # make sure the output directories exist if not os.path.exists("./output/{0}/multinest".format(fname)): os.makedirs(os.path.join("./", "output", "{0}".format(fname), "multinest")) if not os.path.exists("./output/{0}/plots".format(fname)): os.makedirs(os.path.join("./", "output", "{0}".format(fname), "plots")) if not os.path.exists("chains"): os.makedirs("chains") # we want to see some output while it is running progress_plot = pymultinest.ProgressPlotter(n_params=nparams, outputfiles_basename='output/{0}/multinest/'.format(fname)) progress_plot.start() # progress_print = pymultinest.ProgressPrinter(n_params=nparams, outputfiles_basename='output/{0}/multinest/'.format(fname)) # progress_print.start() # run MultiNest pymultinest.run(lnlike, lnprior, nparams, outputfiles_basename=u'./output/{0}/multinest/'.format(fname), resume=True, verbose=True, sampling_efficiency='parameter', n_live_points=1000) # run has completed progress_plot.stop() # progress_print.stop() json.dump(parameters, open('./output/{0}/multinest/params.json'.format(fname), 'w')) # save parameter names # plot the distribution of a posteriori possible models plt.figure() plt.plot(photo_data[0], photo_data[1], '+ ', color='red', label='data') a = pymultinest.Analyzer(outputfiles_basename="./output/{0}/reports/".format(fname), n_params=nparams) for theta in a.get_equal_weighted_posterior()[::100, :-1]: params = utilfuncs.split_parameters(theta, mod_pars[0]) mod_flux, mod_rv = utilfuncs.model(mod_pars, params, photo_data[0], rv_data[0]) plt.plot(photo_data[0], mod_flux, '-', color='blue', alpha=0.3, label='data') utilfuncs.report_as_input(params, fname) plt.savefig('./output/{0}/plots/posterior.pdf'.format(fname)) plt.close()
def generate(mod_pars, body_pars, photo_data, rv_data, fit_method, ncores, fname): nbodies, epoch, max_h, orbit_error, rv_body, rv_corr = mod_pars masses, radii, fluxes, u1, u2, a, e, inc, om, ln, ma = body_pars lmparams = lmParameters() # lmparams.add('N', value=N, vary=False) # lmparams.add('epoch', value=epoch, vary=False) # lmparams.add('maxh', value=maxh, vary=False) # lmparams.add('orbit_error', value=orbit_error, vary=False) for i in range(nbodies): lmparams.add('mass_{0}'.format(i), value=masses[i], min=0.0, max=0.1, vary=False) lmparams.add('radius_{0}'.format(i), value=radii[i], min=0.0, max=1.0, vary=False) lmparams.add('flux_{0}'.format(i), value=fluxes[i], min=0.0, max=1.0, vary=False) lmparams.add('u1_{0}'.format(i), value=u1[i], min=0.0, max=1.0, vary=False) lmparams.add('u2_{0}'.format(i), value=u2[i], min=0.0, max=1.0, vary=False) # if i < N-1: # params['flux_{0}'.format(i)].vary = False # params['u1_{0}'.format(i)].vary = False # params['u2_{0}'.format(i)].vary = False if i > 0: lmparams.add('a_{0}'.format(i), value=a[i - 1], min=0.0, max=10.0, vary=False) lmparams.add('e_{0}'.format(i), value=e[i - 1], min=0.0, max=1.0, vary=False) lmparams.add('inc_{0}'.format(i), value=inc[i - 1], min=0.0, max=np.pi, vary=False) lmparams.add('om_{0}'.format(i), value=om[i - 1], min=0.0, max=twopi) lmparams.add('ln_{0}'.format(i), value=ln[i - 1], min=0.0, max=twopi) lmparams.add('ma_{0}'.format(i), value=ma[i - 1], min=0.0, max=twopi) print('Generating maximum likelihood values...') results = minimize(residual, lmparams, args=(mod_pars, photo_data, rv_data, ncores, fname), iter_cb=per_iteration, method=fit_method) # Save the final outputs print "Writing report..." report_fit(results.params) utilfuncs.report_as_input(mod_pars, utilfuncs.get_lmfit_parameters(mod_pars, results.params), fname) # Return best fit values return utilfuncs.get_lmfit_parameters(mod_pars, results.params)
def generate(mod_pars, body_pars, photo_data, rv_data, nwalkers, ncores, fname, niterations=500): # Flatten body parameters theta = np.array(list(itertools.chain.from_iterable(body_pars))) # Set up the sampler. ndim = len(theta) theta[theta == 0.0] = 1.0e-10 pos0 = [theta + theta * 1.0e-3 * np.random.randn(ndim) for i in range(nwalkers)] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(mod_pars, photo_data, rv_data), threads=ncores) # Clear and run the production chain. print("Running MCMC...") # Make sure paths exist if not os.path.exists("./output/{0}/reports".format(fname)): os.makedirs(os.path.join("./", "{0}".format(fname), "output", "reports")) # Setup some values for tracking time and completion citer, tlast, tsum = 0.0, time.time(), [] for pos, lnp, state in sampler.sample(pos0, iterations=niterations, storechain=True): # Save out the chain for later analysis with open("./output/{0}/reports/mcmc_chain.dat".format(fname), "a+") as f: for k in range(pos.shape[0]): f.write("{0:4d} {1:s}\n".format(k, " ".join(map(str, pos[k])))) citer += 1.0 tsum.append(time.time() - tlast) tleft = np.median(tsum) * (niterations - citer) tlast = time.time() maxlnprob = np.argmax(lnp) bestpos = pos[maxlnprob, :] params = utilfuncs.split_parameters(bestpos, mod_pars[0]) redchisqr = utilfuncs.reduced_chisqr(mod_pars, params, photo_data, rv_data) utilfuncs.iterprint(mod_pars, params, lnp[maxlnprob], redchisqr, citer / niterations, tleft) utilfuncs.report_as_input(mod_pars, params, fname) # Remove 'burn in' region print('Burning in; creating sampler chain...') burnin = int(0.5 * niterations) samples = sampler.chain[:, burnin:, :].reshape((-1, ndim)) # Compute the quantiles. print('Computing quantiles; mapping results...') results = map( lambda v: (v[1], v[2] - v[1], v[1] - v[0]), zip(*np.percentile(samples, [16, 50, 84], axis=0)) ) # Produce final model and save the values print('Saving final results...') utilfuncs.mcmc_report_out(mod_pars, results, fname) utilfuncs.plot_out(params, fname, sampler, samples, ndim)