def main(): # Begin timing the estimation process start_time = time.time() # Run the MultiNest software pmn.run(binary_logit_log_likelihood, uniform_prior, num_dimensions, outputfiles_basename=relative_output_folder, n_live_points=num_live_points, sampling_efficiency=desired_sampling_efficiency, log_zero=-1e200, mode_tolerance=-1e180, null_log_evidence=-1e180, resume=False, verbose=True, init_MPI=False) # Record the ending time of the estimation process end_time = time.time() tot_minutes = (end_time - start_time) / 60.0 # Save the parameter names with open(relative_output_folder + "parameter_names.json", 'wb') as f: json.dump(explanatory_vars, f) # Save the number of live points used as the total estimation time model_run_params = {"n_live_points": num_live_points, "sampling_efficiency": desired_sampling_efficiency, "estimation_minutes": tot_minutes} with open(relative_output_folder + "model_run_parameters.json", "wb") as f: json.dump(model_run_params, f) # Print a report on how long the estimation process took print "Estimation process took {:.2f} minutes".format(tot_minutes)
def multinest(parameter_names, transform, loglikelihood, output_basename, **problem): parameters = parameter_names n_params = len(parameters) def myprior(cube, ndim, nparams): params = transform([cube[i] for i in range(ndim)]) for i in range(ndim): cube[i] = params[i] def myloglike(cube, ndim, nparams): l = loglikelihood([cube[i] for i in range(ndim)]) return l # run MultiNest mn_args = dict( outputfiles_basename = output_basename, resume = problem.get('resume', False), verbose = True, n_live_points = problem.get('n_live_points', 400)) if 'seed' in problem: mn_args['seed'] = problem['seed'] pymultinest.run(myloglike, myprior, n_params, **mn_args) import json # store name of parameters, always useful with file('%sparams.json' % output_basename, 'w') as f: json.dump(parameters, f, indent=2) # analyse a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename = output_basename) s = a.get_stats() with open('%sstats.json' % a.outputfiles_basename, mode='w') as f: json.dump(s, f, indent=2) return a
def test(): test.prior_was_called = False test.loglike_was_called = False test.dumper_was_called = False def myprior(cube, ndim, nparams): for i in range(ndim): cube[i] = cube[i] * 10 * math.pi test.prior_was_called = True def myloglike(cube, ndim, nparams): chi = 1. for i in range(ndim): chi *= math.cos(cube[i] / 2.) test.loglike_was_called = True return math.pow(2. + chi, 5) def mydumper(nSamples,nlive,nPar, physLive,posterior,paramConstr, maxLogLike,logZ,logZerr,nullcontext): print("calling dumper") test.dumper_was_called = True # number of dimensions our problem has parameters = ["x", "y"] n_params = len(parameters) # run MultiNest pymultinest.run(myloglike, myprior, n_params, resume = True, verbose = True, dump_callback=mydumper) assert test.prior_was_called assert test.loglike_was_called assert test.dumper_was_called
def fit_multinest(self, n_live_points=1000, basename="chains/1-", verbose=True, overwrite=True, **kwargs): self._mnest_basename = basename # creates the directory for the output # folder = os.path.abspath(os.path.dirname(self._mnest_basename)) # if not os.path.exists(self._mnest_basename): # os.makedirs(self._mnest_basename) if hasattr(self, "which"): self.n_params = 9 + 6 * self.lc.n_planets else: self.n_params = 5 + 6 * self.lc.n_planets pymultinest.run( self.mnest_loglike, self.mnest_prior, self.n_params, n_live_points=n_live_points, outputfiles_basename=self._mnest_basename, verbose=verbose, **kwargs ) self._make_samples()
def multinest(optimizer, nprocs=1): # number of dimensions our problem has parameters = ["{0}".format(i) for i in range(len(optimizer.params.get_all(True)))] nparams = len(parameters) if not os.path.exists('chains'): os.mkdir('chains') def lnprior(cube, ndim, nparams): theta = np.array([cube[i] for i in range(ndim)]) for i in range(len(optimizer.params.get_all(True))): param = optimizer.params.get_all(True)[i] if "mass_" in param.name: theta[i] = 10 ** (theta[i] * 8 - 9) elif "radius_" in param.name: theta[i] = 10 ** (theta[i] * 4 - 4) elif "flux_" in param.name: theta[i] = 10 ** (theta[i] * 4 - 4) elif "a_" in param.name: theta[i] = 10 ** (theta[i] * 2 - 2) elif "e_" in param.name: theta[i] = 10 ** (theta[i] * 3 - 3) elif "inc_" in param.name: theta[i] *= 2.0 * np.pi elif "om_" in param.name: theta[i] = 2.0 * np.pi * 10 ** (theta[i] * 2 - 2) elif "ln_" in param.name: theta[i] = 2.0 * np.pi * 10 ** (theta[i] * 8 - 8) elif "ma_" in param.name: theta[i] = 2.0 * np.pi * 10 ** (theta[i] * 2 - 2) for i in range(ndim): cube[i] = theta[i] def lnlike(cube, ndim, nparams): theta = np.array([cube[i] for i in range(ndim)]) optimizer.params.update(theta) mod_flux, mod_rv = optimizer.model(nprocs) flnl = -(0.5 * ((mod_flux - optimizer.photo_data[1]) / optimizer.photo_data[2]) ** 2) rvlnl = -(0.5 * ((mod_rv - optimizer.rv_data[1]) / optimizer.rv_data[2]) ** 2) tlnl = np.sum(flnl) + np.sum(rvlnl) nobj = np.append(np.sum(flnl) + np.sum(rvlnl), theta) optimizer.chain = np.vstack([optimizer.chain, nobj]) if tlnl > optimizer.maxlnp: optimizer.iterout(tlnl, theta, mod_flux) return np.sum(flnl) + np.sum(rvlnl) # run MultiNest pymultinest.run(lnlike, lnprior, nparams, n_live_points=1000)
def main(): cube = [ 0.9, 0.5, 0.1 ] # initial values not used ndim = len(cube) nparams = len(cube) os.chdir('/home/jordi/allst/sample') pm.run(F_calc_Likelihood4stmd, F_allpriors, nparams,importance_nested_sampling = False, resume = False, verbose = True, n_live_points=32, outputfiles_basename="DMco_", sampling_efficiency=0.02,const_efficiency_mode=True,init_MPI=False)
def run_multinest(posterior, save_file): """Uses multinest sampler to calculate evidence instead of emceee cmd is bash command to call lagrange_cpp posterior is posterior class, should have methods prior and lik save_file is path to save. will resume from file if arround""" # checks # if path exsissts if not os.path.exists(save_file) and mpi.COMM_WORLD.rank == 0: os.mkdir(save_file) assert hasattr(posterior, 'prior') and hasattr(posterior, 'lik'), 'must have prior and lik methods' # run sampler pymultinest.run(posterior.lik, posterior.prior, posterior.get_dim(), outputfiles_basename=save_file)
def generate(lmod_pars, lparams, lphoto_data, lrv_data, lncores, lfname): global mod_pars, params, photo_data, rv_data, ncores, fname mod_pars, params, photo_data, rv_data, ncores, fname = \ lmod_pars, lparams, lphoto_data, lrv_data, lncores, lfname # number of dimensions our problem has parameters = ["{0}".format(i) for i in range(mod_pars[0] * 5 + (mod_pars[0] - 1) * 6)] nparams = len(parameters) # make sure the output directories exist if not os.path.exists("./output/{0}/multinest".format(fname)): os.makedirs(os.path.join("./", "output", "{0}".format(fname), "multinest")) if not os.path.exists("./output/{0}/plots".format(fname)): os.makedirs(os.path.join("./", "output", "{0}".format(fname), "plots")) if not os.path.exists("chains"): os.makedirs("chains") # we want to see some output while it is running progress_plot = pymultinest.ProgressPlotter(n_params=nparams, outputfiles_basename='output/{0}/multinest/'.format(fname)) progress_plot.start() # progress_print = pymultinest.ProgressPrinter(n_params=nparams, outputfiles_basename='output/{0}/multinest/'.format(fname)) # progress_print.start() # run MultiNest pymultinest.run(lnlike, lnprior, nparams, outputfiles_basename=u'./output/{0}/multinest/'.format(fname), resume=True, verbose=True, sampling_efficiency='parameter', n_live_points=1000) # run has completed progress_plot.stop() # progress_print.stop() json.dump(parameters, open('./output/{0}/multinest/params.json'.format(fname), 'w')) # save parameter names # plot the distribution of a posteriori possible models plt.figure() plt.plot(photo_data[0], photo_data[1], '+ ', color='red', label='data') a = pymultinest.Analyzer(outputfiles_basename="./output/{0}/reports/".format(fname), n_params=nparams) for theta in a.get_equal_weighted_posterior()[::100, :-1]: params = utilfuncs.split_parameters(theta, mod_pars[0]) mod_flux, mod_rv = utilfuncs.model(mod_pars, params, photo_data[0], rv_data[0]) plt.plot(photo_data[0], mod_flux, '-', color='blue', alpha=0.3, label='data') utilfuncs.report_as_input(params, fname) plt.savefig('./output/{0}/plots/posterior.pdf'.format(fname)) plt.close()
def perform_scan_multinest(self, chains_dir, nlive=100): """ Perform a scan with MultiNest """ self.make_dirs([chains_dir]) n_params = len(self.floated_params) pymultinest_options = {'importance_nested_sampling': False, 'resume': False, 'verbose': True, 'sampling_efficiency': 'model', 'init_MPI': False, 'evidence_tolerance': 0.5, 'const_efficiency_mode': False} pymultinest.run(self.ll, self.prior_cube, n_params, outputfiles_basename=chains_dir, n_live_points=nlive, **pymultinest_options)
def main(): """ """ # Set up MPI variables world=MPI.COMM_WORLD rank=world.rank size=world.size master = rank==0 if master: print "Runtime parameters" pprint.pprint(rp) time.sleep(2) if not os.path.exists(rp["outdir"]): try: os.mkdir(rp["outdir"]) except: pass n_params = rp["nc_fit"] + 3 #progress = pymultinest.ProgressPlotter(n_params=n_params, interval_ms=10000, # outputfiles_basename=rp["outputfiles_basename"]) #progress.start() pymultinest.run(loglike, logprior, n_params, resume=False, verbose=True, multimodal=rp["multimodal"], max_modes=rp["max_modes"], write_output=True, n_live_points=rp["n_live_points"], evidence_tolerance=rp["evidence_tolerance"], mode_tolerance=rp["mode_tolerance"], seed=rp["seed"], max_iter=rp["max_iter"], importance_nested_sampling=rp["do_ins"], outputfiles_basename=rp["outputfiles_basename"],\ init_MPI=False) if master: # Copy the config.ini file to the output dir shutil.copy(param_file,rp["outdir"]) #progress.stop() return 0
def multinest(self,*args,**kwargs): import pymultinest #res = self.fit(False,True) #f = open("calls.txt","w+") self.freeParameters = self.modelManager.getFreeParameters() def prior(cube, ndim, nparams): for i,p in enumerate(self.freeParameters.values()): cube[i] = p.prior.multinestCall(cube[i]) pass pass def loglike(cube, ndim, nparams): logL = self.minusLogLike(cube)*(-1) if(numpy.isnan(logL)): logL = -1e10 #f.write(" ".join(map(lambda x:"%s" %x,cube[:ndim]))) #f.write(" %s\n" % logL) return logL pass if('verbose' not in kwargs): kwargs['verbose'] = True if('resume' not in kwargs): kwargs['resume'] = False if('outputfiles_basename' not in kwargs): kwargs['outputfiles_basename'] = '_1_' pass kwargs['log_zero'] = -1e9 pymultinest.run(loglike, prior, len(self.freeParameters), *args, **kwargs) print("done") #Collect the samples analyzer = pymultinest.Analyzer(n_params=len(self.freeParameters),outputfiles_basename=kwargs['outputfiles_basename']) eqw = analyzer.get_equal_weighted_posterior() self.samples = eqw[:,:-1] self.posteriors = eqw[:,-1]
def run(gp): pymultinest.run(myloglike, myprior, gp.ndim, n_params = gp.ndim+1, n_clustering_params = gp.nrho, # gp.ndim, or separate modes on the rho parameters only: gp.nrho wrapped_params = [ gp.pops, gp.nipol, gp.nrho], importance_nested_sampling = False, # INS enabled multimodal = False, # separate modes const_efficiency_mode = True, # use const sampling efficiency n_live_points = gp.nlive, evidence_tolerance = 0.0, # 0 to keep algorithm working indefinitely sampling_efficiency = 0.05, # 0.05, MultiNest README for >30 params n_iter_before_update = 2, # output after this many iterations null_log_evidence = -1e100, max_modes = gp.nlive, # preallocation of modes: max=number of live points mode_tolerance = -1.e100, # mode tolerance in the case where no special value exists: highly negative outputfiles_basename = gp.files.outdir, seed = -1, verbose = True, resume = gp.restart, context = 0, write_output = True, log_zero = -1e500, # points with log likelihood<log_zero will be neglected max_iter = 0, # set to 0 for never reaching max_iter (no stopping criterium based on number of iterations) init_MPI = False, dump_callback = None)
def run(): import gl_file as gfile if gp.getnewdata: gfile.bin_data() gfile.get_data() ## number of dimensions n_dims = gp.nepol + gp.pops*gp.nepol + gp.pops*gp.nbeta #rho, (nu, beta)_i parameters = stringlist(gp.pops, gp.nepol) # show live progress # progress = pymultinest.ProgressPlotter(n_params = n_dims) # progress.start() # threading.Timer(2, show, [gp.files.outdir+'/phys_live.points.pdf']).start() # print(str(len(gp.files.outdir))+': len of gp.files.outdir') pymultinest.run(myloglike, myprior, n_dims, n_params = n_dims, # None beforehands n_clustering_params = gp.nepol, # separate modes on the rho parameters only wrapped_params = None, # do not wrap-around parameters importance_nested_sampling = True, # INS enabled multimodal = True, # separate modes const_efficiency_mode = True, # use const sampling efficiency n_live_points = gp.nlive, evidence_tolerance = 0.0, # set to 0 to keep algorithm working indefinitely sampling_efficiency = 0.80, n_iter_before_update = gp.nlive, # output after this many iterations null_log_evidence = -1, # separate modes if logevidence > this param. max_modes = gp.nlive, # preallocation of modes: maximum = number of live points mode_tolerance = -1., outputfiles_basename = gp.files.outdir, seed = -1, verbose = True, resume = False, context = 0, write_output = True, log_zero = -1e6, max_iter = 10000000, init_MPI = True, dump_callback = None)
def run(self, clean_up=None, **kwargs): if clean_up is None: if self.run_dir is None: clean_up = True else: clean_up = False if self.run_dir is None: run_dir = tempfile.mkdtemp() else: run_dir = self.run_dir basename = self.prepare_fit_directory(run_dir, self.prefix) start_time = time.time() logger.info('Starting fit in {0} with prefix {1}'.format(run_dir, self.prefix)) pymultinest.run(self.likelihood.multinest_evaluate, self.priors.prior_transform, self.n_params, outputfiles_basename='{0}_'.format(basename), **kwargs) logger.info("Fit finished - took {0:.2f} s" .format(time.time() - start_time)) fitted_parameter_names = [item for item in self.likelihood.param_names if not self.likelihood.fixed[item]] self.result = MultiNestResult.from_multinest_basename( basename, fitted_parameter_names) if clean_up: logger.info("Cleaning up - deleting {0}".format(run_dir)) shutil.rmtree(run_dir) else: logger.info("Multinest files can be found in {0}".format(run_dir)) self.likelihood.parameters[~self.likelihood.fixed_mask()] = ( self.result.median.values) return self.result
def run(self, clean_up=None, **kwargs): if clean_up is None: if self.run_dir is None: clean_up = True else: clean_up = False if self.run_dir is None: run_dir = tempfile.mkdtemp() else: run_dir = self.run_dir basename = self.prepare_fit_directory(run_dir, self.prefix) start_time = time.time() logger.info('Starting fit in {0} with prefix {1}'.format(run_dir, self.prefix)) pymultinest.run(self.likelihood, self.priors.prior_transform, self.n_params, outputfiles_basename='{0}_'.format(basename), **kwargs) logger.info("Fit finished - took {0:.2f} s" .format(time.time() - start_time)) self.result = MultinestResult.from_multinest_basename( basename, self.likelihood.param_names) if clean_up == True: logger.info("Cleaning up - deleting {0}".format(run_dir)) shutil.rmtree(run_dir) return self.result
print "\n You are searching for the following parameters: {0}\n".format(parameters) n_params = len(parameters) print "\n The total number of parameters is {0}\n".format(n_params) ##################### # Now, we sample..... ##################### print "\n Now, we sample... \n" if args.eccSearch==True: dirextension = 'eccSearch' else: dirextension = 'circSearch' master_path = os.getcwd() os.chdir(args.datapath) pymultinest.run(lnprob, my_prior_mnest, n_params, importance_nested_sampling = False, resume = False, verbose = True, n_live_points=1000, outputfiles_basename=u'chains_{0}/{0}_'.format(dirextension), sampling_efficiency='parameter')
def run(catalogfile, vel_err, mag_err, N_gauss, outdir, rotate=True): """ PyMultiNest run to determine cluster membership, using PM catalog and applying vel_err and mag_err cuts. Output is put in newly-created outdir directory (must be a string). Parameters: catalogflie --> String containing the name of a FITS catalog. vel_err --> The maximum allowed velocity error for stars to be included. mag_err --> The maximum allowed magnitude error for stars to be included. N_gauss --> number bivariate gaussian, where N_gauss <= 4 outdir --> The output directory name. Keywords: rotate = 1 --> rotate star velocities into RA/DEC format, as opposed to X,Y """ # Load data for full field, extract velocities (already converted to mas) d = loadData(catalogfile, vel_err, mag_err, rotate=rotate) star_Vx = d["fit_vx"] star_Vy = d["fit_vy"] star_Sigx = d["fit_vxe"] star_Sigy = d["fit_vye"] N_stars = len(d) def print_param(pname, val, logp, headerFirst=False): rowHead = "{0:6s} " colHead = " val_{0} ( logp_{0} )" colVal = "{0:6.3f} ({1:9.2e})" if headerFirst: outhdr = " ".join([colHead.format(k) for k in range(N_gauss)]) print rowHead.format("") + outhdr outstr = " ".join([colVal.format(val[k], logp[k]) for k in range(N_gauss)]) print rowHead.format(pname) + outstr return def priors(cube, ndim, nparams): return def likelihood(cube, ndim, nparams): """ Define the likelihood function (from Clarkson+12, Hosek+15) """ # start the timer t0 = time.time() #################### # Set up model params #################### # Number of parameters per Gaussian: N_per_gauss = 6 # Make arrays for the paramters of each Gaussian pi = np.arange(N_gauss, dtype=float) vx = np.arange(N_gauss, dtype=float) vy = np.arange(N_gauss, dtype=float) sigA = np.arange(N_gauss, dtype=float) sigB = np.arange(N_gauss, dtype=float) theta = np.arange(N_gauss, dtype=float) # Make arrays for the prior probability of each paramter logp_pi = np.arange(N_gauss, dtype=float) logp_vx = np.arange(N_gauss, dtype=float) logp_vy = np.arange(N_gauss, dtype=float) logp_sigA = np.arange(N_gauss, dtype=float) logp_sigB = np.arange(N_gauss, dtype=float) logp_theta = np.arange(N_gauss, dtype=float) # Set the fraction of stars in each Gaussian for kk in range(N_gauss): pi[kk], logp_pi[kk] = random_pi(cube[kk * N_per_gauss + 0]) # Make sure all the sum(pi) = 1. pi /= pi.sum() # Sort the field pi values such that they are always ranked from # smallest to largest. sidx = pi[1:].argsort() pi[1:] = pi[1:][sidx] logp_pi[1:] = logp_pi[1:][sidx] # Re-set the cube values. Note this is AFTER sorting. for kk in range(N_gauss): cube[kk * N_per_gauss + 0] = pi[kk] # Set the other Gaussian parameters. for kk in range(N_gauss): # Treat the cluster gaussian (the first, most compact one) # with a special prior function. if kk == 0: rand_vx = random_clust_vx rand_vy = random_clust_vy rand_sigA = random_clust_sigA rand_sigB = random_clust_sigB rand_theta = random_clust_theta else: rand_vx = random_v rand_vy = random_v rand_sigA = random_sig rand_sigB = random_sig rand_theta = random_theta # Velocity centr vx[kk], logp_vx[kk] = rand_vx(cube[kk * N_per_gauss + 1]) cube[kk * N_per_gauss + 1] = vx[kk] vy[kk], logp_vy[kk] = rand_vy(cube[kk * N_per_gauss + 2]) cube[kk * N_per_gauss + 2] = vy[kk] # Major axis sigA[kk], logp_sigA[kk] = rand_sigA(cube[kk * N_per_gauss + 3]) cube[kk * N_per_gauss + 3] = sigA[kk] # Minor axis sigB[kk], logp_sigB[kk] = rand_sigB(cube[kk * N_per_gauss + 4]) cube[kk * N_per_gauss + 4] = sigB[kk] # Angle of major axis (in radians) theta[kk], logp_theta[kk] = rand_theta(cube[kk * N_per_gauss + 5]) cube[kk * N_per_gauss + 5] = theta[kk] # Only want to consider gaussians where Sig A > Sig B if sigB[kk] > sigA[kk]: # print '#######################' # print '#######################' # print '#######################' # print '#######################' return -np.Inf # Check that all our prior probabilities are valid, otherwise abort # before expensive calculation. if ( (logp_pi[kk] == -np.inf) or (logp_vx[kk] == -np.inf) or (logp_vy[kk] == -np.inf) or (logp_sigA[kk] == -np.inf) or (logp_sigB[kk] == -np.inf) or (logp_theta[kk] == -np.inf) ): return -np.Inf ################################ # Calculating likelihood function # Likelihood = # \Sum(i=0 -> N_stars) \Sum(k=0 -> N_gauss) # \pi_k * (2 \pi |\Sigma_k,i|)^{-1/2} * # exp[ -1/2 * (\mu_i - \mu_k)^T \sigma_k,i (\mu_i - \mu_k) ] ################################ # Keep track of the probability for each star, each gaussian # component. We will add over components and multiply over stars. prob_gauss = np.zeros((N_gauss, N_stars), dtype=float) # L_{i,k} Loop through the different gaussian components. for kk in range(N_gauss): # N_stars long array prob_gauss[kk, :] = prob_ellipse( star_Vx, star_Vy, star_Sigx, star_Sigy, pi[kk], vx[kk], vy[kk], sigA[kk], sigB[kk], theta[kk] ) # For each star, the total likelihood is the sum # of each component (before log). L_star = prob_gauss.sum(axis=0) # This array should be N-stars long logL_star = np.log10(L_star) # Final likelihood logL = logL_star.sum() logL_tmp = logL # Add in log(prior probabilities) as well for kk in range(N_gauss): logL += logp_pi[kk] logL += logp_vx[kk] logL += logp_vy[kk] logL += logp_sigA[kk] logL += logp_sigB[kk] logL += logp_theta[kk] # Some printing print "*** logL = {0:9.2e} w/priors = {1:9.2e}".format(logL_tmp, logL) print_param("pi", pi, logp_pi, headerFirst=True) print_param("vx", vx, logp_vx) print_param("vy", vy, logp_vy) print_param("sigA", sigA, logp_sigA) print_param("sigB", sigB, logp_sigB) print_param("theta", theta, logp_theta) t1 = time.time() total = t1 - t0 print "TIME SPENT: " + str(total) # pdb.set_trace() return logL ######################################### # End Likelihoods # Begin running multinest ######################################### # Make new directory to hold output fileUtil.mkdir(outdir) outroot = outdir + "/mnest_" num_dims = 2 * 3 * N_gauss num_params = num_dims ev_tol = 0.3 samp_eff = 0.8 n_live_points = 300 # Create param file _run = open(outroot + "params.run", "w") _run.write("Catalog: %s\n" % catalogfile) _run.write("Vel Err Cut: %.2f\n" % vel_err) _run.write("Mag Err Cut: %.2f\n" % mag_err) _run.write("Rotate: %s\n" % str(rotate)) _run.write("Num Gauss: %d\n" % N_gauss) _run.write("Num Dimensions: %d\n" % num_dims) _run.write("Num Params: %d\n" % num_params) _run.write("Evidence Tolerance: %.1f\n" % ev_tol) _run.write("Sampling Efficiency: %.1f\n" % samp_eff) _run.write("Num Clustering Params: %d\n" % num_dims) _run.write("Num Live Points: %d\n" % n_live_points) _run.close() # Run multinest pymultinest.run( likelihood, priors, num_dims, n_params=num_params, outputfiles_basename=outroot, verbose=True, resume=False, evidence_tolerance=ev_tol, sampling_efficiency=samp_eff, n_live_points=n_live_points, multimodal=True, n_clustering_params=num_dims, importance_nested_sampling=False, ) return
def nested_run(transformations, prior_function = None, sampling_efficiency = 0.3, n_live_points = 400, evidence_tolerance = 0.5, outputfiles_basename = 'chains/', verbose=True, **kwargs): """ Run the Bayesian analysis with specified parameters+transformations. If prior is None, uniform priors are used on the passed parameters. If parameters is also None, all thawed parameters are used. :param transformations: Parameter transformation definitions :param prior_function: set only if you want to specify a custom, non-separable prior :param outputfiles_basename: prefix for output filenames. The remainder are multinest arguments (see PyMultiNest and MultiNest documentation!) The remainder are multinest arguments (see PyMultiNest and MultiNest documentation!) n_live_points: 400 are often enough For quick results, use sampling_efficiency = 0.8, n_live_points = 50, evidence_tolerance = 5. The real results must be estimated with sampling_efficiency = 0.3 and without using const_efficiency_mode, otherwise it is not reliable. """ # for convenience if outputfiles_basename.endswith('/'): if not os.path.exists(outputfiles_basename): os.mkdir(outputfiles_basename) if prior_function is None: prior_function = create_prior_function(transformations) oldchatter = Xset.chatter, Xset.logChatter Xset.chatter, Xset.logChatter = 0, 0 def log_likelihood(cube, ndim, nparams): try: set_parameters(transformations=transformations, values=cube) l = -0.5*Fit.statistic #print "like = %.1f" % l return l except Exception as e: print 'Exception in log_likelihood function: ', e import sys sys.exit(-127) return -1e300 # run multinest if Fit.statMethod.lower() not in ['cstat', 'cash']: raise RuntimeError('ERROR: not using cash (Poisson likelihood) for Poisson data! set Fit.statMethod to cash before analysing (currently: %s)!' % Fit.statMethod) n_params = len(transformations) pymultinest.run(log_likelihood, prior_function, n_params, sampling_efficiency = sampling_efficiency, n_live_points = n_live_points, outputfiles_basename = outputfiles_basename, verbose=verbose, **kwargs) paramnames = [str(t['name']) for t in transformations] json.dump(paramnames, file('%sparams.json' % outputfiles_basename, 'w'), indent=4) # store as chain too, and try to load it for error computations analyzer = pymultinest.Analyzer(n_params = len(transformations), outputfiles_basename = outputfiles_basename) posterior = analyzer.get_equal_weighted_posterior() chainfilename = '%schain.fits' % outputfiles_basename store_chain(chainfilename, transformations, posterior) xspec.AllChains.clear() Xset.chatter, Xset.logChatter = oldchatter xspec.AllChains += chainfilename # set current parameters to best fit set_best_fit(analyzer, transformations) Xset.chatter, Xset.logChatter = oldchatter return analyzer
chi = 1. #print "cube", [cube[i] for i in range(ndim)], cube for i in range(ndim): chi *= math.cos(cube[i] / 2.) #print "returning", math.pow(2. + chi, 5) return math.pow(2. + chi, 5) # number of dimensions our problem has parameters = ["x", "y"] n_params = len(parameters) # we want to see some output while it is running progress = pymultinest.ProgressPlotter(n_params = n_params, outputfiles_basename='chains/2-'); progress.start() threading.Timer(2, show, ["chains/2-phys_live.points.pdf"]).start() # delayed opening # run MultiNest pymultinest.run(myloglike, myprior, n_params, importance_nested_sampling = False, resume = True, verbose = True, sampling_efficiency = 'model', n_live_points = 1000, outputfiles_basename='chains/2-') # ok, done. Stop our progress watcher progress.stop() # lets analyse the results a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename='chains/2-') s = a.get_stats() import json # store name of parameters, always useful with open('%sparams.json' % a.outputfiles_basename, 'w') as f: json.dump(parameters, f, indent=2) # store derived stats with open('%sstats.json' % a.outputfiles_basename, mode='w') as f: json.dump(s, f, indent=2) print()
def dump(): progress = pymultinest.ProgressPlotter(n_params = n_params, outputfiles_basename=dir_output+'chains/2-'); progress.start() threading.Timer(2, show, [dir_output+"chains/2-phys_live.points.pdf"]).start() # delayed opening # run MultiNest pymultinest.run(mc.multinest_call, mc.multinest_priors, n_params, importance_nested_sampling = False, resume = True, verbose = True, sampling_efficiency = 'model', n_live_points = 1000, outputfiles_basename=dir_output+'chains/2-') # ok, done. Stop our progress watcher progress.stop() # lets analyse the results a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename=dir_output+'chains/2-') s = a.get_stats() # store name of parameters, always useful with open('%sparams.json' % a.outputfiles_basename, 'w') as f: json.dump(parameters, f, indent=2) # store derived stats with open('%sstats.json' % a.outputfiles_basename, mode='w') as f: json.dump(s, f, indent=2) print() print("-" * 30, 'ANALYSIS', "-" * 30) print("Global Evidence:\n\t%.15e +- %.15e" % ( s['nested sampling global log-evidence'], s['nested sampling global log-evidence error'] )) import matplotlib.pyplot as plt plt.clf() # run MultiNest #pymultinest.run(mc.pymultinest_call, mc.pymultinest_priors, mc.ndim, outputfiles_basename=dir_output, resume = False, verbose = True) #json.dump(parameters, open(dir_output+'params.json', 'w')) # save parameter names # Here we will plot all the marginals and whatnot, just to show off # You may configure the format of the output here, or in matplotlibrc # All pymultinest does is filling in the data of the plot. # Copy and edit this file, and play with it. p = pymultinest.PlotMarginalModes(a) plt.figure(figsize=(5 * n_params, 5 * n_params)) # plt.subplots_adjust(wspace=0, hspace=0) for i in range(n_params): plt.subplot(n_params, n_params, n_params * i + i + 1) p.plot_marginal(i, with_ellipses=True, with_points=False, grid_points=50) plt.ylabel("Probability") plt.xlabel(parameters[i]) for j in range(i): plt.subplot(n_params, n_params, n_params * j + i + 1) # plt.subplots_adjust(left=0, bottom=0, right=0, top=0, wspace=0, hspace=0) p.plot_conditional(i, j, with_ellipses=False, with_points=True, grid_points=30) plt.xlabel(parameters[i]) plt.ylabel(parameters[j]) plt.savefig(dir_output+"chains/marginals_multinest.pdf") # , bbox_inches='tight') show(dir_output+"chains/marginals_multinest.pdf") for i in range(n_params): outfile = '%s-mode-marginal-%d.pdf' % (a.outputfiles_basename, i) p.plot_modes_marginal(i, with_ellipses=True, with_points=False) plt.ylabel("Probability") plt.xlabel(parameters[i]) plt.savefig(outfile, format='pdf', bbox_inches='tight') plt.close() outfile = '%s-mode-marginal-cumulative-%d.pdf' % (a.outputfiles_basename, i) p.plot_modes_marginal(i, cumulative=True, with_ellipses=True, with_points=False) plt.ylabel("Cumulative probability") plt.xlabel(parameters[i]) plt.savefig(outfile, format='pdf', bbox_inches='tight') plt.close() print("Take a look at the pdf files in chains/")
def main(): # MAIN -- TRADES + pyMultiNest # --- # initialize logger logger = logging.getLogger("Main_log") logger.setLevel(logging.DEBUG) formatter = logging.Formatter("%(asctime)s - %(message)s") # READ COMMAND LINE ARGUMENTS cli = get_args() # STARTING TIME start = time.time() # RENAME working_path = cli.full_path #nthreads=cli.nthreads log_file = os.path.join(working_path, '%s_log.txt' %(os.path.dirname(cli.sub_folder))) flog = logging.FileHandler(log_file, 'w') flog.setLevel(logging.DEBUG) flog.setFormatter(formatter) logger.addHandler(flog) # log screen slog = logging.StreamHandler() slog.setLevel(logging.DEBUG) slog.setFormatter(formatter) logger.addHandler(slog) fitting_priors, fitting_priors_type = read_priors(os.path.join(working_path, 'fitting_priors.dat')) derived_priors, derived_priors_type = read_priors(os.path.join(working_path, 'derived_priors.dat')) n_der_priors = len(derived_priors) # INITIALISE TRADES WITH SUBROUTINE WITHIN TRADES_LIB -> PARAMETER NAMES, MINMAX, INTEGRATION ARGS, READ DATA ... pytrades_lib.pytrades.initialize_trades(working_path, cli.sub_folder, 1) # RETRIEVE DATA AND VARIABLES FROM TRADES_LIB MODULE fitting_parameters = pytrades_lib.pytrades.fitting_parameters # INITIAL PARAMETER SET (NEEDED ONLY TO HAVE THE PROPER ARRAY/VECTOR) parameters_minmax = pytrades_lib.pytrades.parameters_minmax # PARAMETER BOUNDARIES delta_parameters = np.abs(parameters_minmax[:,1] - parameters_minmax[:,0]) # DELTA BETWEEN MAX AND MIN OF BOUNDARIES n_bodies = pytrades_lib.pytrades.n_bodies # NUMBER OF TOTAL BODIES OF THE SYSTEM n_planets = n_bodies - 1 # NUMBER OF PLANETS IN THE SYSTEM ndata = pytrades_lib.pytrades.ndata # TOTAL NUMBER OF DATA AVAILABLE npar = pytrades_lib.pytrades.npar # NUMBER OF TOTAL PARAMATERS ~n_planets X 6 nfit = pytrades_lib.pytrades.nfit # NUMBER OF PARAMETERS TO FIT nfree = pytrades_lib.pytrades.nfree # NUMBER OF FREE PARAMETERS (ie nrvset) dof = pytrades_lib.pytrades.dof # NUMBER OF DEGREES OF FREEDOM = NDATA - NFIT global inv_dof #inv_dof = np.float64(1.0 / dof) inv_dof = pytrades_lib.pytrades.inv_dof str_len = pytrades_lib.pytrades.str_len temp_names = pytrades_lib.pytrades.get_parameter_names(nfit,str_len) trades_names = anc.convert_fortran_charray2python_strararray(temp_names) parameter_names = anc.trades_names_to_emcee(trades_names) # RADIAL VELOCITIES SET n_rv = pytrades_lib.pytrades.nrv n_set_rv = pytrades_lib.pytrades.nrvset # TRANSITS SET n_t0 = pytrades_lib.pytrades.nt0 n_t0_sum = pytrades_lib.pytrades.ntts n_set_t0 = 0 for i in range(0, n_bodies-1): if (n_t0[i] > 0): n_set_t0 += 1 # compute global constant for the loglhd global ln_err_const #try: #e_RVo = np.asarray(pytrades_lib.pytrades.ervobs[:], dtype=np.float64) # fortran variable RV in python will be rv!!! #except: #e_RVo = np.asarray([0.], dtype=np.float64) #try: #e_T0o = np.asarray(pytrades_lib.pytrades.et0obs[:,:], dtype=np.float64).reshape((-1)) #except: #e_T0o = np.asarray([0.], dtype=np.float64) #ln_err_const = anc.compute_ln_err_const(ndata, dof, e_RVo, e_T0o, cli.ln_flag) ln_err_const = pytrades_lib.pytrades.ln_err_const # READ THE NAMES OF THE PARAMETERS FROM THE TRADES_LIB AND CONVERT IT TO PYTHON STRINGS #reshaped_names = pytrades_lib.pytrades.parameter_names.reshape((10,nfit), order='F').T #parameter_names = [''.join(reshaped_names[i,:]).strip() for i in range(0,nfit)] # INITIALISE SCRIPT FOLDER/LOG FILE working_folder, run_log, of_run = init_folder(working_path, cli.sub_folder) logger.info('') logger.info('==================== ') logger.info('pyTRADES-pyMultiNest') logger.info('==================== ') logger.info('') logger.info('WORKING PATH = %s' %(working_path)) logger.info('dof = ndata(%d) - nfit(%d) = %d' %(ndata, nfit, dof)) logger.info('Total N_RV = %d for %d set(s)' %(n_rv, n_set_rv)) logger.info('Total N_T0 = %d for %d out of %d planet(s)' %(n_t0_sum, n_set_t0, n_planets)) logger.info('%s = %.7f' %('log constant error = ', ln_err_const)) #of_run.write(' dof = ndata(%d) - nfit(%d) = %d\n' %(ndata, nfit, dof)) #of_run.write(' Total N_RV = %d for %d set(s)\n' %(n_rv, n_set_rv)) #of_run.write(' Total N_T0 = %d for %d out of %d planet(s)\n' %(n_t0_sum, n_set_t0, n_planets)) #of_run.write(' %s = %.7f\n' %('log constant error = ', ln_err_const)) # save parameter_names and boundaries to be read by a script trades_hdf5 = h5py.File(os.path.join(working_folder, 'system_summary.hdf5'), 'w') trades_hdf5.create_dataset('parameter_names', data=parameter_names, dtype='S10') trades_hdf5.create_dataset('parameters_minmax', data=parameters_minmax, dtype=np.float64) trades_hdf5.create_dataset('ln_err_const', data=np.asarray([ln_err_const], dtype=np.float64), dtype=np.float64) trades_hdf5.close() # MULTINEST HERE #output_mnest = os.path.join(working_folder, 'trades_mnest_') #output_mnest = os.path.join(cli.sub_folder, 'trades_mnest_') output_mnest = 'trades_mnest_' os.chdir(working_folder) log_zero_value = -0.5*1.e8 seed_value = convert_to_int(cli.seed_value) #seed_value = 392919 #n_pop = nfit+int(nfit*0.5) n_pop = convert_to_int(cli.n_pop) if( n_pop < nfit): n_pop = nfit*10 n_update = 5 # by argument resume_flag = cli.resume_flag multi_node_flag = True mpi_onoff = False logger.info('pyMultiNest parameters:') logger.info('folder = %s' %(output_mnest)) logger.info('seed_value = %d , n_pop = %d , n_update = %d' %(seed_value, n_pop, n_update)) logger.info('resume_flag = %r , multi_node_flag = %r, mpi_onoff = %r' %(resume_flag, multi_node_flag, mpi_onoff)) #of_run.write(' pyMultiNest parameters:\n') #of_run.write(' folder = %s\n seed_value = %d , n_pop = %d , n_update = %d\n resume_flag = %r , multi_node_flag = %r, mpi_onoff = %r\n' %(output_mnest, seed_value, n_pop, n_update, resume_flag, multi_node_flag, mpi_onoff)) # # RESCALE PARAMETERS FUNCTION NEEDED BY LNLIKE # def trades_rescale(fitting_parameters, ndim, nparams): for i in range(0,ndim): fitting_parameters[i] = parameters_minmax[i,0] + fitting_parameters[i]*delta_parameters[i] return fitting_parameters # LNPRIOR TO BE ADDED TO LOGLHD # it can use all the variables defined before this point! def lnprior(fitting_parameters, ndim): lnprior_value = 0. i_der = 0 for i in range(0, ndim): #print i,parameter_names[i], fitting_priors_type[i] ln_temp = 0. # calculate the LogLikelihood<->prior of fitting parameter if(fitting_priors_type[i][1].lower() == 'g'): ln_temp = -0.5*(((fitting_parameters[i]-fitting_priors[i][0])/fitting_priors[i][1])**2) lnprior_value = lnprior_value + ln_temp #print '%18.12f %18.12f (%18.12f) => ln = %18.12f' %(fitting_parameters[i], fitting_priors[i][0], fitting_priors[i][1], ln_temp) # calculate the LogLikelihood<->prior of derived parameter if('mA' in parameter_names[i]): ln_temp = 0. ecc = np.sqrt(fitting_parameters[i-2]**2 + fitting_parameters[i-1]**2) if (ecc <= 0.): ecc = np.finfo(float).eps elif (ecc > 1.): ecc = 1.- np.finfo(float).eps # ecc prior if(derived_priors_type[i_der][1].lower() == 'g'): ln_temp = -0.5*(((ecc-derived_priors[i_der][0])/derived_priors[i_der][1])**2) lnprior_value = lnprior_value + ln_temp #print derived_priors_type[i_der] #print '%18.12f %18.12f (%18.12f) => ln = %18.12f' %(ecc, derived_priors[i_der][0], derived_priors[i_der][1], ln_temp) # phi prior if(derived_priors_type[i_der+1][1].lower() == 'g'): if(ecc <= np.finfo(float).eps): argp = 90. else: argp = ((np.arctan2(fitting_parameters[i-1], fitting_parameters[i-2])*180./np.pi)+360.)%360. phi = (argp + fitting_parameters[i] + 360.)%360. ln_temp = 0. ln_temp = -0.5*(((phi-derived_priors[i_der+1][0])/derived_priors[i_der+1][1])**2) lnprior_value = lnprior_value + ln_temp #print derived_priors_type[i_der+1] #print '%18.12f (argp[%18.12f]+mA[%18.12f]) %18.12f (%18.12f) => ln = %18.12f' %(phi, argp, fitting_parameters[i], derived_priors[i_der+1][0], derived_priors[i_der+1][1], ln_temp) i_der = i_der + 2 return lnprior_value # LNLIKEHOOD FUNCTION NEEDED BY MULTINEST def lnlike(fitting_parameters, ndim, nparams): loglhd = 0. check = 1 trades_parameters = np.asarray([fitting_parameters[i] for i in range(0,ndim)], dtype=np.float64) loglhd, check = pytrades_lib.pytrades.fortran_loglikelihood(trades_parameters) if ( check == 0 ): loglhd = -0.5e10 else: lnprior_value = lnprior(fitting_parameters, ndim) #lnprior_value = 0.0 loglhd = loglhd + ln_err_const + lnprior_value # ln_err_const (global variable) & lnprior_value return loglhd # run MultiNest pymultinest.run(LogLikelihood = lnlike, Prior = trades_rescale, n_dims = nfit, n_params = nfit, outputfiles_basename=output_mnest, multimodal = multi_node_flag, log_zero=log_zero_value, seed = seed_value, n_live_points = n_pop, n_iter_before_update = n_update, resume = resume_flag, verbose = True, init_MPI = mpi_onoff) elapsed = time.time() - start elapsed_d, elapsed_h, elapsed_m, elapsed_s = computation_time(elapsed) logger.info('') logger.info('pyTRADES: pyMultiNest FINISHED in %2d day %02d hour %02d min %.2f sec - bye bye' %(int(elapsed_d), int(elapsed_h), int(elapsed_m), elapsed_s)) logger.info('') #of_run.write(' pyTRADES: pyMultiNest FINISHED in %2d day %02d hour %02d min %.2f sec - bye bye\n' %(int(elapsed_d), int(elapsed_h), int(elapsed_m), elapsed_s)) #of_run.close() pytrades_lib.pytrades.deallocate_variables() return
def myloglike_kg(cube,ndim,n_params): try: loglike = kg_loglikelihood(cube,my_observable,my_error,a) return loglike except: return -np.inf parameters = ['Semi-major axis','Eccentricity','Position Angle', 'Thickness','Contrast'] n_params = len(parameters) resume=False eff=0.3 multi=True, max_iter= 10000 ndim = n_params pymultinest.run(myloglike_kg, myprior, n_params,wrapped_params=[2], verbose=False,resume=False,max_iter=max_iter) thing = pymultinest.Analyzer(n_params = n_params) try: s = thing.get_stats() ksemis[trial], dksemis[trial] = s['marginals'][0]['median']/4., s['marginals'][0]['sigma']/4. keccs[trial], dkeccs[trial] = s['marginals'][1]['median'], s['marginals'][1]['sigma'] kthetas[trial], dkthetas[trial] = s['marginals'][2]['median'], s['marginals'][2]['sigma'] kthicks[trial], dkthicks[trial] = s['marginals'][3]['median'], s['marginals'][3]['sigma'] kcons[trial], dkcons[trial] = s['marginals'][4]['median'], s['marginals'][4]['sigma'] stuff = thing.get_best_fit() best_params = stuff['parameters'] print 'Best params (kg):', best_params
def nest(kpo,paramlimits=[20.,250.,0.,360.,1.0001,10],ndim=3,resume=False,eff=0.3,multi=True, max_iter=0,bispec=False): '''Default implementation of a MultiNest fitting routine for kernel phase data. Requires a kernel phase kpo object, parameter limits and sensible keyword arguments for the multinest parameters. This function does very naughty things creating functions inside this function because PyMultiNest is very picky about how you pass it data. Optional parameter eff tunes sampling efficiency, and multi toggles multimodal nested sampling on and off. Turning off multimodal sampling results in a speed boost of ~ 20-30%. ''' import pymultinest # importing here so you don't have to unless you use nest! # make sure you're using the right number of parameters nbands = kpo.kpd.shape[0] # if 'WFC3' in kpo.hdr['tel']: # bands = str(round(1e9*kpo.hdr['filter'],3)) # parameters = ['Separation','Position Angle','Contrast at ' + bands + ' nm'] # print bands # print parameters # else: if np.size(kpo.hdr) == 1: bands = str(round(1e6*kpo.hdr['filter'],3)) parameters = ['Separation','Position Angle','Contrast at ' + bands + ' um'] else: bands = [str(round(1e6*hd['filter'],3)) for hd in kpo.hdr] parameters = ['Separation','Position Angle'] + ['Contrast at ' + band + ' um' for band in bands] n_params = len(parameters) ndim = n_params def myprior(cube, ndim, n_params,paramlimits=paramlimits): cube[0] = (paramlimits[1] - paramlimits[0])*cube[0]+paramlimits[0] cube[1] = (paramlimits[3] - paramlimits[2])*cube[1]+paramlimits[2] for j in range(2,ndim): cube[j] = (paramlimits[5] - paramlimits[4])*cube[j]+paramlimits[4] if bispec: print 'Using a bispectral analysis' def myloglike(cube,ndim,n_params): loglike = bispec_loglikelihood(cube,kpo) return loglike else: print 'Modelling kernel phases with nested sampling' def myloglike(cube,ndim,n_params): loglike = kp_loglikelihood(cube,kpo) return loglike tic = time.time() # start timing #--------------------------------- # now run MultiNest! #--------------------------------- pymultinest.run(myloglike, myprior, n_params, wrapped_params=[1], resume=resume, verbose=True, sampling_efficiency=eff, multimodal=multi, n_iter_before_update=1000,max_iter=max_iter) # let's analyse the results a = pymultinest.Analyzer(n_params = n_params) s = a.get_stats() toc = time.time() if toc-tic < 60.: print 'Time elapsed =',toc-tic,'s' else: print 'Time elapsed =',(toc-tic)/60.,'mins' null = -0.5*np.sum(((kpo.kpd)/kpo.kpe)**2) # json.dump(s, file('%s.json' % a.outputfiles_basename, 'w'), indent=2) print print "-" * 30, 'ANALYSIS', "-" * 30 print "Global Evidence:\n\t%.15e +- %.15e" % ( s['global evidence']-null, s['global evidence error'] ) params = s['marginals'] bestsep = params[0]['median'] seperr = params[0]['sigma'] if 'Hale' in kpo.hdr['tel']: params[1]['median'] += 220.0 + kpo.hdr['orient'] elif 'HST' in kpo.hdr['tel']: params[1]['median'] -= kpo.hdr['orient'] else: params[1]['median'] += 0.0 params[1]['median'] = np.mod(params[1]['median'],360.) bestth = params[1]['median'] therr = params[1]['sigma'] print 'Separation: %.3f pm %.2f' % (bestsep,seperr) print 'Position angle: %.3f pm %.2f' %(bestth,therr) if kpo.nsets == 1: bestcon = params[2]['median'] conerr = params[2]['sigma'] print 'Contrast at',bands,'um: %.3f pm %.3f' % (bestcon,conerr) else: for j, band in enumerate(bands): bestcon = params[j+2]['median'] conerr = params[j+2]['sigma'] print 'Contrast at',band,'um: %.3f pm %.3f' % (bestcon,conerr) return params
def sample_multinest(self, n_live_points, chain_name="chains/fit-", quiet=False, **kwargs): """ Sample the posterior with MULTINEST nested sampling (Feroz & Hobson) :param: n_live_points: number of MULTINEST livepoints :param: chain_names: where to stor the multinest incremental output :param: quiet: Whether or not to should results :param: **kwargs (pyMULTINEST kwords) :return: MCMC samples """ assert has_pymultinest, "You don't have pymultinest installed, so you cannot run the Multinest sampler" self._update_free_parameters() n_dim = len(self._free_parameters.keys()) # MULTINEST has a convergence criteria and therefore, there is no way # to determine progress sampling_procedure = sample_without_progress # MULTINEST uses a different call signiture for # sampling so we construct callbakcs loglike, multinest_prior = self._construct_multinest_posterior() # We need to check if the MCMC # chains will have a place on # the disk to write and if not, # create one mcmc_chains_out_dir = "" tmp = chain_name.split('/') for s in tmp[:-1]: mcmc_chains_out_dir += s + '/' if using_mpi: # if we are running in parallel and this is not the # first engine, then we want to wait and let everything finish if rank != 0: # let these guys take a break time.sleep(1) else: # create mcmc chains directory only on first engine if not os.path.exists(mcmc_chains_out_dir): os.makedirs(mcmc_chains_out_dir) else: if not os.path.exists(mcmc_chains_out_dir): os.makedirs(mcmc_chains_out_dir) print("\nSampling\n") print("MULTINEST has its own convergence criteria... you will have to wait blindly for it to finish") print("If INS is enabled, one can monitor the likelihood in the terminal for completion information") # Multinest must be run parallel via an external method # see the demo in the examples folder!! if threeML_config['parallel']['use-parallel']: raise RuntimeError("If you want to run multinest in parallell you need to use an ad-hoc method") else: sampler = pymultinest.run(loglike, multinest_prior, n_dim, n_dim, outputfiles_basename=chain_name, n_live_points=n_live_points, **kwargs) # Use PyMULTINEST analyzer to gather parameter info process_fit = False if using_mpi: # if we are running in parallel and this is not the # first engine, then we want to wait and let everything finish if rank !=0: # let these guys take a break time.sleep(5) # these engines do not need to read process_fit = False else: # wait for a moment to allow it all to turn off time.sleep(5) process_fit = True else: process_fit = True if process_fit: multinest_analyzer = pymultinest.analyse.Analyzer(n_params=n_dim, outputfiles_basename=chain_name) # Get the log. likelihood values from the chain self._log_like_values = multinest_analyzer.get_equal_weighted_posterior()[:, -1] self._sampler = sampler self._raw_samples = multinest_analyzer.get_equal_weighted_posterior()[:, :-1] # now get the log probability self._log_probability_values = self._log_like_values + np.array([self._log_prior(samples) for samples in self._raw_samples]) self._build_samples_dictionary() self._marginal_likelihood = multinest_analyzer.get_stats()['global evidence'] / np.log(10.) self._build_results() # Display results if not quiet: self._results.display() # now get the marginal likelihood return self.samples
def multinest_run(n_live_points=1000, target = 'ob110022', saveto=multiNest_saveto): def priors(cube, ndim, nparams): return def likelihood(cube, ndim, nparams): lnlikePhot = 0.0 lnlike_nonPhot = 0.0 parnames = Photpars['parnames'] # Photometric params if MultiDimPriors == True: params, lnlikePhot = random_photpar_multidim(cube[0], Photpars['MultiDimPrior_Bins'], Photpars['MultiDimPrior'][:,0], Photpars['MultiDimPrior'][:,1], Photpars['MultiDimPrior'][:, -nPhotpars:]) for i in range(nPhotpars): cube[i] = params[i] else: for i in range(nPhotpars): param, ln_prob_param = random_photpar(cube[i], Photpars[parnames[i]][:,0], Photpars[parnames[i]][:,1], Photpars[parnames[i]][:,2]) cube[i]=param lnlikePhot += ln_prob_param idx = nPhotpars # x Position at t0: thetaS0x, ln_prob_thetaS0x = random_thetaS0x(cube[idx]) cube[idx] = thetaS0x idx += 1 lnlike_nonPhot += ln_prob_thetaS0x # y Position at t0: thetaS0y, ln_prob_thetaS0y = random_thetaS0y(cube[idx]) cube[idx] = thetaS0y idx += 1 lnlike_nonPhot += ln_prob_thetaS0y # Source proper motion (x dimension) muSx, ln_prob_muSx = random_muSx(cube[idx]) cube[idx] = muSx idx += 1 lnlike_nonPhot += ln_prob_muSx # Source proper motion (y dimension) muSy, ln_prob_muSy = random_muSy(cube[idx]) cube[idx] = muSy idx += 1 lnlike_nonPhot += ln_prob_muSy # Source-lens relative proper motion (x dimension) muRelx, ln_prob_muRelx = random_muRelx(cube[idx]) cube[idx] = muRelx idx += 1 lnlike_nonPhot += ln_prob_muRelx # Source-lens relative proper motion (y dimension) muRely, ln_prob_muRely = random_muRely(cube[idx]) cube[idx] = muRely idx += 1 lnlike_nonPhot += ln_prob_muRely t0 = cube[0] beta = cube[1] tE = cube[2] piEN = cube[3] piEE= cube[4] #Create astrometric model of source thetaS_model, thetaE_amp, M, shift, thetaS_nolens = MCMC_LensModel.LensModel_Trial1(tobs, t0, tE, [thetaS0x, thetaS0y], [muSx,muSy], [muRelx, muRely], beta, [piEN, piEE]) cube[11] = thetaE_amp cube[12] = M thetaSx_model = thetaS_model[:,0] thetaSy_model = thetaS_model[:,1] lnlike = lnlikePhot + lnlike_nonPhot + \ MCMC_LensModel.lnLikelihood(thetaSx_model, thetaSx_data, xerr_data) + \ MCMC_LensModel.lnLikelihood(thetaSy_model, thetaSy_data, yerr_data) # print "Log Likelihood: ", lnlike return lnlike ## num_dims = 11 ## num_params = 13 num_dims= 11 num_params= 13 #cube will have this many dimensions ev_tol=0.3 samp_eff=0.8 #Create param file _run = open(saveto + runcode + '_params.run', 'w') _run.write('Num Dimensions: %d\n' % num_dims) _run.write('Num Params: %d\n' % num_params) _run.write('Evidence Tolerance: %.1f\n' % ev_tol) _run.write('Sampling Efficiency: %.1f\n' % samp_eff) _run.write('Num Live Points: %d\n' % n_live_points) _run.close() startdir = os.getcwd() os.chdir(saveto) pymultinest.run(likelihood, priors, num_dims, n_params=num_params, outputfiles_basename= runcode + '_', verbose=True, resume=False, evidence_tolerance=ev_tol, sampling_efficiency=samp_eff, n_live_points=n_live_points, multimodal=True, n_clustering_params=num_dims, importance_nested_sampling=False) os.chdir(startdir) # Testing # lnlike = multinest_run.likelihood([0.01,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.95], 12, 12) # print lnlike return
import pymultinest as pmn import pandas as pd import triangle from mn_test import * pmn.run(llhood_w_const, prior_transform_const, 3, verbose=True, n_live_point = 150) data = pd.read_csv('chains/1-post_equal_weights.dat', names=['H0', 'om', 'w', 'lhood'], delim_whitespace=True, header=False) triangle.corner(data.values[:,:-1], labels = data.columns[:-1], smooth=1)
def nested_run(id=None, otherids=(), prior = None, parameters = None, sampling_efficiency = 0.3, evidence_tolerance = 0.5, n_live_points = 400, outputfiles_basename = 'chains/', **kwargs): """ Run the Bayesian analysis with specified parameters+transformations. :param id: See the sherpa documentation of calc_stat. :param otherids: See the sherpa documentation of calc_stat. :param prior: prior function created with create_prior_function. :param parameters: List of parameters to analyse. :param outputfiles_basename: prefix for output filenames. If prior is None, uniform priors are used on the passed parameters. If parameters is also None, all thawed parameters are used. The remainder are multinest arguments (see PyMultiNest and MultiNest documentation!) n_live_points: 400 are often enough For quick results, use sampling_efficiency = 0.8, n_live_points = 50, evidence_tolerance = 5. The real results must be estimated with sampling_efficiency = 0.3, otherwise it is not reliable. """ fit = ui._session._get_fit(id=id, otherids=otherids)[1] if not isinstance(fit.stat, (Cash, CStat)): raise RuntimeError("Fit statistic must be cash or cstat, not %s" % fit.stat.name) if parameters is None: parameters = fit.model.thawedpars def log_likelihood(cube, ndim, nparams): try: for i, p in enumerate(parameters): assert not isnan(cube[i]), 'ERROR: parameter %d (%s) to be set to %f' % (i, p.fullname, cube[i]) p.val = cube[i] #print "%s: %f" % (p.fullname,p.val), l = -0.5*fit.calc_stat() #print "%.1f" % l return l except Exception as e: print 'Exception in log_likelihood function: ', e for i, p in enumerate(parameters): print ' Parameter %10s: %f --> %f [%f..%f]' % (p.fullname, p.val, cube[i], p.min, p.max) import sys sys.exit(-127) return -1e300 if prior is None: prior = create_prior_function(id=id, otherids=otherids, parameters = parameters) n_params = len(parameters) pymultinest.run(log_likelihood, prior, n_params, sampling_efficiency = sampling_efficiency, n_live_points = n_live_points, outputfiles_basename = outputfiles_basename, evidence_tolerance=evidence_tolerance, **kwargs) import json m = ui._session._get_model(id) paramnames = map(lambda x: x.fullname, parameters) json.dump(paramnames, file('%sparams.json' % outputfiles_basename, 'w'), indent=2)
def Explore(self): ''' This member function invokes multinest. The data must be loaded and the likihood set ''' print bcolors.WARNING+'___________________________________________________'+bcolors.ENDC print bcolors.WARNING+'|'+bcolors.OKBLUE+' _____ _____ _____ _ _ '+bcolors.WARNING+'|'+bcolors.ENDC print bcolors.WARNING+'|'+bcolors.OKBLUE+'| | ___ | __| ___ ___ ___ | __||_|| |_ '+bcolors.WARNING+'|'+bcolors.ENDC print bcolors.WARNING+'|'+bcolors.OKBLUE+'| | | || ||__ || . || -_|| _|| __|| || _|'+bcolors.WARNING+'|'+bcolors.ENDC print bcolors.WARNING+'|'+bcolors.OKBLUE+'|_|_|_||_|_||_____|| _||___||___||__| |_||_| '+bcolors.WARNING+'|'+bcolors.ENDC print bcolors.WARNING+'|'+bcolors.OKBLUE+' |_| '+bcolors.WARNING+'|'+bcolors.ENDC print bcolors.WARNING+'| |'+bcolors.ENDC print bcolors.WARNING+'| |'+bcolors.ENDC print bcolors.WARNING+'| |'+bcolors.ENDC print bcolors.WARNING+'| '+bcolors.OKGREEN+'-J. Michael Burgess'+bcolors.WARNING+' |'+bcolors.ENDC print bcolors.WARNING+'| |'+bcolors.ENDC print bcolors.WARNING+'|_________________________________________________|'+bcolors.ENDC #print #print "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%" #print #print "License Info:" #print "\t Don't read this.\n\t Do whatever the hell you want with this software" #print #print "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%" if not self._dataLoaded: #Make sure to have loaded data print print bcolors.FAIL+ "YOU HAVE NOT LOADED ANY DATA!!"+ bcolors.ENDC print return if not self._saveFileSet: #Warn that no savefile is set print print bcolors.WARNING+"Save file not set!!! Fit params not saved!"+ bcolors.ENDC print outfilesDir = "" tmp = self.basename.split('/') for s in tmp[:-1]: outfilesDir+=s+'/' self.outfilesDir = outfilesDir if not os.path.exists(outfilesDir): os.makedirs(outfilesDir) # we want to see some output while it is running if not self.silent: print "SILENT" progress = pymultinest.ProgressPlotter(n_params = self.n_params); progress.start() threading.Timer(2, show, [self.basename+"phys_live.points.pdf"]).start() # delayed opening startTime = time.time() self._PreFitInfo() # run MultiNest pymultinest.run(self.likelihood, self.prior, self.n_params, importance_nested_sampling = self.importance_nested_sampling, resume = self.resume, verbose = self.verbose, sampling_efficiency = self.sampling_efficiency, n_live_points = self.n_live_points,outputfiles_basename=self.basename, init_MPI=False, dump_callback=self.callback,write_output=self.write) # ok, done. Stop our progress watcher if not self.silent: progress.stop() print print bcolors.OKGREEN +"Finished sampling in %.2f seconds"%(time.time()-startTime) + bcolors.ENDC print if self._saveFileSet: self._WriteFit()
try: loglike = kg_loglikelihood(cube,my_observable,my_error,a) return loglike except: return -np.inf parameters = ['Separation','Position Angle','Contrast'] n_params = len(parameters) resume=False eff=0.3 multi=True, max_iter= 0 ndim = n_params pymultinest.run(myloglike_kg, myprior, n_params, wrapped_params=[1], verbose=True,resume=False) thing = pymultinest.Analyzer(n_params = n_params) s = thing.get_stats() this_j = trial kseps[this_j], dkseps[this_j] = s['marginals'][0]['median'], s['marginals'][0]['sigma'] kthetas[this_j], dkthetas[this_j] = s['marginals'][1]['median'], s['marginals'][1]['sigma'] kcons[this_j], dkcons[this_j] = s['marginals'][2]['median'], s['marginals'][2]['sigma'] stuff = thing.get_best_fit() best_params = stuff['parameters'] model_vises = np.sqrt(pysco.binary_model(best_params,a,hdr,vis2=True)) model_kervises = np.dot(KerGain,model_vises-1.)
mpirun -np 4 python run_PyPolyChord.py on Mac: export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$PWD/lib export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libgfortran.so.3 export LD_PRELOAD=/opt/local/lib/openmpi//lib/libmpi.so:$LD_PRELOAD mpirun -np 4 python run_PyPolyChord.py ''' # number of dimensions our problem has parameters = mc.variable_list n_params = mc.ndim pymultinest.run(mc.multinest_call, mc.multinest_priors, n_params, resume=True, verbose=True, n_live_points=1000, outputfiles_basename=dir_output + 'chains/2-') def dump(): progress = pymultinest.ProgressPlotter(n_params = n_params, outputfiles_basename=dir_output+'chains/2-'); progress.start() threading.Timer(2, show, [dir_output+"chains/2-phys_live.points.pdf"]).start() # delayed opening # run MultiNest pymultinest.run(mc.multinest_call, mc.multinest_priors, n_params, importance_nested_sampling = False, resume = True, verbose = True, sampling_efficiency = 'model', n_live_points = 1000, outputfiles_basename=dir_output+'chains/2-') # ok, done. Stop our progress watcher progress.stop() # lets analyse the results a = pymultinest.Analyzer(n_params = n_params, outputfiles_basename=dir_output+'chains/2-') s = a.get_stats()