def __add__( self, other: "DynestySamples" ) -> "DynestySamples": """ Samples can be added together, which combines their `sample_list` meaning that inferred parameters are computed via their joint PDF. For dynesty samples, the in-built dynesty function `merge_runs` can be used to combine results_internal in their native format and therefore retain visualization support. Parameters ---------- other Another Samples class Returns ------- A class that combined the samples of the two Samples objects. """ self._check_addition(other=other) results_internal = dyfunc.merge_runs(res_list=[self.results_internal, other.results_internal]) return DynestySamples( model=self.model, sample_list=self.sample_list + other.sample_list, number_live_points=self._number_live_points, unconverged_sample_size=self.unconverged_sample_size, time=self.time, results_internal=results_internal )
def test_ravel_unravel(): """ Here I test that ravel/unravel preserves things correctly """ rstate = get_rstate() g = Gaussian() dsampler = dynesty.DynamicNestedSampler(g.loglikelihood, g.prior_transform, g.ndim, bound='single', sample='unif', rstate=rstate, nlive=nlive) maxiter = 1800 dsampler.run_nested(maxiter=maxiter, use_stop=False, nlive_batch=100, print_progress=printing) dres = dsampler.results dres_list = dyfunc.unravel_run(dres) dres_merge = dyfunc.merge_runs(dres_list) assert np.abs(dres.logz[-1] - dres_merge.logz[-1]) < 0.01
def test_gaussian(): sig = 5 rstate = get_rstate() g = Gaussian() sampler = dynesty.NestedSampler(g.loglikelihood, g.prior_transform, g.ndim, nlive=nlive, rstate=rstate) sampler.run_nested(print_progress=printing) # check that jitter/resample work # for not dynamic sampler dyfunc.jitter_run(sampler.results, rstate=rstate) dyfunc.resample_run(sampler.results, rstate=rstate) # add samples # check continuation behavior sampler.run_nested(dlogz=0.1, print_progress=printing) # get errors nerr = 3 result_list = [] for i in range(nerr): sampler.reset() sampler.run_nested(print_progress=False) results = sampler.results result_list.append(results) pos = results.samples wts = np.exp(results.logwt - results.logz[-1]) mean, cov = dyfunc.mean_and_cov(pos, wts) logz = results.logz[-1] assert (np.abs(logz - g.logz_truth) < sig * results.logzerr[-1]) res_comb = dyfunc.merge_runs(result_list) assert (np.abs(res_comb.logz[-1] - g.logz_truth) < sig * results.logzerr[-1]) # check summary res = sampler.results res.summary()
def run(self, nlive=1000, cores=None, filename=None, **kwargs): merge = "no" if filename is not None and os.path.isfile(filename): doit = input(f"There seems to be a file named {filename}. " f"Would you like to run anyway? [y/n] ").lower() if doit in ["no", "n"]: with open(filename, "br") as file: self.results = pickle.load(file) return if cores is None or cores > MAX_CORES: cores = MAX_CORES try: with Pool(cores) as pool: sampler = NestedSampler( self.loglike, self.sample, self.N, npdim=self.ndim, nlive=nlive, pool=pool, queue_size=cores, **kwargs, ) sampler.run_nested() except KeyboardInterrupt: pass if filename is not None and os.path.isfile(filename): merge = input("Merge new run with previous data? [y/n] ").lower() if merge in ["no", "n"]: self.results = sampler.results else: with open(filename, "br") as file: res = pickle.load(file) self.results = merge_runs([sampler.results, res]) if filename is not None: with open(filename, "bw") as file: pickle.dump(self.results, file)
def test_gaussian(): logz_tol = 1 sampler = dynesty.NestedSampler(loglikelihood_gau, prior_transform_gau, ntotdim, nlive=nlive, ncdim=ndim_gau) sampler.run_nested(print_progress=printing) # check that jitter/resample/simulate_run work # for not dynamic sampler dyfunc.jitter_run(sampler.results) dyfunc.resample_run(sampler.results) dyfunc.simulate_run(sampler.results) # add samples # check continuation behavior sampler.run_nested(dlogz=0.1, print_progress=printing) # get errors nerr = 2 result_list = [] for i in range(nerr): sampler.reset() sampler.run_nested(print_progress=False) results = sampler.results result_list.append(results) pos = results.samples wts = np.exp(results.logwt - results.logz[-1]) mean, cov = dyfunc.mean_and_cov(pos, wts) logz = results.logz[-1] assert (np.abs(logz - logz_truth_gau) < logz_tol) res_comb = dyfunc.merge_runs(result_list) assert (np.abs(res_comb.logz[-1] - logz_truth_gau) < logz_tol) # check summary res = sampler.results res.summary()
def merge_run(res_list): return dyfunc.merge_runs(res_list)
# "Static" nested sampling. sampler = dynesty.NestedSampler(loglike, ptform, ndim) sampler.run_nested() sresults = sampler.results # "Dynamic" nested sampling. dsampler = dynesty.DynamicNestedSampler(loglike, ptform, ndim, nlive=500) dsampler.run_nested() dresults = dsampler.results from dynesty import utils as dyfunc # Combine results from "Static" and "Dynamic" runs. results = dyfunc.merge_runs([sresults, dresults]) from dynesty import plotting as dyplot # Plot a summary of the run. rfig, raxes = dyplot.runplot(results) # Plot traces and 1-D marginalized posteriors. tfig, taxes = dyplot.traceplot(results) # Plot the 2-D marginalized posteriors. cfig, caxes = dyplot.cornerplot(results) # we can post-process results