def test_neff(): # test of neff functionality ndim = 2 rstate = get_rstate() sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) assert sampler.n_effective == 0 sampler.run_nested(print_progress=printing) assert sampler.n_effective > 10 sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) assert sampler.n_effective == 0 sampler.run_nested(dlogz_init=1, n_effective=1000, print_progress=printing) assert sampler.n_effective > 1000 sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz_init=1, n_effective=10000, print_progress=printing) assert sampler.n_effective > 10000
def __init__(self, likelihood_module, prior_type='uniform', prior_means=None, prior_sigmas=None, width_scale=1, sigma_scale=1, bound='multi', sample='auto', use_mpi=False, use_pool={}): """ :param likelihood_module: likelihood_module like in likelihood.py (should be callable) :param prior_type: 'uniform' of 'gaussian', for converting the unit hypercube to param cube :param prior_means: if prior_type is 'gaussian', mean for each param :param prior_sigmas: if prior_type is 'gaussian', std dev for each param :param width_scale: scale the widths of the parameters space by this factor :param sigma_scale: if prior_type is 'gaussian', scale the gaussian sigma by this factor :param bound: specific to Dynesty, see https://dynesty.readthedocs.io :param sample: specific to Dynesty, see https://dynesty.readthedocs.io :param use_mpi: Use MPI computing if `True` :param use_pool: specific to Dynesty, see https://dynesty.readthedocs.io """ super(DynestySampler, self).__init__(likelihood_module, prior_type, prior_means, prior_sigmas, width_scale, sigma_scale) # create the Dynesty sampler if use_mpi: from schwimmbad import MPIPool import sys pool = MPIPool(use_dill=True) if not pool.is_master(): pool.wait() sys.exit(0) self._sampler = dynesty.DynamicNestedSampler(self.log_likelihood, self.prior, self.n_dims, bound=bound, sample=sample, pool=pool, use_pool=use_pool) else: self._sampler = dynesty.DynamicNestedSampler(self.log_likelihood, self.prior, self.n_dims, bound=bound, sample=sample) self._has_warned = False
def __init__(self, model, nlive, nprocesses=1, loglikelihood_function=None, use_mpi=False, run_kwds=None, **kwargs): self.model = model log_likelihood_call, prior_call = setup_calls( model, nprocesses=nprocesses, loglikelihood_function=loglikelihood_function) # Set up the pool pool = choose_pool(mpi=use_mpi, processes=nprocesses) if pool is not None: pool.size = nprocesses self.run_kwds = {} if run_kwds is None else run_kwds self.nlive = nlive self.names = model.sampling_params self.ndim = len(model.sampling_params) self.checkpoint_file = None if self.nlive < 0: # Interpret a negative input value for the number of live points # (which is clearly an invalid input in all senses) # as the desire to dynamically determine that number self._sampler = dynesty.DynamicNestedSampler(log_likelihood_call, prior_call, self.ndim, pool=pool, **kwargs) else: self._sampler = dynesty.NestedSampler(log_likelihood_call, prior_call, self.ndim, nlive=self.nlive, pool=pool, **kwargs)
def test_results(dyn): # test of various results interfaces functionality ndim = 2 rstate = get_rstate() if dyn: sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) else: sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(print_progress=printing) res = sampler.results for k in res.keys(): pass for k, v in res.items(): pass for k, v in res.asdict().items(): pass print(res) print(str(res)) print('logl' in res) res1 = res.copy() # check it's pickleable S = pickle.dumps(res) res = pickle.loads(S)
def test_periodic(): # hard test of dynamic sampler with high dlogz_init and small number # of live points logz_true = np.log(np.sqrt(2 * np.pi) * erf(win / np.sqrt(2)) / (2 * win)) thresh = 5 ndim = 2 rstate = get_rstate() sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, periodic=[0], rstate=rstate) sampler.run_nested(dlogz_init=1, print_progress=printing) assert (np.abs(sampler.results.logz[-1] - logz_true) < thresh * sampler.results.logzerr[-1]) sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, periodic=[0], rstate=rstate) sampler.run_nested(dlogz=1, print_progress=printing) assert (np.abs(sampler.results.logz[-1] - logz_true) < thresh * sampler.results.logzerr[-1])
def __init__(self, loglikelihood, prior_transform, ndim, sample='auto', bound='multi', n_cpu=None, n_thread=None): if n_cpu is None: n_cpu = mp.cpu_count() if n_thread is not None: n_thread = max(n_thread, n_cpu-1) if n_cpu > 1: self.open_pool(n_cpu) self.use_pool = {'update_bound': False} else: self.pool = None self.use_pool = None self.prior_tf = prior_transform self.loglike = loglikelihood self.ndim = ndim dsampler = dynesty.DynamicNestedSampler(self.loglike, self.prior_tf, self.ndim, sample=sample, bound=bound, pool=self.pool, queue_size=n_thread, use_pool=self.use_pool) self.dsampler = dsampler
def test_pickle(dynamic, with_pool): # test of pickling functionality ndim = 2 rstate = get_rstate() if with_pool: kw = dict(pool=Pool(2), queue_size=100) else: kw = {} if dynamic: sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate, **kw) else: sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate, **kw) sampler.run_nested(print_progress=printing, maxiter=100) # i do it twice as there were issues previously # with incorrect pool restoring S = pickle.dumps(sampler) sampler = pickle.loads(S) S = pickle.dumps(sampler) sampler = pickle.loads(S) sampler.run_nested(print_progress=printing, maxiter=100) if with_pool: kw['pool'].close() kw['pool'].join()
def test_periodic(sampler, dynamic): # hard test of dynamic sampler with high dlogz_init and small number # of live points logz_true = np.log(np.sqrt(2 * np.pi) * erf(win / np.sqrt(2)) / (2 * win)) thresh = 8 # This is set up to higher level # becasue of failures at ~5ssigma level # this needs to be investigated rstate = get_rstate() if dynamic: dns = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, periodic=[0], rstate=rstate, sample=sampler) dns.run_nested(dlogz_init=1, print_progress=printing) else: dns = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, periodic=[0], rstate=rstate, sample=sampler) dns.run_nested(dlogz=1, print_progress=printing) assert (np.abs(dns.results.logz[-1] - logz_true) < thresh * dns.results.logzerr[-1])
def test_periodic(sampler, dynamic): # hard test of dynamic sampler with high dlogz_init and small number # of live points logz_true = np.log(np.sqrt(2 * np.pi) * erf(win / np.sqrt(2)) / (2 * win)) thresh = 5 rstate = get_rstate() if dynamic: dns = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, sample=sampler, reflective=[0], rstate=rstate) else: dns = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, sample=sampler, reflective=[0], rstate=rstate) dns.run_nested(print_progress=printing) assert (np.abs(dns.results.logz[-1] - logz_true) < thresh * dns.results.logzerr[-1])
def run_dynamic_nested(self, evidence=False, posterior=False, nlive=250, sample='slice'): self.sampler = dynesty.DynamicNestedSampler( self.model.lnlikefunc, self.model.priors.transform_prior, ndim=self.ndim, bound='multi', sample=sample, nlive=nlive) if evidence and posterior: print('Both, really? Going to default') self.sampler.run_nested() elif evidence: # evidence focused dynamic run self.sampler.run_nested(wt_kwargs={'pfrac': 0.0}, stop_kwargs={'pfrac': 0.0}) elif posterior: # evidence focused dynamic run self.sampler.run_nested(wt_kwargs={'pfrac': 1.0}) else: # Default behavior, 80/20 weight split and 100% posterior. self.sampler.run_nested()
def test_error(): rstate = get_rstate() with pytest.raises(ValueError): dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, reflective=[22], rstate=rstate)
def test_dynamic(): # check dynamic nested sampling behavior logz_tol = 1 dsampler = dynesty.DynamicNestedSampler(loglikelihood_gau, prior_transform_gau, ntotdim, ncdim=ndim_gau) dsampler.run_nested(print_progress=printing) check_results_gau(dsampler.results, logz_tol)
def fit(self, log_likelihood, start, num_dim, prior_transform, save_dims=None, uid=None): import dynesty filename = self.get_filename(uid) if os.path.exists(filename): self.logger.info("Not sampling, returning result from file.") return self.load_file(filename) self.logger.info("Sampling posterior now") if save_dims is None: save_dims = num_dim self.logger.debug("Fitting framework with %d dimensions" % num_dim) self.logger.info("Using dynesty Sampler") if self.dynamic: sampler = dynesty.DynamicNestedSampler(log_likelihood, prior_transform, num_dim) sampler.run_nested(maxiter=self.max_iter, print_progress=False, nlive_init=self.nlive, nlive_batch=100, maxbatch=10) else: sampler = dynesty.NestedSampler(log_likelihood, prior_transform, num_dim, nlive=self.nlive) sampler.run_nested(maxiter=self.max_iter, print_progress=False) self.logger.debug("Fit finished") dresults = sampler.results logz = dresults["logz"] chain = dresults["samples"] weights = np.exp(dresults["logwt"] - dresults["logz"][-1]) max_weight = weights.max() trim = max_weight / 1e5 mask = weights > trim likelihood = dresults["logl"] self._save(chain[mask, :], weights[mask], likelihood[mask], filename, logz[mask], save_dims) return { "chain": chain[mask, :], "weights": weights[mask], "posterior": likelihood[mask], "evidence": logz }
def test_stop_nmc(): # test stopping relying in n_mc ndim = 2 rstate = get_rstate() sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz_init=1, n_effective=None, stop_kwargs=dict(n_mc=25), print_progress=printing)
def Model_2_sampler(prior_xform2,data1,data2,bins_,label): x,n1,n2,dn1,dn2 = bin_data(data1,data2,bins_,label) print("running the nested sampler... this might take from minutes to hours...") dsampler = dynesty.DynamicNestedSampler(logLjoint2_skew, prior_xform2, ndim=16, logl_args=(n1, n2, x), nlive=2000, bound='multi', sample='auto') dsampler.run_nested() dres2 = dsampler.results with open('sampler_results_model2_'+label, 'wb') as dres2_file: pickle.dump(dres2, dres2_file) print("sampler output saved as pickle file 'sampler_results_model2_"+label+"'")
def _fit_impl_impl(self, objective, parameters): ndim = 3 sampler = dynesty.DynamicNestedSampler(_log_likelihood_wrapper, _prior_tansform_wrapper, ndim, logl_args=(objective, parameters), ptform_args=(), **self._options_constructor) print(self._options_run_nested) result = sampler.run_nested(**self._options_run_nested) return result
def test_oldstop(): # test of old stopping function functionality ndim = 2 rstate = get_rstate() import dynesty.utils as dyutil stopfn = dyutil.old_stopping_function sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz_init=1, n_effective=None, stop_function=stopfn, print_progress=printing)
def test_pool_dynamic(): # test pool in dynamic mode # here for speed I do a gaussian rstate = get_rstate() with mp.Pool(2) as pool: sampler = dynesty.DynamicNestedSampler(loglike_gau, prior_transform_gau, ndim, nlive=nlive, pool=pool, queue_size=100, rstate=rstate) sampler.run_nested(dlogz_init=1, print_progress=printing) assert (abs(LOGZ_TRUTH_GAU - sampler.results.logz[-1]) < 5. * sampler.results.logzerr[-1])
def test_maxcall(): # hard test of dynamic sampler with high dlogz_init and small number # of live points ndim = 2 sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive) sampler.run_nested(dlogz=1, maxcall=1000) sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive) sampler.run_nested(dlogz_init=1, maxcall=1000)
def test_dyn(): # hard test of dynamic sampler with high dlogz_init and small number # of live points ndim = 2 bound = 'multi' sampler = dynesty.DynamicNestedSampler(loglike_egg, prior_transform_egg, ndim, nlive=nlive, bound=bound, sample='unif') sampler.run_nested(dlogz_init=1, print_progress=printing) logz_truth = 235.856 assert (abs(logz_truth - sampler.results.logz[-1]) < 5. * sampler.results.logzerr[-1])
def fit_e_disk2(): pdfs_weights_file = 'all_pdfs_weights.pkl' membership_file = 'membership_probs.fits' tmp = load_pdfs_weights_pickle(pdfs_weights_file) pdf_dict = tmp[0] wgt_dict = tmp[1] d1_dict = tmp[2] d2_dict = tmp[3] grp_dict = tmp[4] prob_mem = Table.read(membership_file) # Fit only stars with non-zero membership probability. p_thresh = 0.1 s_d2 = np.where(prob_mem['p_d2'] > p_thresh)[0] e_solver = Eccentricity_Solver(pdf_dict['e'][s_d2, :], wgt_dict['d2'][s_d2, :], prob_mem['name'][s_d2]) e_solver.priors['alpha'] = make_gen(0, 50) e_solver.priors['beta'] = make_gen(0, 30) t0 = time.time() # n_cpu = 4 # pool = Pool(n_cpu) sampler = dynesty.DynamicNestedSampler(e_solver.LogLikelihood, e_solver.Prior, ndim=e_solver.n_dims, bound='multi', sample='unif') sampler.run_nested(print_progress=True, dlogz_init=0.05, nlive_init=1000, nlive_batch=500, maxiter_init=20000, maxiter_batch=2000, maxbatch=10) e_solver.sampler = sampler t1 = time.time() print('Runtime: ', t1 - t0) e_solver.save('dnest_ecc_d2.pkl') return
def test_pool2(): # test pool ndim = 2 pool = mp.Pool(2) sampler = dynesty.DynamicNestedSampler(loglike_egg, prior_transform_egg, ndim, nlive=nlive, bound='multi', sample='unif', pool=pool, queue_size=2) sampler.run_nested(dlogz_init=0.1, print_progress=printing) logz_truth = 235.856 assert (abs(logz_truth - sampler.results.logz[-1]) < 5. * sampler.results.logzerr[-1])
def test_printing(): # hard test of dynamic sampler with high dlogz_init and small number # of live points ndim = 2 sampler = dynesty.DynamicNestedSampler( loglike, prior_transform, ndim, nlive=nlive, ) sampler.run_nested(dlogz_init=1, print_progress=printing) sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive) sampler.run_nested(dlogz=1, print_progress=printing)
def test_dyn(): # hard test of dynamic sampler with high dlogz_init and small number # of live points ndim = 2 THRESHOLD = 5 # in sigmas rstate = get_rstate() # this is expected to use unif sampler and multi bound sampler = dynesty.DynamicNestedSampler(loglike_egg, prior_transform_egg, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz_init=1, print_progress=printing) assert (abs(LOGZ_TRUTH - sampler.results.logz[-1]) < THRESHOLD * sampler.results.logzerr[-1]) print(sampler.citations)
def Run_Dynamic_Nested_Fitting(loglikelihood, prior_transform, ndim, nlive_init=100, sample='auto', nlive_batch=50, maxbatch=2, pfrac=0.8, n_cpu=None, print_progress=True): """ Run Fitting as a Function. Parameters ---------- loglikelihood: function log likelihood function prior_transform: function priot transorm function ndim: int number of dimension """ print("Run Nested Fitting for the image... #a of params: %d"%ndim) start = time.time() if n_cpu is None: n_cpu = mp.cpu_count()-1 with mp.Pool(processes=n_cpu) as pool: print("Opening pool: # of CPU used: %d"%(n_cpu)) pool.size = n_cpu dlogz = 1e-3 * (nlive_init - 1) + 0.01 pdsampler = dynesty.DynamicNestedSampler(loglikelihood, prior_transform, ndim, sample=sample, pool=pool, use_pool={'update_bound': False}) pdsampler.run_nested(nlive_init=nlive_init, nlive_batch=nlive_batch, maxbatch=maxbatch, print_progress=print_progress, dlogz_init=dlogz, wt_kwargs={'pfrac': pfrac}) end = time.time() print("Finish Fitting! Total time elapsed: %.3gs"%(end-start)) return pdsampler
def __init__(self, container, sample='auto', bound='multi', n_cpu=None, n_thread=None, run=True, results=None): """ A class for runnning the sampling and plotting results """ # False if a previous run is read self.run = run self.container = container self.image = container.image self.ndim = container.ndim self.labels = container.labels if run: if n_cpu is None: n_cpu = min(mp.cpu_count() - 1, 10) if n_thread is not None: n_thread = max(n_thread, n_cpu - 1) if n_cpu > 1: self.open_pool(n_cpu) self.use_pool = {'update_bound': False} else: self.pool = None self.use_pool = None self.prior_tf = container.prior_transform self.loglike = container.loglikelihood dsampler = dynesty.DynamicNestedSampler(self.loglike, self.prior_tf, self.ndim, sample=sample, bound=bound, pool=self.pool, queue_size=n_thread, use_pool=self.use_pool) self.dsampler = dsampler else: self._results = results # use existed results
def test_maxcall(): # test of maxcall functionality ndim = 2 rstate = get_rstate() sampler = dynesty.NestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz=1, maxcall=1000, print_progress=printing) sampler = dynesty.DynamicNestedSampler(loglike, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz_init=1, maxcall=1000, print_progress=printing)
def test_inf(): # Test of logl that returns -inf ndim = 2 rstate = get_rstate() sampler = dynesty.NestedSampler(loglike_inf, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(print_progress=printing) sampler = dynesty.DynamicNestedSampler(loglike_inf, prior_transform, ndim, nlive=nlive, rstate=rstate) sampler.run_nested(dlogz_init=1, print_progress=printing)
def __init__(self, likelihood, parameters, **kwargs): prop_default = { "nlive": 1000, "bound": "single", "which_sampler": "dynamic", "run_kwargs": {}, } self.likelihood = likelihood for prop, default in prop_default.items(): setattr(self, prop, kwargs.get(prop, default)) for prop, default in prop_default.items(): kwargs[prop] = kwargs.get(prop, default) super().__init__(parameters, **kwargs) current_point_dict = self.get_current_point() self.current_point = np.array( [current_point_dict[key] for key in self.key_order]) self.injection = self.current_point.copy() if self.which_sampler == "dynamic": print("Running dynamic sampler.") self.sampler = dynesty.DynamicNestedSampler( loglike, ptform, len(self.test_inds), logl_args=(self.likelihood, ), ptform_args=(self.sampling_values, self.key_order, self.test_inds), **kwargs) elif self.which_sampler == "static": print("Running static sampler.") self.sampler = dynesty.NestedSampler( loglike, ptform, len(self.test_inds), logl_args=(self.likelihood, ), ptform_args=(self.sampling_values, self.key_order, self.test_inds), **kwargs) else: raise ValueError("which_sampler must be dynamic or static.")
def test_usepool(func): # test all the use_pool options, toggle them one by one rstate = get_rstate() use_pool = {} for k in POOL_KW: use_pool[k] = False use_pool[func] = True with mp.Pool(2) as pool: sampler = dynesty.DynamicNestedSampler(loglike_gau, prior_transform_gau, ndim, nlive=nlive, rstate=rstate, use_pool=use_pool, pool=pool, queue_size=100) sampler.run_nested(maxiter=10000, print_progress=printing)