def mcmc_CH(self, walkerRatio, n_run, n_burn, mean_start, sigma_start, threadCount=1, init_pos=None, mpi_monch=False): """ runs mcmc on the parameter space given parameter bounds with CosmoHammerSampler returns the chain """ lowerLimit, upperLimit = self.cosmoParam.param_bounds params = np.array([mean_start, lowerLimit, upperLimit, sigma_start]).T chain = LikelihoodComputationChain( min=lowerLimit, max=upperLimit) temp_dir = tempfile.mkdtemp("Hammer") file_prefix = os.path.join(temp_dir, "logs") # chain.addCoreModule(CambCoreModule()) chain.addLikelihoodModule(self.chain) chain.setup() store = InMemoryStorageUtil() if mpi_monch is True: sampler = MpiCosmoHammerSampler( params=params, likelihoodComputationChain=chain, filePrefix=file_prefix, walkersRatio=walkerRatio, burninIterations=n_burn, sampleIterations=n_run, threadCount=1, initPositionGenerator=init_pos, storageUtil=store) else: sampler = CosmoHammerSampler( params=params, likelihoodComputationChain=chain, filePrefix=file_prefix, walkersRatio=walkerRatio, burninIterations=n_burn, sampleIterations=n_run, threadCount=threadCount, initPositionGenerator=init_pos, storageUtil=store) time_start = time.time() if sampler.isMaster(): print('Computing the MCMC...') print('Number of walkers = ', len(mean_start)*walkerRatio) print('Burn-in itterations: ', n_burn) print('Sampling itterations:', n_run) sampler.startSampling() if sampler.isMaster(): time_end = time.time() print(time_end - time_start, 'time taken for MCMC sampling') # if sampler._sampler.pool is not None: # sampler._sampler.pool.close() try: shutil.rmtree(temp_dir) except Exception as ex: print(ex) pass return store.samples
def test_createChainContext(self): chain = LikelihoodComputationChain() p = np.array([1, 2]) ctx = chain.createChainContext(p) assert ctx is not None assert np.all(ctx.getParams() == p)
def test_modules(self): chain = LikelihoodComputationChain() assert len(chain.getCoreModules()) == 0 assert len(chain.getLikelihoodModules()) == 0 coreModule = DummyModule() likeModule = DummyModule() chain.addCoreModule(coreModule) chain.addLikelihoodModule(likeModule) assert len(chain.getCoreModules()) == 1 assert len(chain.getLikelihoodModules()) == 1 chain.setup() assert coreModule.init assert likeModule.init like, data = chain([0]) assert coreModule.called assert likeModule.compLike assert like == DummyModule.like assert len(data) == 1 assert data["data"] == DummyModule.data
def test_createChainContext_params(self): keys = ["a", "b"] params = Params((keys[0], 0), (keys[1], 1)) chain = LikelihoodComputationChain() chain.params = params p = np.array([1, 2]) ctx = chain.createChainContext(p) assert ctx is not None assert np.all(ctx.getParams().keys == keys) assert np.all(ctx.getParams()[0] == p[0]) assert np.all(ctx.getParams()[1] == p[1])
def test_init(self): self.sampler = CosmoHammerSampler( params=self.params, likelihoodComputationChain=LikelihoodComputationChain(), filePrefix=self._getTempFilePrefix(), walkersRatio=10, burninIterations=1, sampleIterations=1) assert isinstance(self.sampler.storageUtil, SampleFileUtil) assert isinstance(self.sampler.stopCriteriaStrategy, IterationStopCriteriaStrategy) assert isinstance(self.sampler.initPositionGenerator, SampleBallPositionGenerator) assert self.sampler.likelihoodComputationChain.params is not None
def __init__(self, data, nbins, noise=0., div=1.0, like_func='c'): """ :param data: load your data :param nbins: number of k-modes in powerspectrum OR number of triangle contributions in bispectrum (for covariance matrix) :param noise: system noise, e.g. SKA, MWA noise response (if any), default 0.0, :param div: likelihood normalization factor, default 1.0, :param like_func: choose between complex likelihood function (use 'c'), and normal function (use 'n') """ chain = LikelihoodComputationChain(min=params[:, 1], max=params[:, 2]) chain.params = params if like_func == 'n': chain.addLikelihoodModule(LikeModule(data, nbins, noise, div)) else: chain.addLikelihoodModule(ComplexLikeModule(data, nbins, noise)) self.chain = chain
def setup(self): """ Initialise some data vectors used for the comparisons by the other functions. """ self.params = np.array([[70, 40, 100, 3], [0.0226, 0.005, 0.1, 0.001], [0.122, 0.01, 0.99, 0.01], [2.1e-9, 1.48e-9, 5.45e-9, 1e-10], [0.96, 0.5, 1.5, 0.02], [0.09, 0.01, 0.8, 0.03], [1, 0, 2, 0.4]]) #the real means.. means = [70.704, 0.02256, 0.1115, 2.18474E-09, 0.9688, 0.08920] # ...and non-trivial covariance matrix. cov = np.array( [[6.11E+00, 0, 0, 0, 0, 0], [7.19E-04, 3.26E-07, 0, 0, 0, 0], [-1.19E-02, -3.37E-07, 3.14E-05, 0, 0, 0], [-3.56E-11, 1.43E-14, 1.76E-13, 5.96E-21, 0, 0], [2.01E-02, 6.37E-06, -2.13E-05, 3.66E-13, 1.90E-04, 0], [1.10E-02, 2.36E-06, -1.92E-05, 8.70E-13, 7.32E-05, 2.23E-04]]) cov += cov.T - np.diag(cov.diagonal()) # Invert the covariance matrix icov = np.linalg.inv(cov) chain = LikelihoodComputationChain() pseudoLikelihood = PseudoCmbModule(icov, means) chain.addLikelihoodModule(pseudoLikelihood) chain.setup() posGen = FlatPositionGenerator() self.sampler = CosmoHammerSampler(params=self.params, likelihoodComputationChain=chain, filePrefix=self._getTempFilePrefix(), walkersRatio=10, burninIterations=1, sampleIterations=11, initPositionGenerator=posGen, storageUtil=InMemoryStorageUtil())
def mcmc_CH(self, walkerRatio, n_run, n_burn, mean_start, sigma_start, lowerLimit, upperLimit, X2_chain, threadCount=1, init_pos=None): """ runs mcmc on the parameter space given parameter bounds with CosmoHammerSampler returns the chain """ params = np.array([mean_start, lowerLimit, upperLimit, sigma_start]).T chain = LikelihoodComputationChain(min=lowerLimit, max=upperLimit) # chain.addCoreModule(CambCoreModule()) chain.addLikelihoodModule(X2_chain) chain.setup() store = InMemoryStorageUtil() sampler = CosmoHammerSampler(params=params, likelihoodComputationChain=chain, filePrefix="testHammer", walkersRatio=walkerRatio, burninIterations=n_burn, sampleIterations=n_run, threadCount=threadCount, initPositionGenerator=init_pos, storageUtil=store) sampler.startSampling() if sampler._sampler.pool is not None: sampler._sampler.pool.close() return store.samples
def test_isValid(self): chain = LikelihoodComputationChain() assert chain.isValid([0]) chain = LikelihoodComputationChain(min=[0]) assert chain.isValid([1]) assert chain.isValid([0]) assert not chain.isValid([-1]) chain = LikelihoodComputationChain(min=[0, 1]) assert chain.isValid([1, 2]) assert chain.isValid([0, 1]) assert not chain.isValid([-1, 1]) assert not chain.isValid([0, 0]) assert not chain.isValid([-1, 0]) chain = LikelihoodComputationChain(max=[1]) assert chain.isValid([1]) assert chain.isValid([0]) assert not chain.isValid([2]) chain = LikelihoodComputationChain(max=[1, 2]) assert chain.isValid([0, 1]) assert chain.isValid([1, 2]) assert not chain.isValid([2, 2]) assert not chain.isValid([1, 3]) assert not chain.isValid([2, 3]) chain = LikelihoodComputationChain(min=[0, 1], max=[1, 2]) assert chain.isValid([1, 2]) assert chain.isValid([0, 1]) assert chain.isValid([0, 1]) assert chain.isValid([1, 2]) assert not chain.isValid([-1, 1]) assert not chain.isValid([0, 0]) assert not chain.isValid([-1, 0]) assert not chain.isValid([2, 2]) assert not chain.isValid([1, 3]) assert not chain.isValid([2, 3]) like, data = chain([-1, 0]) assert like == -np.inf assert len(data) == 0 like, data = chain([2, 3]) assert like == -np.inf assert len(data) == 0
with h5py.File(init, 'r') as hf: params = np.array(hf.get("params")) inpos = np.array(hf.get("positions")) szs = np.array(hf.get("szs_parms")).astype(int) mins = params[:, 1] maxs = params[:, 2] nparam = len(mins) #-------------------------------------------------------------------------------- ################################################################################################# ####################################### LIKELIHOOD CHAIN ######################## #-------------------- Setup the Chain -------- # if mins, and maxs included, the code performs checkin # and can calculate a null iteration of Generated Quantities in case of rejection print 'Setting up chain' chain = LikelihoodComputationChain() coremodule = CoreModule(szs) logPosterior = LogPosteriorModule(data, threads=k) chain.addCoreModule(coremodule) chain.addLikelihoodModule(logPosterior) chain.setup() ####################################################################################### ############################### Particle Swarm Optimizer ############################# pso = MPSO(chain, low=mins, high=maxs, particleCount=partCount, req=req, threads=1, InPos=inpos) smp = emcee.EnsembleSampler(
from cosmoHammer import CosmoHammerSampler from cosmoHammer.util import InMemoryStorageUtil from cosmoHammer.util import Params from cosmoHammer.modules import PseudoCmbModule from cosmoHammer.pso.ParticleSwarmOptimizer import ParticleSwarmOptimizer #parameter start center, min, max, start width params = Params( ("hubble", [70, 65, 80, 3]), ("ombh2", [0.0226, 0.01, 0.03, 0.001]), ("omch2", [0.122, 0.09, 0.2, 0.01]), ("scalar_amp", [2.1e-9, 1.8e-9, 2.35e-9, 1e-10]), ("scalar_spectral_index", [0.96, 0.8, 1.2, 0.02]), ("re_optical_depth", [0.09, 0.01, 0.1, 0.03]), ("sz_amp", [1, 0, 2, 0.4])) chain = LikelihoodComputationChain(params[:, 1], params[:, 2]) chain.params = params chain.addLikelihoodModule(PseudoCmbModule()) chain.setup() # find the best fit value and update our params knowledge print("find best fit point") pso = ParticleSwarmOptimizer(chain, params[:, 1], params[:, 2]) psoTrace = np.array([pso.gbest.position.copy() for _ in pso.sample()]) params[:, 0] = pso.gbest.position storageUtil = InMemoryStorageUtil() sampler = CosmoHammerSampler(params=params, likelihoodComputationChain=chain, filePrefix="pseudoCmb_pso",
def mcmc_CH(self, walkerRatio, n_run, n_burn, mean_start, sigma_start, threadCount=1, init_pos=None, mpi=False): """ runs mcmc on the parameter space given parameter bounds with CosmoHammerSampler returns the chain """ lowerLimit, upperLimit = self.lower_limit, self.upper_limit mean_start = np.maximum(lowerLimit, mean_start) mean_start = np.minimum(upperLimit, mean_start) low_start = mean_start - sigma_start high_start = mean_start + sigma_start low_start = np.maximum(lowerLimit, low_start) high_start = np.minimum(upperLimit, high_start) sigma_start = (high_start - low_start) / 2 mean_start = (high_start + low_start) / 2 params = np.array([mean_start, lowerLimit, upperLimit, sigma_start]).T chain = LikelihoodComputationChain(min=lowerLimit, max=upperLimit) temp_dir = tempfile.mkdtemp("Hammer") file_prefix = os.path.join(temp_dir, "logs") #file_prefix = "./lenstronomy_debug" # chain.addCoreModule(CambCoreModule()) chain.addLikelihoodModule(self.chain) chain.setup() store = InMemoryStorageUtil() #store = None if mpi is True: sampler = MpiCosmoHammerSampler(params=params, likelihoodComputationChain=chain, filePrefix=file_prefix, walkersRatio=walkerRatio, burninIterations=n_burn, sampleIterations=n_run, threadCount=1, initPositionGenerator=init_pos, storageUtil=store) else: sampler = CosmoHammerSampler(params=params, likelihoodComputationChain=chain, filePrefix=file_prefix, walkersRatio=walkerRatio, burninIterations=n_burn, sampleIterations=n_run, threadCount=threadCount, initPositionGenerator=init_pos, storageUtil=store) time_start = time.time() if sampler.isMaster(): print('Computing the MCMC...') print('Number of walkers = ', len(mean_start) * walkerRatio) print('Burn-in iterations: ', n_burn) print('Sampling iterations:', n_run) sampler.startSampling() if sampler.isMaster(): time_end = time.time() print(time_end - time_start, 'time taken for MCMC sampling') # if sampler._sampler.pool is not None: # sampler._sampler.pool.close() try: shutil.rmtree(temp_dir) except Exception as ex: print(ex, 'shutil.rmtree did not work') pass #samples = np.loadtxt(file_prefix+".out") #prob = np.loadtxt(file_prefix+"prob.out") return store.samples, store.prob
else: hmparams = None else: cosmo = hmparams = None param_mapping = {} nparams = len(fit_params.keys()) params = np.zeros((nparams, 4)) for key in fit_params.keys(): param_mapping[key] = fit_params[key][0] params[fit_params[key][0], :] = fit_params[key][1:] # Set up CosmoHammer chain = LikelihoodComputationChain( min=params[:, 1], max=params[:, 2]) tracers = config['tracers'] trc_combs = [] if config['fit_comb'] == 'all': logger.info('Fitting auto- and cross-correlations of tracers.') i = 0 for tr_i in tracers: for tr_j in tracers[:i + 1]: # Generate the appropriate list of tracer combinations to plot trc_combs.append([tr_j, tr_i]) i += 1 elif config['fit_comb'] == 'auto': logger.info('Fitting auto-correlations of tracers.')