def map(self, function, sequence): """ Emulates a pool map function using Mpi. Retrieves the number of mpi processes and splits the sequence of walker position in order to allow each process its block :param function: the function to apply on the items of the sequence :param sequence: a sequence of items :returns sequence: sequence of results """ (rank, size) = (MPI.COMM_WORLD.Get_rank(), MPI.COMM_WORLD.Get_size()) #sync sequence = mpiBCast(sequence) getLogger().debug( "Rank: %s, pid: %s MpiPool: starts processing iteration" % (rank, os.getpid())) #split, process and merge the sequence mergedList = mergeList( MPI.COMM_WORLD.allgather( self.mapFunction(function, splitList(sequence, size)[rank]))) getLogger().debug( "Rank: %s, pid: %s MpiPool: done processing iteration" % (rank, os.getpid())) # time.sleep(10) return mergedList
def mpiBCast(value): """ Mpi bcasts the value and Returns the value from the master (rank = 0). """ getLogger().debug("Rank: %s, pid: %s MpiPool: bcast", MPI.COMM_WORLD.Get_rank(), os.getpid()) return MPI.COMM_WORLD.bcast(value)
def __call__(self, ctx): p = ctx.getParams() try: cosmo_params = {} for k,v in self.cosmo_mapping.items(): cosmo_params[k] = p[v] cosmo_params.update(self.cosmo_constants) self.CAMBparams.set_cosmology(**cosmo_params) init_params = {} for k,v in self.init_mapping.items(): init_params[k] = p[v] init_params.update(self.init_constants) self.CAMBparams.InitPower.set_params(**init_params) results = camb.get_results(self.CAMBparams) Tcmb = self.CAMBparams.TCMB*1e6 powers = results.get_cmb_power_spectra(lmax = self.lmax)['total'] powers *= (Tcmb * Tcmb) # The convention in CosmoHammer is to have the spectra in units # microK^2 and starting from ell=2 ctx.add(CL_TT_KEY, powers[2:,0]) ctx.add(CL_TE_KEY, powers[2:,3]) ctx.add(CL_EE_KEY, powers[2:,1]) ctx.add(CL_BB_KEY, powers[2:,2]) except camb.baseconfig.CAMBError: getLogger().warn("CAMBError catched. Used params [%s]"%( ", ".join([str(i) for i in p]) ) ) raise LikelihoodComputationException()
def setup(self): super(WmapExtLikelihoodModule, self).setup() path = resource_filename(wmap5Wrapper.__name__, FILE_PATH) getLogger().info("Loading sz template from: %s"%(path)) self.sz = np.loadtxt(path)[:,1]
def setup(self): super(WmapExtLikelihoodModule, self).setup() path = resource_filename(wmap7Wrapper.__name__, FILE_PATH) getLogger().info("Loading sz template from: %s" % (path)) self.sz = np.loadtxt(path)[:, 1]
def mpiMean(value): """ Mpi gather the value and Returns the value from the master (rank = 0). """ total = np.zeros_like(value) value = np.asarray(value) getLogger().debug("Rank: %s, pid: %s MpiPool: reduce", MPI.COMM_WORLD.Get_rank(), os.getpid()) MPI.COMM_WORLD.Reduce([value, MPI.DOUBLE],[total, MPI.DOUBLE],op = MPI.SUM,root = 0) return total/MPI.COMM_WORLD.Get_size()
def mergeList(lists): """ Merges the lists into one single list :param lists: a list of lists :returns list: the merged list """ getLogger().debug("Rank: %s, pid: %s MpiPool: mergeList", MPI.COMM_WORLD.Get_rank(), os.getpid()) return list(itertools.chain(*lists))
def splitList(list, n): """ Splits the list into block of equal sizes (listlength/n) :param list: a sequence of items :param n: the number of blocks to create :returns sequence: a list of blocks """ getLogger().debug("Rank: %s, pid: %s MpiPool: splitList", MPI.COMM_WORLD.Get_rank(), os.getpid()) blockLen = len(list) / float(n) return [list[int(round(blockLen * i)) : int(round(blockLen * (i + 1)))] for i in range(n)]
def splitList(list, n): """ Splits the list into block of equal sizes (listlength/n) :param list: a sequence of items :param n: the number of blocks to create :returns sequence: a list of blocks """ getLogger().debug("Rank: %s, pid: %s MpiPool: splitList", MPI.COMM_WORLD.Get_rank(), os.getpid()) blockLen = len(list) / float(n) return [list[int(round(blockLen * i)): int(round(blockLen * (i + 1)))] for i in range(n)]
def isValid(self, p): """ checks if the given parameters are valid """ if(self.min is not None): for i in range(len(p)): if (p[i]<self.min[i]): getLogger().debug("Params out of bounds i="+str(i)+" params "+str(p)) return False if(self.max is not None): for i in range(len(p)): if (p[i]>self.max[i]): getLogger().debug("Params out of bounds i="+str(i)+" params "+str(p)) return False return True
def __call__(self, ctx): p1 = ctx.getParams()[0:len(self.mapping)] try: params = self.constants.copy() for k,v in self.mapping.items(): params[k] = p1[v] self._transform(params) cl_tt,cl_ee,cl_bb,cl_te = pycamb.camb(self.lmax, **params) ctx.add(CL_TT_KEY, cl_tt) ctx.add(CL_TE_KEY, cl_te) ctx.add(CL_EE_KEY, cl_ee) ctx.add(CL_BB_KEY, cl_bb) except RuntimeError: getLogger().warn("Runtime error catched from the camb so. Used params [%s]"%( ", ".join([str(i) for i in p1]) ) ) raise LikelihoodComputationException()
def isValid(self, p): """ checks if the given parameters are valid """ if (self.min is not None): for i in range(len(p)): if (p[i] < self.min[i]): getLogger().debug("Params out of bounds i=" + str(i) + " params " + str(p)) return False if (self.max is not None): for i in range(len(p)): if (p[i] > self.max[i]): getLogger().debug("Params out of bounds i=" + str(i) + " params " + str(p)) return False return True
def _configureLogging(self, filename, logLevel): super()._configureLogging(filename, logLevel) logger = getLogger() logger.setLevel(logLevel) ch = logging.StreamHandler() ch.setLevel(self._log_level_stream) # create formatter and add it to the handlers formatter = logging.Formatter("%(asctime)s %(levelname)s:%(message)s") ch.setFormatter(formatter) logger.addHandler(ch)
def __call__(self, ctx): p1 = ctx.getParams()[0:len(self.mapping)] try: params = self.constants.copy() for k, v in self.mapping.items(): params[k] = p1[v] self._transform(params) cl_tt, cl_ee, cl_bb, cl_te = pycamb.camb(self.lmax, **params) ctx.add(CL_TT_KEY, cl_tt) ctx.add(CL_TE_KEY, cl_te) ctx.add(CL_EE_KEY, cl_ee) ctx.add(CL_BB_KEY, cl_bb) except RuntimeError: getLogger().warn( "Runtime error catched from the camb so. Used params [%s]" % (", ".join([str(i) for i in p1]))) raise LikelihoodComputationException()
def map(self, function, sequence): """ Emulates a pool map function using Mpi. Retrieves the number of mpi processes and splits the sequence of walker position in order to allow each process its block :param function: the function to apply on the items of the sequence :param sequence: a sequence of items :returns sequence: sequence of results """ sequence = mpiBCast(sequence) getLogger().debug("Rank: %s, pid: %s MpiPool: starts processing iteration" %(self.rank, os.getpid())) #split, process and merge the sequence mergedList = mergeList(MPI.COMM_WORLD.allgather( self.mapFunction(function, splitList(sequence,self.size)[self.rank]))) getLogger().debug("Rank: %s, pid: %s MpiPool: done processing iteration"%(self.rank, os.getpid())) # time.sleep(10) return mergedList
def __call__(self, p): """ Computes the log likelihood by calling all the core and likelihood modules. :param p: the parameter array for which the likelihood should be evaluated :return: the current likelihood and a dict with additional data """ try: getLogger().debug("pid: %s, processing: %s" % (os.getpid(), p)) if not self.isValid(p): raise LikelihoodComputationException() ctx = self.createChainContext(p) self.invokeCoreModules(ctx) likelihood = self.computeLikelihoods(ctx) getLogger().debug("pid: %s, processed. Returning: %s" % (os.getpid(), likelihood)) return likelihood, ctx.getData() except LikelihoodComputationException: getLogger().debug("pid: %s, processed. Returning: %s" % (os.getpid(), -np.inf)) return -np.inf, []
def _configureLogging(self, filename, logLevel): logger = getLogger() logger.setLevel(logLevel) fh = logging.FileHandler(filename, "w") fh.setLevel(logLevel) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.ERROR) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s %(levelname)s:%(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger for handler in logger.handlers[:]: logger.removeHandler(handler) logger.addHandler(fh) logger.addHandler(ch)
def _configureLogging(self, filename, logLevel): if self.isMaster(): # create logger with 'spam_application' logger = getLogger() logger.setLevel(logLevel) # create file handler which logs even debug messages # fh = ConcurrentRotatingFileHandler(filename, "w", 512*1024, 5, supress_abs_warn=True) fh = logging.FileHandler(filename, "w") fh.setLevel(logLevel) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.ERROR) # create formatter and add it to the handlers formatter = logging.Formatter('%(asctime)s %(levelname)s:%(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) logger.addHandler(ch)
def _configureLogging(self, filename, logLevel): if self.isMaster(): # create logger with 'spam_application' logger = getLogger() logger.setLevel(logLevel) # create file handler which logs even debug messages # fh = ConcurrentRotatingFileHandler(filename, "w", 512*1024, 5, supress_abs_warn=True) fh = logging.FileHandler(filename, "w") fh.setLevel(logLevel) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.ERROR) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s %(levelname)s:%(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) logger.addHandler(ch)
def __call__(self, p): """ Computes the log likelihood by calling all the core and likelihood modules. :param p: the parameter array for which the likelihood should be evaluated :return: the current likelihood and a dict with additional data """ try: getLogger().debug("pid: %s, processing: %s"%(os.getpid(), p)) if not self.isValid(p): raise LikelihoodComputationException() ctx = self.createChainContext(p) self.invokeCoreModules(ctx) likelihood = self.computeLikelihoods(ctx) getLogger().debug("pid: %s, processed. Returning: %s"%(os.getpid(), likelihood)) return likelihood, ctx.getData() except LikelihoodComputationException: getLogger().debug("pid: %s, processed. Returning: %s"%(os.getpid(), -np.inf)) return -np.inf, []
def setup(self): getLogger().info("Multivariante Gaussian setup")
import h5py import logging import numpy as np import os import time import warnings from cosmoHammer import CosmoHammerSampler as _CosmoHammerSampler from cosmoHammer import getLogger from cosmoHammer import util as _util from cosmoHammer.ChainContext import ChainContext from cosmoHammer.LikelihoodComputationChain import LikelihoodComputationChain as _Chain from py21cmfast._utils import ParameterError from py21cmmc.ensemble import EnsembleSampler logger = getLogger() class HDFStorage: """A HDF Storage utility, based on the HDFBackend from emcee v3.0.0.""" def __init__(self, filename, name): if h5py is None: raise ImportError("you must install 'h5py' to use the HDFBackend") self.filename = filename self.name = name @property def initialized(self): """Whether the file object has been initialized.""" if not os.path.exists(self.filename): return False
def setup(self): getLogger().info("Rosenbrock setup")
def log(self, message, level=logging.INFO): """ Logs a message to the logfile """ if self.isMaster(): getLogger().log(level, message)
def setup(self): getLogger().info("Pseudo cmb setup")