def fill_mcmc_parameters(self): """ Initializes the ordered dictionary :attr:`mcmc_parameters` from the input parameter file. It uses :meth:`read_file`, and calls :meth:`from_input_to_mcmc_parameters` to actually fill in :attr:`mcmc_parameters`. """ # Define temporary quantities, only to simplify the input in the # parameter file self.parameters = od() # Read from the parameter file everything try: self.param_file = open(self.param, 'r') except IOError: io_mp.message( "Error in initializing the data class, the parameter file \ {0} does not point to a proper file".format(self.param), "error") self.read_file(self.param_file) # Transform from parameters dictionnary to mcmc_parameters dictionary # of dictionaries, method defined just below self.from_input_to_mcmc_parameters(self.parameters)
def __cmp__(self, other): """ Redefinition of the 'compare' method for two instances of this class. It will decide which basic operations to perform when the code asked if two instances are the same (in case you want to launch a new chain in an existing folder, with your own parameter file) Comparing cosmological code versions (warning only, will not fail the comparison) """ if self.version != other.version: io_mp.message( "You are running with a different version of your \ cosmological code", "warning") # Defines unordered version of the dictionaries of parameters self.uo_parameters = {} other.uo_parameters = {} # Check if all the experiments are tested again, if len(list(set(other.experiments).symmetric_difference( set(self.experiments)))) == 0: # Check that they have been called with the same .data file, stored # in dictionary when initializing. for experiment in self.experiments: for elem in self.lkl[experiment].dictionary: if self.lkl[experiment].dictionary[elem] != \ other.lkl[experiment].dictionary[elem]: print 'in your parameter file: ', print self.lkl[experiment].dictionary print 'in log.param: ', print other.lkl[experiment].dictionary return -1 # Fill in the unordered version of dictionaries for key, elem in self.mcmc_parameters.iteritems(): self.uo_parameters[key] = elem['initial'] for key, elem in other.mcmc_parameters.iteritems(): other.uo_parameters[key] = elem['initial'] # And finally compare them (standard comparison between # dictionnaries, will return True if both have the same keys and # values associated to them. return cmp(self.uo_parameters, other.uo_parameters) else: return -1
def main(): """ Main call of the function This function recovers the input from the command line arguments, from :mod:`parser_mp`, the parameter files. It then extracts the path of the used Monte Python code, assuming a standard setting (the data folder is in the same directory as the code folder). It finally proceeds to initialize a :class:`data` instance, a cosmological code instance, and runs the Markov chain. .. note:: A possible parallelization would take place here. """ # Parsing line argument command_line = parser_mp.parse() # Default configuration path = {} # On execution, sys.path contains all the standard locations for the # libraries, plus, on the first position (index 0), the directory from # where the code is executed. By default, then, the data folder is located # in the same root directory. Any setting in the configuration file will # overwrite this one. path['MontePython'] = sys.path[0] + '/' path['data'] = path['MontePython'][:-5] + 'data/' # Configuration file, defaulting to default.conf in your root directory. # This can be changed with the command line option -conf. All changes will # be stored into the log.param of your folder, and hence will be reused for # an ulterior run in the same directory conf_file = path['MontePython'][:-5] + command_line.config_file if os.path.isfile(conf_file): for line in open(conf_file): exec(line) for key, value in path.iteritems(): if not value.endswith('/'): path[key] = value + '/' else: io_mp.message( "You must provide a .conf file (default.conf by default in your \ montepython directory that specifies the correct locations for your \ data folder, Class (, Clik), etc...", "error") sys.stdout.write('Running MontePython version 1.2\n') # If the info flag was used, read a potential chain (or set of chains) to # be analysed with default procedure. If the argument is a .info file, then # it will extract information from it (plots to compute, chains to analyse, # etc...) if command_line.files is not None: from analyze import analyze # analysis module, only invoked when analyzing analyze(command_line) exit() # If the restart flag was used, load the cosmology directly from the # log.param file, and append to the existing chain. if command_line.restart is not None: if command_line.restart[0] == '/': folder = '' else: folder = './' for elem in command_line.restart.split("/")[:-1]: folder += ''.join(elem+'/') command_line.param = folder+'log.param' command_line.folder = folder sys.stdout.write('Reading {0} file'.format(command_line.restart)) Data = data.data(command_line, path) # Else, fill in data, starting from parameter file. If output folder # already exists, the input parameter file was automatically replaced by # the existing log.param. This prevents you to run different things in a # same folder. else: Data = data.data(command_line, path) # Overwrite arguments from parameter file with the command line if command_line.N is None: try: command_line.N = Data.N except AttributeError: io_mp.message( "You did not provide a number of steps, neither via \ command line, nor in %s" % command_line.param, "error") # Creating the file that will contain the chain io_mp.create_output_files(command_line, Data) # If there is a conflict between the log.param value and the .conf file, # exiting. if Data.path != path: io_mp.message( "Your log.param file is in contradiction with your .conf file, \ please check your path in these two places.", "error") # Loading up the cosmological backbone. For the moment, only Class has been # wrapped. # Importing the python-wrapped Class from the correct folder, defined in # the .conf file, or overwritten at this point by the log.param. # If the cosmological code is Class, do the following to import all # relevant quantities if Data.cosmological_module_name == 'Class': try: for elem in os.listdir(Data.path['cosmo']+"python/build"): if elem.find("lib.") != -1: classy_path = path['cosmo']+"python/build/"+elem except OSError: io_mp.message( "You probably did not compile the python wrapper of Class. \ Please go to /path/to/class/python/ and do\n\ ..]$ python setup.py build", "error") # Inserting the previously found path into the list of folders to # search for python modules. sys.path.insert(1, classy_path) try: from classy import Class except ImportError: io_mp.message( "You must have compiled the classy.pyx file. Please go to \ /path/to/class/python and run the command\n\ python setup.py build", "error") cosmo = Class() else: io_mp.message( "Unrecognised cosmological module. \ Be sure to define the correct behaviour in MontePython.py \ and data.py, to support a new one", "error") # MCMC chain mcmc.chain(cosmo, Data, command_line) # Closing up the file Data.out.close()
def update_cosmo_arguments(self): """ Put in :attr:`cosmo_arguments` the current values of :attr:`mcmc_parameters` This method is called at every step in the Markov chain, to update the dictionary. In the Markov chain, the scale is not remembered, so one has to apply it before giving it to the cosmological code. .. note:: When you want to define new parameters in the Markov chain that do not have a one to one correspondance to a cosmological name, you can redefine its behaviour here. You will find in the source several such examples. """ # For all elements in any cosmological parameters for elem in self.get_mcmc_parameters(['cosmo']): # Fill in the dictionnary with the current value of parameters self.cosmo_arguments[elem] = \ self.mcmc_parameters[elem]['current'] *\ self.mcmc_parameters[elem]['scale'] # For all elements in the cosmological parameters from the mcmc list, # translate any-one that is not directly a Class parameter into one. # The try: except: syntax ensures that the first call for elem in self.get_mcmc_parameters(['cosmo']): # infer h from Omega_Lambda and delete Omega_Lambda if elem == 'Omega_Lambda': omega_b = self.cosmo_arguments['omega_b'] omega_cdm = self.cosmo_arguments['omega_cdm'] Omega_Lambda = self.cosmo_arguments['Omega_Lambda'] self.cosmo_arguments['h'] = math.sqrt( (omega_b+omega_cdm) / (1.-Omega_Lambda)) del self.cosmo_arguments[elem] # infer omega_cdm from Omega_L and delete Omega_L if elem == 'Omega_L': omega_b = self.cosmo_arguments['omega_b'] h = self.cosmo_arguments['h'] Omega_L = self.cosmo_arguments['Omega_L'] self.cosmo_arguments['omega_cdm'] = (1.-Omega_L)*h*h-omega_b del self.cosmo_arguments[elem] if elem == 'ln10^{10}A_s': self.cosmo_arguments['A_s'] = math.exp( self.cosmo_arguments[elem]) / 1.e10 del self.cosmo_arguments[elem] if elem == 'exp_m_2_tau_As': tau_reio = self.cosmo_arguments['tau_reio'] self.cosmo_arguments['A_s'] = self.cosmo_arguments[elem] * \ math.exp(2.*tau_reio) del self.cosmo_arguments[elem] if elem == 'f_cdi': self.cosmo_arguments['n_cdi'] = self.cosmo_arguments['n_s'] if elem == 'beta': self.cosmo_arguments['alpha'] = 2.*self.cosmo_arguments['beta'] # We only do that on xe_1, for there is at least one of them. if elem.find('xe_1') != -1: # To pass this option, you must have set a number of # cosmological settings reio_parametrization to reio_bins_tanh, # binned_reio_z set, and binned_reio_num First, you need to set # reio_parametrization to reio_bins_tanh if (self.cosmo_arguments['reio_parametrization'] != 'reio_bins_tanh'): io_mp.message( "You set binned_reio_xe to some values \ without setting reio_parametrization to \ reio_bins_tanh", "error") else: try: size = self.cosmo_arguments['binned_reio_num'] except (KeyError): io_mp.message( "You need to set reio_binnumber to the value \ corresponding to the one in binned_reio_xe", "error") string = '' for i in range(1, size+1): string += '%.4g' % self.cosmo_arguments['xe_%d' % i] del self.cosmo_arguments['xe_%d' % i] if i != size: string += ',' self.cosmo_arguments['binned_reio_xe'] = string
def __init__(self, command_line, path): """ The data class holds the cosmological information, the parameters from the MCMC run, the information coming from the likelihoods. It is a wide collections of information, with in particular two main dictionaries: cosmo_arguments and mcmc_parameters. It defines several useful **methods**. The following ones are called just once, at initialization: * :func:`fill_mcmc_parameters` * :func:`from_input_to_mcmc_parameters` * :func:`read_file` * :func:`read_version` * :func:`group_parameters_in_blocks` On the other hand, these two following functions are called every step. * :func:`check_for_slow_step` * :func:`update_cosmo_arguments` Finally, the convenient method :func:`get_mcmc_parameters` will be called in many places, to return the proper list of desired parameters. It has a number of different **attributes**, and the more important ones are listed here: * :attr:`cosmo_arguments` * :attr:`mcmc_parameters` * :attr:`need_cosmo_update` * :attr:`log_flag` * :attr:`boundary_loglike` .. note:: The `experiments` attribute is extracted from the parameter file, and contains the list of likelihoods to use To create an instance of this class, one must feed the following parameters and keyword arguments: :Parameters: - **command_line** (`dict`) - dictionary containing the input from the :mod:`parser_mp`. It stores the input parameter file, the jumping methods, the output folder, etc... Most of the information extracted from the command_file will be transformed into :class:`data` attributes, whenever it felt meaningful to do so. - **path** (`dict`) - contains a dictionary of important local paths. It is used here to find the cosmological module location. """ # Initialisation of the random seed rd.seed() # Store the parameter file self.param = command_line.param # Recover jumping method from command_line self.jumping = command_line.jumping self.jumping_factor = command_line.jumping_factor self.path = path self.boundary_loglike = -1e30 """ Define the boundary loglike, the value used to defined a loglike that is out of bounds. If a point in the parameter space is affected to this value, it will be automatically rejected, hence increasing the multiplicity of the last accepted point. """ # Creation of the two main dictionnaries: self.cosmo_arguments = {} """ Simple dictionary that will serve as a communication interface with the cosmological code. It contains all the parameters for the code that will not be set to their default values. It is updated from :attr:`mcmc_parameters`. :rtype: dict """ self.mcmc_parameters = od() """ Ordered dictionary of dictionaries, it contains everything needed by the :mod:`mcmc` module for the MCMC procedure. Every parameter name will be the key of a dictionary, containing the initial configuration, role, status, last accepted point and current point. :rtype: ordereddict """ # Read from the parameter file to fill properly the mcmc_parameters # dictionary. self.fill_mcmc_parameters() # Determine which cosmological code is in use if path['cosmo'].find('class') != -1: self.cosmological_module_name = 'Class' else: self.cosmological_module_name = None # Recover the cosmological code version (and subversion if relevant). # To implement a new cosmological code, please add another case to the # test below. if self.cosmological_module_name == 'Class': svn_file = open(path['cosmo']+'/include/svnversion.h', 'r') self.subversion = svn_file.readline().split()[-1].\ replace('"', '') svn_file.close() for line in open(path['cosmo']+'/include/common.h', 'r'): if line.find('_VERSION_') != -1: self.version = line.split()[-1].replace('"', '') break else: # read in the existing parameter file self.read_version(self.param_file) # End of initialisation with the parameter file self.param_file.close() self.log_flag = False """ Stores the information whether or not the likelihood data files need to be written down in the log.param file. Initially at False. :rtype: bool """ self.need_cosmo_update = True """ `added in version 1.1.1`. It stores the truth value of whether the cosmological block of parameters was changed from one step to another. See :meth:`group_parameters_in_blocks` :rtype: bool """ sys.stdout.write('Testing likelihoods for:\n -> ') for i in range(len(self.experiments)): sys.stdout.write(self.experiments[i]+', ') sys.stdout.write('\n') # logging the parameter file (only if folder does not exist !) if command_line.folder[-1] != '/': command_line.folder += '/' if (os.path.exists(command_line.folder) and not os.path.exists(command_line.folder+'log.param')): if command_line.param is not None: io_mp.message( "Detecting empty folder, logging the parameter file", "warning") io_mp.log_parameters(self, command_line) self.log_flag = True if not os.path.exists(command_line.folder): os.mkdir(command_line.folder) # Logging of parameters io_mp.log_parameters(self, command_line) self.log_flag = True self.lkl = od() # adding the likelihood directory to the path, to import the module # then, for each library, calling an instance of the likelihood. # Beware, though, if you add new likelihoods, they should go to the # folder likelihoods/yourlike/yourlike.py, and contain a yourlike.data, # otherwise the following set of commands will not work anymore. # For the logging if log_flag is True, each likelihood will log its # parameters for elem in self.experiments: folder = os.path.abspath( path['MontePython'])+"/../likelihoods/%s" % elem # add the folder of the likelihood to the path of libraries to... if folder not in sys.path: sys.path.insert(0, folder) # ... import easily the likelihood.py program exec "import %s" % elem # Initialize the likelihoods. Depending on the values of # command_line and log_flag, the routine will call slightly different # things. If log_flag is True, the log.param will be appended. exec "self.lkl['%s'] = %s.%s('%s/%s.data',\ self,command_line)" % ( elem, elem, elem, folder, elem) # Storing parameters by blocks of speed self.group_parameters_in_blocks() # Finally, log the cosmo_arguments used. This comes in the end, because # it can be modified inside the likelihoods init functions if self.log_flag: io_mp.log_cosmo_arguments(self, command_line) io_mp.log_default_configuration(self, command_line)
def update_cosmo_arguments(self): """ Put in :attr:`cosmo_arguments` the current values of :attr:`mcmc_parameters` This method is called at every step in the Markov chain, to update the dictionary. In the Markov chain, the scale is not remembered, so one has to apply it before giving it to the cosmological code. .. note:: When you want to define new parameters in the Markov chain that do not have a one to one correspondance to a cosmological name, you can redefine its behaviour here. You will find in the source several such examples. """ # For all elements in any cosmological parameters for elem in self.get_mcmc_parameters(["cosmo"]): # Fill in the dictionnary with the current value of parameters self.cosmo_arguments[elem] = self.mcmc_parameters[elem]["current"] * self.mcmc_parameters[elem]["scale"] # For all elements in the cosmological parameters from the mcmc list, # translate any-one that is not directly a Class parameter into one. # The try: except: syntax ensures that the first call for elem in self.get_mcmc_parameters(["cosmo"]): # infer h from Omega_Lambda and delete Omega_Lambda if elem == "Omega_Lambda": omega_b = self.cosmo_arguments["omega_b"] omega_cdm = self.cosmo_arguments["omega_cdm"] Omega_Lambda = self.cosmo_arguments["Omega_Lambda"] self.cosmo_arguments["h"] = math.sqrt((omega_b + omega_cdm) / (1.0 - Omega_Lambda)) del self.cosmo_arguments[elem] # infer omega_cdm from Omega_L and delete Omega_L if elem == "Omega_L": omega_b = self.cosmo_arguments["omega_b"] h = self.cosmo_arguments["h"] Omega_L = self.cosmo_arguments["Omega_L"] self.cosmo_arguments["omega_cdm"] = (1.0 - Omega_L) * h * h - omega_b del self.cosmo_arguments[elem] if elem == "ln10^{10}A_s": self.cosmo_arguments["A_s"] = math.exp(self.cosmo_arguments[elem]) / 1.0e10 del self.cosmo_arguments[elem] if elem == "exp_m_2_tau_As": tau_reio = self.cosmo_arguments["tau_reio"] self.cosmo_arguments["A_s"] = self.cosmo_arguments[elem] * math.exp(2.0 * tau_reio) del self.cosmo_arguments[elem] if elem == "f_cdi": self.cosmo_arguments["n_cdi"] = self.cosmo_arguments["n_s"] if elem == "beta": self.cosmo_arguments["alpha"] = 2.0 * self.cosmo_arguments["beta"] # JL # Hemen aldatu daiteke, Neff izatea sokek emandakoa + MCMC-ak mugitzen duena # if elem == 'N_eff': # self.cosmo_arguments['N_eff'] = self.cosmo_arguments['N_eff']+2.0 # We only do that on xe_1, for there is at least one of them. if elem.find("xe_1") != -1: # To pass this option, you must have set a number of # cosmological settings reio_parametrization to reio_bins_tanh, # binned_reio_z set, and binned_reio_num First, you need to set # reio_parametrization to reio_bins_tanh if self.cosmo_arguments["reio_parametrization"] != "reio_bins_tanh": io_mp.message( "You set binned_reio_xe to some values \ without setting reio_parametrization to \ reio_bins_tanh", "error", ) else: try: size = self.cosmo_arguments["binned_reio_num"] except (KeyError): io_mp.message( "You need to set reio_binnumber to the value \ corresponding to the one in binned_reio_xe", "error", ) string = "" for i in range(1, size + 1): string += "%.4g" % self.cosmo_arguments["xe_%d" % i] del self.cosmo_arguments["xe_%d" % i] if i != size: string += "," self.cosmo_arguments["binned_reio_xe"] = string
def chain(cosmo, data, command_line): """ Run a Markov chain of fixed length. Main function of this module, this is the actual Markov chain procedure. After having selected a starting point in parameter space defining the first **last accepted** one, it will, for a given amount of steps : + choose randomnly a new point following the *proposal density*, + compute the cosmological *observables* through the cosmological module, + compute the value of the *likelihoods* of the desired experiments at this point, + *accept/reject* this point given its likelihood compared to the one of the last accepted one. Every time the code accepts :code:`data.write_step` number of points (quantity defined in the input parameter file), it will write the result to disk (flushing the buffer by forcing to exit the output file, and reopen it again. .. note:: to use the code to set a fiducial file for certain fixed parameters, you can use two solutions. The first one is to put all input 1-sigma proposal density to zero (this method still works, but is not recommended anymore). The second one consist in using the flag "-f 0", to force a step of zero amplitude. """ ## Initialisation loglike = 0 # Recover the covariance matrix according to the input, if the varying set # of parameters is non-zero if (data.get_mcmc_parameters(['varying']) != []): sigma_eig, U, C = get_covariance_matrix(data, command_line) if data.jumping_factor == 0: io_mp.message( "The jumping factor has been set to 0. The above covariance \ matrix will not be used.", "info") # In case of a fiducial run (all parameters fixed), simply run once and # print out the likelihood. This should not be used any more (one has to # modify the log.param, which is never a good idea. Instead, force the code # to use a jumping factor of 0 with the option "-f 0". else: io_mp.message( "You are running with no varying parameters... I will compute \ only one point and exit", "info") data.update_cosmo_arguments() # this fills in the fixed parameters loglike = compute_lkl(cosmo, data) io_mp.print_vector([data.out, sys.stdout], 1, loglike, data) return 1, loglike # In the fast-slow method, one need the Cholesky decomposition of the # covariance matrix. Return the Cholesky decomposition as a lower # triangular matrix Cholesky = None Inverse_Cholesky = None Rotation = None if command_line.jumping == 'fast': Cholesky = la.cholesky(C).T Inverse_Cholesky = np.linalg.inv(Cholesky) Rotation = np.identity(len(sigma_eig)) # If restart wanted, pick initial value for arguments if command_line.restart is not None: read_args_from_chain(data, command_line.restart) # If restart from best fit file, read first point (overwrite settings of # read_args_from_chain) if command_line.bf is not None: read_args_from_bestfit(data, command_line.bf) # Pick a position (from last accepted point if restart, from the mean value # else), with a 100 tries. for i in range(100): if get_new_position(data, sigma_eig, U, i, Cholesky, Inverse_Cholesky, Rotation) is True: break if i == 99: io_mp.message( "You should probably check your prior boundaries... because \ no valid starting position was found after 100 tries", "error") # Compute the starting Likelihood loglike = compute_lkl(cosmo, data) # Choose this step as the last accepted value # (accept_step), and modify accordingly the max_loglike accept_step(data) max_loglike = loglike # If the jumping factor is 0, the likelihood associated with this point is # displayed, and the code exits. if data.jumping_factor == 0: io_mp.print_vector([data.out, sys.stdout], 1, loglike, data) return 1, loglike acc, rej = 0.0, 0.0 # acceptance and rejection number count N = 1 # number of time the system stayed in the current position # Print on screen the computed parameters io_mp.print_parameters(sys.stdout, data) k = 1 # Main loop, that goes on while the maximum number of failure is not # reached, and while the expected amount of steps (N) is not taken. while k <= command_line.N: # Pick a new position ('current' flag in mcmc_parameters), and compute # its likelihood. If get_new_position returns True, it means it did not # encounter any boundary problem. Otherwise, just increase the # multiplicity of the point and start the loop again if get_new_position( data, sigma_eig, U, k, Cholesky, Inverse_Cholesky, Rotation) is True: newloglike = compute_lkl(cosmo, data) else: # reject step rej += 1 N += 1 k += 1 continue # Harmless trick to avoid exponentiating large numbers. This decides # whether or not the system should move. if (newloglike != data.boundary_loglike): if (newloglike >= loglike): alpha = 1. else: alpha = np.exp(newloglike-loglike) else: alpha = -1 if ((alpha == 1.) or (rd.uniform(0, 1) < alpha)): # accept step # Print out the last accepted step (WARNING: this is NOT the one we # just computed ('current' flag), but really the previous one.) # with its proper multiplicity (number of times the system stayed # there). io_mp.print_vector([data.out, sys.stdout], N, loglike, data) # Report the 'current' point to the 'last_accepted' accept_step(data) loglike = newloglike if loglike > max_loglike: max_loglike = loglike acc += 1.0 N = 1 # Reset the multiplicity else: # reject step rej += 1.0 N += 1 # Increase multiplicity of last accepted point # Regularly (option to set in parameter file), close and reopen the # buffer to force to write on file. if acc % data.write_step == 0: io_mp.refresh_file(data) k += 1 # One iteration done # END OF WHILE LOOP # If at this moment, the multiplicity is higher than 1, it means the # current point is not yet accepted, but it also mean that we did not print # out the last_accepted one yet. So we do. if N > 1: io_mp.print_vector([data.out, sys.stdout], N-1, loglike, data) # Print out some information on the finished chain rate = acc / (acc + rej) sys.stdout.write('\n# {0} steps done, acceptance rate: {1}\n'. format(command_line.N, rate)) # For a restart, erase the starting point to keep only the new, longer # chain. if command_line.restart is not None: os.remove(command_line.restart) sys.stdout.write(' deleting starting point of the chain {0}\n'. format(command_line.restart)) return
def parse(): """ Check some basic organization of the folder, and exit the program in case something goes wrong. """ # Create the parser parser = create_parser() # Recover all command line arguments in the args dictionnary args = parser.parse_args() # First of all, if the analyze module is invoked, there is no point in # checking for existing folder if args.files is None: # If the user wants to start over from an existing chain, the program # will use automatically the same folder, and the log.param in it if args.restart is not None: args.folder = args.restart.split('/')[0]+'/' args.param = args.folder+'log.param' # Else, the user should provide an output folder else: if args.folder is None: io_mp.message( "You must provide an output folder, because you do not \ want your main folder to look dirty, do you ?", "error") # If he did so, else: # check that the provided name is ending with a /, if args.folder[-1] != '/': args.folder += '/' # and if the folder already exists, and that no parameter file was # provided, use the log.param if os.path.isdir(args.folder): if os.path.exists(args.folder+'log.param'): # if the log.param exists, and that a parameter file was # provided, take instead the log.param, and notify the # user. old_param = args.param args.param = args.folder+'log.param' if args.param is not None: io_mp.message( "Appending to an existing folder: using the \ log.param instead of %s" % old_param, "info") else: if args.param is None: io_mp.message( "The requested output folder appears to be empty. \ You must then provide a parameter file (command \ line option -p any.param)", "error") else: if args.param is None: io_mp.message( "The requested output folder appears to be non \ existent. You must then provide a parameter file \ (command line option -p any.param)", "error") return args
def compute_lkl(cosmo, data): """ Compute the likelihood, given the current point in parameter space. This function now performs a test before calling the cosmological model (**new in version 1.2**). If any cosmological parameter changed, the flag :code:`data.need_cosmo_update` will be set to :code:`True`, from the routine :func:`check_for_slow_step <data.data.check_for_slow_step>`. :Returns: - **loglike** (`float`) - the log of the likelihood (:math:`\\frac{-\chi^2}2`) computed from the sum of the likelihoods of the experiments specified in the input parameter file. This function returns :attr:`data.boundary_loglkie <data.data.boundary_loglike>`, defined in the module :mod:`data` if *i)* the current point in the parameter space has hit a prior edge, or *ii)* the cosmological module failed to compute the model. This value is chosen to be extremly small (large negative value), so that the step will always be rejected. """ # If the cosmological module has already been called once, and if the # cosmological parameters have changed, then clean up, and compute. if (cosmo.state is True and data.need_cosmo_update is True): cosmo._struct_cleanup(set(["lensing", "nonlinear", "spectra", "primordial", "transfer", "perturb", "thermodynamics", "background", "bessel"])) # If the data needs to change, then do a normal call to the cosmological # compute function. Note that, even if need_cosmo update is True, this # function must be called if the jumping factor is set to zero. Indeed, # this means the code is called for only one point, to set the fiducial # model. if ((data.need_cosmo_update) or (not cosmo.state) or (data.jumping_factor == 0)): # Prepare the cosmological module with the new set of parameters cosmo.set(data.cosmo_arguments) # Compute the model, keeping track of the errors # In classy.pyx, we made use of two type of python errors, to handle # two different situations. # - AttributeError is returned if a parameter was not properly set # during the initialisation (for instance, you entered Ommega_cdm # instead of Omega_cdm). Then, the code exits, to prevent running with # imaginary parameters. This behaviour is also used in case you want to # kill the process. # - NameError is returned if Class fails to compute the output given # the parameter values. This will be considered as a valid point, but # with minimum likelihood, so will be rejected, resulting in the choice # of a new point. try: cosmo.compute(["lensing"]) except NameError: return data.boundary_loglike except (AttributeError, KeyboardInterrupt): io_mp.message("Something went terribly wrong with CLASS", "error") # For each desired likelihood, compute its value against the theoretical # model loglike = 0 flag_wrote_fiducial = 0 for likelihood in data.lkl.itervalues(): if likelihood.need_update is True: value = likelihood.loglkl(cosmo, data) # Storing the result likelihood.backup_value = value # Otherwise, take the existing value else: value = likelihood.backup_value loglike += value # In case the fiducial file was written, store this information if value == 1: flag_wrote_fiducial += 1 # Compute the derived parameters if relevant if data.get_mcmc_parameters(['derived']) != []: try: cosmo.get_current_derived_parameters(data) except NameError: print('Terminating now') exit() for elem in data.get_mcmc_parameters(['derived']): data.mcmc_parameters[elem]['current'] /= \ data.mcmc_parameters[elem]['scale'] # If fiducial files were created, inform the user, and exit if flag_wrote_fiducial > 0: if flag_wrote_fiducial == len(data.lkl): print '--> Fiducial file(s) was(were) created,', print 'please start a new chain' exit() else: print '--> Some previously non-existing fiducial files ', print 'were created, but potentially not all of them' print '--> Please check now manually on the headers ', print 'of the corresponding that all' print '--> parameters are coherent for your tested models' exit() return loglike