def initialise(cosmo1, cosmo2, data, command_line): """ Main call to prepare the information for the MultiNest run. """ # Convenience variables varying_param_names = data.get_mcmc_parameters(['varying']) derived_param_names = data.get_mcmc_parameters(['derived']) # Check that all the priors are flat and that all the parameters are bound is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, varying_param_names) if not is_flat: raise io_mp.ConfigurationError( 'Nested Sampling with MultiNest is only possible with flat ' + 'priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'Nested Sampling with MultiNest is only possible for bound ' + 'parameters. Set reasonable bounds for them in the ".param"' + 'file.') # If absent, create the sub-folder NS NS_folder = os.path.join(command_line.folder, NS_subfolder) if not os.path.exists(NS_folder): os.makedirs(NS_folder) # Use chain name as a base name for MultiNest files chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1] base_name = os.path.join(NS_folder, chain_name) # FK: add base folder name to NS_arguments for later reference data.NS_arguments['base_dir'] = NS_folder # Prepare arguments for PyMultiNest # -- Automatic arguments data.NS_arguments['n_dims'] = len(varying_param_names) data.NS_arguments['n_params'] = (len(varying_param_names) + len(derived_param_names)) data.NS_arguments['verbose'] = True data.NS_arguments['outputfiles_basename'] = base_name + NS_separator # -- User-defined arguments for arg in NS_user_arguments: value = getattr(command_line, NS_prefix + arg) # Special case: clustering parameters if arg == 'clustering_params': clustering_param_names = value if value != -1 else [] continue # Rest of the cases if value != -1: data.NS_arguments[arg] = value # else: don't define them -> use PyMultiNest default value # Clustering parameters -- reordering to put them first NS_param_names = [] if clustering_param_names: data.NS_arguments['n_clustering_params'] = len(clustering_param_names) for param in clustering_param_names: if not param in varying_param_names: raise io_mp.ConfigurationError( 'The requested clustering parameter "%s"' % param + ' was not found in your ".param" file. Pick a valid one.') NS_param_names.append(param) for param in varying_param_names: if not param in NS_param_names: NS_param_names.append(param) data.NS_param_names = NS_param_names # Caveat: multi-modal sampling OFF by default; if requested, INS disabled try: if data.NS_arguments['multimodal']: data.NS_arguments['importance_nested_sampling'] = False warnings.warn('Multi-modal sampling has been requested, ' + 'so Importance Nested Sampling has been disabled') except KeyError: data.NS_arguments['multimodal'] = False # MPI: don't initialise it inside MultiNest. # Rather, it is either initialised by Monte Python (if MPI used) or ignored data.NS_arguments['init_MPI'] = False # Write the MultiNest arguments and parameter ordering with open(base_name + name_arguments, 'w') as afile: for arg in data.NS_arguments: if arg != 'n_clustering_params': afile.write(' = '.join([str(arg), str(data.NS_arguments[arg])])) else: afile.write('clustering_params = ' + ' '.join(clustering_param_names)) afile.write('\n') with open(base_name + name_paramnames, 'w') as pfile: pfile.write('\n'.join(NS_param_names + derived_param_names))
def initialise(cosmo, data, command_line): """ Main call to prepare the information for the MultiNest run. """ # Convenience variables varying_param_names = data.get_mcmc_parameters(['varying']) derived_param_names = data.get_mcmc_parameters(['derived']) nslow = len(data.get_mcmc_parameters(['varying', 'cosmo'])) nfast = len(data.get_mcmc_parameters(['varying', 'nuisance'])) # Check that all the priors are flat and that all the parameters are bound is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, varying_param_names) if not is_flat: raise io_mp.ConfigurationError( 'Nested Sampling with PolyChord is only possible ' + 'with flat priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'Nested Sampling with PolyChord is only possible ' + 'for bound parameters. Set reasonable bounds for them in the ' + '".param" file.') # If absent, create the sub-folder PC PC_folder = os.path.join(command_line.folder, PC_subfolder) if not os.path.exists(PC_folder): os.makedirs(PC_folder) # If absent, create the sub-folder PC/clusters PC_clusters_folder = os.path.join(PC_folder,'clusters') if not os.path.exists(PC_clusters_folder): os.makedirs(PC_clusters_folder) # Use chain name as a base name for PolyChord files chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1] base_name = os.path.join(PC_folder, chain_name) # Prepare arguments for PyPolyChord # -- Automatic arguments data.PC_arguments['file_root'] = chain_name data.PC_arguments['base_dir'] = PC_folder data.PC_arguments['grade_dims'] = [] data.PC_arguments['grade_frac'] = [] if nslow: data.PC_arguments['grade_dims'].append(nslow) data.PC_arguments['grade_frac'].append(0.75) if nfast: data.PC_arguments['grade_dims'].append(nfast) data.PC_arguments['grade_frac'].append(0.25) data.PC_arguments['num_repeats'] = data.PC_arguments['grade_dims'][0] * 2 # -- User-defined arguments for arg in PC_user_arguments: value = getattr(command_line, PC_prefix+arg) if value != -1: data.PC_arguments[arg] = value # else: don't define them -> use PyPolyChord default value data.PC_param_names = varying_param_names # Write the PolyChord arguments and parameter ordering with open(base_name+name_arguments, 'w') as afile: for arg in data.PC_arguments: afile.write(' = '.join( [str(arg), str(data.PC_arguments[arg])])) afile.write('\n') with open(base_name+name_paramnames, 'w') as pfile: pfile.write('\n'.join(data.PC_param_names+derived_param_names))
def run(cosmo, data, command_line): """ Main call to prepare the information for the MultiNest run, and to actually run the MultiNest sampler. Note the unusual set-up here, with the two following functions, `prior` and `loglike` having their docstrings written in the encompassing function. This trick was necessary as MultiNest required these two functions to be defined with a given number of parameters, so we could not add `data`. By defining them inside the run function, this problem was by-passed. .. function:: prior Generate the prior function for MultiNest It should transform the input unit cube into the parameter cube. This function actually wraps the method :func:`map_from_unit_interval() <prior.Prior.map_from_unit_interval>` of the class :class:`Prior <prior.Prior>`. Parameters ---------- cube : array Contains the current point in unit parameter space that has been selected within the MultiNest part. ndim : int Number of varying parameters nparams : int Total number of parameters, including the derived ones (not used, so hidden in `*args`) .. function:: loglike Generate the Likelihood function for MultiNest Parameters ---------- cube : array Contains the current point in the correct parameter space after transformation from :func:`prior`. ndim : int Number of varying parameters nparams : int Total number of parameters, including the derived ones (not used, so hidden in `*args`) """ # Convenience variables varying_param_names = data.get_mcmc_parameters(['varying']) derived_param_names = data.get_mcmc_parameters(['derived']) # Check that all the priors are flat and that all the parameters are bound is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, varying_param_names) if not is_flat: raise io_mp.ConfigurationError( 'Nested Sampling with MultiNest is only possible with flat ' + 'priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'Nested Sampling with MultiNest is only possible for bound ' + 'parameters. Set reasonable bounds for them in the ".param"' + 'file.') # If absent, create the sub-folder NS NS_folder = os.path.join(command_line.folder, NS_subfolder) if not os.path.exists(NS_folder): os.makedirs(NS_folder) # Use chain name as a base name for MultiNest files chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1] base_name = os.path.join(NS_folder, chain_name) # Prepare arguments for PyMultiNest # -- Automatic arguments data.NS_arguments['n_dims'] = len(varying_param_names) data.NS_arguments['n_params'] = (len(varying_param_names) + len(derived_param_names)) data.NS_arguments['verbose'] = True data.NS_arguments['outputfiles_basename'] = base_name + NS_separator # -- User-defined arguments for arg in NS_user_arguments: value = getattr(command_line, NS_prefix+arg) # Special case: clustering parameters if arg == 'clustering_params': clustering_param_names = value if value != -1 else [] continue # Rest of the cases if value != -1: data.NS_arguments[arg] = value # else: don't define them -> use PyMultiNest default value # Clustering parameters -- reordering to put them first NS_param_names = [] if clustering_param_names: data.NS_arguments['n_clustering_params'] = len(clustering_param_names) for param in clustering_param_names: if not param in varying_param_names: raise io_mp.ConfigurationError( 'The requested clustering parameter "%s"' % param + ' was not found in your ".param" file. Pick a valid one.') NS_param_names.append(param) for param in varying_param_names: if not param in NS_param_names: NS_param_names.append(param) # Caveat: multi-modal sampling OFF by default; if requested, INS disabled try: if data.NS_arguments['multimodal']: data.NS_arguments['importance_nested_sampling'] = False warnings.warn('Multi-modal sampling has been requested, ' + 'so Importance Nested Sampling has been disabled') except KeyError: data.NS_arguments['multimodal'] = False # Write the MultiNest arguments and parameter ordering with open(base_name+name_arguments, 'w') as afile: for arg in data.NS_arguments: if arg != 'n_clustering_params': afile.write(' = '.join( [str(arg), str(data.NS_arguments[arg])])) else: afile.write('clustering_params = ' + ' '.join(clustering_param_names)) afile.write('\n') with open(base_name+name_paramnames, 'w') as pfile: pfile.write('\n'.join(NS_param_names+derived_param_names)) # Function giving the prior probability def prior(cube, ndim, *args): """ Please see the encompassing function docstring """ for i, name in zip(range(ndim), NS_param_names): cube[i] = data.mcmc_parameters[name]['prior']\ .map_from_unit_interval(cube[i]) # Function giving the likelihood probability def loglike(cube, ndim, *args): """ Please see the encompassing function docstring """ # Updates values: cube --> data for i, name in zip(range(ndim), NS_param_names): data.mcmc_parameters[name]['current'] = cube[i] # Propagate the information towards the cosmo arguments data.update_cosmo_arguments() lkl = sampler.compute_lkl(cosmo, data) for i, name in enumerate(derived_param_names): cube[ndim+i] = data.mcmc_parameters[name]['current'] return lkl # Launch MultiNest, and recover the output code output = nested_run(loglike, prior, **data.NS_arguments) # Assuming this worked, i.e. if output is `None`, # state it and suggest the user to analyse the output. if output is None: warnings.warn('The sampling with MultiNest is done.\n' + 'You can now analyse the output calling Monte Python ' + ' with the -info flag in the chain_name/NS subfolder,' + 'or, if you used multimodal sampling, in the ' + 'chain_name/mode_# subfolders.')
def initialise(cosmo, data, command_line): """ Main call to prepare the information for the NeuralNest run. """ # Convenience variables varying_param_names = data.get_mcmc_parameters(['varying']) derived_param_names = data.get_mcmc_parameters(['derived']) if getattr(command_line, NN_prefix + 'sampler', '').lower() == 'nested': # Check that all the priors are flat and that all the parameters are bound is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, varying_param_names) if not is_flat: raise io_mp.ConfigurationError( 'Nested Sampling with NeuralNest is only possible with flat ' + 'priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'Nested Sampling with NeuralNest is only possible for bound ' + 'parameters. Set reasonable bounds for them in the ".param"' + 'file.') # If absent, create the sub-folder NS NN_folder = os.path.join(command_line.folder, NN_subfolder) if not os.path.exists(NN_folder): os.makedirs(NN_folder) run_num = sum( os.path.isdir(os.path.join(NN_folder, i)) for i in os.listdir(NN_folder)) + 1 # -- Automatic arguments data.NN_arguments['x_dim'] = len(varying_param_names) data.NN_arguments['num_derived'] = len(derived_param_names) data.NN_arguments['verbose'] = True data.NN_arguments['log_dir'] = os.path.join(NN_folder, str(run_num)) data.NN_arguments['use_gpu'] = False data.NN_arguments['flow'] = 'nvp' data.NN_arguments['load_model'] = '' data.NN_arguments['batch_size'] = 100 if getattr(command_line, NN_prefix + 'fastslow'): data.NN_arguments['num_slow'] = data.block_parameters[0] else: data.NN_arguments['num_slow'] = 0 # -- User-defined arguments for arg in NN_user_arguments: value = getattr(command_line, NN_prefix + arg) data.NN_arguments[arg] = value if arg == 'switch': if value >= 0: data.NN_arguments['switch'] = value elif data.NN_arguments['num_slow'] > 0: data.NN_arguments['switch'] = 1.0 / ( 5 * data.NN_arguments['num_slow']) if getattr(command_line, NN_prefix + 'sampler', '').lower() == 'mcmc': data.NN_arguments['mcmc_steps'] = getattr(command_line, 'N') data.NN_param_names = varying_param_names base_name = os.path.join(NN_folder, 'base') if run_num == 1: # Write the NeuralNest arguments and parameter ordering with open(base_name + name_arguments, 'w') as afile: for arg in data.NN_arguments: afile.write(' = '.join([str(arg), str(data.NN_arguments[arg])])) afile.write('\n') with open(base_name + name_paramnames, 'w') as pfile: pfile.write('\n'.join(data.NN_param_names + derived_param_names))
def initialise(cosmo, data, command_line): """ Main call to prepare the information for the MultiNest run. """ # Convenience variables varying_param_names = data.get_mcmc_parameters(['varying']) derived_param_names = data.get_mcmc_parameters(['derived']) # Check that all the priors are flat and that all the parameters are bound is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, varying_param_names) if not is_flat: raise io_mp.ConfigurationError( 'Nested Sampling with MultiNest is only possible with flat ' + 'priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'Nested Sampling with MultiNest is only possible for bound ' + 'parameters. Set reasonable bounds for them in the ".param"' + 'file.') # If absent, create the sub-folder NS NS_folder = os.path.join(command_line.folder, NS_subfolder) if not os.path.exists(NS_folder): os.makedirs(NS_folder) # Use chain name as a base name for MultiNest files chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1] base_name = os.path.join(NS_folder, chain_name) # Prepare arguments for PyMultiNest # -- Automatic arguments data.NS_arguments['n_dims'] = len(varying_param_names) data.NS_arguments['n_params'] = (len(varying_param_names) + len(derived_param_names)) data.NS_arguments['verbose'] = True data.NS_arguments['outputfiles_basename'] = base_name + NS_separator # -- User-defined arguments for arg in NS_user_arguments: value = getattr(command_line, NS_prefix+arg) # Special case: clustering parameters if arg == 'clustering_params': clustering_param_names = value if value != -1 else [] continue # Rest of the cases if value != -1: data.NS_arguments[arg] = value # else: don't define them -> use PyMultiNest default value # Clustering parameters -- reordering to put them first NS_param_names = [] if clustering_param_names: data.NS_arguments['n_clustering_params'] = len(clustering_param_names) for param in clustering_param_names: if not param in varying_param_names: raise io_mp.ConfigurationError( 'The requested clustering parameter "%s"' % param + ' was not found in your ".param" file. Pick a valid one.') NS_param_names.append(param) for param in varying_param_names: if not param in NS_param_names: NS_param_names.append(param) data.NS_param_names = NS_param_names # Caveat: multi-modal sampling OFF by default; if requested, INS disabled try: if data.NS_arguments['multimodal']: data.NS_arguments['importance_nested_sampling'] = False warnings.warn('Multi-modal sampling has been requested, ' + 'so Importance Nested Sampling has been disabled') except KeyError: data.NS_arguments['multimodal'] = False # MPI: don't initialise it inside MultiNest. # Rather, it is either initialised by Monte Python (if MPI used) or ignored data.NS_arguments['init_MPI']=False # Write the MultiNest arguments and parameter ordering with open(base_name+name_arguments, 'w') as afile: for arg in data.NS_arguments: if arg != 'n_clustering_params': afile.write(' = '.join( [str(arg), str(data.NS_arguments[arg])])) else: afile.write('clustering_params = ' + ' '.join(clustering_param_names)) afile.write('\n') with open(base_name+name_paramnames, 'w') as pfile: pfile.write('\n'.join(NS_param_names+derived_param_names))
def run(cosmo, data, command_line): """ Sample with the CosmoHammer """ # Store the parameters inside the format expected by CosmoHammer # TODO: about the derived params? parameter_names = data.get_mcmc_parameters(["varying"]) # Ensure that their prior is bound and flat is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, parameter_names) if not is_flat: raise io_mp.ConfigurationError( 'The Cosmo Hammer is only available with flat ' + 'priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'The Cosmo Hammer is only available for bound ' + 'parameters. Set reasonable bounds for them in the ".param"' + 'file.') params = [] for parameter in parameter_names: params.append(data.mcmc_parameters[parameter]['initial']) params = np.array(params) # If absent, create the sub-folder CH CH_folder = os.path.join(command_line.folder, CH_subfolder) if not os.path.exists(CH_folder): os.makedirs(CH_folder) # Initialize a chain object (Beware, the concept is quite different than # the chain of the module :mod:`mcmc`) chain = LikelihoodComputationChain( min=params[:, 1], max=params[:, 2]) # Add data and cosmo as two core modules. Note that the order is important # here, since data must be called before cosmo. chain.addCoreModule(data) chain.addCoreModule(cosmo) # Add each likelihood class as a LikelihoodModule for likelihood in data.lkl.itervalues(): chain.addLikelihoodModule(likelihood) # Define the file prefix chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1] file_prefix = os.path.join( command_line.folder, os.path.join(CH_subfolder, chain_name)) # Recover the User options data.CH_arguments = {} for arg in CH_user_arguments: value = getattr(command_line, CH_prefix+arg) if value != -1: data.CH_arguments[arg] = value # else, do not define them, and leave the default Cosmo Hammer ones. # Write the CosmoHammer arguments with open(file_prefix+name_arguments, 'w') as arg_file: for arg in data.CH_arguments: arg_file.write( ' = '.join([str(arg), str(data.CH_arguments[arg])]) + '\n') # Create an extension to the SampleFileUtil from cosmoHammer derived_util = DerivedUtil(file_prefix) try: num_threads = int(os.environ['OMP_NUM_THREADS']) except KeyError: warnings.warn( "The environment variable OMP_NUM_THREADS is not set. " "To run the Cosmo Hammer meaningfully, you should better " "set it to something! Defaulting to 1 for now.") num_threads = 1 # Create the Sampler object sampler_hammer = CosmoHammerSampler( params=params, likelihoodComputationChain=chain, filePrefix=file_prefix, storageUtil=derived_util, threadCount=num_threads, **data.CH_arguments) # create console handler and set level to debug (does not seem to appear) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) logging.getLogger().addHandler(console_handler) sampler_hammer.startSampling()
def run(cosmo, data, command_line): """ Sample with the CosmoHammer """ # Store the parameters inside the format expected by CosmoHammer # TODO: about the derived params? parameter_names = data.get_mcmc_parameters(["varying"]) # Ensure that their prior is bound and flat is_flat, is_bound = sampler.check_flat_bound_priors( data.mcmc_parameters, parameter_names) if not is_flat: raise io_mp.ConfigurationError( 'The Cosmo Hammer is only available with flat ' + 'priors. Sorry!') if not is_bound: raise io_mp.ConfigurationError( 'The Cosmo Hammer is only available for bound ' + 'parameters. Set reasonable bounds for them in the ".param"' + 'file.') params = [] for parameter in parameter_names: params.append(data.mcmc_parameters[parameter]['initial']) params = np.array(params) # If absent, create the sub-folder CH CH_folder = os.path.join(command_line.folder, CH_subfolder) if not os.path.exists(CH_folder): os.makedirs(CH_folder) # Initialize a chain object (Beware, the concept is quite different than # the chain of the module :mod:`mcmc`) chain = LikelihoodComputationChain(min=params[:, 1], max=params[:, 2]) # Add data and cosmo as two core modules. Note that the order is important # here, since data must be called before cosmo. chain.addCoreModule(data) chain.addCoreModule(cosmo) # Add each likelihood class as a LikelihoodModule for likelihood in dictvalues(data.lkl): chain.addLikelihoodModule(likelihood) # Define the file prefix chain_name = [a for a in command_line.folder.split(os.path.sep) if a][-1] file_prefix = os.path.join(command_line.folder, CH_subfolder, chain_name) # Recover the User options data.CH_arguments = {} for arg in CH_user_arguments: value = getattr(command_line, CH_prefix + arg) if value != -1: data.CH_arguments[arg] = value # else, do not define them, and leave the default Cosmo Hammer ones. # Write the CosmoHammer arguments with open(file_prefix + name_arguments, 'w') as arg_file: for arg in data.CH_arguments: arg_file.write( ' = '.join([str(arg), str(data.CH_arguments[arg])]) + '\n') # Create an extension to the SampleFileUtil from cosmoHammer derived_util = DerivedUtil(file_prefix) try: num_threads = int(os.environ['OMP_NUM_THREADS']) except KeyError: warnings.warn( "The environment variable OMP_NUM_THREADS is not set. " "To run the Cosmo Hammer meaningfully, you should better " "set it to something! Defaulting to 1 for now.") num_threads = 1 # Create the Sampler object sampler_hammer = CosmoHammerSampler(params=params, likelihoodComputationChain=chain, filePrefix=file_prefix, walkersRatio=50, burninIterations=10, sampleIterations=30, storageUtil=derived_util, threadCount=num_threads, **data.CH_arguments) # create console handler and set level to debug (does not seem to appear) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) logging.getLogger().addHandler(console_handler) sampler_hammer.startSampling()