Esempio n. 1
0
    def initialize(self):

        if os.path.isabs(self.dataset_file):
            data_file = self.dataset_file
            self.path = os.path.dirname(data_file)
        else:
            # If no path specified and has install options (so it installed its data as an
            # external package), use the external packages path
            if not self.path and self.get_install_options() and self.packages_path:
                self.path = self.get_path(self.packages_path)
            self.path = self.path or self.get_class_path()
            if not self.path:
                raise LoggedError(self.log,
                                  "No path given for %s. Set the likelihood "
                                  "property 'path' or the common property '%s'.",
                                  self.dataset_file, packages_path_input)

            data_file = os.path.normpath(os.path.join(self.path, self.dataset_file))
        if not os.path.exists(data_file):
            raise NotInstalledError(
                self.log, "The data file '%s' could not be found at '%s'. "
                          "Either you have not installed this likelihood, "
                          "or have given the wrong packages installation path.",
                self.dataset_file, self.path)
        self.load_dataset_file(data_file, getattr(self, 'dataset_params', {}))
Esempio n. 2
0
def get_camb(packages_path):
    try:
        return load_module("camb",
                           path=os.path.join(
                               process_packages_path(packages_path), "code",
                               "CAMB"))
    except ModuleNotFoundError:
        raise NotInstalledError(None)
Esempio n. 3
0
    def initialize(self):
        """Importing CAMB from the correct path, if given."""
        # Allow global import if no direct path specification
        allow_global = not self.path
        if not self.path and self.packages_path:
            self.path = self.get_path(self.packages_path)
        self.camb = self.is_installed(path=self.path,
                                      allow_global=allow_global,
                                      check=False)
        if not self.camb:
            raise NotInstalledError(
                self.log, "Could not find CAMB. Check error message above.")
        super().initialize()
        self.extra_attrs = {
            "Want_CMB": False,
            "Want_cl_2D_array": False,
            'WantCls': False
        }
        # Derived parameters that may not have been requested, but will be necessary later
        self.derived_extra = []
        # Some default settings
        self.needs_perts = False
        self.limber = False
        self.non_linear_sources = False
        self.non_linear_pk = False
        self._base_params = None
        self._needs_lensing_cross = False
        self._sigmaR_z_indices = {}

        if self.external_primordial_pk:
            self.extra_args['initial_power_model'] \
                = self.camb.initialpower.SplinedInitialPower
            self.initial_power_args, self.power_params = {}, []
        else:
            power_spectrum = self.camb.CAMBparams.make_class_named(
                self.extra_args.get('initial_power_model',
                                    self.camb.initialpower.InitialPowerLaw),
                self.camb.initialpower.InitialPower)
            self.initial_power_args, self.power_params = \
                self._extract_params(power_spectrum.set_params)

        nonlin = self.camb.CAMBparams.make_class_named(
            self.extra_args.get('non_linear_model',
                                self.camb.nonlinear.Halofit),
            self.camb.nonlinear.NonLinearModel)

        self.nonlin_args, self.nonlin_params = self._extract_params(
            nonlin.set_params)

        self.requires = str_to_list(getattr(self, "requires", []))
        self._transfer_requires = [
            p for p in self.requires if p not in self.get_can_support_params()
        ]
        self.requires = [
            p for p in self.requires if p not in self._transfer_requires
        ]
        self.log.info("Initialized!")
Esempio n. 4
0
 def initialize(self):
     """Importing CLASS from the correct path, if given, and if not, globally."""
     # Allow global import if no direct path specification
     allow_global = not self.path
     if not self.path and self.packages_path:
         self.path = self.get_path(self.packages_path)
     self.classy_module = self.is_installed(path=self.path,
                                            allow_global=allow_global)
     if not self.classy_module:
         raise NotInstalledError(
             self.log, "Could not find CLASS. Check error message above.")
     self.classy = self.classy_module.Class()
     super().initialize()
     # Add general CLASS stuff
     self.extra_args["output"] = self.extra_args.get("output", "")
     if "sBBN file" in self.extra_args:
         self.extra_args["sBBN file"] = (
             self.extra_args["sBBN file"].format(classy=self.path))
     # Derived parameters that may not have been requested, but will be necessary later
     self.derived_extra = []
     self.log.info("Initialized!")
Esempio n. 5
0
 def initialize(self):
     """Imports the PolyChord sampler and prepares its arguments."""
     # Allow global import if no direct path specification
     allow_global = not self.path
     if not self.path and self.packages_path:
         self.path = self.get_path(self.packages_path)
     self.pc = self.is_installed(path=self.path, allow_global=allow_global)
     if not self.pc:
         raise NotInstalledError(
             self.log,
             "Could not find PolyChord. Check error message above. "
             "To install it, run 'cobaya-install polychord --%s "
             "[packages_path]'", _packages_path_arg)
     # Prepare arguments and settings
     from pypolychord.settings import PolyChordSettings
     self.n_sampled = len(self.model.parameterization.sampled_params())
     self.n_derived = len(self.model.parameterization.derived_params())
     self.n_priors = len(self.model.prior)
     self.n_likes = len(self.model.likelihood)
     self.nDims = self.model.prior.d()
     self.nDerived = (self.n_derived + self.n_priors + self.n_likes)
     if self.logzero is None:
         self.logzero = np.nan_to_num(-np.inf)
     if self.max_ndead == np.inf:
         self.max_ndead = -1
     self._quants_d_units = ["nlive", "max_ndead"]
     for p in self._quants_d_units:
         if getattr(self, p) is not None:
             setattr(
                 self, p,
                 NumberWithUnits(getattr(self, p),
                                 "d",
                                 scale=self.nDims,
                                 dtype=int).value)
     self._quants_nlive_units = ["nprior"]
     for p in self._quants_nlive_units:
         if getattr(self, p) is not None:
             setattr(
                 self, p,
                 NumberWithUnits(getattr(self, p),
                                 "nlive",
                                 scale=self.nlive,
                                 dtype=int).value)
     # Fill the automatic ones
     if getattr(self, "feedback", None) is None:
         values = {
             logging.CRITICAL: 0,
             logging.ERROR: 0,
             logging.WARNING: 0,
             logging.INFO: 1,
             logging.DEBUG: 2
         }
         self.feedback = values[self.log.getEffectiveLevel()]
     # Prepare output folders and prefixes
     if self.output:
         self.file_root = self.output.prefix
         self.read_resume = self.output.is_resuming()
     else:
         output_prefix = share_mpi(
             hex(int(random() * 16**6))[2:] if is_main_process() else None)
         self.file_root = output_prefix
         # dummy output -- no resume!
         self.read_resume = False
     self.base_dir = self.get_base_dir(self.output)
     self.raw_clusters_dir = os.path.join(self.base_dir, self._clusters_dir)
     self.output.create_folder(self.base_dir)
     if self.do_clustering:
         self.clusters_folder = self.get_clusters_dir(self.output)
         self.output.create_folder(self.clusters_folder)
     self.mpi_info("Storing raw PolyChord output in '%s'.", self.base_dir)
     # Exploiting the speed hierarchy
     if self.blocking:
         blocks, oversampling_factors = self.model.check_blocking(
             self.blocking)
     else:
         if self.measure_speeds:
             self.model.measure_and_set_speeds(n=self.measure_speeds)
         blocks, oversampling_factors = self.model.get_param_blocking_for_sampler(
             oversample_power=self.oversample_power)
     self.mpi_info("Parameter blocks and their oversampling factors:")
     max_width = len(str(max(oversampling_factors)))
     for f, b in zip(oversampling_factors, blocks):
         self.mpi_info("* %" + "%d" % max_width + "d : %r", f, b)
     # Save blocking in updated info, in case we want to resume
     self._updated_info["blocking"] = list(zip(oversampling_factors,
                                               blocks))
     blocks_flat = list(chain(*blocks))
     self.ordering = [
         blocks_flat.index(p)
         for p in self.model.parameterization.sampled_params()
     ]
     self.grade_dims = [len(block) for block in blocks]
     # Steps per block
     # NB: num_repeats is ignored by PolyChord when int "grade_frac" given,
     # so needs to be applied by hand.
     # In num_repeats, `d` is interpreted as dimension of each block
     self.grade_frac = [
         int(o * read_dnumber(self.num_repeats, dim_block))
         for o, dim_block in zip(oversampling_factors, self.grade_dims)
     ]
     # Assign settings
     pc_args = [
         "nlive", "num_repeats", "nprior", "do_clustering",
         "precision_criterion", "max_ndead", "boost_posterior", "feedback",
         "logzero", "posteriors", "equals", "compression_factor",
         "cluster_posteriors", "write_resume", "read_resume", "write_stats",
         "write_live", "write_dead", "base_dir", "grade_frac", "grade_dims",
         "feedback", "read_resume", "base_dir", "file_root", "grade_frac",
         "grade_dims"
     ]
     # As stated above, num_repeats is ignored, so let's not pass it
     pc_args.pop(pc_args.index("num_repeats"))
     self.pc_settings = PolyChordSettings(
         self.nDims,
         self.nDerived,
         seed=(self.seed if self.seed is not None else -1),
         **{
             p: getattr(self, p)
             for p in pc_args if getattr(self, p) is not None
         })
     # prior conversion from the hypercube
     bounds = self.model.prior.bounds(
         confidence_for_unbounded=self.confidence_for_unbounded)
     # Check if priors are bounded (nan's to inf)
     inf = np.where(np.isinf(bounds))
     if len(inf[0]):
         params_names = self.model.parameterization.sampled_params()
         params = [params_names[i] for i in sorted(list(set(inf[0])))]
         raise LoggedError(
             self.log,
             "PolyChord needs bounded priors, but the parameter(s) '"
             "', '".join(params) + "' is(are) unbounded.")
     locs = bounds[:, 0]
     scales = bounds[:, 1] - bounds[:, 0]
     # This function re-scales the parameters AND puts them in the right order
     self.pc_prior = lambda x: (locs + np.array(x)[self.ordering] * scales
                                ).tolist()
     # We will need the volume of the prior domain, since PolyChord divides by it
     self.logvolume = np.log(np.prod(scales))
     # Prepare callback function
     if self.callback_function is not None:
         self.callback_function_callable = (get_external_function(
             self.callback_function))
     self.last_point_callback = 0
     # Prepare runtime live and dead points collections
     self.live = Collection(self.model,
                            None,
                            name="live",
                            initial_size=self.pc_settings.nlive)
     self.dead = Collection(self.model, self.output, name="dead")
     # Done!
     if is_main_process():
         self.log.debug("Calling PolyChord with arguments:")
         for p, v in inspect.getmembers(self.pc_settings,
                                        lambda a: not (callable(a))):
             if not p.startswith("_"):
                 self.log.debug("  %s: %s", p, v)
     self.mpi_info("Initialized!")
Esempio n. 6
0
 def initialize(self):
     if "2015" in self.get_name():
         for line in _deprecation_msg_2015.split("\n"):
             self.log.warning(line)
     code_path = common_path
     data_path = get_data_path(self.__class__.get_qualified_class_name())
     # Allow global import if no direct path specification
     allow_global = not self.path
     if self.path:
         self.path_clik = self.path
     elif self.packages_path:
         self.path_clik = self.get_code_path(self.packages_path)
     else:
         raise LoggedError(
             self.log, "No path given to the Planck likelihood. Set the "
                       "likelihood property 'path' or the common property "
                       "'%s'.", _packages_path)
     clik = is_installed_clik(path=self.path_clik, allow_global=allow_global)
     if not clik:
         raise NotInstalledError(
             self.log, "Could not find the 'clik' Planck likelihood code. "
                       "Check error message above.")
     # Loading the likelihood data
     if not os.path.isabs(self.clik_file):
         self.path_data = getattr(self, "path_data", os.path.join(
             self.path or self.packages_path, "data", data_path))
         self.clik_file = os.path.join(self.path_data, self.clik_file)
     # Differences in the wrapper for lensing and non-lensing likes
     self.lensing = clik.try_lensing(self.clik_file)
     try:
         self.clik = clik.clik_lensing(self.clik_file) if self.lensing \
             else clik.clik(self.clik_file)
     except clik.lkl.CError:
         # Is it that the file was not found?
         if not os.path.exists(self.clik_file):
             raise NotInstalledError(
                 self.log, "The .clik file was not found where specified in the "
                           "'clik_file' field of the settings of this likelihood. "
                           "Maybe the 'path' given is not correct? The full path where"
                           " the .clik file was searched for is '%s'", self.clik_file)
         # Else: unknown clik error
         self.log.error("An unexpected error occurred in clik (possibly related to "
                        "multiple simultaneous initialization, or simultaneous "
                        "initialization of incompatible likelihoods (e.g. polarised "
                        "vs non-polarised 'lite' likelihoods. See error info below:")
         raise
     self.l_maxs = self.clik.get_lmax()
     # calculate requirements here so class can also be separately instantiated
     requested_cls = ["tt", "ee", "bb", "te", "tb", "eb"]
     if self.lensing:
         has_cl = [lmax != -1 for lmax in self.l_maxs]
         requested_cls = ["pp"] + requested_cls
     else:
         has_cl = self.clik.get_has_cl()
     self.requested_cls = [cl for cl, i in zip(requested_cls, has_cl) if int(i)]
     self.l_maxs_cls = [lmax for lmax, i in zip(self.l_maxs, has_cl) if int(i)]
     self.expected_params = list(self.clik.extra_parameter_names)
     # Placeholder for vector passed to clik
     length = (len(self.l_maxs) if self.lensing else len(self.clik.get_has_cl()))
     self.vector = np.zeros(np.sum(self.l_maxs) + length + len(self.expected_params))
     self.log.info("Initialized!")