Beispiel #1
0
 def initialise(self):
     self.tot_theory_fields = len(self.field_names)
     assert self.dataset_file.endswith('.dataset'), (
         "Make sure you are passing a .dataset file!")
     if not self.path:
         path_to_installation = get_path_to_installation()
         if path_to_installation:
             from importlib import import_module
             self.path = getattr(
                 import_module(package + ".likelihoods." + self.name,
                               package=package),
                 "get_path")(path_to_installation)
         else:
             self.log.error(
                 "No path given to the %s likelihood. Set the likelihood"
                 " property 'path' or the common property '%s'.",
                 self.dataset_file, _path_install)
             raise HandledException
     self.dataset_file_path = os.path.join(self.path, self.dataset_file)
     self.log.info("Reading data from %s", self.dataset_file_path)
     if not os.path.exists(self.dataset_file_path):
         self.log.error(
             "The likelihood is not installed in the given path: "
             "cannot find the file '%s'.", self.dataset_file_path)
         raise HandledException
     self.loadDataset(self.dataset_file_path, self.dataset_params)
Beispiel #2
0
 def initialise(self):
     self.l_max = self.l_max or int(50000 * self.acc)
     # If no path specified, use the modules path
     data_file_path = (self.path or os.path.join(get_path_to_installation(),
                                                 "data/des_1yr"))
     if not data_file_path:
         self.log.error(
             "No path given to the DES data. Set the likelihood property "
             "'path' or the common property '%s'.", _path_install)
         raise HandledException
     data_file = os.path.join(data_file_path, self.data_file)
     try:
         if data_file.endswith(".fits"):
             if self.dataset_params:
                 self.log.error("'dataset_params' can only be specified "
                                "for .dataset (not .fits) file.")
                 raise HandledException
             self.load_fits_data(data_file)
         else:
             self.load_dataset(data_file, self.dataset_params)
     except IOError:
         self.log.error(
             "The data file '%s' could not be found at '%s'. "
             "Check your paths!", self.data_file, data_file_path)
         raise HandledException
     self.initialize_postload()
 def initialise(self):
     # Importing Planck's clik library (only once!)
     try:
         clik
     except NameError:
         if not self.path:
             path_to_installation = get_path_to_installation()
             if path_to_installation:
                 self.path_clik = os.path.join(path_to_installation, "code",
                                               common_path)
                 self.path_data = os.path.join(path_to_installation, "data",
                                               common_path)
             else:
                 self.log.error(
                     "No path given to the Planck likelihood. Set the "
                     "likelihood property 'path' or the common property "
                     "'%s'.", _path_install)
                 raise HandledException
         else:
             self.path_clik = self.path
         self.log.info("Importing clik from %s", self.path_clik)
         # test and import clik
         is_installed_clik(self.path_clik,
                           log_and_fail=True,
                           import_it=False)
         import clik
     # Loading the likelihood data
     clik_file = os.path.join(self.path_data, self.clik_file)
     # for lensing, some routines change. Intializing a flag for easier
     # testing of this condition
     self.lensing = "lensing" in self.name
     try:
         self.clik = (clik.clik_lensing(clik_file)
                      if self.lensing else clik.clik(clik_file))
     except clik.lkl.CError:
         self.log.error(
             "The .clik file was not found where specified in the 'clik_file' field "
             "of the settings of this likelihood. Maybe the 'path' given is not "
             "correct? The full path where the .clik file was searched for is '%s'",
             clik_file)
         raise HandledException
     self.expected_params = list(self.clik.extra_parameter_names)
     # line added to deal with a bug in planck likelihood release:
     # A_planck called A_Planck in plik_lite
     if "plikHM_lite" in self.name:
         i = self.expected_params.index('A_Planck')
         self.expected_params[i] = 'A_planck'
         self.log.info(
             "Corrected nuisance parameter name A_Planck to A_planck")
     # Check that the parameters are the right ones
     assert set(self.input_params.keys()) == set(self.expected_params), (
         "Likelihoods parameters do not coincide with the ones clik understands."
     )
     # Placeholder for vector passed to clik
     self.l_maxs = self.clik.get_lmax()
     length = (len(self.l_maxs)
               if self.lensing else len(self.clik.get_has_cl()))
     self.vector = np.zeros(
         np.sum(self.l_maxs) + length + len(self.expected_params))
Beispiel #4
0
 def initialise(self):
     """Importing CLASS from the correct path, if given, and if not, globally."""
     # If path not given, try using general path to modules
     path_to_installation = get_path_to_installation()
     if not self.path and path_to_installation:
         self.path = os.path.join(path_to_installation, "code",
                                  classy_repo_rename)
     if self.path:
         self.log.info("Importing *local* classy from " + self.path)
         classy_build_path = os.path.join(self.path, "python", "build")
         post = next(d for d in os.listdir(classy_build_path)
                     if d.startswith("lib."))
         classy_build_path = os.path.join(classy_build_path, post)
         if not os.path.exists(classy_build_path):
             self.log.error(
                 "Either CLASS is not in the given folder, "
                 "'%s', or you have not compiled it.", self.path)
             raise HandledException
         # Inserting the previously found path into the list of import folders
         sys.path.insert(0, classy_build_path)
     else:
         self.log.info("Importing *global* CLASS.")
     try:
         from classy import Class, CosmoSevereError, CosmoComputationError
     except ImportError:
         self.log.error(
             "Couldn't find the CLASS python interface. "
             "Make sure that you have compiled it, and that you either\n"
             " (a) specify a path (you didn't) or\n"
             " (b) install the Python interface globally with\n"
             "     '/path/to/class/python/python setup.py install --user'")
         raise HandledException
     self.classy = Class()
     # Propagate errors up
     global CosmoComputationError, CosmoSevereError
     # Generate states, to avoid recomputing
     self.n_states = 3
     self.states = [{
         "params": None,
         "derived": None,
         "derived_extra": None,
         "last": 0
     } for i in range(self.n_states)]
     # Dict of named tuples to collect requirements and computation methods
     self.collectors = {}
     # Additional input parameters to pass to CLASS
     self.extra_args = self.extra_args or {}
     self.extra_args["output"] = self.extra_args.get("output", "")
     if "sBBN file" in self.extra_args:
         self.extra_args["sBBN file"] = (os.path.join(
             self.path, self.extra_args["sBBN file"]))
     # Derived parameters that may not have been requested, but will be necessary later
     self.derived_extra = []
Beispiel #5
0
 def initialise(self):
     # If no path specified, use the modules path
     data_file_path = (getattr(self, "path", None) or
                       os.path.join(get_path_to_installation(), "data/sdss_dr12"))
     if not data_file_path:
         self.log.error("No path given to BAO data. Set the likelihood property "
                        "'path' or the common property '%s'.", _path_install)
         raise HandledException
     # Rescaling by a fiducial value of the sound horizon
     if self.rs_fid is None:
         self.rs_fid = 1
     # Load "measurements file" and covmat of requested
     try:
         self.data = pd.read_csv(os.path.join(data_file_path, self.measurements_file),
                                 header=None, index_col=None, sep="\s+", comment="#")
     except IOError:
         self.log.error("Couldn't find measurements file '%s' in folder '%s'. "%(
             self.measurements_file, data_file_path) + "Check your paths.")
         raise HandledException
     # Colums: z value [err] [type]
     self.has_type = self.data.iloc[:, -1].dtype == np.dtype("O")
     assert self.has_type  # mandatory for now!
     self.has_err = len(self.data.columns) > 2 and self.data.iloc[2].dtype == np.float
     assert not self.has_err  # not supported for now!
     self.data.columns = ["z", "value", "observable"]
     prefix = "bao_"
     self.data["observable"] = [(c[len(prefix):] if c.startswith(prefix) else c)
                                for c in self.data["observable"]]
     # Covariance --> read and re-sort as self.data
     try:
         if hasattr(self, "cov_file"):
             self.cov = np.loadtxt(os.path.join(data_file_path, self.cov_file))
         elif hasattr(self, "invcov_file"):
             invcov = np.loadtxt(os.path.join(data_file_path, self.invcov_file))
             self.cov = np.linalg.inv(invcov)
         else:
             raise NotImplementedError("Manual errors not implemented yet.")
             # self.cov = np.diag(ERROR_HERE**2)
     except IOError:
         self.log.error("Couldn't find (inv)cov file '%s' in folder '%s'. "%(
             getattr(self, "cov_file", getattr(self, "invcov_file", None)),
             data_file_path) + "Check your paths.")
         raise HandledException
     self.norm = multivariate_normal(mean=self.data["value"].values, cov=self.cov)
Beispiel #6
0
    def initialise(self):
        """Imports the PolyChord sampler and prepares its arguments."""
        if not get_mpi_rank():  # rank = 0 (MPI master) or None (no MPI)
            self.log.info("Initializing")
        # If path not given, try using general path to modules
        path_to_installation = get_path_to_installation()
        if not self.path and path_to_installation:
            self.path = os.path.join(path_to_installation, "code",
                                     pc_repo_name)
        if self.path:
            if not get_mpi_rank():
                self.log.info("Importing *local* PolyChord from " + self.path)
            pc_py_path = os.path.join(self.path, "PyPolyChord")
            pc_build_path = os.path.join(self.path, "build")
            post = next(d for d in os.listdir(pc_build_path)
                        if d.startswith("lib."))
            pc_build_path = os.path.join(pc_build_path, post)
            if not os.path.exists(pc_build_path):
                self.log.error(
                    "Either PolyChord is not in the given folder, "
                    "'%s', or you have not compiled it.", self.path)
                raise HandledException
            # Inserting the previously found path into the list of import folders
            sys.path.insert(0, pc_build_path)
            sys.path.insert(0, pc_py_path)
        else:
            self.log.info("Importing *global* PolyChord.")
        try:
            import PyPolyChord as PyPolyChord
            from PyPolyChord.settings import PolyChordSettings
        except ImportError:
            self.log.error(
                "Couldn't find the PolyChord python interface. "
                "Make sure that you have compiled it, and that you either\n"
                " (a) specify a path (you didn't) or\n"
                " (b) install the Python interface globally with\n"
                "     '/path/to/PolyChord/python setup.py install --user'")
            raise HandledException
        self.pc = PyPolyChord
        # Prepare arguments and settings
        self.nDims = self.prior.d()
        self.nDerived = (len(self.parametrization.derived_params()) + 1 +
                         len(self.likelihood._likelihoods))
        self.pc_settings = PolyChordSettings(self.nDims, self.nDerived)
        for p in [
                "nlive", "num_repeats", "nprior", "do_clustering",
                "precision_criterion", "max_ndead", "boost_posterior",
                "feedback", "update_files", "posteriors", "equals",
                "cluster_posteriors", "write_resume", "read_resume",
                "write_stats", "write_live", "write_dead", "base_dir",
                "grade_frac", "grade_dims"
        ]:
            v = getattr(self, p)
            if v is not None:
                setattr(self.pc_settings, p, v)
        # Fill the automatic ones
        if getattr(self, "feedback", None) is None:
            values = {
                logging.CRITICAL: 0,
                logging.ERROR: 0,
                logging.WARNING: 0,
                logging.INFO: 1,
                logging.DEBUG: 2
            }
            self.pc_settings.feedback = values[self.log.getEffectiveLevel()]
        try:
            output_folder = getattr(self.output, "folder")
            output_prefix = getattr(self.output, "prefix") or "pc"
        except AttributeError:
            # dummy output -- no resume!
            from tempfile import gettempdir
            output_folder = gettempdir()
            from random import random
            output_prefix = hex(int(random() * 16**6))[2:]
            self.pc_settings.read_resume = False
        self.pc_settings.base_dir = os.path.join(output_folder,
                                                 self.pc_settings.base_dir)
        self.pc_settings.file_root = output_prefix
        if not get_mpi_rank():
            # Creating output folder, if it does not exist (just one process)
            if not os.path.exists(self.pc_settings.base_dir):
                os.makedirs(self.pc_settings.base_dir)
            # Idem, a clusters folder if needed -- notice that PolyChord's default
            # is "True", here "None", hence the funny condition below
            if self.pc_settings.do_clustering is not False:  # None here means "default"
                try:
                    os.makedirs(
                        os.path.join(self.pc_settings.base_dir, clusters))
                except OSError:  # exists!
                    pass
            self.log.info("Storing raw PolyChord output in '%s'.",
                          self.pc_settings.base_dir)
        # explotining the speed hierarchy
        # sort blocks by paramters order and check contiguity (required by PolyChord!!!)
#        speeds, blocks = zip(*self.likelihood.speed_blocked_params(as_indices=True))
#        speeds, blocks = np.array(speeds), np.array(blocks)
# weird behaviour of np.argsort with there is only 1 block
#        if len(blocks) > 1:
#            sorting_indices = np.argsort(blocks, axis=0)
#        else:
#            sorting_indices = [0]
#        speeds, blocks = speeds[sorting_indices], blocks[sorting_indices]
#        if np.all([np.all(block==range(block[0], block[-1]+1)) for block in blocks]):
        self.log.warning("Speed hierarchy exploitation disabled for now!")
        #            self.pc_args["grade_frac"] = list(speeds)
        #            self.pc_args["grade_dims"] = [len(block) for block in blocks]
        #            self.log.info("Exploiting a speed hierarchy with speeds %r and blocks %r",
        #                     speeds, blocks)
        #        else:
        #            self.log.warning("Some speed blocks are not contiguous: PolyChord cannot deal "
        #                        "with the speed hierarchy. Not exploting it.")
        # prior conversion from the hypercube
        bounds = self.prior.bounds(
            confidence_for_unbounded=self.confidence_for_unbounded)
        # Check if priors are bounded (nan's to inf)
        inf = np.where(np.isinf(bounds))
        if len(inf[0]):
            params_names = self.prior.names()
            params = [params_names[i] for i in sorted(list(set(inf[0])))]
            self.log.error(
                "PolyChord needs bounded priors, but the parameter(s) '"
                "', '".join(params) + "' is(are) unbounded.")
            raise HandledException
        locs = bounds[:, 0]
        scales = bounds[:, 1] - bounds[:, 0]
        self.pc_prior = lambda x: (locs + np.array(x) * scales).tolist()
        # We will need the volume of the prior domain, since PolyChord divides by it
        self.logvolume = np.log(np.prod(scales))
        # Done!
        if not get_mpi_rank():
            self.log.info("Calling PolyChord with arguments:")
            for p, v in inspect.getmembers(self.pc_settings,
                                           lambda a: not (callable(a))):
                if not p.startswith("_"):
                    self.log.info("  %s: %s", p, v)
Beispiel #7
0
    def initial_proposal_covmat(self):
        """
        Build the initial covariance matrix, using the data provided, in descending order
        of priority:
        1. "covmat" field in the "mcmc" sampler block.
        2. "proposal" field for each parameter.
        3. variance of the reference pdf.
        4. variance of the prior pdf.

        The covariances between parameters when both are present in a covariance matrix
        provided through option 1 are preserved. All other covariances are assumed 0.
        """
        params, params_infos = zip(
            *self.parametrization.sampled_params().items())
        covmat = np.diag([np.nan] * len(params))
        # If given, load and test the covariance matrix
        if isinstance(self.covmat, six.string_types):
            covmat_pre = "MODULES:"
            if self.covmat.startswith(covmat_pre):
                self.covmat = os.path.join(get_path_to_installation(),
                                           self.covmat[len(covmat_pre):])
            try:
                with open(self.covmat, "r") as file_covmat:
                    header = file_covmat.readline()
                loaded_covmat = np.loadtxt(self.covmat)
            except TypeError:
                self.log.error(
                    "The property 'covmat' must be a file name,"
                    "but it's '%s'.", str(self.covmat))
                raise HandledException
            except IOError:
                self.log.error("Can't open covmat file '%s'.", self.covmat)
                raise HandledException
            if header[0] != "#":
                self.log.error(
                    "The first line of the covmat file '%s' "
                    "must be one list of parameter names separated by spaces "
                    "and staring with '#', and the rest must be a square matrix, "
                    "with one row per line.", self.covmat)
                raise HandledException
            loaded_params = header.strip("#").strip().split()
        elif hasattr(self.covmat, "__getitem__"):
            if not self.covmat_params:
                self.log.error(
                    "If a covariance matrix is passed as a numpy array, "
                    "you also need to pass the parameters it corresponds to "
                    "via 'covmat_params: [name1, name2, ...]'.")
                raise HandledException
            loaded_params = self.covmat_params
            loaded_covmat = self.covmat
        if self.covmat is not None:
            if len(loaded_params) != len(set(loaded_params)):
                self.log.error(
                    "There are duplicated parameters in the header of the "
                    "covmat file '%s' ", self.covmat)
                raise HandledException
            if len(loaded_params) != loaded_covmat.shape[0]:
                self.log.error(
                    "The number of parameters in the header of '%s' and the "
                    "dimensions of the matrix do not coincide.", self.covmat)
                raise HandledException
            if not (np.allclose(loaded_covmat.T, loaded_covmat)
                    and np.all(np.linalg.eigvals(loaded_covmat) > 0)):
                self.log.error(
                    "The covmat loaded from '%s' is not a positive-definite, "
                    "symmetric square matrix.", self.covmat)
                raise HandledException
            # Fill with parameters in the loaded covmat
            aliases = [[p] + np.atleast_1d(v.get(_p_alias, [])).tolist()
                       for p, v in zip(params, params_infos)]
            aliases = odict([[a[0], a] for a in aliases])
            indices_used, indices_sampler = zip(*[[
                loaded_params.index(p),
                [params.index(q) for q, a in aliases.items() if p in a]
            ] for p in loaded_params])
            indices_used, indices_sampler = zip(
                *[[i, j] for i, j in zip(indices_used, indices_sampler) if j])
            if any(len(j) - 1 for j in indices_sampler):
                first = next(j for j in indices_sampler if len(j) > 1)
                self.log.error(
                    "The parameters %s have duplicated aliases. Can't assign them an "
                    "element of the covariance matrix unambiguously.",
                    ", ".join([params[i] for i in first]))
                raise HandledException
            indices_sampler = list(chain(*indices_sampler))
            if not indices_used:
                self.log.error(
                    "A proposal covariance matrix has been loaded, but none of its "
                    "parameters are actually sampled here. Maybe a mismatch between"
                    " parameter names in the covariance matrix and the input file?"
                )
                raise HandledException
            covmat[np.ix_(indices_sampler,
                          indices_sampler)] = (loaded_covmat[np.ix_(
                              indices_used, indices_used)])
            self.log.info("Covariance matrix loaded for params %r",
                          [params[i] for i in indices_sampler])
            missing_params = set(params).difference(
                set([params[i] for i in indices_sampler]))
            if missing_params:
                self.log.info("Missing proposal covarince for params %r", [
                    p for p in self.parametrization.sampled_params()
                    if p in missing_params
                ])
            else:
                self.log.info(
                    "All parameters' covariance loaded from given covmat.")
        # Fill gaps with "proposal" property, if present, otherwise ref (or prior)
        where_nan = np.isnan(covmat.diagonal())
        if np.any(where_nan):
            covmat[where_nan, where_nan] = np.array([
                info.get(_p_proposal, np.nan)**2 for info in params_infos
            ])[where_nan]
            # we want to start learning the covmat earlier
            self.log.info(
                "Covariance matrix " +
                ("not present" if np.all(where_nan) else "not complete") + ". "
                "We will start learning the covariance of the proposal earlier: "
                "R-1 = %g (was %g).", self.learn_proposal_Rminus1_max_early,
                self.learn_proposal_Rminus1_max)
            self.learn_proposal_Rminus1_max = self.learn_proposal_Rminus1_max_early
        where_nan = np.isnan(covmat.diagonal())
        if np.any(where_nan):
            covmat[where_nan, where_nan] = (
                self.prior.reference_covmat().diagonal()[where_nan])
        assert not np.any(np.isnan(covmat))
        return covmat
Beispiel #8
0
 def initialise(self):
     def relative_path(tag):
         return ini.relativeFileName(tag).replace('data/', '').replace('Pantheon/', '')
     # has_absdist = F, intrinsicdisp=0, idispdataset=False
     if not self.path:
         path_to_installation = get_path_to_installation()
         if path_to_installation:
             from importlib import import_module
             self.path = getattr(
                 import_module(package + ".likelihoods." + self.name, package=package),
                 "get_path")(path_to_installation)
         else:
             self.log.error("No path given to the %s likelihood. Set the likelihood"
                            " property 'path' or the common property '%s'.",
                            self.dataset_file, _path_install)
             raise HandledException
     self.dataset_file_path = os.path.join(self.path, self.dataset_file)
     self.log.info("Reading data from %s", self.dataset_file_path)
     if not os.path.exists(self.dataset_file_path):
         self.log.error("The likelihood is not installed in the given path: "
                        "cannot find the file '%s'.", self.dataset_file_path)
         raise HandledException
     ini = IniFile(self.dataset_file_path)
     ini.params.update(self.dataset_params)
     self.twoscriptmfit = ini.bool('twoscriptmfit')
     if self.twoscriptmfit:
         scriptmcut = ini.float('scriptmcut', 10.)
     assert not ini.float('intrinsicdisp', 0) and not ini.float('intrinsicdisp0', 0)
     if hasattr(self, "alpha_beta_names"):
         self.alpha_name = self.alpha_beta_names[0]
         self.beta_name = self.alpha_beta_names[1]
     self.pecz = ini.float('pecz', 0.001)
     cols = None
     self.has_third_var = False
     data_file = os.path.join(self.path, self.dataset_params["data_file"])
     self.log.debug('Reading %s' % data_file)
     supernovae = {}
     self.names = []
     ix = 0
     with io.open(data_file, 'r') as f:
         lines = f.readlines()
         for line in lines:
             if '#' in line:
                 cols = line[1:].split()
                 for rename, new in zip(
                         ['mb', 'color', 'x1', '3rdvar', 'd3rdvar',
                          'cov_m_s', 'cov_m_c', 'cov_s_c'],
                         ['mag', 'colour', 'stretch', 'third_var',
                          'dthird_var', 'cov_mag_stretch',
                          'cov_mag_colour', 'cov_stretch_colour']):
                     if rename in cols:
                         cols[cols.index(rename)] = new
                 self.has_third_var = 'third_var' in cols
                 zeros = np.zeros(len(lines) - 1)
                 self.third_var = zeros.copy()
                 self.dthird_var = zeros.copy()
                 self.set = zeros.copy()
                 for col in cols:
                     setattr(self, col, zeros.copy())
             elif line.strip():
                 if cols is None:
                     self.log.error('Data file must have comment header')
                     raise HandledException
                 vals = line.split()
                 for i, (col, val) in enumerate(zip(cols, vals)):
                     if col == 'name':
                         supernovae[val] = ix
                         self.names.append(val)
                     else:
                         getattr(self, col)[ix] = np.float64(val)
                 ix += 1
     self.z_var = self.dz ** 2
     self.mag_var = self.dmb ** 2
     self.stretch_var = self.dx1 ** 2
     self.colour_var = self.dcolor ** 2
     self.thirdvar_var = self.dthird_var ** 2
     self.nsn = ix
     self.log.debug('Number of SN read: %s ' % self.nsn)
     if self.twoscriptmfit and not self.has_third_var:
         self.log.error('twoscriptmfit was set but thirdvar information not present')
         raise HandledException
     if ini.bool('absdist_file'):
         self.log.error('absdist_file not supported')
         raise HandledException
     covmats = [
         'mag', 'stretch', 'colour', 'mag_stretch', 'mag_colour', 'stretch_colour']
     self.covs = {}
     for name in covmats:
         if ini.bool('has_%s_covmat' % name):
             self.log.debug('Reading covmat for: %s ' % name)
             self.covs[name] = self._read_covmat(
                 os.path.join(self.path, self.dataset_params['%s_covmat_file'%name]))
     self.alphabeta_covmat = (len(self.covs.items()) > 1 or
                              self.covs.get('mag', None) is None)
     self._last_alpha = np.inf
     self._last_beta = np.inf
     self.marginalize = getattr(self, "marginalize", False)
     assert self.covs
     # jla_prep
     zfacsq = 25.0 / np.log(10.0) ** 2
     self.pre_vars = self.mag_var + zfacsq * self.pecz ** 2 * (
         (1.0 + self.zcmb) / (self.zcmb * (1 + 0.5 * self.zcmb))) ** 2
     if self.twoscriptmfit:
         A1 = np.zeros(self.nsn)
         A2 = np.zeros(self.nsn)
         A1[self.third_var <= scriptmcut] = 1
         A2[self.third_var > scriptmcut] = 1
         has_A1 = np.any(A1)
         has_A2 = np.any(A2)
         if not has_A1:
             # swap
             A1 = A2
             A2 = np.zeros(self.nsn)
             has_A2 = False
         if not has_A2:
             self.twoscriptmfit = False
         self.A1 = A1
         self.A2 = A2
     if self.marginalize:
         self.step_width_alpha = self.marginalize_params['step_width_alpha']
         self.step_width_beta = self.marginalize_params['step_width_beta']
         _marge_steps = self.marginalize_params['marge_steps']
         self.alpha_grid = np.empty((2 * _marge_steps + 1) ** 2)
         self.beta_grid = self.alpha_grid.copy()
         _int_points = 0
         for alpha_i in range(-_marge_steps, _marge_steps + 1):
             for beta_i in range(-_marge_steps, _marge_steps + 1):
                 if alpha_i ** 2 + beta_i ** 2 <= _marge_steps ** 2:
                     self.alpha_grid[_int_points] = (
                         self.marginalize_params['alpha_centre'] +
                         alpha_i * self.step_width_alpha)
                     self.beta_grid[_int_points] = (
                         self.marginalize_params['beta_centre'] +
                         beta_i * self.step_width_beta)
                     _int_points += 1
         self.log.debug('Marignalizing alpha, beta over %s points' % _int_points)
         self.marge_grid = np.empty(_int_points)
         self.int_points = _int_points
         self.alpha_grid = self.alpha_grid[:_int_points]
         self.beta_grid = self.beta_grid[:_int_points]
         self.invcovs = np.empty(_int_points, dtype=np.object)
         if self.precompute_covmats:
             for i, (alpha, beta) in enumerate(zip(self.alpha_grid, self.beta_grid)):
                 self.invcovs[i] = self.inverse_covariance_matrix(alpha, beta)
     elif not self.alphabeta_covmat:
         self.inverse_covariance_matrix()