Пример #1
0
def test_data():
    test_config_path = find_file('configs/main.ini')

    # Read the main config file
    main_config = configparser.ConfigParser()
    main_config.optionxform = lambda option: option
    main_config.read(test_config_path)
    ini_files = main_config['data sets'].get('ini files').split()

    # Initialize the individual components and test each dataset
    for path in ini_files:
        config = configparser.ConfigParser()
        config.optionxform = lambda option: option
        config.read(find_file(path))

        corr_item = correlation_item.CorrelationItem(config)

        data = Data(corr_item)
        hdul = fits.open(find_file(config['data']['filename']))

        assert np.allclose(data.data_vec, hdul[1].data['DA'])

        rp_rt_grid = corr_item.rp_rt_grid
        assert np.allclose(rp_rt_grid[0], hdul[1].data['RP'])
        assert np.allclose(rp_rt_grid[1], hdul[1].data['RT'])
        assert np.allclose(corr_item.z_grid, hdul[1].data['Z'])

        hdul.close()

        assert data.masked_data_vec is not None
Пример #2
0
def build_names(params):
    latex_names_path = 'vega/postprocess/latex_names.txt'
    latex_names_path = find_file(latex_names_path)
    latex_full = get_latex(latex_names_path)

    latex_comp_path = 'vega/postprocess/latex_composite.txt'
    latex_comp_path = find_file(latex_comp_path)
    latex_comp = get_latex(latex_comp_path)

    latex_names = {}
    for name in params:
        if name in latex_full:
            latex_names[name] = latex_full[name]
        else:
            tracer = None
            for subset in latex_comp:
                if subset.lower() in name.lower():
                    tracer = subset

            comp_par = None
            for comp in composites:
                if comp in name:
                    comp_par = comp

            if tracer is not None and comp_par is not None:
                comp_name = comp_par + '_' + tracer
                latex_names[comp_name] = composites[comp_par] \
                    + latex_comp[tracer] + r'}'
            elif comp_par is not None:
                print('Warning: No latex name found for tracer: %s. If you'
                      ' want plots to work well, add a latex name to'
                      ' latex_composite.txt' % name[len(comp_par) + 1:])
                latex_names[name] = composites[comp_par] \
                    + name[len(comp_par) + 1:] + r'}'
            else:
                print('Warning! No latex name found for %s. Add the latex'
                      ' representation to latex_names.txt.' % name)
                latex_names[name] = name

    return latex_names
Пример #3
0
def get_default_values():
    values_path = 'vega/postprocess/default_values.txt'
    with open(find_file(values_path)) as f:
        content = f.readlines()

    values = {}
    for line in content:
        line = line.strip()
        if line[0] == '#':
            continue

        items = line.split()
        values[items[0]] = {}
        values[items[0]]['limits'] = (float(items[1]), float(items[2]))
        values[items[0]]['error'] = float(items[3])

    return values
Пример #4
0
def test_vega():
    hdul = fits.open(find_file('data/picca_bench_data.fits'))
    names = ['test_' + str(i) for i in range(8)]

    vega_auto = VegaInterface(
        'examples/picca_benchmarks/configs/vega/main.ini')

    vega_auto.fiducial['Omega_de'] = None
    xi_vega_auto = vega_auto.compute_model(run_init=True)

    vega_cross = VegaInterface(
        'examples/picca_benchmarks/configs/vega/main_cross.ini')
    vega_cross.fiducial['Omega_de'] = None
    xi_vega_cross = vega_cross.compute_model(run_init=True)

    for name in names:
        xi_picca_auto = np.array(hdul[1].data['auto_' + name])
        xi_picca_cross = np.array(hdul[2].data['cross_' + name])

        assert np.allclose(xi_vega_auto[name], xi_picca_auto)
        assert np.allclose(xi_vega_cross[name], xi_picca_cross)

    hdul.close()
Пример #5
0
    def parameters(self, parameters):
        if self._params_template is None:
            # Read template
            config = ConfigParser()
            config.optionxform = lambda option: option
            template_path = find_file('vega/templates/parameters.ini')
            config.read(template_path)
            self._params_template = config['parameters']

        def get_par(name):
            if name not in parameters and name not in self._params_template:
                raise ValueError(
                    'Unknown parameter: {}, please pass a default value.'.
                    format(name))
            return parameters.get(name, self._params_template[name])

        new_params = {}

        # Scale parameters
        if self.options['scale_params'] == 'ap_at':
            new_params['ap'] = get_par('ap')
            new_params['at'] = get_par('at')
        elif self.options['scale_params'] == 'phi_gamma':
            new_params['phi'] = get_par('phi')
            new_params['gamma'] = get_par('gamma')
            if self.options['smooth_scaling']:
                new_params['phi_smooth'] = get_par('phi_smooth')
                new_params['gamma_smooth'] = get_par('gamma_smooth')
        elif self.options['scale_params'] == 'aiso_epsilon':
            new_params['aiso'] = get_par('aiso')
            new_params['1+epsilon'] = get_par('1+epsilon')
        else:
            raise ValueError('Unknown scale parameters: {}'.format(
                self.options['scale_params']))

        # Peak parameters
        if self.options['bao_broadening']:
            new_params['sigmaNL_per'] = get_par('sigmaNL_per')
            new_params['sigmaNL_par'] = get_par('sigmaNL_par')
        else:
            new_params['sigmaNL_per'] = 0.
            new_params['sigmaNL_par'] = 0.
        new_params['bao_amp'] = get_par('bao_amp')

        # bias beta model
        for name in self.corr_names:
            use_bias_eta = self.fit_info['use_bias_eta'].get(name, True)
            growth_rate = parameters.get('growth_rate', None)
            if growth_rate is None:
                growth_rate = self.get_growth_rate(self.zeff_in)

            if name == 'LYA':
                bias_lya = self.get_lya_bias(self.zeff_in)
                bias_eta_lya = parameters.get('bias_eta_LYA', None)
                beta_lya = float(get_par('beta_LYA'))

                if bias_eta_lya is None:
                    bias_eta_lya = bias_lya * beta_lya / growth_rate

                if use_bias_eta:
                    new_params['growth_rate'] = growth_rate
                    new_params['bias_eta_LYA'] = bias_eta_lya
                else:
                    new_params['bias_LYA'] = bias_lya
                new_params['beta_LYA'] = beta_lya
            elif name == 'QSO':
                bias_qso = self.get_qso_bias(self.zeff_in)
                beta_qso = parameters.get('beta_QSO', None)

                if beta_qso is None:
                    beta_qso = growth_rate / bias_qso

                if use_bias_eta:
                    new_params['growth_rate'] = growth_rate
                    new_params['bias_eta_QSO'] = 1.
                else:
                    new_params['bias_QSO'] = bias_qso
                new_params['beta_QSO'] = beta_qso
            else:
                raise ValueError('Tracer {} not supported yet.'.format(name))

            new_params['alpha_{}'.format(name)] = get_par(
                'alpha_{}'.format(name))

        # Small scale non-linear model
        if self.options['small_scale_nl']:
            new_params['dnl_arinyo_q1'] = get_par('dnl_arinyo_q1')
            new_params['dnl_arinyo_kv'] = get_par('dnl_arinyo_kv')
            new_params['dnl_arinyo_av'] = get_par('dnl_arinyo_av')
            new_params['dnl_arinyo_bv'] = get_par('dnl_arinyo_bv')
            new_params['dnl_arinyo_kp'] = get_par('dnl_arinyo_kp')

        # HCDs
        if self.options['hcd_model'] is not None:
            new_params['bias_hcd'] = get_par('bias_hcd')
            new_params['beta_hcd'] = get_par('beta_hcd')
            new_params['L0_hcd'] = get_par('L0_hcd')

        # Delta_rp
        if 'QSO' in self.corr_names:
            new_params['drp_QSO'] = get_par('drp_QSO')

        # Velocity dispersion parameters
        if self.options['velocity_dispersion'] is not None:
            if self.options['velocity_dispersion'] == 'lorentz':
                new_params['sigma_velo_disp_lorentz_QSO'] = get_par(
                    'sigma_velo_disp_lorentz_QSO')
            else:
                new_params['sigma_velo_disp_gauss_QSO'] = get_par(
                    'sigma_velo_disp_gauss_QSO')

        # QSO radiation effects
        if self.options['radiation_effects']:
            new_params['qso_rad_strength'] = get_par('qso_rad_strength')
            new_params['qso_rad_asymmetry'] = get_par('qso_rad_asymmetry')
            new_params['qso_rad_lifetime'] = get_par('qso_rad_lifetime')
            new_params['qso_rad_decrease'] = get_par('qso_rad_decrease')

        # UV background parameters
        if self.options['uv_background']:
            new_params['bias_gamma'] = get_par('bias_gamma')
            new_params['bias_prim'] = get_par('bias_prim')
            new_params['lambda_uv'] = get_par('lambda_uv')

        # Metals
        if self.options['metals'] is not None:
            for name in self.options['metals']:
                new_params['bias_eta_{}'.format(name)] = get_par(
                    'bias_eta_{}'.format(name))
                new_params['beta_{}'.format(name)] = get_par(
                    'beta_{}'.format(name))
                new_params['alpha_{}'.format(name)] = get_par(
                    'alpha_{}'.format(name))

        # Full-shape smoothing
        if self.options['fullshape_smoothing'] is not None:
            new_params['par_sigma_smooth'] = get_par('par_sigma_smooth')
            new_params['per_sigma_smooth'] = get_par('per_sigma_smooth')
            if self.options['fullshape_smoothing'] == 'exp':
                new_params['par_exp_smooth'] = get_par('par_exp_smooth')
                new_params['per_exp_smooth'] = get_par('per_exp_smooth')

        self._parameters = new_params
Пример #6
0
    def _build_corr_config(self, name, corr_info):
        """Build config file for a correlation based on a template

        Parameters
        ----------
        name : string
            Name of the correlation. Must be the same as corresponding template file name
        corr_info : dict
            Correlation information. The paths to the data and metal files are required.

        Returns
        -------
        string
            Path to the config file for the correlation.
        """
        # Read template
        config = ConfigParser()
        config.optionxform = lambda option: option
        template_path = find_file('vega/templates/{}.ini'.format(name))
        config.read(template_path)

        # get tracer info
        tracer1 = config['data']['tracer1']
        tracer2 = config['data']['tracer2']
        type1 = config['data']['tracer1-type']
        type2 = config['data']['tracer2-type']

        # Write the basic info
        config['data']['filename'] = corr_info.get('corr_path')
        config['cuts']['r-min'] = str(corr_info.get('r-min', 10))
        config['cuts']['r-max'] = str(corr_info.get('r-max', 180))

        if 'binsize' in corr_info:
            config['parameters'] = {}
            config['parameters']['par binsize {}'.format(name)] = str(
                corr_info.get('binsize', 4))
            config['parameters']['per binsize {}'.format(name)] = str(
                corr_info.get('binsize', 4))

        # Write the model options
        # Things that require both tracers to be LYA
        if tracer1 == 'LYA' and tracer2 == 'LYA':
            if self.options['small_scale_nl']:
                config['model']['small scale nl'] = 'dnl_arinyo'

        # Things that require at least one tracer to be continuous
        if type1 == 'continuous' or type2 == 'continuous':
            if self.options['uv_background']:
                config['model']['add uv'] = 'True'

            if self.options['hcd_model'] is not None:
                assert self.options['hcd_model'] in [
                    'mask', 'Rogers2018', 'sinc'
                ]
                config['model']['model-hcd'] = self.options['hcd_model']
                if self.options['hcd_model'] == 'mask':
                    config['model']['fvoigt_model'] = self.options[
                        'fvoigt_model']

            if self.options['metals'] is not None:
                config['metals'] = {}
                config['metals']['filename'] = corr_info.get('metal_path')
                config['metals']['z evol'] = 'bias_vs_z_std'
                if type1 == 'continuous':
                    config['metals']['in tracer1'] = ' '.join(
                        self.options['metals'])
                if type2 == 'continuous':
                    config['metals']['in tracer2'] = ' '.join(
                        self.options['metals'])

        # Things that require at least one discrete tracer
        if type1 == 'discrete' or type2 == 'discrete':
            if self.options['velocity_dispersion'] is not None:
                assert self.options['velocity_dispersion'] in [
                    'lorentz', 'gaussian'
                ]
                config['model']['velocity dispersion'] = self.options[
                    'velocity_dispersion']

                if self.options['metals'] is not None and type1 != type2:
                    config['metals']['velocity dispersion'] = self.options[
                        'velocity_dispersion']

        # Only for the LYA - QSO cross
        if 'LYA' in [tracer1, tracer2] and 'QSO' in [tracer1, tracer2]:
            if self.options['radiation_effects']:
                config['model']['radiation effects'] = 'True'

        # General things
        if self.options['fullshape_smoothing'] is not None:
            assert self.options['fullshape_smoothing'] in ['gauss', 'exp']
            config['model']['fullshape smoothing'] = self.options[
                'fullshape_smoothing']

        if self.name_extension is None:
            corr_path = self.config_path / '{}.ini'.format(name)
        else:
            corr_path = self.config_path / '{}-{}.ini'.format(
                name, self.name_extension)
        with open(corr_path, 'w') as configfile:
            config.write(configfile)

        return corr_path, config['data']['filename'], tracer1, tracer2
Пример #7
0
    def _init_metals(self, metal_config):
        """Read the metal file and initialize all the metal data.

        Parameters
        ----------
        metal_config : ConfigParser
            metals section from the config file

        Returns
        -------
        dict
            Dictionary containing all tracer objects (metals and the core ones)
        list
            list of all metal correlations we need to compute
        """
        assert ('in tracer1' in metal_config) or ('in tracer2' in metal_config)

        # Read metal tracers
        metals_in_tracer1 = None
        metals_in_tracer2 = None
        if 'in tracer1' in metal_config:
            metals_in_tracer1 = metal_config.get('in tracer1').split()
        if 'in tracer2' in metal_config:
            metals_in_tracer2 = metal_config.get('in tracer2').split()

        self.metal_mats = {}
        self.metal_rp_grids = {}
        self.metal_rt_grids = {}
        self.metal_z_grids = {}

        # Build tracer Catalog
        tracer_catalog = {}
        tracer_catalog[self._tracer1['name']] = self._tracer1
        tracer_catalog[self._tracer2['name']] = self._tracer2

        if metals_in_tracer1 is not None:
            for metal in metals_in_tracer1:
                tracer_catalog[metal] = {'name': metal, 'type': 'continuous'}

        if metals_in_tracer2 is not None:
            for metal in metals_in_tracer2:
                tracer_catalog[metal] = {'name': metal, 'type': 'continuous'}

        # Read the metal file
        metal_hdul = fits.open(find_file(metal_config.get('filename')))

        metal_correlations = []
        # First look for correlations between tracer1 and metals
        if 'in tracer2' in metal_config:
            for metal in metals_in_tracer2:
                if not self._use_correlation(self._tracer1['name'], metal):
                    continue
                tracers = (self._tracer1['name'], metal)
                name = self._tracer1['name'] + '_' + metal
                if 'RP_' + name not in metal_hdul[2].columns.names:
                    name = metal + '_' + self._tracer1['name']
                self._read_metal_correlation(metal_hdul, tracers, name)
                metal_correlations.append(tracers)

        # Then look for correlations between metals and tracer2
        # If we have an auto-cf the files are saved in the format tracer-metal
        if 'in tracer1' in metal_config:
            for metal in metals_in_tracer1:
                if not self._use_correlation(metal, self._tracer2['name']):
                    continue
                tracers = (metal, self._tracer2['name'])
                name = metal + '_' + self._tracer2['name']
                if 'RP_' + name not in metal_hdul[2].columns.names:
                    name = self._tracer2['name'] + '_' + metal
                self._read_metal_correlation(metal_hdul, tracers, name)
                metal_correlations.append(tracers)

        # Finally look for metal-metal correlations
        # Some files are reversed order, so reverse order if we don't find it
        if ('in tracer1' in metal_config) and ('in tracer2' in metal_config):
            for i, metal1 in enumerate(metals_in_tracer1):
                j0 = i if self._tracer1 == self._tracer2 else 0

                for metal2 in metals_in_tracer2[j0:]:
                    if not self._use_correlation(metal1, metal2):
                        continue
                    tracers = (metal1, metal2)
                    name = metal1 + '_' + metal2

                    if 'RP_' + name not in metal_hdul[2].columns.names:
                        name = metal2 + '_' + metal1
                    self._read_metal_correlation(metal_hdul, tracers, name)
                    metal_correlations.append(tracers)

        metal_hdul.close()

        return tracer_catalog, metal_correlations
Пример #8
0
    def _read_data(self, data_path, cuts_config):
        """Read the data, mask it and prepare the environment.

        Parameters
        ----------
        data_path : string
            Path to fits data file
        cuts_config : ConfigParser
            cuts section from the config file
        """
        print('Reading data file {}\n'.format(data_path))
        hdul = fits.open(find_file(data_path))

        blinding = 'none'
        if 'BLINDING' in hdul[1].header:
            blinding = hdul[1].header['BLINDING']

        if blinding == 'minimal':
            print('Warning! Running on blinded data {}'.format(data_path))
            print('Scale parameters must be fixed to 1.')
            self._blind = True
            self._data_vec = hdul[1].data['DA_BLIND']
            if 'DM' in hdul[1].columns.names:
                self._distortion_mat = csr_matrix(hdul[1].data['DM_BLIND'])
        elif blinding == 'none':
            self._blind = False
            self._data_vec = hdul[1].data['DA']
            if 'DM' in hdul[1].columns.names:
                self._distortion_mat = csr_matrix(hdul[1].data['DM'])
        else:
            self._blind = True
            raise ValueError(
                "Unknown blinding strategy. Only 'minimal' implemented.")

        if 'CO' in hdul[1].columns.names:
            self._cov_mat = hdul[1].data['CO']

        rp_grid = hdul[1].data['RP']
        rt_grid = hdul[1].data['RT']
        z_grid = hdul[1].data['Z']
        if 'NB' in hdul[1].columns.names:
            self.nb = hdul[1].data['NB']
        else:
            self.nb = None

        try:
            dist_rp_grid = hdul[2].data['DMRP']
            dist_rt_grid = hdul[2].data['DMRT']
            dist_z_grid = hdul[2].data['DMZ']
        except (IndexError, KeyError):
            dist_rp_grid = rp_grid.copy()
            dist_rt_grid = rt_grid.copy()
            dist_z_grid = z_grid.copy()
        self.coeff_binning_model = np.sqrt(dist_rp_grid.size / rp_grid.size)

        # Compute the mask and use it on the data
        self.mask, self.bin_size_rp, self.bin_size_rt = self._build_mask(
            rp_grid, rt_grid, cuts_config, hdul[1].header)

        self.data_size = len(self.masked_data_vec)
        self.full_data_size = len(self.data_vec)

        hdul.close()

        self.r_square_grid = np.sqrt(rp_grid**2 + rt_grid**2)
        self.mu_square_grid = np.zeros(self.r_square_grid.size)
        w = self.r_square_grid > 0.
        self.mu_square_grid[w] = rp_grid[w] / self.r_square_grid[w]

        # return the coordinate grids
        rp_rt_grid = np.array([dist_rp_grid, dist_rt_grid])
        return rp_rt_grid, dist_z_grid