Exemplo n.º 1
0
    def testGetDist(self):
        from getdist.command_line import getdist_command

        os.chdir(self.tempdir)
        res = getdist_command([self.root])
        # Note this can fail if your local analysis defaults changes the default ignore_rows
        self.assertTrue('-Ln(mean like)  = 2.31' in res, res)
        fname = 'testchain_pars.ini'
        getdist_command(['--make_param_file', fname])
        ini = IniFile(fname)
        ini.params['no_plots'] = False
        ini.params['plot_2D_num'] = 1
        ini.params['plot1'] = 'x y'
        ini.params['num_3D_plots'] = 1
        ini.params['3D_plot1'] = 'x y x'
        ini.params['triangle_params'] = '*[xy]*'

        ini.saveFile(fname)
        res = getdist_command([fname, self.root])
        self.assertTrue('-Ln(mean like)  = 2.31' in res)

        def check_run():
            for f in ['.py', '_2D.py', '_3D.py', '_tri.py']:
                pyname = self.root + f
                self.assertTrue(os.path.isfile(pyname))
                subprocess.check_output(['python', pyname])
                pdf = self.root + f.replace('py', 'pdf')
                self.assertTrue(os.path.isfile(pdf))
                os.remove(pdf)
                os.remove(pyname)

        check_run()
Exemplo n.º 2
0
    def testGetDist(self):
        def callGetDist(args):
            if os.getenv('TRAVIS', None):
                return str(
                    subprocess.check_output(['GetDist.py'] + args,
                                            env={'PATH': os.getenv('PATH')}))
            else:
                return str(
                    subprocess.check_output([
                        'python',
                        os.path.join(os.path.dirname(__file__), '..' +
                                     os.sep, 'GetDist.py')
                    ] + args))

        os.chdir(self.tempdir)
        res = callGetDist([self.root])
        # Note this can fail if your local analysis defaults changes the default ignore_rows
        self.assertTrue('-Ln(mean like)  = 2.30' in res)
        fname = 'testchain_pars.ini'
        callGetDist(['--make_param_file', fname])
        ini = IniFile(fname)
        ini.params['no_plots'] = False
        ini.params['plot_2D_num'] = 1
        ini.params['plot1'] = 'x y'
        ini.params['num_3D_plots'] = 1
        ini.params['3D_plot1'] = 'x y x'
        ini.params['plot_data_dir'] = ''
        ini.params['triangle_params'] = '*[xy]*'

        ini.saveFile(fname)
        res = callGetDist([fname, self.root])
        self.assertTrue('-Ln(mean like)  = 2.30' in res)
        self.assertFalse(
            os.path.isfile(
                os.path.join(self.tempdir, 'plot_data', 'testchain_2D_x_y')))

        def checkRun():
            for f in ['.py', '_2D.py', '_3D.py', '_tri.py']:
                pyname = self.root + f
                self.assertTrue(os.path.isfile(pyname))
                subprocess.check_output(['python', pyname])
                pdf = self.root + f.replace('py', 'pdf')
                self.assertTrue(os.path.isfile(pdf))
                os.remove(pdf)
                os.remove(pyname)

        checkRun()

        ini.params['plot_data_dir'] = 'plot_data/'
        ini.saveFile(fname)
        callGetDist([fname, self.root])
        self.assertTrue(
            os.path.isfile(
                os.path.join(self.tempdir, 'plot_data', 'testchain_2D_x_y')))
        checkRun()
        shutil.rmtree(os.path.join(self.tempdir, 'plot_data'))
Exemplo n.º 3
0
    def testGetDist(self):

        def callGetDist(args):
            if os.getenv('TRAVIS', None):
                return str(subprocess.check_output(['GetDist.py'] + args, env={'PATH': os.getenv('PATH')}))
            else:
                return str(subprocess.check_output(
                    ['python', os.path.join(os.path.dirname(__file__), '..' + os.sep, 'GetDist.py')] + args))

        os.chdir(self.tempdir)
        res = callGetDist([self.root])
        # Note this can fail if your local analysis defaults changes the default ignore_rows
        self.assertTrue('-Ln(mean like)  = 2.30' in res)
        fname = 'testchain_pars.ini'
        callGetDist(['--make_param_file', fname])
        ini = IniFile(fname)
        ini.params['no_plots'] = False
        ini.params['plot_2D_num'] = 1
        ini.params['plot1'] = 'x y'
        ini.params['num_3D_plots'] = 1
        ini.params['3D_plot1'] = 'x y x'
        ini.params['plot_data_dir'] = ''
        ini.params['triangle_params'] = '*[xy]*'

        ini.saveFile(fname)
        res = callGetDist([fname, self.root])
        self.assertTrue('-Ln(mean like)  = 2.30' in res)
        self.assertFalse(os.path.isfile(os.path.join(self.tempdir, 'plot_data', 'testchain_2D_x_y')))

        def checkRun():
            for f in ['.py', '_2D.py', '_3D.py', '_tri.py']:
                pyname = self.root + f
                self.assertTrue(os.path.isfile(pyname))
                subprocess.check_output(['python', pyname])
                pdf = self.root + f.replace('py', 'pdf')
                self.assertTrue(os.path.isfile(pdf))
                os.remove(pdf)
                os.remove(pyname)

        checkRun()

        ini.params['plot_data_dir'] = 'plot_data/'
        ini.saveFile(fname)
        callGetDist([fname, self.root])
        self.assertTrue(os.path.isfile(os.path.join(self.tempdir, 'plot_data', 'testchain_2D_x_y')))
        checkRun()
        shutil.rmtree(os.path.join(self.tempdir, 'plot_data'))
Exemplo n.º 4
0
#    if (l >= 2): SN += (2 * l + 1) * fsky * (d[l - 2, 1] / (NoiseCl + d[l - 2, 1])) ** 2
# print 'Number of modes: ', SN

outfile = open(outDir + outRoot + '_Noise.dat', 'w')
for l in range(2, lmax + 1):
    outfile.write("%d " % l + " ".join("%E" % elem for elem in NoiseOut[l]) + "\n")
outfile.close()

dataset['fullsky_exact_fksy'] = fsky
dataset['name'] = outRoot
dataset['dataset_format'] = 'CMBLike2'
dataset['like_approx'] = 'exact'

dataset['cl_lmin'] = lmin
dataset['cl_lmax'] = lmax

dataset['binned'] = False


dataset['cl_hat_includes_noise'] = False

dataset['cl_hat_file'] = outPath + outRoot + '.dat'
dataset['cl_hat_order'] = dataCols
dataset['cl_noise_file '] = outPath + outRoot + '_Noise.dat'
dataset['cl_noise_order'] = 'TT EE BB'


ini.saveFile(outDir + outRoot + '.dataset')

shutil.copy(lensedTotClFileRoot, outDir + outRoot + '.dat')
Exemplo n.º 5
0
def makeGrid(batchPath,
             settingName=None,
             settings=None,
             readOnly=False,
             interactive=False):
    batchPath = os.path.abspath(batchPath) + os.sep

    # 0: chains, 1: importance sampling, 2: best-fit, 3: best-fit and Hessian
    cosmomcAction = 0

    if not settings:
        if not settingName:
            if not pathIsGrid(batchPath):
                raise Exception(
                    'Need to give name of setting file if batchPath/config does not exist'
                )
            readOnly = True
            sys.path.insert(0, batchPath + 'config')
            sys.modules['batchJob'] = batchjob  # old name
            settings = __import__(
                IniFile(batchPath +
                        'config/config.ini').params['setting_file'].replace(
                            '.py', ''))
        else:
            settings = __import__(settingName, fromlist=['dummy'])

    batch = batchjob.batchJob(batchPath, settings.ini_dir)

    if hasattr(settings, 'skip'): batch.skip = settings.skip
    batch.makeItems(settings, messages=not readOnly)
    if readOnly:
        for jobItem in [b for b in batch.jobItems]:
            if not jobItem.chainExists():
                batch.jobItems.remove(jobItem)
        batch.save()
        print('OK, configured grid with %u existing chains' %
              (len(batch.jobItems)))
        return batch
    else:
        batch.makeDirectories(settings.__file__)
        batch.save()

    # priors and widths for parameters which are varied
    start_at_bestfit = getattr(settings, 'start_at_bestfit', False)
    params = getattr(settings, 'params', default_params)
    param_extra = getattr(settings, 'param_extra_opts',
                          default_param_extra_opts)

    for jobItem in batch.items(wantSubItems=False):

        jobItem.makeChainPath()
        ini = IniFile()

        for param in jobItem.param_set:
            ini.params['param[' + param + ']'] = params[param]
            if param_extra is not None and param in param_extra:
                ini.params.update(param_extra[param])

        if hasattr(settings, 'extra_opts'):
            ini.params.update(settings.extra_opts)

        ini.params['file_root'] = jobItem.chainRoot

        cov_dir_name = getattr(settings, 'cov_dir', 'planck_covmats')
        covdir = os.path.join(batch.basePath, cov_dir_name)
        covmat = os.path.join(covdir, jobItem.name + '.covmat')
        if not os.path.exists(covmat):
            covNameMappings = getattr(settings, 'covNameMappings', None)
            mapped_name_norm = jobItem.makeNormedName(covNameMappings)[0]
            covmat_normed = os.path.join(covdir, mapped_name_norm + '.covmat')
            covmat = covmat_normed
            if not os.path.exists(covmat) and hasattr(jobItem.data_set,
                                                      'covmat'):
                covmat = batch.basePath + jobItem.data_set.covmat
            if not os.path.exists(covmat) and hasattr(settings, 'covmat'):
                covmat = batch.basePath + settings.covmat
        else:
            covNameMappings = None
        if os.path.exists(covmat):
            ini.params['propose_matrix'] = covmat
            if getattr(settings, 'newCovmats', True):
                ini.params['MPI_Max_R_ProposeUpdate'] = 20
        else:
            hasCov = False
            ini.params['MPI_Max_R_ProposeUpdate'] = 20
            covmat_try = []
            if 'covRenamer' in dir(settings):
                covmat_try += settings.covRenamer(jobItem.name)
                covmat_try += settings.covRenamer(mapped_name_norm)
            if hasattr(settings, 'covrenames'):
                for aname in [jobItem.name, mapped_name_norm]:
                    covmat_try += [
                        aname.replace(old, new, 1)
                        for old, new in settings.covrenames if old in aname
                    ]
                    for new1, old1 in settings.covrenames:
                        if old1 in aname:
                            name = aname.replace(old1, new1, 1)
                            covmat_try += [
                                name.replace(old, new, 1)
                                for old, new in settings.covrenames
                                if old in name
                            ]
            if 'covWithoutNameOrder' in dir(settings):
                if covNameMappings:
                    removes = copy.deepcopy(covNameMappings)
                else:
                    removes = dict()
                for name in settings.covWithoutNameOrder:
                    if name in jobItem.data_set.names:
                        removes[name] = ''
                        covmat_try += [jobItem.makeNormedName(removes)[0]]
            covdir2 = os.path.join(
                batch.basePath,
                getattr(settings, 'cov_dir_fallback', cov_dir_name))
            for name in covmat_try:
                covmat = os.path.join(batch.basePath, covdir2,
                                      name + '.covmat')
                if os.path.exists(covmat):
                    ini.params['propose_matrix'] = covmat
                    print('covmat ' + jobItem.name + ' -> ' + name)
                    hasCov = True
                    break
            if not hasCov:
                print('WARNING: no matching specific covmat for ' +
                      jobItem.name)

        ini.params['start_at_bestfit'] = start_at_bestfit
        updateIniParams(ini, jobItem.data_set.params, batch.commonPath)
        for deffile in settings.defaults:
            ini.defaults.append(batch.commonPath + deffile)
        if hasattr(settings, 'override_defaults'):
            ini.defaults = [
                batch.commonPath + deffile
                for deffile in settings.override_defaults
            ] + ini.defaults

        ini.params['action'] = cosmomcAction
        ini.saveFile(jobItem.iniFile())
        if not start_at_bestfit:
            setMinimize(jobItem, ini)
            variant = '_minimize'
            ini.saveFile(jobItem.iniFile(variant))

            # add ini files for importance sampling runs
        for imp in jobItem.importanceJobs():
            if not getattr(jobItem, 'importanceFilter', None): continue
            if batch.hasName(imp.name.replace('_post', '')):
                raise Exception(
                    'importance sampling something you already have?')
            for minimize in (False, True):
                if minimize and not getattr(imp, 'want_minimize', True):
                    continue
                ini = IniFile()
                updateIniParams(ini, imp.importanceSettings, batch.commonPath)
                if cosmomcAction == 0 and not minimize:
                    for deffile in settings.importanceDefaults:
                        ini.defaults.append(batch.commonPath + deffile)
                    ini.params['redo_outroot'] = imp.chainRoot
                    ini.params['action'] = 1
                else:
                    ini.params['file_root'] = imp.chainRoot
                if minimize:
                    setMinimize(jobItem, ini)
                    variant = '_minimize'
                else:
                    variant = ''
                ini.defaults.append(jobItem.iniFile())
                ini.saveFile(imp.iniFile(variant))
                if cosmomcAction != 0: break

    if not interactive: return batch
    print('Done... to run do: python python/runbatch.py ' + batchPath)
    if not start_at_bestfit:
        print('....... for best fits: python python/runbatch.py ' + batchPath +
              ' --minimize')
    print('')
    print('for importance sampled: python python/runbatch.py ' + batchPath +
          ' --importance')
    print('for best-fit for importance sampled: python python/runbatch.py ' +
          batchPath + ' --importance_minimize')
Exemplo n.º 6
0
    elif os.path.exists(batch.commonPath + args.base_ini):
        ini.defaults.append(batch.commonPath + args.base_ini)
    else:
        raise ValueError("base_ini file not found")
    if hasattr(batch, 'getdist_options'):
        ini.params.update(batch.getdist_options)
    tag = ''
    if jobItem.isImportanceJob or args.burn_removed or jobItem.isBurnRemoved():
        ini.params['ignore_rows'] = 0
    elif args.burn_remove is not None:
        ini.params['ignore_rows'] = args.burn_remove

    if jobItem.isImportanceJob:
        ini.params['compare_num'] = 1
        ini.params['compare1'] = jobItem.parent.chainRoot
    if args.no_plots: ini.params['no_plots'] = True
    if args.make_plots: ini.params['make_plots'] = True
    fname = ini_dir + jobItem.name + tag + '.ini'
    ini.params.update(jobItem.dist_settings)
    ini.saveFile(fname)
    if not args.norun and (not args.notexist or not jobItem.getDistExists()) and (
                not args.update_only or jobItem.getDistNeedsUpdate()):
        if jobItem.chainExists():
            print("running: " + fname)
            processes.add(subprocess.Popen([args.command] + args.command_params + [fname]))
            while len(processes) >= args.procs:
                time.sleep(.1)
                processes.difference_update([p for p in processes if p.poll() is not None])
        else:
            if not args.exist: print("Chains do not exist yet: " + jobItem.chainRoot)
bf = types.BestFit(root + '.minimum',
                   setParamNameFile=root + '.paramnames',
                   want_fixed=True)

for camb, cosmomc in list(pars.items()):
    par = bf.parWithName(cosmomc)
    if par is not None: ini.params[camb] = par.best_fit

ini.params['scalar_amp(1)'] = float(ini.params['scalar_amp(1)']) / 1e9

nmassive = 1
neffstandard = 3.046 / 3
ini.params['massless_neutrinos'] = float(
    ini.params['massless_neutrinos']) - neffstandard * nmassive
ini.params['massive_neutrinos'] = int(round(neffstandard * nmassive))
ini.params['nu_mass_degeneracies'] = neffstandard * nmassive
ini.params['share_delta_neff'] = False
ini.params['tensor_spectral_index(1)'] = -float(
    ini.params['initial_ratio(1)']) / 8

inPars = IniFile(root + '.inputparams')
if inPars.bool('use_nonlinear_lensing', True):
    ini.params['do_nonlinear'] = 3
# Note, if you want accurate spectrun on small scales, may need to increase accuracy

ini.saveFile(sys.argv[2])

print(
    'OK, though note this does not support all parameter extensions from LCDM')
Exemplo n.º 8
0
def make_forecast_cmb_dataset(fiducial_Cl,
                              output_root,
                              output_dir=None,
                              noise_muK_arcmin_T=None,
                              noise_muK_arcmin_P=None,
                              NoiseVar=None,
                              ENoiseFac=2,
                              fwhm_arcmin=None,
                              lmin=2,
                              lmax=None,
                              fsky=1.0,
                              lens_recon_noise=None,
                              cl_dict_lmin=0):  # pragma: no cover
    """
    Make a simulated .dataset and associated files with 'data' set at the input fiducial
    model. Uses the exact full-sky log-likelihood, scaled by fsky.

    If you want to use numerical N_L CMB noise files, you can just replace the noise
    .dat text file produced by this function.

    :param fiducial_Cl: dictionary of Cls to use, combination of tt, te, ee, bb, pp;
                        note te must be included with tt and ee when using them
    :param output_root: root name for output files, e.g. 'my_sim1'
    :param output_dir: output directory
    :param noise_muK_arcmin_T: temperature noise in muK-arcmin
    :param noise_muK_arcmin_P: polarization noise in muK-arcmin
    :param NoiseVar: alternatively if noise_muK_arcmin_T is None, effective
        isotropic noise variance for the temperature (N_L=NoiseVar with no beam)
    :param ENoiseFac: factor by which polarization noise variance is higher thab
                NoiseVar (usually 2, for Planck about 4
                        as only half the detectors polarized)
    :param fwhm_arcmin: beam fwhm in arcminutes
    :param lmin: l_min
    :param lmax: l_max
    :param fsky: sky fraction
    :param lens_recon_noise: optional array, starting at L=0, for the
       pp lensing reconstruction noise, in [L(L+1)]^2C_L^phi/2pi units
    :param cl_dict_lmin: l_min for the arrays in fiducial_Cl
    :return: IniFile that was saved
    """
    ini = IniFile()
    dataset = ini.params

    cl_keys = fiducial_Cl.keys()
    use_CMB = set(cl_keys).intersection(set(CMB_keys))
    use_lensing = lens_recon_noise

    if use_CMB:
        if NoiseVar is None:
            if noise_muK_arcmin_T is None:
                raise ValueError('Must specify noise')
            NoiseVar = white_noise_from_muK_arcmin(noise_muK_arcmin_T)
            if noise_muK_arcmin_P is not None:
                ENoiseFac = (noise_muK_arcmin_P / noise_muK_arcmin_T)**2
        elif noise_muK_arcmin_T is not None or noise_muK_arcmin_P is not None:
            raise ValueError('Specific either noise_muK_arcmin or NoiseVar')
        fields_use = ''
        if 'tt' in cl_keys or 'te' in cl_keys:
            fields_use = 'T'
        if 'ee' in cl_keys or 'te' in cl_keys:
            fields_use += ' E'
        if 'bb' in cl_keys:
            fields_use += ' B'
        if 'pp' in cl_keys and use_lensing:
            fields_use += ' P'
        if 'tt' in cl_keys and 'ee' in cl_keys and 'te' not in cl_keys:
            raise ValueError(
                'Input power spectra should have te if using tt and ee -'
                'using the exact likelihood requires the full covariance.')
    else:
        fields_use = 'P'

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    dataset['fields_use'] = fields_use

    if use_CMB:
        fwhm = fwhm_arcmin / 60
        xlc = 180 * np.sqrt(8. * np.log(2.)) / np.pi
        sigma2 = (fwhm / xlc)**2
        noise_cols = 'TT           EE          BB'
        if use_lensing:
            noise_cols += '          PP'
    elif use_lensing:
        noise_cols = 'PP'
    else:
        raise ValueError('Must use CMB or lensing C_L')
    noise_file = output_root + '_Noise.dat'
    with open(os.path.join(output_dir, noise_file), 'w') as f:
        f.write('#L %s\n' % noise_cols)

        for ell in range(lmin, lmax + 1):
            noises = []
            if use_CMB:
                # noinspection PyUnboundLocalVariable
                noise_cl = ell * (ell + 1.) / 2 / np.pi * NoiseVar * np.exp(
                    ell * (ell + 1) * sigma2)
                noises += [
                    noise_cl, ENoiseFac * noise_cl, ENoiseFac * noise_cl
                ]
            if use_lensing:
                noises += [lens_recon_noise[ell]]
            f.write("%d " % ell + " ".join("%E" % elem
                                           for elem in noises) + "\n")

    dataset['fullsky_exact_fksy'] = fsky
    dataset['dataset_format'] = 'CMBLike2'
    dataset['like_approx'] = 'exact'

    dataset['cl_lmin'] = lmin
    dataset['cl_lmax'] = lmax

    dataset['binned'] = False

    dataset['cl_hat_includes_noise'] = False

    save_cl_dict(os.path.join(output_dir, output_root + '.dat'),
                 fiducial_Cl,
                 cl_dict_lmin=cl_dict_lmin)
    dataset['cl_hat_file'] = output_root + '.dat'
    dataset['cl_noise_file '] = noise_file

    ini.saveFile(os.path.join(output_dir, output_root + '.dataset'))
    return ini
def make_forecast_cmb_dataset(input_cl_file,
                              output_root,
                              output_dir=None,
                              noise_muK_arcmin_T=None,
                              noise_muK_arcmin_P=None,
                              NoiseVar=None,
                              ENoiseFac=2,
                              fwhm_arcmin=None,
                              lmin=2,
                              lmax=None,
                              fsky=1,
                              fields_use=None,
                              lens_recon_noise=None,
                              cl_data_cols=''):
    """
    Make a simulated .dataset and associated files with 'data' set at the input fiducial model.

    :param input_cl_file: input fiducial CL
    :param output_root: root name for output files, e.g. 'my_sim1'
    :param output_dir: output directory
    :param noise_muK_arcmin_T: temperature noise in muK-arcmin
    :param noise_muK_arcmin_P: polarization noise in muK-arcmin
    :param NoiseVar: effective isotropic noise variance for the temperature (N_L=NoiseVar with no beam)
    :param ENoiseFac: factor by which polarization noise variance is higher (usually 2, for Planck about 4
                        as only half the detectors polarized)
    :param fwhm_arcmin: beam fwhm in arcminutes
    :param lmin: l_min
    :param lmax: l_max
    :param fsky: sky fraction
    :param fields_use: optional list of fields to restict to (e.g. 'T E')
    :param lens_recon_noise: optional array, starting at L=0, for the PP lensing reconstruction noise, in [L(L+1)]^2C_L^phi/2pi units
    :param cl_data_cols: if not specified in file header, order of columns in input CL file (e.g. 'TT TE EE BB PP')
    :return:
    """

    use_lensing = lens_recon_noise
    use_CMB = noise_muK_arcmin_T or NoiseVar is not None

    ini = IniFile()
    dataset = ini.params

    if not cl_data_cols:
        cl_data_cols = lastTopComment(input_cl_file)
        if not cl_data_cols:
            raise Exception(
                'input CL file must specific names of columns (TT TE EE..)')
    else:
        dataset['cl_hat_order'] = cl_data_cols

    if use_CMB:
        if NoiseVar is None:
            if noise_muK_arcmin_T is None:
                raise ValueError('Must specify noise')
            NoiseVar = (noise_muK_arcmin_T * np.pi / 180 / 60.)**2
            if noise_muK_arcmin_P is not None:
                ENoiseFac = (noise_muK_arcmin_P / noise_muK_arcmin_T)**2
        elif noise_muK_arcmin_T is not None or noise_muK_arcmin_P is not None:
            raise ValueError('Specific either noise_muK_arcmin or NoiseVar')
        if not fields_use:
            fields_use = ''
            if 'TT' or 'TE' in cl_data_cols: fields_use = 'T'
            if 'EE' or 'TE' in cl_data_cols: fields_use += ' E'
            if 'BB' in cl_data_cols: fields_use += ' B'
            if 'PP' in cl_data_cols and use_lensing: fields_use += ' P'
    else:
        fields_use = fields_use or 'P'

    if output_dir is None:
        output_dir = os.path.join(os.path.dirname(__file__), '..', 'data',
                                  output_root)
    if not os.path.exists(output_dir): os.makedirs(output_dir)

    dataset['fields_use'] = fields_use

    if use_CMB:
        fwhm = fwhm_arcmin / 60
        xlc = 180 * np.sqrt(8. * np.log(2.)) / np.pi
        sigma2 = (fwhm / xlc)**2
        noise_cols = 'TT           EE          BB'
        if use_lensing: noise_cols += '          PP'
    elif use_lensing:
        noise_cols = 'PP'
    noise_file = output_root + '_Noise.dat'
    with open(os.path.join(output_dir, noise_file), 'w') as f:
        f.write('#L %s\n' % noise_cols)

        for l in range(lmin, lmax + 1):
            NoiseCl = l * (l + 1.) / 2 / np.pi * NoiseVar * np.exp(
                l * (l + 1) * sigma2)
            noises = []
            if use_CMB:
                noises += [NoiseCl, ENoiseFac * NoiseCl, ENoiseFac * NoiseCl]
            if use_lensing: noises += [lens_recon_noise[l]]
            f.write("%d " % l + " ".join("%E" % elem
                                         for elem in noises) + "\n")

    dataset['fullsky_exact_fksy'] = fsky
    dataset['dataset_format'] = 'CMBLike2'
    dataset['like_approx'] = 'exact'

    dataset['cl_lmin'] = lmin
    dataset['cl_lmax'] = lmax

    dataset['binned'] = False

    dataset['cl_hat_includes_noise'] = False

    shutil.copy(input_cl_file, os.path.join(output_dir, output_root + '.dat'))
    dataset['cl_hat_file'] = output_root + '.dat'
    dataset['cl_noise_file '] = noise_file

    ini.saveFile(os.path.join(output_dir, output_root + '.dataset'))
Exemplo n.º 10
0
def makeGrid(batchPath, settingName=None, settings=None, readOnly=False, interactive=False):
    batchPath = os.path.abspath(batchPath) + os.sep

    # 0: chains, 1: importance sampling, 2: best-fit, 3: best-fit and Hessian
    cosmomcAction = 0

    if not settings:
        if not settingName:
            if not pathIsGrid(batchPath):
                raise Exception('Need to give name of setting file if batchPath/config does not exist')
            readOnly = True
            sys.path.insert(0, batchPath + 'config')
            sys.modules['batchJob'] = batchjob  # old name
            settings = __import__(IniFile(batchPath + 'config/config.ini').params['setting_file'].replace('.py', ''))
        else:
            settings = __import__(settingName, fromlist=['dummy'])

    batch = batchjob.batchJob(batchPath, settings.ini_dir)

    if hasattr(settings, 'skip'): batch.skip = settings.skip
    batch.makeItems(settings, messages=not readOnly)
    if readOnly:
        for jobItem in [b for b in batch.jobItems]:
            if not jobItem.chainExists():
                batch.jobItems.remove(jobItem)
        batch.save()
        print('OK, configured grid with %u existing chains' % (len(batch.jobItems)))
        return batch
    else:
        batch.makeDirectories(settings.__file__)
        batch.save()

    # priors and widths for parameters which are varied
    start_at_bestfit = getattr(settings, 'start_at_bestfit', False)
    params = getattr(settings, 'params', default_params)
    param_extra = getattr(settings, 'param_extra_opts', default_param_extra_opts)

    for jobItem in batch.items(wantSubItems=False):

        jobItem.makeChainPath()
        ini = IniFile()

        for param in jobItem.param_set:
            ini.params['param[' + param + ']'] = params[param]
            if param_extra is not None and param in param_extra:
                ini.params.update(param_extra[param])

        if hasattr(settings, 'extra_opts'):
            ini.params.update(settings.extra_opts)

        ini.params['file_root'] = jobItem.chainRoot

        cov_dir_name = getattr(settings, 'cov_dir', 'planck_covmats')
        covdir = os.path.join(batch.basePath, cov_dir_name)
        covmat = os.path.join(covdir, jobItem.name + '.covmat')
        if not os.path.exists(covmat):
            covNameMappings = getattr(settings, 'covNameMappings', None)
            mapped_name_norm = jobItem.makeNormedName(covNameMappings)[0]
            covmat_normed = os.path.join(covdir, mapped_name_norm + '.covmat')
            covmat = covmat_normed
            if not os.path.exists(covmat) and hasattr(jobItem.data_set,
                                                      'covmat'): covmat = batch.basePath + jobItem.data_set.covmat
            if not os.path.exists(covmat) and hasattr(settings, 'covmat'): covmat = batch.basePath + settings.covmat
        else:
            covNameMappings = None
        if os.path.exists(covmat):
            ini.params['propose_matrix'] = covmat
            if getattr(settings, 'newCovmats', True): ini.params['MPI_Max_R_ProposeUpdate'] = 20
        else:
            hasCov = False
            ini.params['MPI_Max_R_ProposeUpdate'] = 20
            covmat_try = []
            if 'covRenamer' in dir(settings):
                covmat_try += settings.covRenamer(jobItem.name)
                covmat_try += settings.covRenamer(mapped_name_norm)
            if hasattr(settings, 'covrenames'):
                for aname in [jobItem.name, mapped_name_norm]:
                    covmat_try += [aname.replace(old, new, 1) for old, new in settings.covrenames if old in aname]
                    for new1, old1 in settings.covrenames:
                        if old1 in aname:
                            name = aname.replace(old1, new1, 1)
                            covmat_try += [name.replace(old, new, 1) for old, new in settings.covrenames if old in name]
            if 'covWithoutNameOrder' in dir(settings):
                if covNameMappings:
                    removes = copy.deepcopy(covNameMappings)
                else:
                    removes = dict()
                for name in settings.covWithoutNameOrder:
                    if name in jobItem.data_set.names:
                        removes[name] = ''
                        covmat_try += [jobItem.makeNormedName(removes)[0]]
            covdir2 = os.path.join(batch.basePath, getattr(settings, 'cov_dir_fallback', cov_dir_name))
            for name in covmat_try:
                covmat = os.path.join(batch.basePath, covdir2, name + '.covmat')
                if os.path.exists(covmat):
                    ini.params['propose_matrix'] = covmat
                    print('covmat ' + jobItem.name + ' -> ' + name)
                    hasCov = True
                    break
            if not hasCov: print('WARNING: no matching specific covmat for ' + jobItem.name)

        ini.params['start_at_bestfit'] = start_at_bestfit
        updateIniParams(ini, jobItem.data_set.params, batch.commonPath)
        for deffile in settings.defaults:
            ini.defaults.append(batch.commonPath + deffile)
        if hasattr(settings, 'override_defaults'):
            ini.defaults = [batch.commonPath + deffile for deffile in settings.override_defaults] + ini.defaults

        ini.params['action'] = cosmomcAction
        ini.saveFile(jobItem.iniFile())
        if not start_at_bestfit:
            setMinimize(jobItem, ini)
            variant = '_minimize'
            ini.saveFile(jobItem.iniFile(variant))


            # add ini files for importance sampling runs
        for imp in jobItem.importanceJobs():
            if batch.hasName(imp.name.replace('_post', '')): raise Exception(
                'importance sampling something you already have?')
            for minimize in (False, True):
                if minimize and not getattr(imp, 'want_minimize', True): continue
                ini = IniFile()
                updateIniParams(ini, imp.importanceSettings, batch.commonPath)
                if cosmomcAction == 0 and not minimize:
                    for deffile in settings.importanceDefaults:
                        ini.defaults.append(batch.commonPath + deffile)
                    ini.params['redo_outroot'] = imp.chainRoot
                    ini.params['action'] = 1
                else:
                    ini.params['file_root'] = imp.chainRoot
                if minimize:
                    setMinimize(jobItem, ini)
                    variant = '_minimize'
                else:
                    variant = ''
                ini.defaults.append(jobItem.iniFile())
                ini.saveFile(imp.iniFile(variant))
                if cosmomcAction != 0: break

    if not interactive: return batch
    print('Done... to run do: python python/runbatch.py ' + batchPath)
    if not start_at_bestfit:
        print('....... for best fits: python python/runbatch.py ' + batchPath + ' --minimize')
    print('')
    print('for importance sampled: python python/runbatch.py ' + batchPath + ' --importance')
    print('for best-fit for importance sampled: python python/runbatch.py ' + batchPath + ' --importance_minimize')
Exemplo n.º 11
0
def make_forecast_cmb_dataset(input_cl_file, output_root, output_dir=None, noise_muK_arcmin_T=None,
                              noise_muK_arcmin_P=None, NoiseVar=None, ENoiseFac=2, fwhm_arcmin=None,
                              lmin=2, lmax=None, fsky=1, fields_use=None,
                              lens_recon_noise=None, cl_data_cols=''):
    """
    Make a simulated .dataset and associated files with 'data' set at the input fiducial model.

    :param input_cl_file: input fiducial CL
    :param output_root: root name for output files, e.g. 'my_sim1'
    :param output_dir: output directory
    :param noise_muK_arcmin_T: temperature noise in muK-arcmin
    :param noise_muK_arcmin_P: polarization noise in muK-arcmin
    :param NoiseVar: effective isotropic noise variance for the temperature (N_L=NoiseVar with no beam)
    :param ENoiseFac: factor by which polarization noise variance is higher (usually 2, for Planck about 4
                        as only half the detectors polarized)
    :param fwhm_arcmin: beam fwhm in arcminutes
    :param lmin: l_min
    :param lmax: l_max
    :param fsky: sky fraction
    :param fields_use: optional list of fields to restict to (e.g. 'T E')
    :param lens_recon_noise: optional array, starting at L=0, for the PP lensing reconstruction noise, in [L(L+1)]^2C_L^phi/2pi units
    :param cl_data_cols: if not specified in file header, order of columns in input CL file (e.g. 'TT TE EE BB PP')
    :return:
    """

    use_lensing = lens_recon_noise
    use_CMB = noise_muK_arcmin_T or NoiseVar is not None

    ini = IniFile()
    dataset = ini.params

    if not cl_data_cols:
        cl_data_cols = lastTopComment(input_cl_file)
        if not cl_data_cols:
            raise Exception('input CL file must specific names of columns (TT TE EE..)')
    else:
        dataset['cl_hat_order'] = cl_data_cols

    if use_CMB:
        if NoiseVar is None:
            if noise_muK_arcmin_T is None:
                raise ValueError('Must specify noise')
            NoiseVar = (noise_muK_arcmin_T * np.pi / 180 / 60.) ** 2
            if noise_muK_arcmin_P is not None:
                ENoiseFac = (noise_muK_arcmin_P / noise_muK_arcmin_T) ** 2
        elif noise_muK_arcmin_T is not None or noise_muK_arcmin_P is not None:
            raise ValueError('Specific either noise_muK_arcmin or NoiseVar')
        if not fields_use:
            fields_use = ''
            if 'TT' or 'TE' in cl_data_cols: fields_use = 'T'
            if 'EE' or 'TE' in cl_data_cols: fields_use += ' E'
            if 'BB' in cl_data_cols: fields_use += ' B'
            if 'PP' in cl_data_cols and use_lensing: fields_use += ' P'
    else:
        fields_use = fields_use or 'P'

    if output_dir is None:
        output_dir = os.path.join(os.path.dirname(__file__), '..', 'data', output_root)
    if not os.path.exists(output_dir): os.makedirs(output_dir)

    dataset['fields_use'] = fields_use

    if use_CMB:
        fwhm = fwhm_arcmin / 60
        xlc = 180 * np.sqrt(8. * np.log(2.)) / np.pi
        sigma2 = (fwhm / xlc) ** 2
        noise_cols = 'TT           EE          BB'
        if use_lensing: noise_cols += '          PP'
    elif use_lensing:
        noise_cols = 'PP'
    noise_file = output_root + '_Noise.dat'
    with open(os.path.join(output_dir, noise_file), 'w') as f:
        f.write('#L %s\n' % noise_cols)

        for l in range(lmin, lmax + 1):
            NoiseCl = l * (l + 1.) / 2 / np.pi * NoiseVar * np.exp(l * (l + 1) * sigma2)
            noises = []
            if use_CMB: noises += [NoiseCl, ENoiseFac * NoiseCl, ENoiseFac * NoiseCl]
            if use_lensing: noises += [lens_recon_noise[l]]
            f.write("%d " % l + " ".join("%E" % elem for elem in noises) + "\n")

    dataset['fullsky_exact_fksy'] = fsky
    dataset['dataset_format'] = 'CMBLike2'
    dataset['like_approx'] = 'exact'

    dataset['cl_lmin'] = lmin
    dataset['cl_lmax'] = lmax

    dataset['binned'] = False

    dataset['cl_hat_includes_noise'] = False

    shutil.copy(input_cl_file, os.path.join(output_dir, output_root + '.dat'))
    dataset['cl_hat_file'] = output_root + '.dat'
    dataset['cl_noise_file '] = noise_file

    ini.saveFile(os.path.join(output_dir, output_root + '.dataset'))
Exemplo n.º 12
0
ini.params['temp_cmb'] = 2.7255
ini.params['CMB_outputscale'] = 2.7255e6 ** 2.
ini.defaults.append('params.ini')

bf = types.BestFit(root + '.minimum', setParamNameFile=root + '.paramnames', want_fixed=True)

for camb, cosmomc in list(pars.items()):
    par = bf.parWithName(cosmomc)
    if par is not None: ini.params[camb] = par.best_fit

ini.params['scalar_amp(1)'] = float(ini.params['scalar_amp(1)']) / 1e9

nmassive = 1
neffstandard = 3.046 / 3
ini.params['massless_neutrinos'] = float(ini.params['massless_neutrinos']) - neffstandard * nmassive
ini.params['massive_neutrinos'] = int(round(neffstandard * nmassive))
ini.params['nu_mass_degeneracies'] = neffstandard * nmassive
ini.params['share_delta_neff'] = False
ini.params['tensor_spectral_index(1)'] = -float(ini.params['initial_ratio(1)']) / 8


inPars = IniFile(root + '.inputparams')
if inPars.bool('use_nonlinear_lensing', True):
    ini.params['do_nonlinear'] = 3
# Note, if you want accurate spectrun on small scales, may need to increase accuracy


ini.saveFile(sys.argv[2])

print('OK, though note this does not support all parameter extensions from LCDM')