Esempio n. 1
0
post_BAOlensingPantheon = [[BAO, lensing, Pantheon],
                           [BAOdata, 'lensing.ini', 'Pantheon.ini'],
                           importanceFilterNotOmegak()]

post_Pantheon = [[Pantheon], ['Pantheon.ini'], importanceFilterNotOmegak()]

post_CookeBBN = ['Cooke17']
post_Aver15 = [['Aver15'], ['Aver15BBN.ini'], importanceFilterNnu()]
post_BBN = [['Cooke17', 'Aver15'], ['Aver15BBN.ini', 'Cooke17BBN.ini'],
            importanceFilterNnu()]

# set up groups of parameters and data sets

groups = []

g = batchjob.jobGroup('main')
g.datasets = copy.deepcopy(planck_highL_sets)
for d in g.datasets:
    d.add(lowl)
    d.add(lowE, lowEdata)

g.params = [[], ['omegak'], ['mnu'], ['r'], ['nrun', 'r'], ['nnu'], ['nrun'],
            ['Alens'], ['yhe'], ['w'], ['alpha1']]
g.importanceRuns = [
    post_BAO, post_lensing, post_lensingBAO, post_HST, post_BBN
]
groups.append(g)

gpol = batchjob.jobGroup('mainpol')
gpol.datasets = copy.deepcopy(planck_pol_sets)
for d in gpol.datasets:
Esempio n. 2
0
# Directory to find .ini files
ini_dir = 'batch2/'

# directory to look for existing covariance matrices
cov_dir = 'planck_covmats/'

# ini files you want to base each set of runs on
defaults = ['common.ini']
importanceDefaults = ['importance_sampling.ini']

# set up list of groups of parameters and data sets
groups = []

# make first group of runs (all parameter variations with all data combinations)
g = batchjob.jobGroup('main')

g.params = [[], ['mnu'], ['nnu']]

g.datasets = []

# lists of dataset names to combine, with corresponding sets of inis to include
g.datasets.append(batchjob.dataSet(['plikHM', 'TT', 'lowTEB'], ['plik_dx11dr2_HM_v18_TT.ini', 'lowTEB.ini']))
g.datasets.append(batchjob.dataSet(['plikHM', 'TT', 'lowTEB', 'lensing'],
                                   ['plik_dx11dr2_HM_v18_TT.ini', 'lowTEB.ini', 'lensing.ini']))


# add importance name tags, and list of specific .ini files to include (in batch1/)
g.importanceRuns = []
g.importanceRuns.append([['BAO'], ['BAO.ini']])
Esempio n. 3
0
post_all = [[lensing, BAO, HST, JLA], [lensing, BAOdata, HSTdata, 'JLA_marge.ini'], importanceFilterNotOmegak()]
post_allnonBAO = [[lensing, HST, JLA], [lensing, HSTdata, 'JLA_marge.ini'], importanceFilterBAO()]

post_WP = [['WMAPtau'], [WMAPtau]]
post_zre = zre_importance(['zre6p5'], ['zre_prior.ini'], dist_settings={'limits[zrei]': '6.5 N'}, minimize=False)
post_BAOzre = zre_importance([BAO, 'zre6p5'], [BAOdata, 'zre_prior.ini'], dist_settings={'limits[zrei]': '6.5 N'},
                             minimize=False)
post_reion = zre_importance(['reion'], ['reion_tau.ini'], dist_settings={'limits[zrei]': '6.5 N'}, minimize=False)

# post_fix = [[ 'fix'], ['postfix.ini']]

# set up groups of parameters and data sets

groups = []

g = batchjob.jobGroup('main')
# Main group with just tau prior

g.datasets = copy.deepcopy(planck_highL_sets)
for d in g.datasets:
    d.add(lowTEB)

g.params = [[], ['omegak'], ['mnu'], ['r'], ['nrun', 'r'], ['nnu'], ['nrun'], ['Alens'], ['yhe'], ['w'], ['alpha1']]
g.importanceRuns = [post_BAO, post_JLA, post_lensing, post_HST, post_all, post_zre]
groups.append(g)

gpol = batchjob.jobGroup('mainpol')
gpol.datasets = copy.deepcopy(planck_pol_sets)
for d in gpol.datasets:
    d.add(lowTEB)
for d in copy.deepcopy(planck_pol_sets):
Esempio n. 4
0
                          dist_settings={'limits[zrei]': '6.5 N'},
                          minimize=False)
post_BAOzre = zre_importance([BAO, 'zre6p5'], [BAOdata, 'zre_prior.ini'],
                             dist_settings={'limits[zrei]': '6.5 N'},
                             minimize=False)
post_reion = zre_importance(['reion'], ['reion_tau.ini'],
                            dist_settings={'limits[zrei]': '6.5 N'},
                            minimize=False)

# post_fix = [[ 'fix'], ['postfix.ini']]

# set up groups of parameters and data sets

groups = []

g = batchjob.jobGroup('main')
# Main group with just tau prior

g.datasets = copy.deepcopy(planck_highL_sets)
for d in g.datasets:
    d.add(lowTEB)

g.params = [[], ['omegak'], ['mnu'], ['r'], ['nrun', 'r'], ['nnu'], ['nrun'],
            ['Alens'], ['yhe'], ['w'], ['alpha1']]
g.importanceRuns = [
    post_BAO, post_JLA, post_lensing, post_HST, post_all, post_zre
]
groups.append(g)

gpol = batchjob.jobGroup('mainpol')
gpol.datasets = copy.deepcopy(planck_pol_sets)
Esempio n. 5
0
post_BAOHSTJLA = [[BAO, JLA, HST], [BAOdata, 'JLA_marge.ini', HSTdata], importanceFilterNotOmegak()]
post_BAOHSTPantheon = [[BAO, Pantheon, HST], [BAOdata, 'Pantheon.ini', HSTdata], importanceFilterNotOmegak()]
post_BAOlensingPantheon = [[BAO, lensing, Pantheon], [BAOdata, 'lensing.ini', 'Pantheon.ini'],
                           importanceFilterNotOmegak()]

post_Pantheon = [[Pantheon], ['Pantheon.ini'], importanceFilterNotOmegak()]

post_CookeBBN = ['Cooke17']
post_Aver15 = [['Aver15'], ['Aver15BBN.ini'], importanceFilterNnu()]
post_BBN = [['Cooke17', 'Aver15'], ['Aver15BBN.ini', 'Cooke17BBN.ini'], importanceFilterNnu()]

# set up groups of parameters and data sets

groups = []

g = batchjob.jobGroup('main')
g.datasets = copy.deepcopy(planck_highL_sets)
for d in g.datasets:
    d.add(lowl)
    d.add(lowE, lowEdata)

g.params = [[], ['omegak'], ['mnu'], ['r'], ['nrun', 'r'], ['nnu'], ['nrun'], ['Alens'], ['yhe'], ['w'], ['alpha1']]
g.importanceRuns = [post_BAO, post_lensing, post_lensingBAO, post_HST, post_BBN]
groups.append(g)

gpol = batchjob.jobGroup('mainpol')
gpol.datasets = copy.deepcopy(planck_pol_sets)
for d in gpol.datasets:
    d.add(lowE, lowEdata)
gpol.params = [[], ['mnu'], ['nnu'], ['nrun'], ['Alens'], ['yhe'], ['r']]
gpol.importanceRuns = [post_BAO]