Exemple #1
0
def GetBackgroundFuncs(samples):
    ixs = samples.randomSingleSamples_indices()[::40]
    DMs = np.zeros((len(ixs), len(redshifts)))
    Hs = np.zeros(DMs.shape)
    rsDV = np.zeros(DMs.shape)
    camb.set_z_outputs(redshifts)
    for i, ix in enumerate(ixs):
        print(i, ix)
        dic = samples.getParamSampleDict(ix)
        pars = get_camb_params(dic)
        results = camb.get_background(pars)
        bao = results.get_background_outputs()
        rsDV[i, :] = 1 / bao[:, 0]
        DMs[i, :] = bao[:, 2] * (1 + reds)
        Hs[i, :] = bao[:, 1]

    Hmeans = np.zeros(len(redshifts))
    Herrs = np.zeros(len(redshifts))
    DMmeans = np.zeros(len(redshifts))
    DMerrs = np.zeros(len(redshifts))
    for i, z in enumerate(redshifts):
        Hmeans[i] = np.mean(Hs[:, i]) / (1 + z)
        Herrs[i] = np.std(Hs[:, i]) / (1 + z)
        DMmeans[i] = np.mean(DMs[:, i])
        DMerrs[i] = np.std(DMs[:, i])

    Hinterp = UnivariateSpline([0] + redshifts, [samples.mean('H0')] + list(Hmeans), s=0)
    DMinterp = UnivariateSpline([0] + redshifts, [0] + list(DMmeans), s=0)
    Herrinterp = UnivariateSpline([0] + redshifts, [samples.std('H0')] + list(Herrs), s=0)
    DMerrinterp = UnivariateSpline([0] + redshifts, [0] + list(DMerrs), s=0)
    return Hinterp, Herrinterp, DMinterp, DMerrinterp, rsDV
Exemple #2
0
def GetBackgroundFuncs(samples):
    ixs = samples.randomSingleSamples_indices()[::40]
    DMs = np.zeros((len(ixs), len(redshifts)))
    Hs = np.zeros(DMs.shape)
    rsDV = np.zeros(DMs.shape)
    camb.set_z_outputs(redshifts)
    for i, ix in enumerate(ixs):
        print(i, ix)
        dic = samples.getParamSampleDict(ix)
        pars = get_camb_params(dic)
        results = camb.get_background(pars)
        bao = results.get_background_outputs()
        rsDV[i, :] = 1 / bao[:, 0]
        DMs[i, :] = bao[:, 2] * (1 + reds)
        Hs[i, :] = bao[:, 1]

    Hmeans = np.zeros(len(redshifts))
    Herrs = np.zeros(len(redshifts))
    DMmeans = np.zeros(len(redshifts))
    DMerrs = np.zeros(len(redshifts))
    for i, z in enumerate(redshifts):
        Hmeans[i] = np.mean(Hs[:, i]) / (1 + z)
        Herrs[i] = np.std(Hs[:, i]) / (1 + z)
        DMmeans[i] = np.mean(DMs[:, i])
        DMerrs[i] = np.std(DMs[:, i])

    Hinterp = UnivariateSpline([0] + redshifts,
                               [samples.mean('H0')] + list(Hmeans),
                               s=0)
    DMinterp = UnivariateSpline([0] + redshifts, [0] + list(DMmeans), s=0)
    Herrinterp = UnivariateSpline([0] + redshifts,
                                  [samples.std('H0')] + list(Herrs),
                                  s=0)
    DMerrinterp = UnivariateSpline([0] + redshifts, [0] + list(DMerrs), s=0)
    return Hinterp, Herrinterp, DMinterp, DMerrinterp, rsDV
Exemple #3
0
 def get_theory_for_params(self, paramdic, camb_pars=None, camb_results=None):
     if camb_pars is None:
         from cosmomc_to_camb import get_camb_params
         camb_pars = get_camb_params(paramdic)
     if camb_results is not None:
         results, PKdelta, PKWeyl = camb_results
     else:
         results, PKdelta, PKWeyl = self.get_camb_theory(camb_pars)
     wl_photoz_errors = [paramdic['DES_DzS1'], paramdic['DES_DzS2'], paramdic['DES_DzS3'], paramdic['DES_DzS4']]
     lens_photoz_errors = [paramdic['DES_DzL1'], paramdic['DES_DzL2'], paramdic['DES_DzL3'], paramdic['DES_DzL4'],
                           paramdic['DES_DzL5']]
     bin_bias = [paramdic['DES_b1'], paramdic['DES_b2'], paramdic['DES_b3'], paramdic['DES_b4'], paramdic['DES_b5']]
     shear_calibration_parameters = [paramdic['DES_m1'], paramdic['DES_m2'], paramdic['DES_m3'], paramdic['DES_m4']]
     return self.get_theory(camb_pars, results, PKdelta, PKWeyl, bin_bias=bin_bias,
                            wl_photoz_errors=wl_photoz_errors,
                            lens_photoz_errors=lens_photoz_errors,
                            shear_calibration_parameters=shear_calibration_parameters,
                            intrinsic_alignment_A=paramdic['DES_AIA'],
                            intrinsic_alignment_alpha=paramdic['DES_alphaIA'], intrinsic_alignment_z0=0.62)
Exemple #4
0
common = []
for name in like.names:
    common.append(name in JLA.names or 'SDSS' + name in JLA.names
                  or 'sn' + name in JLA.names)
common = np.array(common, dtype=np.bool)
print(like.nsn, np.sum(common), like.nsn - np.sum(common))

redshifts = np.logspace(-2, 1, 1000)
samples = g.sampleAnalyser.samplesForRoot(
    'base_plikHM_TTTEEE_lowl_lowE_lensing')
ixs = samples.randomSingleSamples_indices()
dists = np.zeros((len(ixs), len(redshifts)))
sndists = np.zeros((len(ixs), like.nsn))
for i, ix in enumerate(ixs):
    dic = samples.getParamSampleDict(ix)
    camb_pars = get_camb_params(dic)
    results = camb.get_background(camb_pars)
    dists[i, :] = 5 * np.log10(
        (1 + redshifts)**2 * results.angular_diameter_distance(redshifts))
    sndists[i, :] = 5 * np.log10(
        (1 + like.zcmb)**2 * results.angular_diameter_distance(like.zcmb))

paramdic = g.bestfit('base_plikHM_TTTEEE_lowl_lowE_lensing').getParamDict()
camb_pars = get_camb_params(paramdic)
results = camb.get_background(camb_pars)

invvars = 1.0 / like.pre_vars
wtval = np.sum(invvars)

offset = 5 * np.log10(1e-5)
lumdists = 5 * np.log10(
Exemple #5
0
JLA = SN.SN_likelihood(os.path.join(os.path.dirname(__file__), r'../../data/jla.dataset'), marginalize=False)

common = []
for name in like.names:
    common.append(name in JLA.names or 'SDSS' + name in JLA.names or 'sn' + name in JLA.names)
common = np.array(common, dtype=np.bool)
print(like.nsn, np.sum(common), like.nsn - np.sum(common))

redshifts = np.logspace(-2, 1, 1000)
samples = g.sampleAnalyser.samplesForRoot('base_plikHM_TTTEEE_lowl_lowE_lensing')
ixs = samples.randomSingleSamples_indices()
dists = np.zeros((len(ixs), len(redshifts)))
sndists = np.zeros((len(ixs), like.nsn))
for i, ix in enumerate(ixs):
    dic = samples.getParamSampleDict(ix)
    camb_pars = get_camb_params(dic)
    results = camb.get_background(camb_pars)
    dists[i, :] = 5 * np.log10((1 + redshifts) ** 2 * results.angular_diameter_distance(redshifts))
    sndists[i, :] = 5 * np.log10((1 + like.zcmb) ** 2 * results.angular_diameter_distance(like.zcmb))

paramdic = g.bestfit('base_plikHM_TTTEEE_lowl_lowE_lensing').getParamDict()
camb_pars = get_camb_params(paramdic)
results = camb.get_background(camb_pars)

invvars = 1.0 / like.pre_vars
wtval = np.sum(invvars)

offset = 5 * np.log10(1e-5)
lumdists = 5 * np.log10((1 + like.zcmb) ** 2 * results.angular_diameter_distance(like.zcmb))

redshifts = np.logspace(-2, 1, 1000)
Exemple #6
0
    samples.jobItem.chainRoot + '_1.txt')) + tuple(redshifts)))
                         + '.fsig_evolve')
if os.path.isfile(cachename):
    with open(cachename, 'rb') as inp:
        ixs, f8s, Hs, DMs, FAPs = pickle.load(inp)
else:
    camb.set_z_outputs(redshifts)
    ixs = samples.randomSingleSamples_indices()[::4]
    DMs = np.zeros((len(ixs), len(redshifts)))
    Hs = np.zeros(DMs.shape)
    FAPs = np.zeros(DMs.shape)
    f8s = np.zeros(DMs.shape)
    for i, ix in enumerate(ixs):
        print(i, ix)
        dic = samples.getParamSampleDict(ix)
        pars = get_camb_params(dic)
        pars.set_matter_power(redshifts, kmax=2)
        results = camb.get_results(pars)
        bao = results.get_background_outputs()
        DMs[i, :] = bao[:, 2] * (1 + reds)
        Hs[i, :] = bao[:, 1]
        FAPs[i, :] = bao[:, 3]
        f8s[i, :] = results.get_fsigma8()[::-1]
        assert (abs(dic['fsigma8z038'] / f8s[i, redshifts.index(0.38)] - 1) < 0.001)

    with open(cachename, 'wb') as output:
        pickle.dump([ixs, f8s, Hs, DMs, FAPs], output, pickle.HIGHEST_PROTOCOL)

fsigmeans = np.zeros(len(redshifts))
fsigerrs = np.zeros(len(redshifts))
FAPmeans = np.zeros(len(redshifts))
Exemple #7
0
    samples.jobItem.chainRoot + '_1.txt')) + tuple(redshifts)))
                         + '.fsig_evolve')
if os.path.isfile(cachename):
    with open(cachename, 'rb') as inp:
        ixs, f8s, Hs, DMs, FAPs = pickle.load(inp)
else:
    camb.set_z_outputs(redshifts)
    ixs = samples.randomSingleSamples_indices()[::4]
    DMs = np.zeros((len(ixs), len(redshifts)))
    Hs = np.zeros(DMs.shape)
    FAPs = np.zeros(DMs.shape)
    f8s = np.zeros(DMs.shape)
    for i, ix in enumerate(ixs):
        print(i, ix)
        dic = samples.getParamSampleDict(ix)
        pars = get_camb_params(dic)
        pars.set_matter_power(redshifts, kmax=2)
        results = camb.get_results(pars)
        bao = results.get_background_outputs()
        DMs[i, :] = bao[:, 2] * (1 + reds)
        Hs[i, :] = bao[:, 1]
        FAPs[i, :] = bao[:, 3]
        f8s[i, :] = results.get_fsigma8()[::-1]
        assert (abs(dic['fsigma8z038'] / f8s[i, redshifts.index(0.38)] - 1) < 0.001)

    with open(cachename, 'wb') as output:
        pickle.dump([ixs, f8s, Hs, DMs, FAPs], output, pickle.HIGHEST_PROTOCOL)

fsigmeans = np.zeros(len(redshifts))
fsigerrs = np.zeros(len(redshifts))
FAPmeans = np.zeros(len(redshifts))