예제 #1
0
파일: fsig8-z.py 프로젝트: mishakb/ISiTGR
datavar = '_lensing'
root = 'base_plikHM_TTTEEE_lowl_lowE' + datavar
samples = g.sampleAnalyser.samplesForRoot(root)

datasets = ['sdss_6DF_bao.dataset', 'sdss_MGS_bao.dataset', 'sdss_DR14_quasar_bao.dataset']
names = ['6DFGS', 'SDSS\nMGS', 'SDSS quasars']

dataredshifts = np.zeros(len(datasets))
datapoints = np.zeros(len(datasets))
dataerrs = np.zeros(len(datasets))
colors = ['g', 'm', 'r', 'darkred']

for i, dat in enumerate(datasets):
    if '.dataset in dat':
        ini = inifile.IniFile(batchjob.getCodeRootPath() + 'data/' + dat)
        datapoints[i], dataerrs[i] = [float(f) for f in ini.split('bao_measurement')]
        dataredshifts[i] = ini.float('zeff')
        rescale = ini.float('rs_rescale', 1.)
        tp = ini.string('measurement_type')
        if tp == 'rs_over_DV':
            dataerrs[i] = -0.5 / (datapoints[i] + dataerrs[i]) + 0.5 / (datapoints[i] - dataerrs[i])
            datapoints[i] = 1 / datapoints[i]
        elif tp != 'DV_over_rs':
            raise Exception('error')
        datapoints[i] *= rescale
        dataerrs[i] *= rescale
    print(dataredshifts[i], datapoints[i], dataerrs[i])

DR12 = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/final_consensus_results_dM_Hz_fsig.dat',
                  usecols=[0, 1])
예제 #2
0
datavar = '_lensing'
samples = g.sampleAnalyser.samplesForRoot('base_plikHM_TTTEEE_lowl_lowE' + datavar)
variant = None  # e.g. to add another set of bands in H(z) 'base_nnu_plikHM_TTTEEE_lowl_lowE'

datasets = ['sdss_6DF_bao.dataset', 'sdss_MGS_bao.dataset', 'sdss_DR14_quasar_bao.dataset']
names = ['6DFGS', 'SDSS\nMGS', 'SDSS quasars']

dataredshifts = np.zeros(len(datasets))
datapoints = np.zeros(len(datasets))
dataerrs = np.zeros(len(datasets))
colors = ['g', 'm', 'r', 'darkred']

for i, dat in enumerate(datasets):
    if '.dataset in dat':
        ini = inifile.IniFile(batchjob.getCodeRootPath() + 'data/' + dat)
        datapoints[i], dataerrs[i] = [float(f) for f in ini.split('bao_measurement')]
        dataredshifts[i] = ini.float('zeff')
        rescale = ini.float('rs_rescale', 1.)
        tp = ini.string('measurement_type')
        if tp == 'rs_over_DV':
            dataerrs[i] = -0.5 / (datapoints[i] + dataerrs[i]) + 0.5 / (datapoints[i] - dataerrs[i])
            datapoints[i] = 1 / datapoints[i]
        elif tp != 'DV_over_rs':
            raise Exception('error')
        datapoints[i] *= rescale
        dataerrs[i] *= rescale
    print dataredshifts[i], datapoints[i], dataerrs[i]


def GetBackgroundFuncs(samples):
예제 #3
0
    FAPv = np.arange(0.56, 0.78, 0.003)
    f8v = np.arange(0.28, 0.63, 0.003)

    FAP, f8 = np.meshgrid(FAPv, f8v)
    like = (FAP - FAPbar) ** 2 * invcov[0, 0] + 2 * (FAP - FAPbar) * (f8 - f8bar) * invcov[0, 1] + (f8 - f8bar) ** 2 * invcov[1, 1]

    density = Density2D(FAPv, f8v, exp(-like / 2))
    density.contours = exp(-np.array([1.509, 2.4477]) ** 2 / 2)
    return density



FAPbar = 0.6725
f8bar = 0.4412
density = RSDdensity(FAPbar, f8bar, batchjob.getCodeRootPath() + 'data/sdss_DR11CMASS_RSD_bao_invcov_Samushia.txt')
g.add_2d_contours(roots[0], 'FAP057', 'fsigma8z057', filled=True, density=density)


# CS = contourf(FAP, f8, like, origin='lower', levels=[2.279, 5.991], colors='r')


FAPbar = .683
f8bar = 0.422
density = RSDdensity(FAPbar, f8bar, batchjob.getCodeRootPath() + 'data/sdss_DR11CMASS_RSD_bao_invcov_Beutler.txt')
g.add_2d_contours(roots[0], 'FAP057', 'fsigma8z057', filled=False, density=density, ls=':', alpha=0.5)

g.add_2d_contours(roots[0], 'FAP057', 'fsigma8z057', filled=True, plotno=3)


g.add_legend(['BOSS CMASS (Samushia et al.)', 'BOSS CMASS (Beutler et al.)', s.defplanck + '+lensing'], legend_loc='upper left')
예제 #4
0
WP = r'\textit{Planck}+WP'
WPhighL = r'\textit{Planck}+WP+highL'
NoLowL = r'\textit{Planck}$-$lowL'
lensonly = 'lensing'
HST = r'$H_0$'
BAO = 'BAO'

LCDM = r'$\Lambda$CDM'

s = copy.copy(plots.defaultSettings)
s.legend_frame = False
s.figure_legend_frame = False
s.prob_label = r'$P/P_{\rm max}$'
s.norm_prob_label = 'Probability density'
s.prob_y_ticks = True
s.param_names_for_labels = os.path.join(batchjob.getCodeRootPath(),
                                        'clik_units.paramnames')
s.alpha_filled_add = 0.85
s.solid_contour_palefactor = 0.6

s.solid_colors = [('#8CD3F5', '#006FED'), ('#F7BAA6', '#E03424'),
                  ('#D1D1D1', '#A1A1A1'), 'g', 'cadetblue', 'olive',
                  'darkcyan']
s.axis_marker_lw = 0.6
s.lw_contour = 1

s.param_names_for_labels = os.path.normpath(
    os.path.join(os.path.dirname(__file__), '..', 'clik_latex.paramnames'))

use_plot_data = getdist.use_plot_data
rootdir = getdist.default_grid_root or os.path.join(batchjob.getCodeRootPath(),
예제 #5
0
    f8v = np.arange(0.28, 0.63, 0.003)

    FAP, f8 = np.meshgrid(FAPv, f8v)
    like = (FAP - FAPbar)**2 * invcov[0, 0] + 2 * (FAP - FAPbar) * (
        f8 - f8bar) * invcov[0, 1] + (f8 - f8bar)**2 * invcov[1, 1]

    density = Density2D(FAPv, f8v, exp(-like / 2))
    density.contours = exp(-np.array([1.509, 2.4477])**2 / 2)
    return density


FAPbar = 0.6725
f8bar = 0.4412
density = RSDdensity(
    FAPbar, f8bar,
    batchjob.getCodeRootPath() +
    'data/sdss_DR11CMASS_RSD_bao_invcov_Samushia.txt')
g.add_2d_contours(roots[0],
                  'FAP057',
                  'fsigma8z057',
                  filled=True,
                  density=density)

# CS = contourf(FAP, f8, like, origin='lower', levels=[2.279, 5.991], colors='r')

FAPbar = .683
f8bar = 0.422
density = RSDdensity(
    FAPbar, f8bar,
    batchjob.getCodeRootPath() +
    'data/sdss_DR11CMASS_RSD_bao_invcov_Beutler.txt')
예제 #6
0
samples = g.sampleAnalyser.samplesForRoot(root)

datasets = [
    'sdss_6DF_bao.dataset', 'sdss_MGS_bao.dataset',
    'sdss_DR14_quasar_bao.dataset'
]
names = ['6DFGS', 'SDSS\nMGS', 'SDSS quasars']

dataredshifts = np.zeros(len(datasets))
datapoints = np.zeros(len(datasets))
dataerrs = np.zeros(len(datasets))
colors = ['g', 'm', 'r', 'darkred']

for i, dat in enumerate(datasets):
    if '.dataset in dat':
        ini = inifile.IniFile(batchjob.getCodeRootPath() + 'data/' + dat)
        datapoints[i], dataerrs[i] = [
            float(f) for f in ini.split('bao_measurement')
        ]
        dataredshifts[i] = ini.float('zeff')
        rescale = ini.float('rs_rescale', 1.)
        tp = ini.string('measurement_type')
        if tp == 'rs_over_DV':
            dataerrs[i] = -0.5 / (datapoints[i] + dataerrs[i]) + 0.5 / (
                datapoints[i] - dataerrs[i])
            datapoints[i] = 1 / datapoints[i]
        elif tp != 'DV_over_rs':
            raise Exception('error')
        datapoints[i] *= rescale
        dataerrs[i] *= rescale
    print dataredshifts[i], datapoints[i], dataerrs[i]
예제 #7
0
import planckStyle as s
from paramgrid import batchjob
import GetDistPlots
import pylab as plt
import numpy as np

g = s.getSubplotPlotter(subplot_size=4)

rd_fid = 147.78

cov = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/BAO_consensus_covtot_dM_Hz.txt')

pts = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/sdss_DR12Consensus_bao.dat', usecols=[0, 1])

pnames = ['DM038', 'Hubble038', 'DM051', 'Hubble051', 'DM061', 'Hubble061']
redshifts = pts[:, 0]
data = pts[:, 1]
planckmeans = []


def BAOdensity(p1, p2, marge=True):
    err = np.sqrt(cov[p1, p1])
    DAv = np.arange(data[p1] - 4 * err, data[p1] + 4 * err, 4)
    err = np.sqrt(cov[p2, p2])
    Hv = np.arange(data[p2] - 4 * err, data[p2] + 4 * err, 0.3)
    DA, H = np.meshgrid(DAv, Hv)
    v1 = data[p1]
    v2 = data[p2]
    if marge:
        mcov = cov[np.ix_([p1, p2], [p1, p2])]
        invcov = np.linalg.inv(mcov)
예제 #8
0
variant = None  # e.g. to add another set of bands in H(z) 'base_nnu_plikHM_TTTEEE_lowl_lowE'

datasets = [
    'sdss_6DF_bao.dataset', 'sdss_MGS_bao.dataset',
    'sdss_DR14_quasar_bao.dataset'
]
names = ['6DFGS', 'SDSS\nMGS', 'SDSS quasars']

dataredshifts = np.zeros(len(datasets))
datapoints = np.zeros(len(datasets))
dataerrs = np.zeros(len(datasets))
colors = ['g', 'm', 'r', 'darkred']

for i, dat in enumerate(datasets):
    if '.dataset in dat':
        ini = inifile.IniFile(batchjob.getCodeRootPath() + 'data/' + dat)
        datapoints[i], dataerrs[i] = [
            float(f) for f in ini.split('bao_measurement')
        ]
        dataredshifts[i] = ini.float('zeff')
        rescale = ini.float('rs_rescale', 1.)
        tp = ini.string('measurement_type')
        if tp == 'rs_over_DV':
            dataerrs[i] = -0.5 / (datapoints[i] + dataerrs[i]) + 0.5 / (
                datapoints[i] - dataerrs[i])
            datapoints[i] = 1 / datapoints[i]
        elif tp != 'DV_over_rs':
            raise Exception('error')
        datapoints[i] *= rescale
        dataerrs[i] *= rescale
    print dataredshifts[i], datapoints[i], dataerrs[i]
예제 #9
0
WP = r'\textit{Planck}+WP'
WPhighL = r'\textit{Planck}+WP+highL'
NoLowL = r'\textit{Planck}$-$lowL'
lensonly = 'lensing'
HST = r'$H_0$'
BAO = 'BAO'

LCDM = r'$\Lambda$CDM'

s = copy.copy(plots.defaultSettings)
s.legend_frame = False
s.figure_legend_frame = False
s.prob_label = r'$P/P_{\rm max}$'
s.norm_prob_label = 'Probability density'
s.prob_y_ticks = True
s.param_names_for_labels = os.path.join(batchjob.getCodeRootPath(), 'clik_units.paramnames')
s.alpha_filled_add = 0.85
s.solid_contour_palefactor = 0.6

s.solid_colors = [('#8CD3F5', '#006FED'), ('#F7BAA6', '#E03424'), ('#D1D1D1', '#A1A1A1'), 'g', 'cadetblue', 'olive',
                  'darkcyan']
s.axis_marker_lw = 0.6
s.lw_contour = 1

s.param_names_for_labels = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', 'clik_latex.paramnames'))

use_plot_data = getdist.use_plot_data
rootdir = getdist.default_grid_root or os.path.join(batchjob.getCodeRootPath(), 'main')
output_base_dir = getdist.output_base_dir or batchjob.getCodeRootPath()

H0_gpe = [70.6, 3.3]
예제 #10
0
    d = loadtxt(prob_file)
    ix = 0
    prob = np.zeros((alpha_npoints, alpha_npoints))
    alpha_perp = np.zeros(alpha_npoints)
    alpha_pl = np.zeros(alpha_npoints)
    for i in range(alpha_npoints):
        for j in range(alpha_npoints):
            alpha_perp[i] = d[ix, 0]
            alpha_pl[j] = d[ix, 1]
            prob[j, i] = d[ix, 2]
            ix += 1
    prob = prob / np.max(prob)
    return alpha_perp, alpha_pl, prob


alpha_perp, alpha_pl, prob = BAOdensity(batchjob.getCodeRootPath() + 'data/sdss_DR11CMASS_consensus.dat')

densityG = BAOdensityG()

perp = alpha_perp * DA_fid
para = H_fid / alpha_pl

density = GetDistPlots.Density2D(perp, para, prob)
density.contours = exp(-np.array([1.509, 2.4477]) ** 2 / 2)

root = 'base_plikHM_TT_lowTEB_lensing'

c = 29979.2458


def makeNew(samples):
예제 #11
0
import planckStyle as s
from paramgrid import batchjob
import GetDistPlots
import pylab as plt
import numpy as np

g = s.getSubplotPlotter(subplot_size=4)

rd_fid = 147.78

FAP = True
if FAP:
    cov = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/final_consensus_covtot_dV_FAP_fsig.txt')
    pts = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/final_consensus_results_dV_FAP_fsig.dat', usecols=[0, 1])
else:
    cov = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/final_consensus_covtot_dM_Hz_fsig.txt')
    pts = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/sdss_DR12Consensus_final.dat', usecols=[0, 1])

pnames = ['DM038', 'Hubble038', 'fsigma8z038', 'DM051', 'Hubble051', 'fsigma8z051', 'DM061', 'Hubble061', 'fsigma8z061']
redshifts = pts[:, 0]
data = pts[:, 1]
planckmeans = []


def BAOdensity(p1, p2, marge=True):
    err = np.sqrt(cov[p1, p1])
    DAv = np.arange(data[p1] - 4 * err, data[p1] + 4 * err, err / 100)
    err = np.sqrt(cov[p2, p2])
    Hv = np.arange(data[p2] - 4 * err, data[p2] + 4 * err, err / 100)
    DA, H = np.meshgrid(DAv, Hv)
    if marge:
예제 #12
0
datavar = '_lensing'
root = 'base_plikHM_TTTEEE_lowl_lowE' + datavar
samples = g.sampleAnalyser.samplesForRoot(root)

datasets = ['sdss_6DF_bao.dataset', 'sdss_MGS_bao.dataset', 'sdss_DR14_quasar_bao.dataset']
names = ['6DFGS', 'SDSS\nMGS', 'SDSS quasars']

dataredshifts = np.zeros(len(datasets))
datapoints = np.zeros(len(datasets))
dataerrs = np.zeros(len(datasets))
colors = ['g', 'm', 'r', 'darkred']

for i, dat in enumerate(datasets):
    if '.dataset in dat':
        ini = inifile.IniFile(batchjob.getCodeRootPath() + 'data/' + dat)
        datapoints[i], dataerrs[i] = [float(f) for f in ini.split('bao_measurement')]
        dataredshifts[i] = ini.float('zeff')
        rescale = ini.float('rs_rescale', 1.)
        tp = ini.string('measurement_type')
        if tp == 'rs_over_DV':
            dataerrs[i] = -0.5 / (datapoints[i] + dataerrs[i]) + 0.5 / (datapoints[i] - dataerrs[i])
            datapoints[i] = 1 / datapoints[i]
        elif tp != 'DV_over_rs':
            raise Exception('error')
        datapoints[i] *= rescale
        dataerrs[i] *= rescale
    print dataredshifts[i], datapoints[i], dataerrs[i]

DR12 = np.loadtxt(batchjob.getCodeRootPath() + 'data/DR12/final_consensus_results_dM_Hz_fsig.dat',
                  usecols=[0, 1])
예제 #13
0
import planckStyle as s
from paramgrid import batchjob
import GetDistPlots
import pylab as plt
import numpy as np

g = s.getSubplotPlotter(subplot_size=4)

rd_fid = 147.78

FAP = True
if FAP:
    cov = np.loadtxt(batchjob.getCodeRootPath() +
                     'data/DR12/final_consensus_covtot_dV_FAP_fsig.txt')
    pts = np.loadtxt(batchjob.getCodeRootPath() +
                     'data/DR12/final_consensus_results_dV_FAP_fsig.dat',
                     usecols=[0, 1])
else:
    cov = np.loadtxt(batchjob.getCodeRootPath() +
                     'data/DR12/final_consensus_covtot_dM_Hz_fsig.txt')
    pts = np.loadtxt(batchjob.getCodeRootPath() +
                     'data/DR12/sdss_DR12Consensus_final.dat',
                     usecols=[0, 1])

pnames = [
    'DM038', 'Hubble038', 'fsigma8z038', 'DM051', 'Hubble051', 'fsigma8z051',
    'DM061', 'Hubble061', 'fsigma8z061'
]
redshifts = pts[:, 0]
data = pts[:, 1]
planckmeans = []