Ejemplo n.º 1
0
def set_fn_data(flg=2, sources=None, extra_fNc=[]):
    """ Load up f(N) data

    Parameters
    ----------
    flg : int, optional
      2 : z~2 constraints
      5 : z~5 constraints
    sources : list, optional
      References for constraints
    extra_fNc : list, optional

    Returns
    -------
    fN_data :: List of fN_Constraint Classes
    """
    fN_cs = []
    if flg == 2:
        if sources is None:
            sources = ['OPB07', 'OPW12', 'OPW13', 'K05', 'K13R13', 'N12']

        fn_file = pyigm_path + '/data/fN/fn_constraints_z2.5_vanilla.fits'
        k13r13_file = pyigm_path + '/data/fN/fn_constraints_K13R13_vanilla.fits'
        n12_file = pyigm_path + '/data/fN/fn_constraints_N12_vanilla.fits'
        all_fN_cs = FNConstraint.from_fitsfile(
            [fn_file, k13r13_file, n12_file])

        # Add on, e.g. user-supplied
        if len(extra_fNc) > 0:
            raise IOError("NOT READY FOR THIS YET")
            #for src in extra_fNc:
            #	all_fN_cs.append(xifd.fN_data_from_ascii_file(os.path.abspath(src)))

        # Include good data sources
        for fN_c in all_fN_cs:
            # In list?
            if fN_c.ref in sources:
                print('Using {:s} as a constraint'.format(fN_c.ref))
                # Append
                fN_cs.append(fN_c)
                # Pop
                idx = sources.index(fN_c.ref)
                sources.pop(idx)

        # Check that all the desired sources were used
        if len(sources) > 0:
            pdb.set_trace()
    elif flg == 5:
        if sources is None:
            sources = ['Worseck+14', 'Crighton+15', 'Crighton+16', 'Becker+13']
        chk_sources = sources[:]

        #all_fN_cs = FNConstraint.from_fitsfile([fn_file,k13r13_file,n12_file])

        # MFP (Worseck+14)
        fN_MFPa = FNConstraint('MFP',
                               4.56,
                               ref='Worseck+14',
                               flavor='\\lmfp',
                               data=dict(MFP=22.2, SIG_MFP=2.3,
                                         COSM='VANILLA'))
        fN_MFPb = FNConstraint('MFP',
                               4.86,
                               ref='Worseck+14',
                               flavor='\\lmfp',
                               data=dict(MFP=15.1, SIG_MFP=1.8,
                                         COSM='VANILLA'))
        fN_MFPc = FNConstraint('MFP',
                               5.16,
                               ref='Worseck+14',
                               flavor='\\lmfp',
                               data=dict(MFP=10.3, SIG_MFP=1.6,
                                         COSM='VANILLA'))
        fN_MFP = [fN_MFPa, fN_MFPb, fN_MFPc]
        # LLS (Crighton+16)
        fN_LLSa = FNConstraint('LLS',
                               np.mean([3.75, 4.40]),
                               ref='Crighton+16',
                               flavor='\\tlox',
                               data=dict(LX=0.628,
                                         SIG_LX=0.095,
                                         TAU_LIM=2.,
                                         COSM='VANILLA'))
        fN_LLSb = FNConstraint('LLS',
                               np.mean([4.40, 4.70]),
                               ref='Crighton+16',
                               flavor='\\tlox',
                               data=dict(LX=0.601,
                                         SIG_LX=0.102,
                                         TAU_LIM=2.,
                                         COSM='VANILLA'))
        fN_LLSc = FNConstraint('LLS',
                               np.mean([4.70, 5.40]),
                               ref='Crighton+16',
                               flavor='\\tlox',
                               data=dict(LX=0.928,
                                         SIG_LX=0.151,
                                         TAU_LIM=2.,
                                         COSM='VANILLA'))
        fN_LLS = [fN_LLSa, fN_LLSb, fN_LLSc]
        # DLA (Crighton+15)
        tau_lim = 10.**(20.3 - 17.19)
        fN_DLAa = FNConstraint('DLA',
                               np.mean([3.56, 4.45]),
                               ref='Crighton+15',
                               flavor='\\tdlox',
                               data=dict(LX=0.059,
                                         SIG_LX=0.018,
                                         COSM='VANILLA',
                                         TAU_LIM=tau_lim))
        fN_DLAb = FNConstraint('DLA',
                               np.mean([4.45, 5.31]),
                               ref='Crighton+15',
                               flavor='\\tdlox',
                               data=dict(LX=0.095,
                                         SIG_LX=0.022,
                                         COSM='VANILLA',
                                         TAU_LIM=tau_lim))
        fN_DLAc = FNConstraint(
            'fN',
            np.mean([3.6, 5.2]),
            ref='Crighton+15',
            flavor='f(N)',
            data=dict(COSM='VANILLA',
                      NPT=5,
                      FN=np.array([
                          -22.1247392, -22.12588672, -22.51361414, -22.7732822,
                          -23.76709909
                      ]),
                      SIG_FN=np.array([[
                          0.24127323, 0.17599877, 0.17613792, 0.14095363,
                          0.30129492
                      ],
                                       [
                                           0.21437162, 0.15275017, 0.12551036,
                                           0.12963855, 0.17654378
                                       ]]),
                      BINS=np.array([[20.175, 20.425, 20.675, 20.925, 21.05],
                                     [20.675, 20.925, 21.175, 21.425,
                                      22.05]])))

        fN_DLA = [fN_DLAa, fN_DLAb, fN_DLAc]
        # tau_eff (Becker+13)
        b13_tab2 = Table.read(pyigm.__path__[0] +
                              '/data/teff/becker13_tab2.dat',
                              format='ascii')
        fN_teff = []
        for row in b13_tab2:
            if row['z'] < 4.:
                continue
            # calculate
            teff = -1 * np.log(row['F'])
            sigteff = row['s(F)'] / row['F']
            # Generate
            fN = FNConstraint('teff',
                              row['z'],
                              ref='Becker+13',
                              flavor='\\tlya',
                              data=dict(Z_TEFF=row['z'],
                                        TEFF=teff,
                                        SIG_TEFF=sigteff,
                                        COSM='N/A',
                                        NHI_MNX=[11., 22.]))
            # Append
            fN_teff.append(fN)
        # Collate
        all_fN_cs = fN_MFP + fN_DLA + fN_teff + fN_LLS

        # Include good data sources
        for fN_c in all_fN_cs:
            # In list?
            if fN_c.ref in sources:
                print('Using {:s} as a constraint'.format(fN_c.ref))
                # Append
                fN_cs.append(fN_c)
                # Pop
                try:
                    idx = chk_sources.index(fN_c.ref)
                except ValueError:
                    pass
                else:
                    chk_sources.pop(idx)

        # Check that all the desired sources were used
        if len(chk_sources) > 0:
            pdb.set_trace()

    return fN_cs
Ejemplo n.º 2
0
def test_init():
    tst = FNConstraint('fN')
    assert tst.fN_dtype == 'fN'