Ejemplo n.º 1
0
def prepare_random_catalog(randoms,
                           z='redshift',
                           w_sys=None,
                           size=250000,
                           z_low=0.18,
                           z_upp=0.52):
    """Pre-compute the DeltaSigma profile for a downsampled random objects."""
    # Randomly select a small sample of random objects
    random_use = np.random.choice(randoms, size=size, replace=False)

    # Assign a uniform distribution of redshift
    random_use[z] = np.random.uniform(low=z_low, high=z_upp, size=size)
    random_use = Table(random_use)

    # Add a systematic weight function is necessary
    if w_sys is None:
        random_use.add_column(Column(name='w_sys', data=np.ones(size)))

    random_pre = helpers.dsigma_table(random_use,
                                      'lens',
                                      ra='ra',
                                      dec='dec',
                                      z=z,
                                      field='field',
                                      w_sys='w_sys')

    return random_pre
Ejemplo n.º 2
0
        raise ValueError("nstar can only be specified for CFHT.")

# %%

if args.survey.lower() == 'kids':

    table_s = Table()

    for reg in [9, 12, 15, 23, 'S']:
        kv = Table.read(os.path.join(
            'kids', 'KV450_G{}_reweight_3x4x4_v2_good.cat'.format(reg)),
                        hdu=1)

        table = dsigma_table(kv,
                             'source',
                             survey='KiDS',
                             version='KV450',
                             verbose=reg == 9)

        for key in table.colnames:
            table[key].unit = None
        table = Table(table)

        table_s = vstack([table_s, table])

    table_s = table_s[table_s['z'] > 0.25 - 1e-6]
    table_s = table_s[table_s['z'] < 1.2 + 1e-6]
    table_s = add_maximum_lens_redshift(table_s, dz_min=0.105)
    table_s['m'] = kids.multiplicative_shear_bias(table_s['z'],
                                                  version='KV450')
Ejemplo n.º 3
0
parser = argparse.ArgumentParser(
    description='Calculate the lensing signal for a LWB-type analysis.')

parser.add_argument('survey', help='the lens survey')
args = parser.parse_args()

cosmology = FlatLambdaCDM(100, 0.3)

table_l = vstack([
    Table.read(os.path.join('boss',
                            'galaxy_DR12v5_CMASSLOWZTOT_South.fits.gz')),
    Table.read(os.path.join('boss',
                            'galaxy_DR12v5_CMASSLOWZTOT_North.fits.gz'))
])
table_l = dsigma_table(table_l, 'lens', z='Z', ra='RA', dec='DEC', w_sys=1)
table_l = table_l[table_l['z'] >= 0.15]

if args.survey.lower() == 'des':

    table_s = []

    fname_list = [
        'mcal-y1a1-combined-riz-unblind-v4-matched.fits',
        'y1a1-gold-mof-badregion_BPZ.fits',
        'mcal-y1a1-combined-griz-blind-v3-matched_BPZbase.fits'
    ]
    columns_list = [[
        'e1', 'e2', 'R11', 'R12', 'R21', 'R22', 'region', 'ra', 'dec',
        'flags_select', 'flags_select_1p', 'flags_select_1m',
        'flags_select_2p', 'flags_select_2m'
Ejemplo n.º 4
0
def prepare_source_random(srcs,
                          rand,
                          calib=None,
                          photoz_cut='medium',
                          dz_min=0.1,
                          cosmology=None,
                          H0=70.0,
                          Om0=0.3,
                          comoving=False,
                          n_jobs=4,
                          r_min=0.15,
                          r_max=11,
                          n_bins=11,
                          output=None,
                          n_random=500000,
                          verbose=True):
    """Prepare the lensing source, calibration, and random catalogs.

    Also precompute the DeltaSigma profiles for randoms if necessary, and define
    the cosmology model and the radial bins used for lensing profiles.
    """
    # Define cosmology
    if cosmology is None:
        cosmology = FlatLambdaCDM(H0=H0, Om0=Om0)

    # Define radial bins
    rp_bins = np.logspace(np.log10(r_min), np.log10(r_max), n_bins)

    # Photo-z quality cuts
    if verbose:
        print("# Use the {:s} photo-z quality cut".format(photoz_cut))

    if photoz_cut == 'basic':
        photoz_mask = (srcs['frankenz_model_llmin'] < 6.)
    elif photoz_cut == 'medium':
        photoz_mask = (srcs['frankenz_model_llmin'] <
                       6.) & (srcs['frankenz_photoz_risk_best'] < 0.25)
    elif photoz_cut == 'strict':
        photoz_mask = (srcs['frankenz_model_llmin'] <
                       6.) & (srcs['frankenz_photoz_risk_best'] < 0.15)
    else:
        raise Exception(
            "# Wrong photo-z quality cut type: [basic/medium/strict]")

    # Prepare the source catalog
    if verbose:
        print("\n# Prepare the lensing source catalog")
    srcs_use = helpers.dsigma_table(srcs[photoz_mask],
                                    'source',
                                    survey='hsc',
                                    version='PDR2',
                                    field='field',
                                    z='frankenz_photoz_best',
                                    z_low='frankenz_photoz_err68_min',
                                    z_upp='frankenz_photoz_err68_max')

    # Add maximum usable redshift for lenses
    srcs_use = add_maximum_lens_redshift(srcs_use,
                                         dz_min=dz_min,
                                         z_err_factor=0,
                                         apply_z_low=True)

    # Prepare the calibration catalog if necessary
    if calib is not None:
        if verbose:
            print("\n# Prepare the lensing calibration catalog")
        # Photo-z quality cuts
        if photoz_cut == 'basic':
            photoz_mask = (calib['frankenz_model_llmin'] < 6.)
        elif photoz_cut == 'medium':
            photoz_mask = (calib['frankenz_model_llmin'] <
                           6.) & (calib['frankenz_photoz_risk_best'] < 0.25)
        elif photoz_cut == 'strict':
            photoz_mask = (calib['frankenz_model_llmin'] <
                           6.) & (calib['frankenz_photoz_risk_best'] < 0.15)
        else:
            raise Exception(
                "# Wrong photo-z quality cut type: [basic/medium/strict]")

        # Prepare the calibration catalog
        calib_use = helpers.dsigma_table(calib[photoz_mask],
                                         'calibration',
                                         z='frankenz_photoz_best',
                                         z_true='z_true',
                                         z_low='frankenz_photoz_err68_min',
                                         w='w_source',
                                         w_sys='somw_cosmos_samplevaraince')

        # Add maximum usable redshift for lenses
        calib_use = add_maximum_lens_redshift(calib_use,
                                              dz_min=dz_min,
                                              z_err_factor=0,
                                              apply_z_low=True)
    else:
        calib_use = None

    # Prepare the random catalogs
    if verbose:
        print("\n# Prepare the random object catalog")
    rand_use = prepare_random_catalog(rand, size=n_random)

    # Pre-compute the DeltaSigma profiles for random objects
    if verbose:
        print("\n# Pre-compute the DeltaSigma profiles for random objects")
    rand_pre = add_precompute_results(rand_use,
                                      srcs_use,
                                      rp_bins,
                                      table_c=calib_use,
                                      cosmology=cosmology,
                                      comoving=comoving,
                                      n_jobs=n_jobs)

    # Remove the ones with no useful lensing information
    rand_pre['n_s_tot'] = np.sum(rand_pre['sum 1'], axis=1)
    rand_pre = rand_pre[rand_pre['n_s_tot'] > 0]

    if output is not None:
        srcs_use.write(output, path='source', format='hdf5')
        calib_use.write(output, path='calib', format='hdf5', append=True)
        rand_pre.write(output, path='random', format='hdf5', append=True)
        return
    else:
        return {
            'cosmology': cosmology,
            'rp_bins': rp_bins,
            'source': srcs_use,
            'calib': calib_use,
            'random': rand_pre
        }
Ejemplo n.º 5
0
def prepare_lens_catalog(cat,
                         src,
                         rp_bins=None,
                         calib=None,
                         z_min=0.19,
                         z_max=0.52,
                         z='z',
                         ra='ra',
                         dec='dec',
                         comoving=False,
                         n_jobs=4,
                         field=None,
                         w_sys=None,
                         r_max_mpc=2.0,
                         verbose=True,
                         col_used=None,
                         cosmology=None):
    """Prepare the lens catalog:
        1. Select lenses in the right redshift range defined by `z_min` < z <= `z_max`.
        2. Match to the source catalog using the KDTree. Matching radius is defined as `r_max_mpc`.
        3. Prepare the catalog for pre-computation: adding `field` and lense weight if necessary.
    """
    # Generate a KDTree to match
    src_tree = catalog.catalog_to_kdtree(src, 'ra', 'dec')

    # Cosmology parameters
    if cosmology is None:
        cosmology = FlatLambdaCDM(H0=70.0, Om0=0.3)

    # Radial bins
    if rp_bins is None:
        rp_bins = np.logspace(np.log10(0.1), np.log10(20), 11)

    # Redshift cut
    cat_use = cat[(cat[z] > z_min) & (cat[z] <= z_max)]
    if len(cat_use) < 1:
        print("# No useful objects left after the redshift cut!")
        return
    if verbose:
        print("# {:d} / {:d} objects left after the redshift cut".format(
            len(cat_use), len(cat)))

    # Match to the source catalog
    # Maximum matching radius in deg
    r_max_deg = (cosmology.arcsec_per_kpc_proper(cat_use[z]) *
                 (r_max_mpc * u.Mpc).to(u.kpc)).to(u.degree).value

    # Maximum matching radius in the 3-D Cartesian coordinates used by the KDTree
    r_max_3d = np.sqrt(2 - 2 * np.cos(np.deg2rad(r_max_deg)))

    # Get the KDTree of the lens catalog
    cat_kdtree = catalog.catalog_to_kdtree(cat_use, ra, dec)

    cat_index = list(
        itertools.chain(
            *src_tree.query_ball_tree(cat_kdtree, r=r_max_3d.max())))
    cat_use = cat_use[np.unique(np.asarray(cat_index))]

    if len(cat_use) < 1:
        print("# No useful objects left after the source catalog match!")
        return
    if verbose:
        print("# {:d} / {:d} objects left after the source catalog match!".
              format(len(cat_use), len(cat)))

    # Add continued fields
    cat_use.rename_column(ra, 'ra')
    cat_use.rename_column(dec, 'dec')

    if field is None:
        cat_pre = add_continous_fields(cat_use,
                                       n_samples=10000,
                                       distance_threshold=1.0)
        field = 'field'

    # Add a place holder for systematic weight if necessary
    if w_sys is None:
        w_sys = 1.0

    # Organize the columns need to be transfered
    if col_used is None:
        col_used = cat_pre.colnames

    for col in ['ra', 'dec', 'z', z, field, 'w_sys']:
        if col in col_used:
            col_used.remove(col)

    col_kwargs = {}
    for col in col_used:
        col_kwargs[col.lower()] = col

    # Get the catalog ready for dsigma
    cat_pre = helpers.dsigma_table(cat_pre,
                                   'lens',
                                   ra='ra',
                                   dec='dec',
                                   z=z,
                                   field=field,
                                   w_sys=w_sys,
                                   **col_kwargs)

    # Pre-computation for the lenses
    cat_pre = add_precompute_results(cat_pre,
                                     src,
                                     rp_bins,
                                     table_c=calib,
                                     cosmology=cosmology,
                                     comoving=comoving,
                                     n_jobs=n_jobs)

    # Remove the ones with no useful lensing information
    cat_pre['n_s_tot'] = np.sum(cat_pre['sum 1'], axis=1)
    cat_pre = cat_pre[cat_pre['n_s_tot'] > 0]

    return cat_pre
Ejemplo n.º 6
0
                                               'raw', fname),
                                  columns=columns),
                      names=columns))
        table_s = hstack(table_s)
        table_s = table_s[table_s['flags_select'] == 0]

    if survey == 'HSC':
        table_s = Table.read(
            os.path.join(zebu.base_dir, 'lwb', 'hsc', 'raw',
                         'hsc_s16a_lensing.fits'))
    elif survey == 'KiDS':
        table_s = Table.read(
            os.path.join(zebu.base_dir, 'lwb', 'kids', 'raw',
                         'KiDS_DR4.1_ugriZYJHKs_SOM_gold_WL_cat.fits'))

    table_s = dsigma_table(table_s, 'source', survey=survey)
    table_s = table_s[table_s['z'] < zebu.source_z_bins[survey.lower()][-1]]

    for z_l, color in zip(z_l_list, color_list):

        sigma_crit = critical_surface_density(z_l,
                                              table_s['z'],
                                              cosmology=zebu.cosmo)

        dz_list = np.linspace(0, 0.9, 91) - 0.005
        error = np.zeros(len(dz_list))

        for i, dz in enumerate(dz_list):

            use = table_s['z'] > z_l + dz
            w_ls = table_s['w'] / sigma_crit**2