Beispiel #1
0
def difference(table_l, table_r=None, table_l_2=None, table_r_2=None,
               survey_1=None, survey_2=None, boost=False):

    for survey in [survey_1, survey_2]:
        if survey not in ['gen', 'hsc', 'kids', 'des']:
            raise RuntimeError('Unkown survey!')

    if not boost:
        ds_1 = excess_surface_density(table_l, table_r=table_r,
                                      **zebu.stacking_kwargs(survey_1))
        ds_2 = excess_surface_density(table_l_2, table_r=table_r_2,
                                      **zebu.stacking_kwargs(survey_2))

        return ds_1 - ds_2
    else:
        b_1 = boost_factor(table_l, table_r=table_r)
        b_2 = boost_factor(table_l_2, table_r=table_r_2)
        return b_1 - b_2
Beispiel #2
0
                                     cosmology=FlatLambdaCDM(H0=70, Om0=0.3))

    # Create the jackknife fields.
    table_l_pre = add_continous_fields(table_l_pre, distance_threshold=2)
    centers = jackknife_field_centers(table_l_pre, 100)
    table_l_pre = add_jackknife_fields(table_l_pre, centers)
    table_r_pre = add_jackknife_fields(table_r_pre, centers)

    kwargs = {
        'return_table': True,
        'shear_bias_correction': True,
        'random_subtraction': True,
        'photo_z_dilution_correction': True,
        'table_r': table_r_pre
    }

    result = excess_surface_density(table_l_pre, **kwargs)
    kwargs['return_table'] = False
    ds_cov = jackknife_resampling(excess_surface_density, table_l_pre,
                                  **kwargs)
    result['ds_err'] = np.sqrt(np.diag(ds_cov))

    fname_base = '{}_{}{}{}'.format(
        args.survey.lower(), lens_bin,
        '_no_dillution_correction' if args.no_dilution_correction else '',
        ('_nstar_' + args.nstar) if args.nstar is not None else '')

    np.savetxt(os.path.join('results', fname_base + '_cov.csv'), ds_cov)

    result.write(os.path.join('results', fname_base + '.csv'), overwrite=True)
Beispiel #3
0
# %%


table_l_ref = []
table_r_ref = []
ds_ref = []

for lens_bin in range(len(zebu.lens_z_bins) - 1):

    table_l, table_r = read_precompute(
        'gen', lens_bin, 'all', zspec=True, lens_magnification=False,
        source_magnification=False, fiber_assignment=False)
    table_l_ref.append(table_l)
    table_r_ref.append(table_r)
    ds_ref.append(excess_surface_density(
        table_l, table_r=table_r, **zebu.stacking_kwargs('gen')))

# %%

rp = np.sqrt(zebu.rp_bins[1:] * zebu.rp_bins[:-1])

if args.stage == 0:
    for lens_bin in range(len(zebu.lens_z_bins) - 1):
        plt.plot(rp, rp * ds_ref[lens_bin],
                 label=r'${:.1f} \leq z_l < {:.1f}$'.format(
            zebu.lens_z_bins[lens_bin], zebu.lens_z_bins[lens_bin + 1]))

    plt.title("no shape noise, no photo-z's, all sources")
    plt.xscale('log')
    plt.xlabel(r'Projected Radius $r_p \, [h^{-1} \, \mathrm{Mpc}]$')
    plt.ylabel(r'$r_p \Delta \Sigma \, [10^6 M_\odot / \mathrm{pc}]$')
Beispiel #4
0
        if len(table_l_part) == 0:
            continue

        if np.amin(table_l_part['z']) >= np.amax(table_s_part['z_l_max']):
            continue

        add_precompute_results(table_l_part, table_s_part, rp_bins,
                               **precompute_kwargs)

        # Create the jackknife fields.
        table_l_part['n_s_tot'] = np.sum(table_l_part['sum 1'], axis=1)
        table_l_part = table_l_part[table_l_part['n_s_tot'] > 0]
        table_l_part = add_continous_fields(table_l_part, distance_threshold=2)
        centers = jackknife_field_centers(table_l_part, 100)
        table_l_part = add_jackknife_fields(table_l_part, centers)

        stacking_kwargs['return_table'] = True
        result = excess_surface_density(table_l_part, **stacking_kwargs)
        stacking_kwargs['return_table'] = False
        ds_cov = jackknife_resampling(excess_surface_density, table_l_part,
                                      **stacking_kwargs)
        result['ds_err'] = np.sqrt(np.diag(ds_cov))

        fname_base = '{}_l{}_s{}'.format(args.survey.lower(), lens_bin,
                                         source_bin)

        np.savetxt(os.path.join('results', fname_base + '_cov.csv'), ds_cov)

        result.write(os.path.join('results', fname_base + '.csv'),
                     overwrite=True)
Beispiel #5
0
table_l['n_s_tot'] = np.sum(table_l['sum 1'], axis=1)
table_l = table_l[table_l['n_s_tot'] > 0]

table_r['n_s_tot'] = np.sum(table_r['sum 1'], axis=1)
table_r = table_r[table_r['n_s_tot'] > 0]

add_continous_fields(table_l, distance_threshold=2)
centers = jackknife_field_centers(table_l, 100, weight='n_s_tot')
add_jackknife_fields(table_l, centers)
add_jackknife_fields(table_r, centers)

# Stack the signal.
stacking_kwargs['random_subtraction'] = True

for lens_bin in range(len(z_bins) - 1):
    mask_l = ((z_bins[lens_bin] <= table_l['z']) &
              (table_l['z'] < z_bins[lens_bin + 1]))
    mask_r = ((z_bins[lens_bin] <= table_r['z']) &
              (table_r['z'] < z_bins[lens_bin + 1]))

    stacking_kwargs['table_r'] = table_r[mask_r]
    stacking_kwargs['return_table'] = True
    result = excess_surface_density(table_l[mask_l], **stacking_kwargs)
    stacking_kwargs['return_table'] = False
    result['ds_err'] = np.sqrt(
        np.diag(
            jackknife_resampling(excess_surface_density, table_l[mask_l],
                                 **stacking_kwargs)))

    result.write('{}_{}.csv'.format(args.survey.lower(), lens_bin))
Beispiel #6
0
            fname_r = 'l{}_s{}_r.hdf5'.format(lens_bin, source_bin)
            table_l = Table.read(os.path.join('jackknife', fname_l),
                                 path='data')
            table_r = Table.read(os.path.join('jackknife', fname_r),
                                 path='data')
        except FileNotFoundError:
            continue

        kwargs = {
            'table_r': table_r,
            'photo_z_dilution_correction': True,
            'boost_correction': True,
            'random_subtraction': True,
            'return_table': True
        }
        delta_sigma = excess_surface_density(table_l, **kwargs)
        kwargs['return_table'] = False
        delta_sigma['delta sigma_err'] = np.sqrt(
            np.diag(
                jackknife_resampling(excess_surface_density, table_l,
                                     **kwargs)))

        color = 'C{}'.format(source_bin)

        axarr[0].plot(delta_sigma['rp'],
                      delta_sigma['f_bias'],
                      color=color,
                      ls='-',
                      label=r"$f_{\rm bias}$" if source_bin == 3 else "")
        axarr[0].plot(delta_sigma['rp'],
                      delta_sigma['b'],
Beispiel #7
0
def stack_dsigma_profile(lens,
                         rand,
                         mask=None,
                         n_rand=None,
                         use_dsigma=False,
                         bootstrap=False,
                         n_boot=500,
                         jackknife=True,
                         n_jobs=None,
                         n_jk=45):
    """Get the DeltaSigma profile of a sample of lens."""
    # Check to see the setup for lens and random
    assert np.all(lens.meta['bins'] == rand.meta['bins'])
    assert lens.meta['H0'] == rand.meta['H0']
    assert lens.meta['Om0'] == rand.meta['Om0']
    assert (lens['n_s_tot'] > 0).sum() == len(lens)
    assert (rand['n_s_tot'] > 0).sum() == len(rand)

    # Apply the mask
    lens_use = lens if mask is None else lens[mask]

    # Randomly downsample the random objects if necessary
    if n_rand is not None and n_rand < len(rand):
        rand_use = Table(np.random.choice(rand, size=n_rand, replace=False))
        rand_use.meta = rand.meta
    else:
        rand_use = rand

    # Get the stacked lensing profiles
    if use_dsigma:
        # Configurations for calculating HSC
        kwargs = {
            'return_table': True,
            'shear_bias_correction': True,
            'shear_responsivity_correction': True,
            'selection_bias_correction': True,
            'boost_correction': False,
            'random_subtraction': True,
            'photo_z_dilution_correction': True,
            'rotation': False,
            'table_r': rand_use
        }

        result = excess_surface_density(lens_use, **kwargs)
    else:
        result = Table()
        result['ds'] = dsigma_no_wsys(lens_use, rand_use)

    if jackknife:
        if n_jk <= 5:
            raise Exception(
                "Number of jackknife fields is too small, should >5")

        if len(lens_use) <= 5:
            print("Number of lenses < 5, cannot use Jackknife resampling")
            jackknife = False
        else:
            # Deal with situations with small sample
            if len(lens_use) <= n_jk - 5:
                n_jk = len(lens) - 5

        # Add consistent Jackknife fields to both the lens and random catalogs
        add_continous_fields(lens_use, distance_threshold=2)
        centers = jackknife_field_centers(lens_use, n_jk, weight='n_s_tot')
        add_jackknife_fields(lens_use, centers)
        add_jackknife_fields(rand_use, centers)

        # Estimate the covariance matrix using Jackknife resampling
        cov_jk = dsigma_jk_resample(lens_use, rand_use, n_jobs=n_jobs)

        result['ds_err_jk'] = np.sqrt(np.diag(cov_jk))
        result.meta['cov_jk'] = cov_jk
        result.meta['s2n_jk'] = np.sqrt(
            np.dot(result['ds'].T.dot(np.linalg.inv(cov_jk)), result['ds']))

    # Estimate the covariance matrix using Bootstrap resampling
    if bootstrap:
        cov_bt = dsigma_bootstrap(lens_use,
                                  rand_use,
                                  n_boot=n_boot,
                                  n_jobs=n_jobs)

        result['ds_err_bt'] = np.sqrt(np.diag(cov_bt))
        result.meta['cov_bt'] = cov_bt
        result.meta['s2n_bt'] = np.sqrt(
            np.dot(result['ds'].T.dot(np.linalg.inv(cov_bt)), result['ds']))

    return result
Beispiel #8
0
for i in range(3):

    source_bin = 3 - i

    path = os.path.join(
        zebu.base_dir, 'stacks', 'region_1', 'precompute',
        'l{}_s{}_gen_zspec_nosmag.hdf5'.format(lens_bin, source_bin))

    table_l = Table.read(path, path='lens')
    table_r = Table.read(path, path='random')

    kwargs = zebu.stacking_kwargs('gen')

    if source_bin == 3:
        ds_ref = excess_surface_density(table_l, table_r=table_r, **kwargs)

    ds_lm = lens_magnification_bias(table_l,
                                    zebu.alpha_l[lens_bin],
                                    camb_results,
                                    photo_z_dilution_correction=True)

    plt.plot(rp,
             rp * ds_lm,
             label=r'${:.1f} \leq z_s < {:.1f}$'.format(
                 zebu.source_z_bins['gen'][source_bin],
                 zebu.source_z_bins['gen'][source_bin + 1]))

plt.title(r'${:.1f} \leq z_l < {:.1f}$'.format(zebu.lens_z_bins[lens_bin],
                                               zebu.lens_z_bins[lens_bin + 1]))
plt.legend(loc='best')