def plot_difference(ax, color, table_l_1, table_r_1, table_l_2, table_r_2, survey, survey_2=None, ds_norm=1.0, label=None, offset=0, lens_bin=0, boost=False): if len(table_l_1) * len(table_r_1) * len(table_l_2) * len(table_r_2) == 0: print('Warning: Received empty result to plot.') return None if survey_2 is None: survey_2 = survey dds = difference( table_l_1, table_r=table_r_1, table_l_2=table_l_2, table_r_2=table_r_2, survey_1=survey, survey_2=survey_2, boost=boost) / ds_norm dds_cov = jackknife_resampling( difference, table_l_1, table_r=table_r_1, table_l_2=table_l_2, table_r_2=table_r_2, survey_1=survey, survey_2=survey_2, boost=boost) if hasattr(ds_norm, 'shape'): dds_cov = dds_cov / np.outer(ds_norm, ds_norm) else: dds_cov = dds_cov / ds_norm**2 if np.all(np.isclose(dds_cov, 0)): dds_err = np.zeros(len(np.diag(dds_cov))) else: dds_err = np.sqrt(np.diag(dds_cov)) plotline, caps, barlinecols = ax.errorbar( rp * (1 + offset * 0.05), 100 * dds, yerr=100 * dds_err, label=label, fmt='o', ms=2, color=color, zorder=offset + 100) plt.setp(barlinecols[0], capstyle='round') return None
cosmology=FlatLambdaCDM(H0=70, Om0=0.3)) # Create the jackknife fields. table_l_pre = add_continous_fields(table_l_pre, distance_threshold=2) centers = jackknife_field_centers(table_l_pre, 100) table_l_pre = add_jackknife_fields(table_l_pre, centers) table_r_pre = add_jackknife_fields(table_r_pre, centers) kwargs = { 'return_table': True, 'shear_bias_correction': True, 'random_subtraction': True, 'photo_z_dilution_correction': True, 'table_r': table_r_pre } result = excess_surface_density(table_l_pre, **kwargs) kwargs['return_table'] = False ds_cov = jackknife_resampling(excess_surface_density, table_l_pre, **kwargs) result['ds_err'] = np.sqrt(np.diag(ds_cov)) fname_base = '{}_{}{}{}'.format( args.survey.lower(), lens_bin, '_no_dillution_correction' if args.no_dilution_correction else '', ('_nstar_' + args.nstar) if args.nstar is not None else '') np.savetxt(os.path.join('results', fname_base + '_cov.csv'), ds_cov) result.write(os.path.join('results', fname_base + '.csv'), overwrite=True)
table_l['n_s_tot'] = np.sum(table_l['sum 1'], axis=1) table_l = table_l[table_l['n_s_tot'] > 0] table_r['n_s_tot'] = np.sum(table_r['sum 1'], axis=1) table_r = table_r[table_r['n_s_tot'] > 0] add_continous_fields(table_l, distance_threshold=2) centers = jackknife_field_centers(table_l, 100, weight='n_s_tot') add_jackknife_fields(table_l, centers) add_jackknife_fields(table_r, centers) # Stack the signal. stacking_kwargs['random_subtraction'] = True for lens_bin in range(len(z_bins) - 1): mask_l = ((z_bins[lens_bin] <= table_l['z']) & (table_l['z'] < z_bins[lens_bin + 1])) mask_r = ((z_bins[lens_bin] <= table_r['z']) & (table_r['z'] < z_bins[lens_bin + 1])) stacking_kwargs['table_r'] = table_r[mask_r] stacking_kwargs['return_table'] = True result = excess_surface_density(table_l[mask_l], **stacking_kwargs) stacking_kwargs['return_table'] = False result['ds_err'] = np.sqrt( np.diag( jackknife_resampling(excess_surface_density, table_l[mask_l], **stacking_kwargs))) result.write('{}_{}.csv'.format(args.survey.lower(), lens_bin))
if len(table_l_part) == 0: continue if np.amin(table_l_part['z']) >= np.amax(table_s_part['z_l_max']): continue add_precompute_results(table_l_part, table_s_part, rp_bins, **precompute_kwargs) # Create the jackknife fields. table_l_part['n_s_tot'] = np.sum(table_l_part['sum 1'], axis=1) table_l_part = table_l_part[table_l_part['n_s_tot'] > 0] table_l_part = add_continous_fields(table_l_part, distance_threshold=2) centers = jackknife_field_centers(table_l_part, 100) table_l_part = add_jackknife_fields(table_l_part, centers) stacking_kwargs['return_table'] = True result = excess_surface_density(table_l_part, **stacking_kwargs) stacking_kwargs['return_table'] = False ds_cov = jackknife_resampling(excess_surface_density, table_l_part, **stacking_kwargs) result['ds_err'] = np.sqrt(np.diag(ds_cov)) fname_base = '{}_l{}_s{}'.format(args.survey.lower(), lens_bin, source_bin) np.savetxt(os.path.join('results', fname_base + '_cov.csv'), ds_cov) result.write(os.path.join('results', fname_base + '.csv'), overwrite=True)
path='data') except FileNotFoundError: continue kwargs = { 'table_r': table_r, 'photo_z_dilution_correction': True, 'boost_correction': True, 'random_subtraction': True, 'return_table': True } delta_sigma = excess_surface_density(table_l, **kwargs) kwargs['return_table'] = False delta_sigma['delta sigma_err'] = np.sqrt( np.diag( jackknife_resampling(excess_surface_density, table_l, **kwargs))) color = 'C{}'.format(source_bin) axarr[0].plot(delta_sigma['rp'], delta_sigma['f_bias'], color=color, ls='-', label=r"$f_{\rm bias}$" if source_bin == 3 else "") axarr[0].plot(delta_sigma['rp'], delta_sigma['b'], color=color, ls='--', label=r"boost" if source_bin == 3 else "") axarr[1].errorbar( delta_sigma['rp'] * (1 + (source_bin - lens_bin) * 0.03),