if len(glob.glob(os.path.join('precompute', fname_base + '*'))) == 0: continue table_l = [] for fname in glob.glob( os.path.join('precompute', fname_base + '*')): print(fname) table_l_i = Table.read(fname, path='data') if args.zspec and args.zspec_zphot_sys_weights: table_l_i['w_sys'] = zspec_systematic_weights( lens_bin, source_bin)(table_l_i['z']) add_jackknife_fields(table_l_i, centers) table_l_i = compress_jackknife_fields(table_l_i) table_l.append(table_l_i) table_l = compress_jackknife_fields(vstack(table_l)) # undo vstack concatenate table_l.meta['rp_bins'] = table_l_i.meta['rp_bins'] if args.zspec and args.zspec_zphot_sys_weights: fname_base = fname_base + '_w_sys' table_l.write(os.path.join('jackknife', fname_base + '.hdf5'), path='data', overwrite=True, serialize_meta=True)
comoving=False, table_c=table_c, cosmology=FlatLambdaCDM(H0=70, Om0=0.3)) print('Working on randoms in bin {}...'.format(lens_bin + 1)) table_r_pre = precompute_catalog(table_r, table_s, rp_bins, n_jobs=40, comoving=False, table_c=table_c, cosmology=FlatLambdaCDM(H0=70, Om0=0.3)) # Create the jackknife fields. table_l_pre = add_continous_fields(table_l_pre, distance_threshold=2) centers = jackknife_field_centers(table_l_pre, 100) table_l_pre = add_jackknife_fields(table_l_pre, centers) table_r_pre = add_jackknife_fields(table_r_pre, centers) kwargs = { 'return_table': True, 'shear_bias_correction': True, 'random_subtraction': True, 'photo_z_dilution_correction': True, 'table_r': table_r_pre } result = excess_surface_density(table_l_pre, **kwargs) kwargs['return_table'] = False ds_cov = jackknife_resampling(excess_surface_density, table_l_pre, **kwargs) result['ds_err'] = np.sqrt(np.diag(ds_cov))
}) # Pre-compute the signal. add_precompute_results(table_l, table_s, rp_bins, **precompute_kwargs) add_precompute_results(table_r, table_s, rp_bins, **precompute_kwargs) # Add jackknife fields. table_l['n_s_tot'] = np.sum(table_l['sum 1'], axis=1) table_l = table_l[table_l['n_s_tot'] > 0] table_r['n_s_tot'] = np.sum(table_r['sum 1'], axis=1) table_r = table_r[table_r['n_s_tot'] > 0] add_continous_fields(table_l, distance_threshold=2) centers = jackknife_field_centers(table_l, 100, weight='n_s_tot') add_jackknife_fields(table_l, centers) add_jackknife_fields(table_r, centers) # Stack the signal. stacking_kwargs['random_subtraction'] = True for lens_bin in range(len(z_bins) - 1): mask_l = ((z_bins[lens_bin] <= table_l['z']) & (table_l['z'] < z_bins[lens_bin + 1])) mask_r = ((z_bins[lens_bin] <= table_r['z']) & (table_r['z'] < z_bins[lens_bin + 1])) stacking_kwargs['table_r'] = table_r[mask_r] stacking_kwargs['return_table'] = True result = excess_surface_density(table_l[mask_l], **stacking_kwargs) stacking_kwargs['return_table'] = False
if len(table_l_part) == 0: continue if np.amin(table_l_part['z']) >= np.amax(table_s_part['z_l_max']): continue add_precompute_results(table_l_part, table_s_part, rp_bins, **precompute_kwargs) # Create the jackknife fields. table_l_part['n_s_tot'] = np.sum(table_l_part['sum 1'], axis=1) table_l_part = table_l_part[table_l_part['n_s_tot'] > 0] table_l_part = add_continous_fields(table_l_part, distance_threshold=2) centers = jackknife_field_centers(table_l_part, 100) table_l_part = add_jackknife_fields(table_l_part, centers) stacking_kwargs['return_table'] = True result = excess_surface_density(table_l_part, **stacking_kwargs) stacking_kwargs['return_table'] = False ds_cov = jackknife_resampling(excess_surface_density, table_l_part, **stacking_kwargs) result['ds_err'] = np.sqrt(np.diag(ds_cov)) fname_base = '{}_l{}_s{}'.format(args.survey.lower(), lens_bin, source_bin) np.savetxt(os.path.join('results', fname_base + '_cov.csv'), ds_cov) result.write(os.path.join('results', fname_base + '.csv'), overwrite=True)
def stack_dsigma_profile(lens, rand, mask=None, n_rand=None, use_dsigma=False, bootstrap=False, n_boot=500, jackknife=True, n_jobs=None, n_jk=45): """Get the DeltaSigma profile of a sample of lens.""" # Check to see the setup for lens and random assert np.all(lens.meta['bins'] == rand.meta['bins']) assert lens.meta['H0'] == rand.meta['H0'] assert lens.meta['Om0'] == rand.meta['Om0'] assert (lens['n_s_tot'] > 0).sum() == len(lens) assert (rand['n_s_tot'] > 0).sum() == len(rand) # Apply the mask lens_use = lens if mask is None else lens[mask] # Randomly downsample the random objects if necessary if n_rand is not None and n_rand < len(rand): rand_use = Table(np.random.choice(rand, size=n_rand, replace=False)) rand_use.meta = rand.meta else: rand_use = rand # Get the stacked lensing profiles if use_dsigma: # Configurations for calculating HSC kwargs = { 'return_table': True, 'shear_bias_correction': True, 'shear_responsivity_correction': True, 'selection_bias_correction': True, 'boost_correction': False, 'random_subtraction': True, 'photo_z_dilution_correction': True, 'rotation': False, 'table_r': rand_use } result = excess_surface_density(lens_use, **kwargs) else: result = Table() result['ds'] = dsigma_no_wsys(lens_use, rand_use) if jackknife: if n_jk <= 5: raise Exception( "Number of jackknife fields is too small, should >5") if len(lens_use) <= 5: print("Number of lenses < 5, cannot use Jackknife resampling") jackknife = False else: # Deal with situations with small sample if len(lens_use) <= n_jk - 5: n_jk = len(lens) - 5 # Add consistent Jackknife fields to both the lens and random catalogs add_continous_fields(lens_use, distance_threshold=2) centers = jackknife_field_centers(lens_use, n_jk, weight='n_s_tot') add_jackknife_fields(lens_use, centers) add_jackknife_fields(rand_use, centers) # Estimate the covariance matrix using Jackknife resampling cov_jk = dsigma_jk_resample(lens_use, rand_use, n_jobs=n_jobs) result['ds_err_jk'] = np.sqrt(np.diag(cov_jk)) result.meta['cov_jk'] = cov_jk result.meta['s2n_jk'] = np.sqrt( np.dot(result['ds'].T.dot(np.linalg.inv(cov_jk)), result['ds'])) # Estimate the covariance matrix using Bootstrap resampling if bootstrap: cov_bt = dsigma_bootstrap(lens_use, rand_use, n_boot=n_boot, n_jobs=n_jobs) result['ds_err_bt'] = np.sqrt(np.diag(cov_bt)) result.meta['cov_bt'] = cov_bt result.meta['s2n_bt'] = np.sqrt( np.dot(result['ds'].T.dot(np.linalg.inv(cov_bt)), result['ds'])) return result