def ang_cross_corr_from_coords(ras, decs, refras, refdecs, randras, randdecs, minscale, maxscale, weights=None, refweights=None, randweights=None, nthreads=1, nbins=10): # set up logarithimically spaced bins in units of degrees bins = np.logspace(minscale, maxscale, (nbins + 1)) # count pairs between sample and control sample DD_counts = DDtheta_mocks(0, nthreads, bins, ras, decs, RA2=refras, DEC2=refdecs, weights1=weights, weights2=refweights) # extract number counts dd = [] for j in range(nbins): dd.append(DD_counts[j][3]) # cross correlation between sample and random catalog DR_counts = np.array( DDtheta_mocks(0, nthreads, bins, ras, decs, RA2=randras, DEC2=randdecs, weights1=weights, weights2=randweights)) dr = [] for j in range(nbins): dr.append(DR_counts[j][3]) wtheta = np.array(dd) / np.array(dr) * (float(len(randras))) / float( len(refras)) - 1 return wtheta
def angular_correlation_function(table_1, table_2, table_r, theta_bins): n_1 = len(table_1) n_2 = len(table_2) n_r = len(table_r) n = multiprocessing.cpu_count() d1d2 = DDtheta_mocks( False, n, theta_bins, table_1['ra'], table_1['dec'], RA2=table_2['ra'], DEC2=table_2['dec'])['npairs'] d1r = DDtheta_mocks( False, n, theta_bins, table_1['ra'], table_1['dec'], RA2=table_r['ra'], DEC2=table_r['dec'])['npairs'] d2r = DDtheta_mocks( False, n, theta_bins, table_2['ra'], table_2['dec'], RA2=table_r['ra'], DEC2=table_r['dec'])['npairs'] rr = DDtheta_mocks( True, 4, theta_bins, table_r['ra'], table_r['dec'])['npairs'] return convert_3d_counts_to_cf(n_1, n_2, n_r, n_r, d1d2, d1r, d2r, rr)
def angular_corr_from_coords(ras, decs, randras, randdecs, weights=None, randweights=None, nthreads=1, nbins=10): bins = np.logspace(-2, 1, (nbins + 1)) # autocorrelation of catalog DD_counts = DDtheta_mocks(1, nthreads, bins, ras, decs, weights1=weights) # cross correlation between data and random catalog DR_counts = DDtheta_mocks(0, nthreads, bins, ras, decs, RA2=randras, DEC2=randdecs, weights1=weights, weights2=randweights) # autocorrelation of random points RR_counts = DDtheta_mocks(1, nthreads, bins, randras, randdecs, weights1=randweights) wtheta = convert_3d_counts_to_cf(len(ras), len(ras), len(randras), len(randras), DD_counts, DR_counts, DR_counts, RR_counts) return wtheta
if test: mask_tar = select_patch([targets['RA'], targets['DEC']], limits=limits) mask_ran = select_patch([randoms['RA'], randoms['DEC']], limits=limits) else: mask_tar = np.ones(len(targets['RA']), dtype=bool) mask_ran = np.ones(len(randoms['RA']), dtype=bool) for key, val in colours.items(): keep_tar = (mask_tar) & (val) RA1, DEC1 = targets['RA'][keep_tar], targets['DEC'][keep_tar] RAr, DECr = randoms['RA'][mask_ran], randoms['DEC'][mask_ran] start = time.time() pairs['DD_%s' % (key)] = DDtheta_mocks(1, nthreads, bins, RA1, DEC1) end = time.time() print('DD run time: %f sec' % (end - start)) start = time.time() pairs['DR_%s' % (key)] = DDtheta_mocks(0, nthreads, bins, RAr, DECr, RA2=RA1, DEC2=DEC1) end = time.time() print('DR run time: %f sec' % (end - start)) np.save(
def select_patch(cat, limits): patch = np.ones(len(cat[0]), bool) patch &= np.logical_and(cat[0] > limits[0], cat[0] < limits[1]) patch &= np.logical_and(cat[1] > limits[2], cat[1] < limits[3]) return patch # test = False nbins = 40 nthreads = 32 bins = np.logspace(np.log10(0.001), np.log10(10.0), nbins + 1) #log bins if test: limits = [160, 170, 5, 7] mask_ran = select_patch([randoms['RA'], randoms['DEC']], limits=limits) RAr, DECr = randoms['RA'][mask_ran], randoms['DEC'][mask_ran] else: RAr, DECr = randoms['RA'], randoms['DEC'] start = time.time() RR = DDtheta_mocks(0, nthreads, bins, RAr, DECr, RA2=RAr, DEC2=DECr) end = time.time() print('RR run time: %f sec' % (end - start)) np.save( '/global/cscratch1/sd/qmxp55/bgstargets_output/dr9/clustering/pair_counts/RR_counts_randoms1_south_nominal', RR)
#split array in chunks chunks = {} n = 16 for i in range(n): chunks['%i' % (i)] = idx[i::n] # ran_pairs = {} nbins = 40 nthreads = 16 bins = np.logspace(np.log10(0.001), np.log10(10.0), nbins + 1) #log bins for keyR, val in chunks.items(): RAr, DECr = randoms['RA'][val], randoms['DEC'][val] start = time.time() ran_pairs['RR_%s' % (keyR)] = DDtheta_mocks(0, nthreads, bins, RAr, DECr, RA2=RAr, DEC2=DECr) end = time.time() print('RR_%s run time: %f sec' % (keyR, end - start)) np.save( '/global/cscratch1/sd/qmxp55/bgstargets_output/dr9/clustering/pair_counts/RR_south_16.npy', ran_pairs)