def test_cluster(): numpy.random.seed(1234) dec = numpy.arcsin(numpy.random.uniform(-1, 1, size=100000)) / numpy.pi * 180 ra = numpy.random.uniform(0, 2 * numpy.pi, size=100000) / numpy.pi * 180 # testing bootstrap for area, rand, in sphere.bootstrap(4, (ra, dec), 41252.96 / len(dec)): pass dataset = sphere.points(ra, dec) r = cluster.fof(dataset, 0.00001, np=None) assert r.N == len(dataset) binning = sphere.AngularBinning(numpy.linspace(0, 1.0, 10)) binningR = correlate.RBinning(binning.edges) r = correlate.paircount(dataset, dataset, binning=binning, usefast=True) r1 = correlate.paircount(dataset, dataset, binning=binning, usefast=False) r2 = correlate.paircount(dataset, dataset, binning=binningR, usefast=True) assert_equal(r1.sum1, r2.sum1) assert_equal(r1.sum1, r.sum1) assert_allclose( r.sum1, numpy.diff(2 * numpy.pi * (1 - numpy.cos(numpy.radians(binning.angular_edges)))) / ( 4 * numpy.pi) * len(ra) ** 2, rtol=10e-2)
def corr(): datafile = h5py.File(ns.catalogue, 'r') randfile = h5py.File(ns.random, 'r') datamask = datafile['COMPLETENESS'][:] >= 1 for vetoname in ns.use_tycho_veto: datamask &= ~datafile['TYCHO_VETO'][vetoname][:] dataRA = datafile['RA'][:][datamask] dataDEC = datafile['DEC'][:][datamask] randmask = randfile['COMPLETENESS'][:] >= 1 for vetoname in ns.use_tycho_veto: randmask &= ~randfile['TYCHO_VETO'][vetoname][:] randRA = randfile['RA'][:][randmask] randDEC = randfile['DEC'][:][randmask] data = sphere.points(dataRA, dataDEC) rand = sphere.points(randRA, randDEC) abin = sphere.AngularBinning(np.logspace(-3, 0, 16, endpoint=True)) DD = correlate.paircount(data, data, abin, np=ns.np) DR = correlate.paircount(data, rand, abin, np=ns.np) RR = correlate.paircount(rand, rand, abin, np=ns.np) r = 1. * len(data) / len(rand) dd = 1.0 * DD.sum1 dr = 1.0 * DR.sum1 * r rr = 1.0 * RR.sum1 * (r * r) return abin.angular_centers, (dd - 2 * dr + rr) / rr
def test_field(): numpy.random.seed(1234) dec = numpy.arcsin(numpy.random.uniform(-1, 1, size=100000)) / numpy.pi * 180 ra = numpy.random.uniform(0, 2 * numpy.pi, size=100000) / numpy.pi * 180 dataset = sphere.field(ra, dec, value=numpy.ones_like(dec) * 0.5) binning = sphere.AngularBinning(numpy.linspace(0, 1.0, 10)) r = correlate.paircount(dataset, dataset, binning=binning)
def reference_2pcf_angular(thetaedges,position1,weight1,position2=None,weight2=None): """Reference pair counting via kdcount""" tree1 = sphere.points(position1[:,0],position1[:,1],weights=weight1) factor = 1. if position2 is None: tree2 = tree1 factor = 1./2. else: tree2 = sphere.points(position2[:,0],position2[:,1],weights=weight2) bins = sphere.AngularBinning(np.asarray(thetaedges)) pc = correlate.paircount(tree1,tree2,bins) return factor*pc.sum1
def ac_yu(self): from kdcount import correlate from kdcount import sphere abin = sphere.AngularBinning(np.logspace(-4, -2.6, 10)) D = sphere.points(self.gal_ra, self.gal_dec) R = sphere.points(self.ran_ra, self.ran_dec) #weights=wt_array DD = correlate.paircount(D, D, abin, np=self.ncores) DR = correlate.paircount(D, R, abin, np=self.ncores) RR = correlate.paircount(R, R, abin, np=self.ncores) r = D.norm / R.norm w = (DD.sum1 - 2 * r * DR.sum1 + r**2 * RR.sum1) / (r**2 * RR.sum1) return abin.angular_centers, w
def reference_paircount(pos1, w1, edges, pos2=None, w2=None): """Reference pair counting via kdcount""" # set up the trees tree1 = sphere.points(*pos1, boxsize=None, weights=w1) if pos2 is None: tree2 = tree1 else: tree2 = sphere.points(*pos2, boxsize=None, weights=w2) # run the pair count bins = sphere.AngularBinning(edges) pc = correlate.paircount(tree1, tree2, bins, np=0, compute_mean_coords=True) return numpy.nan_to_num(pc.pair_counts), numpy.nan_to_num(pc.mean_centers), pc.sum1
def test_cluster(): numpy.random.seed(1234) dec = numpy.arcsin(numpy.random.uniform(-1, 1, size=100000)) / numpy.pi * 180 ra = numpy.random.uniform(0, 2 * numpy.pi, size=100000) / numpy.pi * 180 # testing bootstrap for area, rand, in sphere.bootstrap(4, (ra, dec), 41252.96 / len(dec)): pass dataset = sphere.points(ra, dec) r = cluster.fof(dataset, 0.00001, np=None) assert r.N == len(dataset) binning = sphere.FastAngularBinning(numpy.linspace(0, 1.0, 10)) binning1 = sphere.AngularBinning(numpy.linspace(0, 1.0, 10)) binningR = correlate.RBinning(binning.edges) r = correlate.paircount(dataset, dataset, binning=binning) r1 = correlate.paircount(dataset, dataset, binning=binning1, compute_mean_coords=True) r2 = correlate.paircount(dataset, dataset, binning=binningR) # make sure mean_centers compute angular centers for i, val in enumerate(r1.mean_centers): assert binning.angular_edges[i] < val < binning.angular_edges[i + 1] assert_equal(r1.sum1, r2.sum1) assert_equal(r1.sum1, r.sum1) assert_allclose( r.sum1, numpy.diff(2 * numpy.pi * (1 - numpy.cos(numpy.radians(binning.angular_edges)))) / (4 * numpy.pi) * len(ra)**2, rtol=10e-2)
def corr(): data1file = h5py.File(ns.catalogue1, 'r') data2file = h5py.File(ns.catalogue2, 'r') rand1file = h5py.File(ns.random1, 'r') data1mask = data1file['COMPLETENESS'][:] >= 1 for vetoname in ns.use_tycho_veto: data1mask &= ~data1file['TYCHO_VETO'][vetoname][:] data1RA = data1file['RA'][:][data1mask] data1DEC = data1file['DEC'][:][data1mask] rand1mask = rand1file['COMPLETENESS'][:] >= 1 for vetoname in ns.use_tycho_veto: rand1mask &= ~rand1file['TYCHO_VETO'][vetoname][:] rand1RA = rand1file['RA'][:][rand1mask] rand1DEC = rand1file['DEC'][:][rand1mask] # data2mask = data2file['COMPLETENESS'][:] >= 1 # for vetoname in ns.use_tycho_veto: # data2mask &= ~data2file['TYCHO_VETO'][vetoname][:] data2mask = Ellipsis data2RA = data2file['RA'][:][data2mask] data2DEC = data2file['DEC'][:][data2mask] data1 = sphere.points(data1RA, data1DEC) data2 = sphere.points(data2RA, data2DEC) rand1 = sphere.points(rand1RA, rand1DEC) abin = sphere.AngularBinning(np.logspace(-3, 0, 16, endpoint=True)) DD = correlate.paircount(data1, data2, abin, np=ns.np) DR = correlate.paircount(rand1, data2, abin, np=ns.np) r = 1. * len(data1) / len(rand1) dd = 1.0 * DD.sum1 dr = 1.0 * DR.sum1 * r return abin.angular_centers, (dd - dr) / dr