示例#1
0
def test_pairwise():
    # Test the pairwise option.

    ngal = 1000
    s = 10.
    np.random.seed(8675309)
    x1 = np.random.normal(0,s, (ngal,) )
    y1 = np.random.normal(0,s, (ngal,) )
    w1 = np.random.random(ngal)

    x2 = np.random.normal(0,s, (ngal,) )
    y2 = np.random.normal(0,s, (ngal,) )
    w2 = np.random.random(ngal)
    k2 = np.random.normal(0,3, (ngal,) )

    w1 = np.ones_like(w1)
    w2 = np.ones_like(w2)

    cat1 = treecorr.Catalog(x=x1, y=y1, w=w1)
    cat2 = treecorr.Catalog(x=x2, y=y2, w=w2, k=k2)

    min_sep = 5.
    max_sep = 50.
    nbins = 10
    bin_size = np.log(max_sep/min_sep) / nbins
    nk = treecorr.NKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins)
    nk.process_pairwise(cat1, cat2)
    nk.finalize(cat2.vark)

    true_npairs = np.zeros(nbins, dtype=int)
    true_weight = np.zeros(nbins, dtype=float)
    true_xi = np.zeros(nbins, dtype=float)

    rsq = (x1-x2)**2 + (y1-y2)**2
    r = np.sqrt(rsq)
    logr = np.log(r)

    ww = w1 * w2
    xi = ww * k2

    index = np.floor(np.log(r/min_sep) / bin_size).astype(int)
    mask = (index >= 0) & (index < nbins)
    np.add.at(true_npairs, index[mask], 1)
    np.add.at(true_weight, index[mask], ww[mask])
    np.add.at(true_xi, index[mask], xi[mask])

    true_xi /= true_weight

    np.testing.assert_array_equal(nk.npairs, true_npairs)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-4, atol=1.e-8)

    # If cats have names, then the logger will mention them.
    # Also, test running with optional args.
    cat1.name = "first"
    cat2.name = "second"
    with CaptureLog() as cl:
        nk.logger = cl.logger
        nk.process_pairwise(cat1, cat2, metric='Euclidean', num_threads=2)
    assert "for cats first, second" in cl.output
示例#2
0
    def calc_pos_shear(self,i,j,verbose,num_threads):

        mask = self.lens_binning==i
        lenscat_i = treecorr.Catalog(w=self.lensweight[mask], ra=self.lens['ra'][mask], dec=self.lens['dec'][mask], ra_units='deg', dec_units='deg')

        mask = self.ran_binning==i
        rancat_i  = treecorr.Catalog(w=np.ones(np.sum(mask)), ra=self.randoms['ra'][mask], dec=self.randoms['dec'][mask], ra_units='deg', dec_units='deg')

        m1,m2,mask = self.get_m(j)
        if self.params['has_sheared']:
            cat_j = treecorr.Catalog(g1=self.shape['e1'][mask]/m1[mask], g2=self.shape['e2'][mask]/m2[mask], w=self.weight[mask], ra=self.shape['ra'][mask], dec=self.shape['dec'][mask], ra_units='deg', dec_units='deg')
        else:
            cat_j = treecorr.Catalog(g1=self.shape['e1'][mask], g2=self.shape['e2'][mask], w=self.weight[mask], ra=self.shape['ra'][mask], dec=self.shape['dec'][mask], ra_units='deg', dec_units='deg')
            biascat_j = treecorr.Catalog(k=np.sqrt(self.shape['m1'][mask]*self.shape['m2'][mask]), w=self.weight[mask], ra=self.shape['ra'][mask], dec=self.shape['dec'][mask], ra_units='deg', dec_units='deg')

        ng = treecorr.NGCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads)
        rg = treecorr.NGCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads)
        if self.params['has_sheared']:
            norm = 1.
        else:
            nk = treecorr.NKCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads)
            nk.process(lenscat_i,biascat_j)
            norm,tmp=nk.calculateXi()
        ng.process(lenscat_i,cat_j)
        rg.process(rancat_i,cat_j)
        gammat,gammat_im,gammaterr=ng.calculateXi(rg)

        theta=np.exp(ng.meanlogr)
        if np.sum(norm)==0:
          norm=1.
        gammat/=norm
        gammat_im/=norm
        gammaterr=np.sqrt(gammaterr/norm)

        return theta, gammat, gammaterr
示例#3
0
def test_single():
    # Use kappa(r) = kappa0 exp(-r^2/2r0^2) (1-r^2/2r0^2) around a single lens

    nsource = 1000000
    kappa0 = 0.05
    r0 = 10.
    L = 5. * r0
    numpy.random.seed(8675309)
    x = (numpy.random.random_sample(nsource) - 0.5) * L
    y = (numpy.random.random_sample(nsource) - 0.5) * L
    r2 = (x**2 + y**2)
    k = kappa0 * numpy.exp(-0.5 * r2 / r0**2) * (1. - 0.5 * r2 / r0**2)

    lens_cat = treecorr.Catalog(x=[0],
                                y=[0],
                                x_units='arcmin',
                                y_units='arcmin')
    source_cat = treecorr.Catalog(x=x,
                                  y=y,
                                  k=k,
                                  x_units='arcmin',
                                  y_units='arcmin')
    nk = treecorr.NKCorrelation(bin_size=0.1,
                                min_sep=1.,
                                max_sep=25.,
                                sep_units='arcmin',
                                verbose=1)
    nk.process(lens_cat, source_cat)

    r = nk.meanr
    true_k = kappa0 * numpy.exp(
        -0.5 * r**2 / r0**2) * (1. - 0.5 * r**2 / r0**2)

    print('nk.xi = ', nk.xi)
    print('true_kappa = ', true_k)
    print('ratio = ', nk.xi / true_k)
    print('diff = ', nk.xi - true_k)
    print('max diff = ', max(abs(nk.xi - true_k)))
    assert max(abs(nk.xi - true_k)) < 4.e-4

    # Check that we get the same result using the corr2 executable:
    if __name__ == '__main__':
        lens_cat.write(os.path.join('data', 'nk_single_lens.dat'))
        source_cat.write(os.path.join('data', 'nk_single_source.dat'))
        import subprocess
        corr2_exe = get_script_name('corr2')
        p = subprocess.Popen([corr2_exe, "nk_single.yaml"])
        p.communicate()
        corr2_output = numpy.genfromtxt(os.path.join('output',
                                                     'nk_single.out'),
                                        names=True)
        print('nk.xi = ', nk.xi)
        print('from corr2 output = ', corr2_output['kappa'])
        print('ratio = ', corr2_output['kappa'] / nk.xi)
        print('diff = ', corr2_output['kappa'] - nk.xi)
        numpy.testing.assert_almost_equal(corr2_output['kappa'] / nk.xi,
                                          1.,
                                          decimal=3)
示例#4
0
def test_single():
    # Use kappa(r) = kappa0 exp(-r^2/2r0^2) (1-r^2/2r0^2) around a single lens

    nsource = 100000
    kappa0 = 0.05
    r0 = 10.
    L = 5. * r0
    np.random.seed(8675309)
    x = (np.random.random_sample(nsource)-0.5) * L
    y = (np.random.random_sample(nsource)-0.5) * L
    r2 = (x**2 + y**2)
    k = kappa0 * np.exp(-0.5*r2/r0**2) * (1.-0.5*r2/r0**2)

    lens_cat = treecorr.Catalog(x=[0], y=[0], x_units='arcmin', y_units='arcmin')
    source_cat = treecorr.Catalog(x=x, y=y, k=k, x_units='arcmin', y_units='arcmin')
    nk = treecorr.NKCorrelation(bin_size=0.1, min_sep=1., max_sep=25., sep_units='arcmin',
                                verbose=1)
    nk.process(lens_cat, source_cat)

    r = nk.meanr
    true_k = kappa0 * np.exp(-0.5*r**2/r0**2) * (1.-0.5*r**2/r0**2)

    print('nk.xi = ',nk.xi)
    print('true_kappa = ',true_k)
    print('ratio = ',nk.xi / true_k)
    print('diff = ',nk.xi - true_k)
    print('max diff = ',max(abs(nk.xi - true_k)))
    # Note: there is a zero crossing, so need to include atol as well as rtol
    np.testing.assert_allclose(nk.xi, true_k, rtol=1.e-2, atol=1.e-4)

    # Check that we get the same result using the corr2 function
    lens_cat.write(os.path.join('data','nk_single_lens.dat'))
    source_cat.write(os.path.join('data','nk_single_source.dat'))
    config = treecorr.read_config('configs/nk_single.yaml')
    config['verbose'] = 0
    treecorr.corr2(config)
    corr2_output = np.genfromtxt(os.path.join('output','nk_single.out'), names=True,
                                    skip_header=1)
    print('nk.xi = ',nk.xi)
    print('from corr2 output = ',corr2_output['kappa'])
    print('ratio = ',corr2_output['kappa']/nk.xi)
    print('diff = ',corr2_output['kappa']-nk.xi)
    np.testing.assert_allclose(corr2_output['kappa'], nk.xi, rtol=1.e-3)

    # There is special handling for single-row catalogs when using np.genfromtxt rather
    # than pandas.  So mock it up to make sure we test it.
    if sys.version_info < (3,): return  # mock only available on python 3
    from unittest import mock
    with mock.patch.dict(sys.modules, {'pandas':None}):
        with CaptureLog() as cl:
            treecorr.corr2(config, logger=cl.logger)
        assert "Unable to import pandas" in cl.output
    corr2_output = np.genfromtxt(os.path.join('output','nk_single.out'), names=True,
                                    skip_header=1)
    np.testing.assert_allclose(corr2_output['kappa'], nk.xi, rtol=1.e-3)
示例#5
0
def NKCorr(galaxy):

	ra=galaxy[:,0]
	dec=galaxy[:,1]
	z=galaxy[:,2]

	l,b=ecliptic2galactic(ra,dec)
	raa,decc,k=galactic_pixel(l,b)

	galaxy_catalogue=tc.Catalog(ra=raa,dec=decc,k=k,ra_units="deg",dec_units="deg")
	k_ra,k_dec,k_k=denoise(mask_path,alm_path,tmp_path)
	CMB_catalogue=tc.Catalog(ra=k_ra,dec=k_dec,k=k_k,ra_units="deg",dec_units="deg")

	nk=tc.NKCorrelation(nbins=NBINS,min_sep=MIN_SEP,max_sep=MAX_SEP,bin_slop=0.01,verbose=0,sep_units='degrees')
	nk.process(galaxy_catalogue,CMB_catalogue)

	xi=nk.xi
	r=np.exp(nk.meanlogr)
	
	plt.plot(r,xi,color='blue')
	plt.plot(r,-xi,color='blue',ls=':')
	plt.loglog()
	plt.savefig("/home/yhwu/pic/xi_cross.png",png=1000)
示例#6
0
def test_direct_spherical():
    # Repeat in spherical coords

    ngal = 100
    s = 10.
    np.random.seed(8675309)
    x1 = np.random.normal(0,s, (ngal,) )
    y1 = np.random.normal(0,s, (ngal,) ) + 200  # Put everything at large y, so small angle on sky
    z1 = np.random.normal(0,s, (ngal,) )
    w1 = np.random.random(ngal)

    x2 = np.random.normal(0,s, (ngal,) )
    y2 = np.random.normal(0,s, (ngal,) ) + 200
    z2 = np.random.normal(0,s, (ngal,) )
    w2 = np.random.random(ngal)
    k2 = np.random.normal(0,3, (ngal,) )

    ra1, dec1 = coord.CelestialCoord.xyz_to_radec(x1,y1,z1)
    ra2, dec2 = coord.CelestialCoord.xyz_to_radec(x2,y2,z2)

    cat1 = treecorr.Catalog(ra=ra1, dec=dec1, ra_units='rad', dec_units='rad', w=w1)
    cat2 = treecorr.Catalog(ra=ra2, dec=dec2, ra_units='rad', dec_units='rad', w=w2, k=k2)

    min_sep = 1.
    max_sep = 10.
    nbins = 50
    bin_size = np.log(max_sep/min_sep) / nbins
    nk = treecorr.NKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins,
                                sep_units='deg', bin_slop=0.)
    nk.process(cat1, cat2)

    r1 = np.sqrt(x1**2 + y1**2 + z1**2)
    r2 = np.sqrt(x2**2 + y2**2 + z2**2)
    x1 /= r1;  y1 /= r1;  z1 /= r1
    x2 /= r2;  y2 /= r2;  z2 /= r2

    north_pole = coord.CelestialCoord(0*coord.radians, 90*coord.degrees)

    true_npairs = np.zeros(nbins, dtype=int)
    true_weight = np.zeros(nbins, dtype=float)
    true_xi = np.zeros(nbins, dtype=float)

    c1 = [coord.CelestialCoord(r*coord.radians, d*coord.radians) for (r,d) in zip(ra1, dec1)]
    c2 = [coord.CelestialCoord(r*coord.radians, d*coord.radians) for (r,d) in zip(ra2, dec2)]
    for i in range(ngal):
        for j in range(ngal):
            rsq = (x1[i]-x2[j])**2 + (y1[i]-y2[j])**2 + (z1[i]-z2[j])**2
            r = np.sqrt(rsq)
            r *= coord.radians / coord.degrees
            logr = np.log(r)

            index = np.floor(np.log(r/min_sep) / bin_size).astype(int)
            if index < 0 or index >= nbins:
                continue

            # Rotate shears to coordinates where line connecting is horizontal.
            # Original orientation is where north is up.
            theta1 = 90*coord.degrees - c1[i].angleBetween(north_pole, c2[j])
            theta2 = 90*coord.degrees - c2[j].angleBetween(c1[i], north_pole)
            exp2theta1 = np.cos(2*theta1) + 1j * np.sin(2*theta1)
            expm2theta2 = np.cos(2*theta2) - 1j * np.sin(2*theta2)

            ww = w1[i] * w2[j]
            xi = ww * k2[j]

            true_npairs[index] += 1
            true_weight[index] += ww
            true_xi[index] += xi

    true_xi /= true_weight

    print('true_npairs = ',true_npairs)
    print('diff = ',nk.npairs - true_npairs)
    np.testing.assert_array_equal(nk.npairs, true_npairs)

    print('true_weight = ',true_weight)
    print('diff = ',nk.weight - true_weight)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)

    print('true_xi = ',true_xi)
    print('nk.xi = ',nk.xi)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-4, atol=1.e-8)

    try:
        import fitsio
    except ImportError:
        print('Skipping FITS tests, since fitsio is not installed')
        return

    # Check that running via the corr2 script works correctly.
    config = treecorr.config.read_config('configs/nk_direct_spherical.yaml')
    cat1.write(config['file_name'])
    cat2.write(config['file_name2'])
    treecorr.corr2(config)
    data = fitsio.read(config['nk_file_name'])
    np.testing.assert_allclose(data['R_nom'], nk.rnom)
    np.testing.assert_allclose(data['npairs'], nk.npairs)
    np.testing.assert_allclose(data['weight'], nk.weight)
    np.testing.assert_allclose(data['kappa'], nk.xi, rtol=1.e-3)

    # Repeat with binslop not precisely 0, since the code flow is different for bin_slop == 0.
    # And don't do any top-level recursion so we actually test not going to the leaves.
    nk = treecorr.NKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins,
                                sep_units='deg', bin_slop=1.e-16, max_top=0)
    nk.process(cat1, cat2)
    np.testing.assert_array_equal(nk.npairs, true_npairs)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-3, atol=1.e-6)
示例#7
0
def test_nk():
    # Use kappa(r) = kappa0 exp(-r^2/2r0^2) (1-r^2/2r0^2) around many lenses.

    nlens = 1000
    nsource = 100000
    kappa0 = 0.05
    r0 = 10.
    L = 50. * r0
    numpy.random.seed(8675309)
    xl = (numpy.random.random_sample(nlens) - 0.5) * L
    yl = (numpy.random.random_sample(nlens) - 0.5) * L
    xs = (numpy.random.random_sample(nsource) - 0.5) * L
    ys = (numpy.random.random_sample(nsource) - 0.5) * L
    k = numpy.zeros((nsource, ))
    for x, y in zip(xl, yl):
        dx = xs - x
        dy = ys - y
        r2 = dx**2 + dy**2
        k += kappa0 * numpy.exp(-0.5 * r2 / r0**2) * (1. - 0.5 * r2 / r0**2)

    lens_cat = treecorr.Catalog(x=xl, y=yl, x_units='arcmin', y_units='arcmin')
    source_cat = treecorr.Catalog(x=xs,
                                  y=ys,
                                  k=k,
                                  x_units='arcmin',
                                  y_units='arcmin')
    nk = treecorr.NKCorrelation(bin_size=0.1,
                                min_sep=1.,
                                max_sep=25.,
                                sep_units='arcmin',
                                verbose=1)
    nk.process(lens_cat, source_cat)

    # log(<R>) != <logR>, but it should be close:
    print('meanlogr - log(meanr) = ', nk.meanlogr - numpy.log(nk.meanr))
    numpy.testing.assert_almost_equal(nk.meanlogr,
                                      numpy.log(nk.meanr),
                                      decimal=3)

    r = nk.meanr
    true_k = kappa0 * numpy.exp(
        -0.5 * r**2 / r0**2) * (1. - 0.5 * r**2 / r0**2)

    print('nk.xi = ', nk.xi)
    print('true_kappa = ', true_k)
    print('ratio = ', nk.xi / true_k)
    print('diff = ', nk.xi - true_k)
    print('max diff = ', max(abs(nk.xi - true_k)))
    assert max(abs(nk.xi - true_k)) < 5.e-3

    nrand = nlens * 13
    xr = (numpy.random.random_sample(nrand) - 0.5) * L
    yr = (numpy.random.random_sample(nrand) - 0.5) * L
    rand_cat = treecorr.Catalog(x=xr, y=yr, x_units='arcmin', y_units='arcmin')
    rk = treecorr.NKCorrelation(bin_size=0.1,
                                min_sep=1.,
                                max_sep=25.,
                                sep_units='arcmin',
                                verbose=1)
    rk.process(rand_cat, source_cat)
    print('rk.xi = ', rk.xi)
    xi, varxi = nk.calculateXi(rk)
    print('compensated xi = ', xi)
    print('true_kappa = ', true_k)
    print('ratio = ', xi / true_k)
    print('diff = ', xi - true_k)
    print('max diff = ', max(abs(xi - true_k)))
    # It turns out this doesn't come out much better.  I think the imprecision is mostly just due
    # to the smallish number of lenses, not to edge effects
    assert max(abs(xi - true_k)) < 5.e-3

    # Check that we get the same result using the corr2 executable:
    if __name__ == '__main__':
        lens_cat.write(os.path.join('data', 'nk_lens.dat'))
        source_cat.write(os.path.join('data', 'nk_source.dat'))
        rand_cat.write(os.path.join('data', 'nk_rand.dat'))
        import subprocess
        corr2_exe = get_script_name('corr2')
        p = subprocess.Popen([corr2_exe, "nk.yaml"])
        p.communicate()
        corr2_output = numpy.genfromtxt(os.path.join('output', 'nk.out'),
                                        names=True)
        print('nk.xi = ', nk.xi)
        print('xi = ', xi)
        print('from corr2 output = ', corr2_output['kappa'])
        print('ratio = ', corr2_output['kappa'] / xi)
        print('diff = ', corr2_output['kappa'] - xi)
        numpy.testing.assert_almost_equal(corr2_output['kappa'] / xi,
                                          1.,
                                          decimal=3)

    # Check the fits write option
    out_file_name1 = os.path.join('output', 'nk_out1.fits')
    nk.write(out_file_name1)
    data = fitsio.read(out_file_name1)
    numpy.testing.assert_almost_equal(data['R_nom'], numpy.exp(nk.logr))
    numpy.testing.assert_almost_equal(data['meanR'], nk.meanr)
    numpy.testing.assert_almost_equal(data['meanlogR'], nk.meanlogr)
    numpy.testing.assert_almost_equal(data['kappa'], nk.xi)
    numpy.testing.assert_almost_equal(data['sigma'], numpy.sqrt(nk.varxi))
    numpy.testing.assert_almost_equal(data['weight'], nk.weight)
    numpy.testing.assert_almost_equal(data['npairs'], nk.npairs)

    out_file_name2 = os.path.join('output', 'nk_out2.fits')
    nk.write(out_file_name2, rk)
    data = fitsio.read(out_file_name2)
    numpy.testing.assert_almost_equal(data['R_nom'], numpy.exp(nk.logr))
    numpy.testing.assert_almost_equal(data['meanR'], nk.meanr)
    numpy.testing.assert_almost_equal(data['meanlogR'], nk.meanlogr)
    numpy.testing.assert_almost_equal(data['kappa'], xi)
    numpy.testing.assert_almost_equal(data['sigma'], numpy.sqrt(varxi))
    numpy.testing.assert_almost_equal(data['weight'], nk.weight)
    numpy.testing.assert_almost_equal(data['npairs'], nk.npairs)

    # Check the read function
    nk2 = treecorr.NKCorrelation(bin_size=0.1,
                                 min_sep=1.,
                                 max_sep=25.,
                                 sep_units='arcmin')
    nk2.read(out_file_name1)
    numpy.testing.assert_almost_equal(nk2.logr, nk.logr)
    numpy.testing.assert_almost_equal(nk2.meanr, nk.meanr)
    numpy.testing.assert_almost_equal(nk2.meanlogr, nk.meanlogr)
    numpy.testing.assert_almost_equal(nk2.xi, nk.xi)
    numpy.testing.assert_almost_equal(nk2.varxi, nk.varxi)
    numpy.testing.assert_almost_equal(nk2.weight, nk.weight)
    numpy.testing.assert_almost_equal(nk2.npairs, nk.npairs)
示例#8
0
                'w_col': 'w',
                'ra_units': 'deg',
                'dec_units': 'deg'}


    nside = 2048
    logging.info(f"Calculating for NSIDE = {nside}")
    rcscat = treecorr.Catalog("rcslens.fits", rcsconfig)
    szcat = treecorr.Catalog(f"szmaps_masked{maskno}_{nside}.fits", szconfig)
    m1cat = treecorr.Catalog("rcslens.fits", m1config)

    kg = treecorr.KGCorrelation(corrconfig,logger=logging.getLogger())
    kg.process(szcat, rcscat)   # Calculate the cross-correlation
    kg.write(f"crosscorr{maskno}_{nside}.result")

    nk = treecorr.NKCorrelation(corrconfig,logger=logging.getLogger())
    nk.process(szcat, m1cat)
    nk.write(f"calib{maskno}_{nside}.result")

    ny = treecorr.NKCorrelation(corrconfig,logger=logging.getLogger())
    ny.process(rcscat, szcat)
    ny.write(f"ycorr{maskno}_{nside}.result")
    
    ng = treecorr.NGCorrelation(corrconfig,logger=logging.getLogger())
    ng.process(rcscat, rcscat)
    ng.write(f"shearcorr{maskno}_{nside}.result")

    gg = treecorr.GGCorrelation(corrconfig,logger=logging.getLogger())
    gg.process(rcscat, rcscat)
    gg.write(f"shear_auto_corr{maskno}_{nside}.result")
示例#9
0
def test_sample_pairs():

    nobj = 10000
    rng = np.random.RandomState(8675309)
    x1 = rng.random_sample(nobj)   # All from 0..1
    y1 = rng.random_sample(nobj)
    z1 = rng.random_sample(nobj)
    w1 = rng.random_sample(nobj)
    use = rng.randint(30, size=nobj).astype(float)
    w1[use == 0] = 0
    g11 = rng.random_sample(nobj)
    g21 = rng.random_sample(nobj)
    k1 = rng.random_sample(nobj)

    x2 = rng.random_sample(nobj)   # All from 0..1
    y2 = rng.random_sample(nobj)
    z2 = rng.random_sample(nobj)
    w2 = rng.random_sample(nobj)
    use = rng.randint(30, size=nobj).astype(float)
    w2[use == 0] = 0
    g12 = rng.random_sample(nobj)
    g22 = rng.random_sample(nobj)
    k2 = rng.random_sample(nobj)

    # Start with flat coords

    cat1 = treecorr.Catalog(x=x1, y=y1, w=w1, g1=g11, g2=g21, k=k1, keep_zero_weight=True)
    cat2 = treecorr.Catalog(x=x2, y=y2, w=w2, g1=g12, g2=g22, k=k2, keep_zero_weight=True)

    # Note: extend range low enough that some bins have < 100 pairs.
    nn = treecorr.NNCorrelation(min_sep=0.001, max_sep=0.01, bin_size=0.1, max_top=0)
    nn.process(cat1, cat2)
    print('rnom = ',nn.rnom)
    print('npairs = ',nn.npairs.astype(int))

    # Start with a bin near the bottom with < 100 pairs
    # This only exercises case 1 in the sampleFrom function.
    b = 1
    i1, i2, sep = nn.sample_pairs(100, cat1, cat2,
                                  min_sep=nn.left_edges[b], max_sep=nn.right_edges[b])

    print('i1 = ',i1)
    print('i2 = ',i2)
    print('sep = ',sep)
    assert nn.npairs[b] <= 100  # i.e. make sure these next tests are what we want to do.
    assert len(i1) == nn.npairs[b]
    assert len(i2) == nn.npairs[b]
    assert len(sep) == nn.npairs[b]
    actual_sep = ((x1[i1]-x2[i2])**2 + (y1[i1]-y2[i2])**2)**0.5
    np.testing.assert_allclose(sep, actual_sep, rtol=0.1)  # half bin size with slop.
    np.testing.assert_array_less(sep, nn.right_edges[b])
    np.testing.assert_array_less(nn.left_edges[b], sep)

    # Next one that still isn't too many pairs, but more than 100
    # This exercises cases 1,2 in the sampleFrom function.
    b = 10
    i1, i2, sep = nn.sample_pairs(100, cat1, cat2,
                                  min_sep=nn.left_edges[b], max_sep=nn.right_edges[b])

    print('i1 = ',i1)
    print('i2 = ',i2)
    print('sep = ',sep)
    assert nn.npairs[b] > 100
    assert len(i1) == 100
    assert len(i2) == 100
    assert len(sep) == 100
    actual_sep = ((x1[i1]-x2[i2])**2 + (y1[i1]-y2[i2])**2)**0.5
    np.testing.assert_allclose(sep, actual_sep, rtol=0.1)
    np.testing.assert_array_less(sep, nn.right_edges[b])
    np.testing.assert_array_less(nn.left_edges[b], sep)

    # To exercise case 3, we need to go to larger separations, so the recursion
    # more often stops before getting to the leaves.
    # Also switch to 3d coordinates.

    cat1 = treecorr.Catalog(x=x1, y=y1, z=z1, w=w1, g1=g11, g2=g21, k=k1, keep_zero_weight=True)
    cat2 = treecorr.Catalog(x=x2, y=y2, z=z2, w=w2, g1=g12, g2=g22, k=k2, keep_zero_weight=True)

    gg = treecorr.GGCorrelation(min_sep=0.4, nbins=10, bin_size=0.1, max_top=0)
    gg.process(cat1, cat2)
    print('rnom = ',gg.rnom)
    print('npairs = ',gg.npairs.astype(int))
    for b in [0,5]:
        i1, i2, sep = gg.sample_pairs(100, cat1, cat2,
                                      min_sep=gg.left_edges[b], max_sep=gg.right_edges[b])

        print('len(npairs) = ',len(gg.npairs))
        print('npairs = ',gg.npairs)
        print('i1 = ',i1)
        print('i2 = ',i2)
        print('sep = ',sep)
        assert len(i1) == 100
        assert len(i2) == 100
        assert len(sep) == 100
        actual_sep = ((x1[i1]-x2[i2])**2 + (y1[i1]-y2[i2])**2 + (z1[i1]-z2[i2])**2)**0.5
        np.testing.assert_allclose(sep, actual_sep, rtol=0.2)
        np.testing.assert_array_less(sep, gg.right_edges[b])
        np.testing.assert_array_less(gg.left_edges[b], sep)

    # Check a different metric.
    # Also ability to generate the field automatically.
    cat1.clear_cache()  # Clears the previously made cat1.field
    cat2.clear_cache()  # and cat2.field

    b = 3
    with CaptureLog() as cl:
        nk = treecorr.NKCorrelation(min_sep=0.4, max_sep=1.0, bin_size=0.1, max_top=0,
                                    logger=cl.logger)
        i1, i2, sep = nk.sample_pairs(100, cat1, cat2, metric='Arc',
                                      min_sep=nk.left_edges[b], max_sep=nk.right_edges[b])
    print(cl.output)
    nk.process(cat1, cat2, metric='Arc')
    print('len(npairs) = ',len(nk.npairs))
    print('npairs = ',nk.npairs)
    assert "Sampled %d pairs out of a total of %d"%(100, nk.npairs[b]) in cl.output
    print('i1 = ',i1)
    print('i2 = ',i2)
    print('sep = ',sep)
    assert len(i1) == 100
    assert len(i2) == 100
    assert len(sep) == 100
    r1 = (x1**2 + y1**2 + z1**2)**0.5
    r2 = (x2**2 + y2**2 + z2**2)**0.5
    xx1 = x1/r1
    yy1 = y1/r1
    zz1 = z1/r1
    xx2 = x2/r2
    yy2 = y2/r2
    zz2 = z2/r2
    chord_sep = ((xx1[i1]-xx2[i2])**2 + (yy1[i1]-yy2[i2])**2 + (zz1[i1]-zz2[i2])**2)**0.5
    arc_sep = np.arcsin(chord_sep/2.)*2.
    print('arc_sep = ',arc_sep)
    np.testing.assert_allclose(sep, arc_sep, rtol=0.1)
    np.testing.assert_array_less(sep, nk.right_edges[b])
    np.testing.assert_array_less(nk.left_edges[b], sep)

    # Finally, check spherical coords with non-default units.
    ra1, dec1 = coord.CelestialCoord.xyz_to_radec(x1,y1,z1)
    ra2, dec2 = coord.CelestialCoord.xyz_to_radec(x2,y2,z2)
    cat1 = treecorr.Catalog(ra=ra1, dec=dec1, ra_units='rad', dec_units='rad')
    cat2 = treecorr.Catalog(ra=ra2, dec=dec2, ra_units='rad', dec_units='rad')

    nn = treecorr.NNCorrelation(min_sep=1., max_sep=60., nbins=50, sep_units='deg', metric='Arc')
    nn.process(cat1, cat2)
    print('rnom = ',nn.rnom)
    print('npairs = ',nn.npairs.astype(int))

    b = 5
    n = 50
    i1, i2, sep = nn.sample_pairs(n, cat1, cat2,
                                  min_sep=nn.left_edges[b], max_sep=nn.right_edges[b])

    print('i1 = ',i1)
    print('i2 = ',i2)
    print('sep = ',sep)
    assert nn.npairs[b] > n
    assert len(i1) == n
    assert len(i2) == n
    assert len(sep) == n

    c1 = [coord.CelestialCoord(r*coord.radians, d*coord.radians) for (r,d) in zip(ra1,dec1)]
    c2 = [coord.CelestialCoord(r*coord.radians, d*coord.radians) for (r,d) in zip(ra2,dec2)]
    actual_sep = np.array([c1[i1[k]].distanceTo(c2[i2[k]]) / coord.degrees for k in range(n)])
    print('actual_sep = ',actual_sep)
    np.testing.assert_allclose(sep, actual_sep, rtol=0.1)
    np.testing.assert_array_less(sep, nn.right_edges[b])
    np.testing.assert_array_less(nn.left_edges[b], sep)
示例#10
0
def test_nk():
    # Use kappa(r) = kappa0 exp(-r^2/2r0^2) (1-r^2/2r0^2) around many lenses.

    nlens = 1000
    nsource = 100000
    kappa0 = 0.05
    r0 = 10.
    L = 100. * r0
    rng = np.random.RandomState(8675309)
    xl = (rng.random_sample(nlens) - 0.5) * L
    yl = (rng.random_sample(nlens) - 0.5) * L
    xs = (rng.random_sample(nsource) - 0.5) * L
    ys = (rng.random_sample(nsource) - 0.5) * L
    k = np.zeros((nsource, ))
    for x, y in zip(xl, yl):
        dx = xs - x
        dy = ys - y
        r2 = dx**2 + dy**2
        k += kappa0 * np.exp(-0.5 * r2 / r0**2) * (1. - 0.5 * r2 / r0**2)

    lens_cat = treecorr.Catalog(x=xl, y=yl, x_units='arcmin', y_units='arcmin')
    source_cat = treecorr.Catalog(x=xs,
                                  y=ys,
                                  k=k,
                                  x_units='arcmin',
                                  y_units='arcmin')
    nk = treecorr.NKCorrelation(bin_size=0.1,
                                min_sep=1.,
                                max_sep=20.,
                                sep_units='arcmin',
                                verbose=1)
    nk.process(lens_cat, source_cat)

    # log(<R>) != <logR>, but it should be close:
    print('meanlogr - log(meanr) = ', nk.meanlogr - np.log(nk.meanr))
    np.testing.assert_allclose(nk.meanlogr, np.log(nk.meanr), atol=1.e-3)

    r = nk.meanr
    true_k = kappa0 * np.exp(-0.5 * r**2 / r0**2) * (1. - 0.5 * r**2 / r0**2)

    print('nk.xi = ', nk.xi)
    print('true_kappa = ', true_k)
    print('ratio = ', nk.xi / true_k)
    print('diff = ', nk.xi - true_k)
    print('max diff = ', max(abs(nk.xi - true_k)))
    np.testing.assert_allclose(nk.xi, true_k, rtol=0.1, atol=2.e-3)

    nrand = nlens * 13
    xr = (rng.random_sample(nrand) - 0.5) * L
    yr = (rng.random_sample(nrand) - 0.5) * L
    rand_cat = treecorr.Catalog(x=xr, y=yr, x_units='arcmin', y_units='arcmin')
    rk = treecorr.NKCorrelation(bin_size=0.1,
                                min_sep=1.,
                                max_sep=20.,
                                sep_units='arcmin',
                                verbose=1)
    rk.process(rand_cat, source_cat)
    print('rk.xi = ', rk.xi)
    xi, varxi = nk.calculateXi(rk)
    print('compensated xi = ', xi)
    print('true_kappa = ', true_k)
    print('ratio = ', xi / true_k)
    print('diff = ', xi - true_k)
    print('max diff = ', max(abs(xi - true_k)))
    # It turns out this doesn't come out much better.  I think the imprecision is mostly just due
    # to the smallish number of lenses, not to edge effects
    np.testing.assert_allclose(nk.xi, true_k, rtol=0.05, atol=1.e-3)

    try:
        import fitsio
    except ImportError:
        print('Skipping FITS tests, since fitsio is not installed')
        return

    # Check that we get the same result using the corr2 function
    lens_cat.write(os.path.join('data', 'nk_lens.fits'))
    source_cat.write(os.path.join('data', 'nk_source.fits'))
    rand_cat.write(os.path.join('data', 'nk_rand.fits'))
    config = treecorr.read_config('configs/nk.yaml')
    config['verbose'] = 0
    config['precision'] = 8
    treecorr.corr2(config)
    corr2_output = np.genfromtxt(os.path.join('output', 'nk.out'),
                                 names=True,
                                 skip_header=1)
    print('nk.xi = ', nk.xi)
    print('xi = ', xi)
    print('from corr2 output = ', corr2_output['kappa'])
    print('ratio = ', corr2_output['kappa'] / xi)
    print('diff = ', corr2_output['kappa'] - xi)
    np.testing.assert_allclose(corr2_output['kappa'], xi, rtol=1.e-3)

    # In the corr2 context, you can turn off the compensated bit, even if there are randoms
    # (e.g. maybe you only want randoms for some nn calculation, but not nk.)
    config['nk_statistic'] = 'simple'
    treecorr.corr2(config)
    corr2_output = np.genfromtxt(os.path.join('output', 'nk.out'),
                                 names=True,
                                 skip_header=1)
    xi_simple, _ = nk.calculateXi()
    np.testing.assert_equal(xi_simple, nk.xi)
    np.testing.assert_allclose(corr2_output['kappa'], xi_simple, rtol=1.e-3)

    # Check the fits write option
    out_file_name1 = os.path.join('output', 'nk_out1.fits')
    nk.write(out_file_name1)
    data = fitsio.read(out_file_name1)
    np.testing.assert_almost_equal(data['r_nom'], np.exp(nk.logr))
    np.testing.assert_almost_equal(data['meanr'], nk.meanr)
    np.testing.assert_almost_equal(data['meanlogr'], nk.meanlogr)
    np.testing.assert_almost_equal(data['kappa'], nk.xi)
    np.testing.assert_almost_equal(data['sigma'], np.sqrt(nk.varxi))
    np.testing.assert_almost_equal(data['weight'], nk.weight)
    np.testing.assert_almost_equal(data['npairs'], nk.npairs)

    out_file_name2 = os.path.join('output', 'nk_out2.fits')
    nk.write(out_file_name2, rk)
    data = fitsio.read(out_file_name2)
    np.testing.assert_almost_equal(data['r_nom'], np.exp(nk.logr))
    np.testing.assert_almost_equal(data['meanr'], nk.meanr)
    np.testing.assert_almost_equal(data['meanlogr'], nk.meanlogr)
    np.testing.assert_almost_equal(data['kappa'], xi)
    np.testing.assert_almost_equal(data['sigma'], np.sqrt(varxi))
    np.testing.assert_almost_equal(data['weight'], nk.weight)
    np.testing.assert_almost_equal(data['npairs'], nk.npairs)

    # Check the read function
    nk2 = treecorr.NKCorrelation(bin_size=0.1,
                                 min_sep=1.,
                                 max_sep=20.,
                                 sep_units='arcmin')
    nk2.read(out_file_name2)
    np.testing.assert_almost_equal(nk2.logr, nk.logr)
    np.testing.assert_almost_equal(nk2.meanr, nk.meanr)
    np.testing.assert_almost_equal(nk2.meanlogr, nk.meanlogr)
    np.testing.assert_almost_equal(nk2.xi, nk.xi)
    np.testing.assert_almost_equal(nk2.varxi, nk.varxi)
    np.testing.assert_almost_equal(nk2.weight, nk.weight)
    np.testing.assert_almost_equal(nk2.npairs, nk.npairs)
    assert nk2.coords == nk.coords
    assert nk2.metric == nk.metric
    assert nk2.sep_units == nk.sep_units
    assert nk2.bin_type == nk.bin_type
示例#11
0
def test_twod():
    try:
        from scipy.spatial.distance import pdist, squareform
    except ImportError:
        print('Skipping test_twod, since uses scipy, and scipy is not installed.')
        return

    # N random points in 2 dimensions
    rng = np.random.RandomState(8675309)
    N = 200
    x = rng.uniform(-20, 20, N)
    y = rng.uniform(-20, 20, N)
    
    # Give the points a multivariate Gaussian random field for kappa and gamma
    L1 = [[0.33, 0.09], [-0.01, 0.26]]  # Some arbitrary correlation matrix
    invL1 = np.linalg.inv(L1)
    dists = pdist(np.array([x,y]).T, metric='mahalanobis', VI=invL1)
    K = np.exp(-0.5 * dists**2)
    K = squareform(K)
    np.fill_diagonal(K, 1.)

    A = 2.3
    kappa = rng.multivariate_normal(np.zeros(N), K*(A**2))

    # Add some noise
    sigma = A/10.
    kappa += rng.normal(scale=sigma, size=N)
    kappa_err = np.ones_like(kappa) * sigma

    # Make gamma too
    gamma1 = rng.multivariate_normal(np.zeros(N), K*(A**2))
    gamma1 += rng.normal(scale=sigma, size=N)
    gamma2 = rng.multivariate_normal(np.zeros(N), K*(A**2))
    gamma2 += rng.normal(scale=sigma, size=N)
    gamma = gamma1 + 1j * gamma2
    gamma_err = kappa_err

    # Calculate the 2D correlation using brute force
    max_sep = 21.
    nbins = 21
    xi_brut = corr2d(x, y, kappa, kappa, w=None, rmax=max_sep, bins=nbins)

    cat1 = treecorr.Catalog(x=x, y=y, k=kappa, g1=gamma1, g2=gamma2)
    kk = treecorr.KKCorrelation(min_sep=0., max_sep=max_sep, nbins=nbins, bin_type='TwoD',
                                brute=True)

    # First the simplest case to get right: cross correlation of the catalog with itself.
    kk.process(cat1, cat1)

    print('max abs diff = ',np.max(np.abs(kk.xi - xi_brut)))
    print('max rel diff = ',np.max(np.abs(kk.xi - xi_brut)/np.abs(kk.xi)))
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    # Auto-correlation should do the same thing.
    kk.process(cat1)
    print('max abs diff = ',np.max(np.abs(kk.xi - xi_brut)))
    print('max rel diff = ',np.max(np.abs(kk.xi - xi_brut)/np.abs(kk.xi)))
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    # Repeat with weights.
    xi_brut = corr2d(x, y, kappa, kappa, w=1./kappa_err**2, rmax=max_sep, bins=nbins)
    cat2 = treecorr.Catalog(x=x, y=y, k=kappa, g1=gamma1, g2=gamma2, w=1./kappa_err**2)
    # NB. Testing that min_sep = 0 is default
    kk = treecorr.KKCorrelation(max_sep=max_sep, nbins=nbins, bin_type='TwoD', brute=True)
    kk.process(cat2, cat2)
    print('max abs diff = ',np.max(np.abs(kk.xi - xi_brut)))
    print('max rel diff = ',np.max(np.abs(kk.xi - xi_brut)/np.abs(kk.xi)))
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    kk.process(cat2)
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    # Check GG
    xi_brut = corr2d(x, y, gamma, np.conj(gamma), rmax=max_sep, bins=nbins)
    # Equivalent bin_size = 2.  Check omitting nbins
    gg = treecorr.GGCorrelation(max_sep=max_sep, bin_size=2., bin_type='TwoD', brute=True)
    gg.process(cat1)
    print('max abs diff = ',np.max(np.abs(gg.xip - xi_brut)))
    print('max rel diff = ',np.max(np.abs(gg.xip - xi_brut)/np.abs(gg.xip)))
    np.testing.assert_allclose(gg.xip, xi_brut, atol=2.e-7)

    xi_brut = corr2d(x, y, gamma, np.conj(gamma), w=1./kappa_err**2, rmax=max_sep, bins=nbins)
    # Check omitting max_sep
    gg = treecorr.GGCorrelation(bin_size=2, nbins=nbins, bin_type='TwoD', brute=True)
    gg.process(cat2)
    print('max abs diff = ',np.max(np.abs(gg.xip - xi_brut)))
    print('max rel diff = ',np.max(np.abs(gg.xip - xi_brut)/np.abs(gg.xip)))
    np.testing.assert_allclose(gg.xip, xi_brut, atol=2.e-7)

    # Check NK
    xi_brut = corr2d(x, y, np.ones_like(kappa), kappa, rmax=max_sep, bins=nbins)
    # Check slightly larger bin_size gets rounded down
    nk = treecorr.NKCorrelation(max_sep=max_sep, bin_size=2.05, bin_type='TwoD', brute=True)
    nk.process(cat1, cat1)
    print('max abs diff = ',np.max(np.abs(nk.xi - xi_brut)))
    print('max rel diff = ',np.max(np.abs(nk.xi - xi_brut)/np.abs(nk.xi)))
    np.testing.assert_allclose(nk.xi, xi_brut, atol=1.e-7)

    xi_brut = corr2d(x, y, np.ones_like(kappa), kappa, w=1./kappa_err**2, rmax=max_sep, bins=nbins)
    # Check very small, but non-zeo min_sep
    nk = treecorr.NKCorrelation(min_sep=1.e-6, max_sep=max_sep, nbins=nbins, bin_type='TwoD', brute=True)
    nk.process(cat2, cat2)
    print('max abs diff = ',np.max(np.abs(nk.xi - xi_brut)))
    print('max rel diff = ',np.max(np.abs(nk.xi - xi_brut)/np.abs(nk.xi)))
    np.testing.assert_allclose(nk.xi, xi_brut, atol=1.e-7)

    # Check NN
    xi_brut, counts = corr2d(x, y, np.ones_like(kappa), np.ones_like(kappa),
                             rmax=max_sep, bins=nbins, return_counts=True)
    nn = treecorr.NNCorrelation(max_sep=max_sep, nbins=nbins, bin_type='TwoD', brute=True)
    nn.process(cat1)
    print('max abs diff = ',np.max(np.abs(nn.npairs - counts)))
    print('max rel diff = ',np.max(np.abs(nn.npairs - counts)/np.abs(nn.npairs)))
    np.testing.assert_allclose(nn.npairs, counts, atol=1.e-7)

    nn.process(cat1, cat1)
    print('max abs diff = ',np.max(np.abs(nn.npairs - counts)))
    print('max rel diff = ',np.max(np.abs(nn.npairs - counts)/np.abs(nn.npairs)))
    np.testing.assert_allclose(nn.npairs, counts, atol=1.e-7)

    xi_brut, counts = corr2d(x, y, np.ones_like(kappa), np.ones_like(kappa),
                             w=1./kappa_err**2, rmax=max_sep, bins=nbins, return_counts=True)
    nn = treecorr.NNCorrelation(max_sep=max_sep, nbins=nbins, bin_type='TwoD', brute=True)
    nn.process(cat2)
    print('max abs diff = ',np.max(np.abs(nn.weight - counts)))
    print('max rel diff = ',np.max(np.abs(nn.weight - counts)/np.abs(nn.weight)))
    np.testing.assert_allclose(nn.weight, counts, atol=1.e-7)

    nn.process(cat2, cat2)
    print('max abs diff = ',np.max(np.abs(nn.weight - counts)))
    print('max rel diff = ',np.max(np.abs(nn.weight - counts)/np.abs(nn.weight)))
    np.testing.assert_allclose(nn.weight, counts, atol=1.e-7)

    # The other two, NG and KG can't really be checked with the brute force
    # calculator we have here, so we're counting on the above being a sufficient
    # test of all aspects of the twod binning.  I think that it is sufficient, but I
    # admit I would prefer if we had a real test of these other two pairs, along with
    # xi- for GG.

    # Check invalid constructors
    assert_raises(TypeError, treecorr.NNCorrelation, max_sep=max_sep, nbins=nbins, bin_size=2,
                  bin_type='TwoD')
    assert_raises(TypeError, treecorr.NNCorrelation, nbins=nbins, bin_type='TwoD')
    assert_raises(TypeError, treecorr.NNCorrelation, bin_size=2, bin_type='TwoD')
    assert_raises(TypeError, treecorr.NNCorrelation, max_sep=max_sep, bin_type='TwoD')
示例#12
0
文件: run.py 项目: johannesulf/Zebu
for color, source_bin in zip(color_list, source_bin_list):

    print(source_bin)

    table_s = zebu.read_mock_data('source', source_bin, survey=survey)

    table_s['m'] = np.where(
        (zebu.lens_z_bins[lens_bin] - 0.05 < table_s['z_true']) &
        (table_s['z_true'] < zebu.lens_z_bins[lens_bin + 1] + 0.05), 0, 1)

    cat_s = treecorr.Catalog(ra=table_s['ra'], dec=table_s['dec'],
                             ra_units='deg', dec_units='deg',
                             k=table_s['m'])

    nk = treecorr.NKCorrelation(
        max_sep=np.amax(zebu.theta_bins), min_sep=np.amin(zebu.theta_bins),
        nbins=len(zebu.theta_bins) - 1, sep_units='arcmin', metric='Arc',
        bin_slop=0)
    nk.process(cat_l, cat_s)

    b = np.mean(table_s['m']) / nk.xi

    ax2.plot(theta, b, color=color,
             label=r'${:.2f} \leq z_s < {:.2f}$'.format(
                 zebu.source_z_bins[survey.lower()][source_bin],
                 zebu.source_z_bins[survey.lower()][source_bin + 1]))

ax2.axhline(1.0, color='black', ls='--')
ax2.set_title(r'$\gamma_t$')
ax2.legend(loc='best')
ax2.set_xscale('log')
ax2.set_xlabel(r'Angle $\theta \, [\mathrm{arcmin}]$')
示例#13
0
def run_dessv(source_file, lens_file, use_patches):
    if use_patches:
        # First determine patch centers using 1/10 of the total source catalog.
        # Only need positions for this.
        # This isn't strictly necessary.  It's trying to showcase how to do this when the
        # whole catalog doesn't fit in memory.  If it all fits, then fine to use the full
        # source catalog to run KMeans.
        print('Read 1/10 of source catalog for kmeans patches')
        npatch = 128
        small_cat = treecorr.Catalog(source_file,
                                     ra_col='RA',
                                     dec_col='DEC',
                                     file_type='FITS',
                                     ra_units='deg',
                                     dec_units='deg',
                                     every_nth=10,
                                     npatch=npatch,
                                     verbose=2)

        # Write the patch centers
        patch_file = os.path.join('output', 'test_dessv_patches.fits')
        small_cat.write_patch_centers(patch_file)
        print('wrote patch centers file ', patch_file)
        #print('centers = ',small_cat.patch_centers)

        patch_kwargs = dict(patch_centers=patch_file, save_patch_dir='output')
    else:
        patch_kwargs = {}

    # Now load the full catalog using these patch centers.
    # Note: they need to use the same patch_file!
    print('make source catalog')
    sources = treecorr.Catalog(source_file,
                               ra_col='RA',
                               dec_col='DEC',
                               file_type='FITS',
                               ra_units='deg',
                               dec_units='deg',
                               g1_col='E_1',
                               g2_col='E_2',
                               w_col='W',
                               k_col='SENS',
                               **patch_kwargs)

    print('make lens catalog')
    lenses = treecorr.Catalog(lens_file,
                              ra_col='RA',
                              dec_col='DEC',
                              file_type='FITS',
                              ra_units='deg',
                              dec_units='deg',
                              **patch_kwargs)

    # Configuration of correlation functions.
    bin_config = dict(bin_size=0.2,
                      min_sep=10.,
                      max_sep=200.,
                      bin_slop=0.1,
                      sep_units='arcmin',
                      verbose=1,
                      output_dots=False)
    if use_patches:
        bin_config['var_method'] = 'jackknife'

    # Run the various 2pt correlations.  I'll skip NN here, to avoid dealing with randoms,
    # but that could be included as well.
    gg = treecorr.GGCorrelation(bin_config)
    ng = treecorr.NGCorrelation(bin_config)

    print('Process gg')
    gg.process(sources)
    print('Process ng')
    ng.process(lenses, sources)

    print('gg.xip = ', gg.xip)
    print('gg.xim = ', gg.xim)
    print('ng.xi = ', ng.xi)
    nbins = len(ng.xi)

    method = 'jackknife' if use_patches else 'shot'
    cov = treecorr.estimate_multi_cov([ng, gg], method)
    print('cov = ', cov)
    print('sigma = ', np.sqrt(cov.diagonal()))
    print('S/N = ',
          np.concatenate([gg.xip, gg.xim, ng.xi]) / np.sqrt(cov.diagonal()))

    assert len(gg.xip) == nbins
    assert len(gg.xim) == nbins
    assert cov.shape == (3 * nbins, 3 * nbins)

    # Apply sensitivities.
    print('Process kk')
    kk = treecorr.KKCorrelation(bin_config)
    print('Process nk')
    nk = treecorr.NKCorrelation(bin_config)

    kk.process(sources)
    nk.process(lenses, sources)

    ng.xi /= nk.xi
    gg.xip /= kk.xi
    gg.xim /= kk.xi

    # This makes the assumption that the power spectrum of the sensitivity is effectively uniform
    # across the survey.  So don't bother propagating covariance of sens.
    cov /= np.outer(np.concatenate([nk.xi, kk.xi, kk.xi]),
                    np.concatenate([nk.xi, kk.xi, kk.xi]))

    print('gg.xip => ', gg.xip)
    print('gg.xim => ', gg.xim)
    print('ng.xi => ', ng.xi)
    print('cov => ', cov)
示例#14
0
def test_twod():
    try:
        from scipy.spatial.distance import pdist, squareform
    except ImportError:
        print(
            'Skipping test_twod, since uses scipy, and scipy is not installed.'
        )
        return

    # N random points in 2 dimensions
    np.random.seed(42)
    N = 200
    x = np.random.uniform(-20, 20, N)
    y = np.random.uniform(-20, 20, N)

    # Give the points a multivariate Gaussian random field for kappa and gamma
    L1 = [[0.33, 0.09], [-0.01, 0.26]]  # Some arbitrary correlation matrix
    invL1 = np.linalg.inv(L1)
    dists = pdist(np.array([x, y]).T, metric='mahalanobis', VI=invL1)
    K = np.exp(-0.5 * dists**2)
    K = squareform(K)
    np.fill_diagonal(K, 1.)

    A = 2.3
    kappa = np.random.multivariate_normal(np.zeros(N), K * (A**2))

    # Add some noise
    sigma = A / 10.
    kappa += np.random.normal(scale=sigma, size=N)
    kappa_err = np.ones_like(kappa) * sigma

    # Make gamma too
    gamma1 = np.random.multivariate_normal(np.zeros(N), K * (A**2))
    gamma1 += np.random.normal(scale=sigma, size=N)
    gamma2 = np.random.multivariate_normal(np.zeros(N), K * (A**2))
    gamma2 += np.random.normal(scale=sigma, size=N)
    gamma = gamma1 + 1j * gamma2
    gamma_err = kappa_err

    # Calculate the 2D correlation using brute force
    max_sep = 21.
    nbins = 21
    xi_brut = corr2d(x, y, kappa, kappa, w=None, rmax=max_sep, bins=nbins)

    cat1 = treecorr.Catalog(x=x, y=y, k=kappa, g1=gamma1, g2=gamma2)
    kk = treecorr.KKCorrelation(min_sep=0.,
                                max_sep=max_sep,
                                nbins=nbins,
                                bin_type='TwoD',
                                bin_slop=0)

    # First the simplest case to get right: cross correlation of the catalog with itself.
    kk.process(cat1, cat1)

    print('max abs diff = ', np.max(np.abs(kk.xi - xi_brut)))
    print('max rel diff = ', np.max(np.abs(kk.xi - xi_brut) / np.abs(kk.xi)))
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    # Auto-correlation should do the same thing.
    kk.process(cat1)
    print('max abs diff = ', np.max(np.abs(kk.xi - xi_brut)))
    print('max rel diff = ', np.max(np.abs(kk.xi - xi_brut) / np.abs(kk.xi)))
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    # Repeat with weights.
    xi_brut = corr2d(x,
                     y,
                     kappa,
                     kappa,
                     w=1. / kappa_err**2,
                     rmax=max_sep,
                     bins=nbins)
    cat2 = treecorr.Catalog(x=x,
                            y=y,
                            k=kappa,
                            g1=gamma1,
                            g2=gamma2,
                            w=1. / kappa_err**2)
    # NB. Testing that min_sep = 0 is default
    kk = treecorr.KKCorrelation(max_sep=max_sep,
                                nbins=nbins,
                                bin_type='TwoD',
                                bin_slop=0)
    kk.process(cat2, cat2)
    print('max abs diff = ', np.max(np.abs(kk.xi - xi_brut)))
    print('max rel diff = ', np.max(np.abs(kk.xi - xi_brut) / np.abs(kk.xi)))
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    kk.process(cat2)
    np.testing.assert_allclose(kk.xi, xi_brut, atol=1.e-7)

    # Check GG
    xi_brut = corr2d(x, y, gamma, np.conj(gamma), rmax=max_sep, bins=nbins)
    # Equivalent bin_size = 2.  Check omitting nbins
    gg = treecorr.GGCorrelation(max_sep=max_sep,
                                bin_size=2.,
                                bin_type='TwoD',
                                bin_slop=0)
    gg.process(cat1)
    print('max abs diff = ', np.max(np.abs(gg.xip - xi_brut)))
    print('max rel diff = ', np.max(np.abs(gg.xip - xi_brut) / np.abs(gg.xip)))
    np.testing.assert_allclose(gg.xip, xi_brut, atol=2.e-7)

    xi_brut = corr2d(x,
                     y,
                     gamma,
                     np.conj(gamma),
                     w=1. / kappa_err**2,
                     rmax=max_sep,
                     bins=nbins)
    # Check omitting max_sep
    gg = treecorr.GGCorrelation(bin_size=2,
                                nbins=nbins,
                                bin_type='TwoD',
                                bin_slop=0)
    gg.process(cat2)
    print('max abs diff = ', np.max(np.abs(gg.xip - xi_brut)))
    print('max rel diff = ', np.max(np.abs(gg.xip - xi_brut) / np.abs(gg.xip)))
    np.testing.assert_allclose(gg.xip, xi_brut, atol=2.e-7)

    # Check NK
    xi_brut = corr2d(x,
                     y,
                     np.ones_like(kappa),
                     kappa,
                     rmax=max_sep,
                     bins=nbins)
    # Check slightly smaller max_sep gets rounded up.
    nk = treecorr.NKCorrelation(max_sep=max_sep - 0.5,
                                bin_size=2,
                                bin_type='TwoD',
                                bin_slop=0)
    nk.process(cat1, cat1)
    print('max abs diff = ', np.max(np.abs(nk.xi - xi_brut)))
    print('max rel diff = ', np.max(np.abs(nk.xi - xi_brut) / np.abs(nk.xi)))
    np.testing.assert_allclose(nk.xi, xi_brut, atol=1.e-7)

    xi_brut = corr2d(x,
                     y,
                     np.ones_like(kappa),
                     kappa,
                     w=1. / kappa_err**2,
                     rmax=max_sep,
                     bins=nbins)
    # Check very small, but non-zeo min_sep
    nk = treecorr.NKCorrelation(min_sep=1.e-6,
                                max_sep=max_sep,
                                nbins=nbins,
                                bin_type='TwoD',
                                bin_slop=0)
    nk.process(cat2, cat2)
    print('max abs diff = ', np.max(np.abs(nk.xi - xi_brut)))
    print('max rel diff = ', np.max(np.abs(nk.xi - xi_brut) / np.abs(nk.xi)))
    np.testing.assert_allclose(nk.xi, xi_brut, atol=1.e-7)

    # Check NN
    xi_brut, counts = corr2d(x,
                             y,
                             np.ones_like(kappa),
                             np.ones_like(kappa),
                             rmax=max_sep,
                             bins=nbins,
                             return_counts=True)
    nn = treecorr.NNCorrelation(max_sep=max_sep,
                                nbins=nbins,
                                bin_type='TwoD',
                                bin_slop=0)
    nn.process(cat1)
    print('max abs diff = ', np.max(np.abs(nn.npairs - counts)))
    print('max rel diff = ',
          np.max(np.abs(nn.npairs - counts) / np.abs(nn.npairs)))
    np.testing.assert_allclose(nn.npairs, counts, atol=1.e-7)

    nn.process(cat1, cat1)
    print('max abs diff = ', np.max(np.abs(nn.npairs - counts)))
    print('max rel diff = ',
          np.max(np.abs(nn.npairs - counts) / np.abs(nn.npairs)))
    np.testing.assert_allclose(nn.npairs, counts, atol=1.e-7)

    xi_brut, counts = corr2d(x,
                             y,
                             np.ones_like(kappa),
                             np.ones_like(kappa),
                             w=1. / kappa_err**2,
                             rmax=max_sep,
                             bins=nbins,
                             return_counts=True)
    nn = treecorr.NNCorrelation(max_sep=max_sep,
                                nbins=nbins,
                                bin_type='TwoD',
                                bin_slop=0)
    nn.process(cat2)
    print('max abs diff = ', np.max(np.abs(nn.weight - counts)))
    print('max rel diff = ',
          np.max(np.abs(nn.weight - counts) / np.abs(nn.weight)))
    np.testing.assert_allclose(nn.weight, counts, atol=1.e-7)

    nn.process(cat2, cat2)
    print('max abs diff = ', np.max(np.abs(nn.weight - counts)))
    print('max rel diff = ',
          np.max(np.abs(nn.weight - counts) / np.abs(nn.weight)))
    np.testing.assert_allclose(nn.weight, counts, atol=1.e-7)
示例#15
0
def test_direct_spherical():
    # Repeat in spherical coords

    ngal = 100
    s = 10.
    rng = np.random.RandomState(8675309)
    x1 = rng.normal(0, s, (ngal, ))
    y1 = rng.normal(
        0, s,
        (ngal, )) + 200  # Put everything at large y, so small angle on sky
    z1 = rng.normal(0, s, (ngal, ))
    w1 = rng.random_sample(ngal)

    x2 = rng.normal(0, s, (ngal, ))
    y2 = rng.normal(0, s, (ngal, )) + 200
    z2 = rng.normal(0, s, (ngal, ))
    w2 = rng.random_sample(ngal)
    k2 = rng.normal(0, 3, (ngal, ))

    ra1, dec1 = coord.CelestialCoord.xyz_to_radec(x1, y1, z1)
    ra2, dec2 = coord.CelestialCoord.xyz_to_radec(x2, y2, z2)

    cat1 = treecorr.Catalog(ra=ra1,
                            dec=dec1,
                            ra_units='rad',
                            dec_units='rad',
                            w=w1)
    cat2 = treecorr.Catalog(ra=ra2,
                            dec=dec2,
                            ra_units='rad',
                            dec_units='rad',
                            w=w2,
                            k=k2)

    min_sep = 1.
    max_sep = 10.
    nbins = 50
    bin_size = np.log(max_sep / min_sep) / nbins
    nk = treecorr.NKCorrelation(min_sep=min_sep,
                                max_sep=max_sep,
                                nbins=nbins,
                                sep_units='deg',
                                brute=True)
    nk.process(cat1, cat2)

    r1 = np.sqrt(x1**2 + y1**2 + z1**2)
    r2 = np.sqrt(x2**2 + y2**2 + z2**2)
    x1 /= r1
    y1 /= r1
    z1 /= r1
    x2 /= r2
    y2 /= r2
    z2 /= r2

    true_npairs = np.zeros(nbins, dtype=int)
    true_weight = np.zeros(nbins, dtype=float)
    true_xi = np.zeros(nbins, dtype=float)

    for i in range(ngal):
        for j in range(ngal):
            rsq = (x1[i] - x2[j])**2 + (y1[i] - y2[j])**2 + (z1[i] - z2[j])**2
            r = np.sqrt(rsq)
            r *= coord.radians / coord.degrees

            index = np.floor(np.log(r / min_sep) / bin_size).astype(int)
            if index < 0 or index >= nbins:
                continue

            ww = w1[i] * w2[j]
            xi = ww * k2[j]

            true_npairs[index] += 1
            true_weight[index] += ww
            true_xi[index] += xi

    true_xi /= true_weight

    print('true_npairs = ', true_npairs)
    print('diff = ', nk.npairs - true_npairs)
    np.testing.assert_array_equal(nk.npairs, true_npairs)

    print('true_weight = ', true_weight)
    print('diff = ', nk.weight - true_weight)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)

    print('true_xi = ', true_xi)
    print('nk.xi = ', nk.xi)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-4, atol=1.e-8)

    try:
        import fitsio
    except ImportError:
        print('Skipping FITS tests, since fitsio is not installed')
        return

    # Check that running via the corr2 script works correctly.
    config = treecorr.config.read_config('configs/nk_direct_spherical.yaml')
    cat1.write(config['file_name'])
    cat2.write(config['file_name2'])
    treecorr.corr2(config)
    data = fitsio.read(config['nk_file_name'])
    np.testing.assert_allclose(data['r_nom'], nk.rnom)
    np.testing.assert_allclose(data['npairs'], nk.npairs)
    np.testing.assert_allclose(data['weight'], nk.weight)
    np.testing.assert_allclose(data['kappa'], nk.xi, rtol=1.e-3)

    # Repeat with binslop = 0, since the code flow is different from brute=True.
    # And don't do any top-level recursion so we actually test not going to the leaves.
    nk = treecorr.NKCorrelation(min_sep=min_sep,
                                max_sep=max_sep,
                                nbins=nbins,
                                sep_units='deg',
                                bin_slop=0,
                                max_top=0)
    nk.process(cat1, cat2)
    np.testing.assert_array_equal(nk.npairs, true_npairs)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-3, atol=1.e-6)
示例#16
0
def test_direct():
    # If the catalogs are small enough, we can do a direct calculation to see if comes out right.
    # This should exactly match the treecorr result if brute force.

    ngal = 200
    s = 10.
    rng = np.random.RandomState(8675309)
    x1 = rng.normal(0, s, (ngal, ))
    y1 = rng.normal(0, s, (ngal, ))
    w1 = rng.random_sample(ngal)

    x2 = rng.normal(0, s, (ngal, ))
    y2 = rng.normal(0, s, (ngal, ))
    w2 = rng.random_sample(ngal)
    k2 = rng.normal(0, 3, (ngal, ))

    cat1 = treecorr.Catalog(x=x1, y=y1, w=w1)
    cat2 = treecorr.Catalog(x=x2, y=y2, w=w2, k=k2)

    min_sep = 1.
    max_sep = 50.
    nbins = 50
    bin_size = np.log(max_sep / min_sep) / nbins
    nk = treecorr.NKCorrelation(min_sep=min_sep,
                                max_sep=max_sep,
                                nbins=nbins,
                                brute=True)
    nk.process(cat1, cat2)

    true_npairs = np.zeros(nbins, dtype=int)
    true_weight = np.zeros(nbins, dtype=float)
    true_xi = np.zeros(nbins, dtype=float)
    for i in range(ngal):
        # It's hard to do all the pairs at once with numpy operations (although maybe possible).
        # But we can at least do all the pairs for each entry in cat1 at once with arrays.
        rsq = (x1[i] - x2)**2 + (y1[i] - y2)**2
        r = np.sqrt(rsq)

        ww = w1[i] * w2
        xi = ww * k2

        index = np.floor(np.log(r / min_sep) / bin_size).astype(int)
        mask = (index >= 0) & (index < nbins)
        np.add.at(true_npairs, index[mask], 1)
        np.add.at(true_weight, index[mask], ww[mask])
        np.add.at(true_xi, index[mask], xi[mask])

    true_xi /= true_weight

    print('true_npairs = ', true_npairs)
    print('diff = ', nk.npairs - true_npairs)
    np.testing.assert_array_equal(nk.npairs, true_npairs)

    print('true_weight = ', true_weight)
    print('diff = ', nk.weight - true_weight)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)

    print('true_xi = ', true_xi)
    print('nk.xi = ', nk.xi)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-4, atol=1.e-8)

    try:
        import fitsio
    except ImportError:
        print('Skipping FITS tests, since fitsio is not installed')
        return

    # Check that running via the corr2 script works correctly.
    config = treecorr.config.read_config('configs/nk_direct.yaml')
    cat1.write(config['file_name'])
    cat2.write(config['file_name2'])
    treecorr.corr2(config)
    data = fitsio.read(config['nk_file_name'])
    np.testing.assert_allclose(data['r_nom'], nk.rnom)
    np.testing.assert_allclose(data['npairs'], nk.npairs)
    np.testing.assert_allclose(data['weight'], nk.weight)
    np.testing.assert_allclose(data['kappa'], nk.xi, rtol=1.e-3)

    # Invalid with only one file_name
    del config['file_name2']
    with assert_raises(TypeError):
        treecorr.corr2(config)
    config['file_name2'] = 'data/nk_direct_cat2.fits'
    # Invalid to request compoensated if no rand_file
    config['nk_statistic'] = 'compensated'
    with assert_raises(TypeError):
        treecorr.corr2(config)

    # Repeat with binslop = 0, since the code flow is different from brute=True
    # And don't do any top-level recursion so we actually test not going to the leaves.
    nk = treecorr.NKCorrelation(min_sep=min_sep,
                                max_sep=max_sep,
                                nbins=nbins,
                                bin_slop=0,
                                max_top=0)
    nk.process(cat1, cat2)
    np.testing.assert_array_equal(nk.npairs, true_npairs)
    np.testing.assert_allclose(nk.weight, true_weight, rtol=1.e-5, atol=1.e-8)
    np.testing.assert_allclose(nk.xi, true_xi, rtol=1.e-4, atol=1.e-8)

    # Check a few basic operations with a NKCorrelation object.
    do_pickle(nk)

    nk2 = nk.copy()
    nk2 += nk
    np.testing.assert_allclose(nk2.npairs, 2 * nk.npairs)
    np.testing.assert_allclose(nk2.weight, 2 * nk.weight)
    np.testing.assert_allclose(nk2.meanr, 2 * nk.meanr)
    np.testing.assert_allclose(nk2.meanlogr, 2 * nk.meanlogr)
    np.testing.assert_allclose(nk2.xi, 2 * nk.xi)

    nk2.clear()
    nk2 += nk
    np.testing.assert_allclose(nk2.npairs, nk.npairs)
    np.testing.assert_allclose(nk2.weight, nk.weight)
    np.testing.assert_allclose(nk2.meanr, nk.meanr)
    np.testing.assert_allclose(nk2.meanlogr, nk.meanlogr)
    np.testing.assert_allclose(nk2.xi, nk.xi)

    ascii_name = 'output/nk_ascii.txt'
    nk.write(ascii_name, precision=16)
    nk3 = treecorr.NKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins)
    nk3.read(ascii_name)
    np.testing.assert_allclose(nk3.npairs, nk.npairs)
    np.testing.assert_allclose(nk3.weight, nk.weight)
    np.testing.assert_allclose(nk3.meanr, nk.meanr)
    np.testing.assert_allclose(nk3.meanlogr, nk.meanlogr)
    np.testing.assert_allclose(nk3.xi, nk.xi)

    with assert_raises(TypeError):
        nk2 += config
    nk4 = treecorr.NKCorrelation(min_sep=min_sep / 2,
                                 max_sep=max_sep,
                                 nbins=nbins)
    with assert_raises(ValueError):
        nk2 += nk4
    nk5 = treecorr.NKCorrelation(min_sep=min_sep,
                                 max_sep=max_sep * 2,
                                 nbins=nbins)
    with assert_raises(ValueError):
        nk2 += nk5
    nk6 = treecorr.NKCorrelation(min_sep=min_sep,
                                 max_sep=max_sep,
                                 nbins=nbins * 2)
    with assert_raises(ValueError):
        nk2 += nk6

    fits_name = 'output/nk_fits.fits'
    nk.write(fits_name)
    nk4 = treecorr.NKCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins)
    nk4.read(fits_name)
    np.testing.assert_allclose(nk4.npairs, nk.npairs)
    np.testing.assert_allclose(nk4.weight, nk.weight)
    np.testing.assert_allclose(nk4.meanr, nk.meanr)
    np.testing.assert_allclose(nk4.meanlogr, nk.meanlogr)
    np.testing.assert_allclose(nk4.xi, nk.xi)
示例#17
0
def Calculate(cat_galaxy, cat_rand, result, num_of_runs, i, planck_ra,
              planck_dec, z_bins):
    start_time = time.time()
    print 'run ', i, '/', num_of_runs, '   z_bins', z_bins
    print 'loading simulation and mask...'
    #download
    #wget.download('http://pla.esac.esa.int/pla/aio/product-action?SIMULATED_MAP.FILE_ID=dx12_v3_smica_nosz_cmb_mc_{}_raw.fits'.format(str(i).zfill(5)),'data/')
    #wget.download('http://pla.esac.esa.int/pla/aio/product-action?SIMULATED_MAP.FILE_ID=dx12_v3_smica_nosz_noise_hm1_mc_{}_raw.fits'.format(str(i).zfill(5)),'data/')

    #plancksim_n=healpy.read_map('data/dx12_v3_smica_nosz_cmb_mc_{}_raw.fits'.format(str(i).zfill(5)))
    #plancknoise=healpy.read_map('data/dx12_v3_smica_nosz_noise_hm1_mc_{}_raw.fits'.format(str(i).zfill(5)))

    plancksim_n_file = fits.open(
        'http://pla.esac.esa.int/pla/aio/product-action?SIMULATED_MAP.FILE_ID=dx12_v3_smica_nosz_cmb_mc_{}_raw.fits'
        .format(str(i).zfill(5)),
        cache=True,
        ignore_missing_end=True,
        show_progress=False)
    plancknoise_file = fits.open(
        'http://pla.esac.esa.int/pla/aio/product-action?SIMULATED_MAP.FILE_ID=dx12_v3_smica_nosz_noise_hm1_mc_{}_raw.fits'
        .format(str(i).zfill(5)),
        cache=True,
        ignore_missing_end=True,
        show_progress=False)

    plancksim_n = healpy.read_map(plancksim_n_file, verbose=False)
    plancknoise = healpy.read_map(plancknoise_file, verbose=False)

    plancksim_n_file.close()
    plancknoise_file.close()

    plancksim = plancksim_n + plancknoise

    #print plancksim_n.shape, plancknoise.shape,plancksim.shape

    plancksim_n = None
    plancknoise = None
    plancksim = healpy.ud_grade(plancksim,
                                2048,
                                order_in='RING',
                                order_out='NEST')

    plancksim = plancksim[planckImask == 1]

    cat_sim = treecorr.Catalog(ra=planck_ra,
                               dec=planck_dec,
                               ra_units='deg',
                               dec_units='deg',
                               k=plancksim)

    print 'Calculating correlation...'
    print 'run ', i, '/', num_of_runs, '   z_bins', z_bins

    nk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    rk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    nk.process(cat_galaxy, cat_sim)
    rk.process(cat_rand, cat_sim)

    xi, varxi = nk.calculateXi(rk)

    print 'Done'

    result[i] = xi
    r = np.exp(nk.meanlogr)

    np.save('datalog/sim_SuperCosmos_{}.npy'.format(str(z_bins)), result)
    np.save('datalog/sim_r_SuperCosmos_{}.npy'.format(str(z_bins)), r)

    #print 'xi:',xi
    #print 'r:',r

    #print xi.size
    #print r.size

    nk.clear()
    rk.clear()
    cat_sim.clear_cache()

    #os.remove('data/dx12_v3_smica_nosz_cmb_mc_{}_raw.fits'.format(str(i).zfill(5)))
    #os.remove('data/dx12_v3_smica_nosz_noise_hm1_mc_{}_raw.fits'.format(str(i).zfill(5)))
    astropy.utils.data.clear_download_cache()

    end_time = time.time()
    print 'time used: ', end_time - start_time, 's'

    return result
示例#18
0
def test_varxi():
    # Test that varxi is correct (or close) based on actual variance of many runs.

    kappa0 = 0.05
    r0 = 10.
    L = 10 * r0
    rng = np.random.RandomState(8675309)

    # Note: to get a good estimate of var(xi), you need a lot of runs.  The number of
    # runs matters much more than the number of galaxies for getting this to pass.
    # In addition, I found that the variance was significantly underestimated when there
    # were lots of lenses.  I guess because there were multiple lenses that paired with the
    # same sources in a given bin, which increased the variance of the mean <g>.
    # So there might be some adjustment that would help improve the estimate of varxi,
    # but at least this unit test shows that it's fairly accurate for *some* scenario.
    if __name__ == '__main__':
        nsource = 1000
        nrand = 10
        nruns = 50000
        tol_factor = 1
    else:
        nsource = 100
        nrand = 2
        nruns = 5000
        tol_factor = 5

    lens = treecorr.Catalog(x=[0], y=[0])
    all_nks = []
    all_rks = []
    for run in range(nruns):
        x2 = (rng.random_sample(nsource) - 0.5) * L
        y2 = (rng.random_sample(nsource) - 0.5) * L
        x3 = (rng.random_sample(nrand) - 0.5) * L
        y3 = (rng.random_sample(nrand) - 0.5) * L

        r2 = (x2**2 + y2**2) / r0**2
        k = kappa0 * np.exp(-r2 / 2.) * (1. - r2 / 2.)
        k += rng.normal(0, 0.1, size=nsource)
        # Varied weights are hard, but at least check that non-unit weights work correctly.
        w = np.ones_like(x2) * 5

        source = treecorr.Catalog(x=x2, y=y2, w=w, k=k)
        rand = treecorr.Catalog(x=x3, y=y3)
        nk = treecorr.NKCorrelation(bin_size=0.1, min_sep=6., max_sep=15.)
        rk = treecorr.NKCorrelation(bin_size=0.1, min_sep=6., max_sep=15.)
        nk.process(lens, source)
        rk.process(rand, source)
        all_nks.append(nk)
        all_rks.append(rk)

    print('Uncompensated:')

    all_xis = [nk.calculateXi() for nk in all_nks]
    mean_wt = np.mean([nk.weight for nk in all_nks], axis=0)
    mean_xi = np.mean([xi[0] for xi in all_xis], axis=0)
    var_xi = np.var([xi[0] for xi in all_xis], axis=0)
    mean_varxi = np.mean([xi[1] for xi in all_xis], axis=0)

    print('mean_xi = ', mean_xi)
    print('mean_wt = ', mean_wt)
    print('mean_varxi = ', mean_varxi)
    print('var_xi = ', var_xi)
    print('ratio = ', var_xi / mean_varxi)
    print('max relerr for xi = ', np.max(np.abs(
        (var_xi - mean_varxi) / var_xi)))
    print('diff = ', var_xi - mean_varxi)
    np.testing.assert_allclose(mean_varxi, var_xi, rtol=0.02 * tol_factor)

    print('Compensated:')

    all_xis = [nk.calculateXi(rk) for (nk, rk) in zip(all_nks, all_rks)]
    mean_wt = np.mean([nk.weight for nk in all_nks], axis=0)
    mean_xi = np.mean([xi[0] for xi in all_xis], axis=0)
    var_xi = np.var([xi[0] for xi in all_xis], axis=0)
    mean_varxi = np.mean([xi[1] for xi in all_xis], axis=0)

    print('mean_xi = ', mean_xi)
    print('mean_wt = ', mean_wt)
    print('mean_varxi = ', mean_varxi)
    print('var_xi = ', var_xi)
    print('ratio = ', var_xi / mean_varxi)
    print('max relerr for xi = ', np.max(np.abs(
        (var_xi - mean_varxi) / var_xi)))
    print('diff = ', var_xi - mean_varxi)
    # Unlike for NG, the agreement is slightly worse for the compensated case.
    # Not sure if this is telling me something important, or just the way it turned out.
    np.testing.assert_allclose(mean_varxi, var_xi, rtol=0.03 * tol_factor)
def written_as_a_function_to_save_memory(z_bins, catalogname, single_fre):

    if catalogname == '1':
        print('loading SuperCosmos catalog and mask...')

        #load supercosmos catalog
        #catalog=np.loadtxt('wiseScosPhotoz160708.csv',skiprows=1,delimiter=",",usecols=(7,8,11,16,18),max_rows=3000)   #RA, DEC,Ebv(extinction),z,mask from "all sky survey"
        #catalog=np.loadtxt('wiseScosPhotoz160708.csv',skiprows=1,delimiter=",",usecols=(7,8,16))   #ra,dec,z (in degrees)
        #print(catalog)

        #catalog=catalog.transpose()

        catalog = np.load('datalog/wisecatalog.npy')

        scosmask = healpy.read_map('WISExSCOSmask.fits')

        #num=catalog[0].size
        num = 30000000
        #coord=SkyCoord(catalog[0],catalog[1],frame='galactic',unit='deg').icrs

        #catalog[0],catalog[1]=coord.ra.deg,coord.dec.deg
        coord = SkyCoord(catalog[0], catalog[1], frame='icrs',
                         unit='deg').galactic
        l, b = coord.l.deg, coord.b.deg

        catalog = catalog[:, scosmask[healpy.ang2pix(
            256, l, b, nest=False, lonlat=True)] == 1]

        catalog = catalog[:, catalog[2].argsort()]

        if z_bins == 3:
            catalog = catalog[:, catalog[0].size / 4 * z_bins:]
        else:
            catalog = catalog[:, catalog[0].size / 4 * z_bins:catalog[0].size /
                              4 * (z_bins + 1)]

        if z_bins == 0:
            catalog = catalog[:, catalog[2] >= 0.01]

        #print('bin',z_bins,'size',catalog[2].shape,'z range',catalog[2,0],'~',catalog[2,-1])

        #cat_galaxy=treecorr.Catalog(ra=catalog[0],dec=catalog[1],ra_units='deg',dec_units='deg',k=np.ones(catalog[0].size))
        cat_galaxy = treecorr.Catalog(ra=catalog[0],
                                      dec=catalog[1],
                                      ra_units='deg',
                                      dec_units='deg')

        print('Done!\n')

        print 'generating random galaxy catalog'
        #plt.scatter(catalog[0],catalog[1],s=0.01)
        #plt.xlabel('RA(deg)')
        #plt.ylabel('DEC(deg)')
        #plt.show()
        ra_min = np.min(cat_galaxy.ra)
        ra_max = np.max(cat_galaxy.ra)
        dec_min = np.min(cat_galaxy.dec)
        dec_max = np.max(cat_galaxy.dec)
        print('ra range = %f .. %f' % (ra_min, ra_max))
        print('dec range = %f .. %f' % (dec_min, dec_max))

        rand_ra = np.random.uniform(ra_min, ra_max, num)
        rand_sindec = np.random.uniform(np.sin(dec_min), np.sin(dec_max), num)
        rand_dec = np.arcsin(rand_sindec)

        coord = SkyCoord(rand_ra, rand_dec, frame='icrs', unit='rad').galactic
        l, b = coord.l.deg, coord.b.deg

        rand_ra = rand_ra[scosmask[healpy.ang2pix(
            256, l, b, nest=False, lonlat=True)] == 1]
        rand_dec = rand_dec[scosmask[healpy.ang2pix(
            256, l, b, nest=False, lonlat=True)] == 1]

        #plt.scatter(np.rad2deg(rand_ra),np.rad2deg(rand_dec),s=0.01)
        #plt.xlabel('RA(deg)')
        #plt.ylabel('DEC(deg)')
        #plt.show()
        print('Done!\n')

    if catalogname == '2':
        print('loading MCXC catalog and mask...')

        num = 1000000

        MCXCfile = pyfits.open('MCXC.fits')
        MCXCdata = MCXCfile[1].data
        MCXCfile.close()

        MCXCmaskfile = pyfits.open('HFI_Mask_GalPlane-apo0_2048_R2.00.fits')
        MCXCmask = MCXCmaskfile[1].data['GAL080']
        MCXCmaskfile.close()

        coord = SkyCoord(MCXCdata['RA'],
                         MCXCdata['DEC'],
                         frame='icrs',
                         unit='deg').galactic
        l, b = coord.l.deg, coord.b.deg

        MCXCdata = MCXCdata[MCXCmask[healpy.ang2pix(
            2048, l, b, nest=True, lonlat=True)] == 1]

        #ra260-280 dec60-70 suspicious area
        MCXCmask2 = np.ones(MCXCdata.size)
        for i in range(MCXCdata.size):
            if (260 < MCXCdata[i]['RA'] < 280
                    and 60 < MCXCdata[i]['DEC'] < 70):
                MCXCmask2[i] = 0
        MCXCdata = MCXCdata[MCXCmask2 == 1]

        cat_galaxy = treecorr.Catalog(ra=MCXCdata['RA'],
                                      dec=MCXCdata['DEC'],
                                      ra_units='deg',
                                      dec_units='deg')
        #cat_galaxy=treecorr.Catalog(ra=MCXCdata['RA'],dec=MCXCdata['DEC'],ra_units='deg',dec_units='deg',k=np.ones(MCXCdata['RA'].size))
        print('Done!\n')

        #plt.scatter(MCXCdata['RA'],MCXCdata['DEC'],s=0.5)
        #plt.xlabel('RA(deg)')
        #plt.ylabel('DEC(deg)')
        #plt.show()
        print 'generating random galaxy catalog'

        ra_min = np.min(cat_galaxy.ra)
        ra_max = np.max(cat_galaxy.ra)
        dec_min = np.min(cat_galaxy.dec)
        dec_max = np.max(cat_galaxy.dec)
        print('ra range = %f .. %f' % (ra_min, ra_max))
        print('dec range = %f .. %f' % (dec_min, dec_max))

        rand_ra = np.random.uniform(ra_min, ra_max, num)
        rand_sindec = np.random.uniform(np.sin(dec_min), np.sin(dec_max), num)
        rand_dec = np.arcsin(rand_sindec)

        #plt.hist(rand_ra)
        #plt.show()
        #plt.hist(rand_sindec)
        #plt.show()

        #scosmask=healpy.read_map('WISExSCOSmask.fits')
        planckmask = pyfits.open('HFI_Mask_GalPlane-apo0_2048_R2.00.fits')
        planckImask = planckmask[1].data['GAL080']
        planckmask.close()

        coord = SkyCoord(rand_ra, rand_dec, frame='icrs', unit='rad').galactic
        l, b = coord.l.deg, coord.b.deg

        rand_ra = rand_ra[planckImask[healpy.ang2pix(
            2048, l, b, nest=True, lonlat=True)] == 1]
        rand_dec = rand_dec[planckImask[healpy.ang2pix(
            2048, l, b, nest=True, lonlat=True)] == 1]

        #ra260-280 dec60-70 suspicious area
        MCXCrandmask2 = np.ones(rand_ra.size)
        for i in range(rand_ra.size):
            if (260 < np.rad2deg(rand_ra[i]) < 280
                    and 60 < np.rad2deg(rand_dec[i]) < 70):
                MCXCrandmask2[i] = 0
        rand_ra = rand_ra[MCXCrandmask2 == 1]
        rand_dec = rand_dec[MCXCrandmask2 == 1]

        #plt.scatter(np.rad2deg(rand_ra),np.rad2deg(rand_dec),s=0.5)
        #plt.scatter(MCXCdata['RA'],MCXCdata['DEC'],s=0.5)
        #plt.xlabel('RA(deg)')
        #plt.ylabel('DEC(deg)')
        #plt.show()
        #input('stop here')
        print('Done!\n')

    if catalogname != '3':
        cat_rand = treecorr.Catalog(ra=rand_ra,
                                    dec=rand_dec,
                                    ra_units='radians',
                                    dec_units='radians')

    #load planck data
    print('loading Planck catalog and mask...')
    if single_fre == '1':
        '''
        planckdata=fits.open('http://pla.esac.esa.int/pla/aio/product-action?MAP.MAP_ID=COM_CompMap_ISW_0064_R2.00.fits')
        planckImap=planckdata[1].data['I']
        planckImask=planckdata[1].data['I_MASK']
        planckdata.close()
        planckpix=np.arange(0,planckImap.size)

        planckImap=planckImap[planckImask==1]
        planckImap=planckImap*2.725

        planckpix=planckpix[planckImask==1]

        planck_ra,planck_dec=healpy.pix2ang(nside=64,ipix=planckpix,nest=True,lonlat=True)




        coord=SkyCoord(planck_ra,planck_dec,frame='galactic',unit='deg').icrs
        planck_ra,planck_dec=coord.ra.deg,coord.dec.deg


        print 'ISW mean:',np.mean(planckImap)
        '''

        planckdata = fits.open(
            'http://pla.esac.esa.int/pla/aio/product-action?MAP.MAP_ID=COM_CompMap_ISW_0064_R2.00.fits'
        )
        planckImap = planckdata[1].data['I']
        planckImask = planckdata[1].data['I_MASK']
        planckdata.close()

        Imax = np.max(planckImap[planckImask == 1])
        Imin = np.min(planckImap[planckImask == 1])

        planck2048 = healpy.pixelfunc.ud_grade(planckImap,
                                               2048,
                                               order_in='NEST',
                                               order_out='NEST')
        planck2048mask = healpy.pixelfunc.ud_grade(planckImask,
                                                   2048,
                                                   order_in='NEST',
                                                   order_out='NEST')
        planckpix = np.arange(0, planck2048.size)

        planckpix = planckpix[planck2048mask == 1]

        planck_ra, planck_dec = healpy.pix2ang(nside=2048,
                                               ipix=planckpix,
                                               nest=True,
                                               lonlat=True)

        planckImap = healpy.pixelfunc.get_interp_val(planckImap,
                                                     planck_ra,
                                                     planck_dec,
                                                     nest=True,
                                                     lonlat=True)

        coord = SkyCoord(planck_ra, planck_dec, frame='galactic',
                         unit='deg').icrs
        planck_ra, planck_dec = coord.ra.deg, coord.dec.deg

        planckset = np.array([planckImap, planck_ra, planck_dec])
        planckset = planckset[:,
                              (planckset[0] >= Imin) & (planckset[0] <= Imax)]
        planckImap, planck_ra, planck_dec = planckset[0], planckset[
            1], planckset[2]

        coord = None
        planckImask = None
        planckpix = None
        planck2048 = None
        planck2048mask = None

        planckImap = planckImap * 2.725

        #print planckImap[5000:5200]
        #np.save('datalog/test.npy',planckImap)
        #input('stop here')
        '''
        plt.scatter(planck_ra,planck_dec,s=0.01,c=planckImap,cmap='rainbow',edgecolors='none')
        plt.xlabel('RA(deg)')
        plt.ylabel('DEC(deg)')
        plt.title('Planck map after mask')
        plt.colorbar()
        plt.show()
        input('stop here')
        '''

    if single_fre == '2':
        planckdata = fits.open(
            'http://pla.esac.esa.int/pla/aio/product-action?MAP.MAP_ID=COM_CompMap_CIB-GNILC-F545_2048_R2.00.fits'
        )
        planckImap = planckdata[1].data['I']

        planckdata.close()
        planckpix = np.arange(0, planckImap.size)

        planckpix = planckpix[planckImap != 0]

        planckImap = planckImap[planckImap != 0]

        planck_ra, planck_dec = healpy.pix2ang(nside=2048,
                                               ipix=planckpix,
                                               nest=False,
                                               lonlat=True)

        coord = SkyCoord(planck_ra, planck_dec, frame='galactic',
                         unit='deg').icrs
        planck_ra, planck_dec = coord.ra.deg, coord.dec.deg

        print 'CIB mean:', np.mean(planckImap)
        '''
        plt.scatter(planck_ra,planck_dec,s=0.01,c=planckImap,cmap='rainbow',edgecolors='none')
        plt.xlabel('RA(deg)')
        plt.ylabel('DEC(deg)')
        plt.title('CIB scatter')
        plt.colorbar()
        plt.show()
        input('stop here')
        '''

    if single_fre == '3':
        planckdata = fits.open(
            'http://pla.esac.esa.int/pla/aio/product-action?MAP.MAP_ID=HFI_CompMap_Foregrounds-commander-143_R3.00.fits'
        )
        planckmask = fits.open('HFI_Mask_GalPlane-apo0_2048_R2.00.fits')
        planckImask = planckmask[1].data['GAL080']

        planckImap = healpy.fitsfunc.read_map(planckdata, field=0, nest=True)

        planckdata.close()
        planckmask.close()

        planckpix = np.arange(0, planckImap.size)

        planckImap = planckImap[planckImask == 1]
        planckpix = planckpix[planckImask == 1]

        planck_ra, planck_dec = healpy.pix2ang(nside=2048,
                                               ipix=planckpix,
                                               nest=True,
                                               lonlat=True)

        coord = SkyCoord(planck_ra, planck_dec, frame='galactic',
                         unit='deg').icrs
        planck_ra, planck_dec = coord.ra.deg, coord.dec.deg

        print 'subtracted mean:', np.mean(planckImap)
        '''
        plt.scatter(planck_ra,planck_dec,s=0.005,c=planckImap,cmap='rainbow',edgecolors='none')
        plt.xlabel('RA(deg)')
        plt.ylabel('DEC(deg)')
        plt.title('CMB subtracted scatter')
        plt.colorbar()
        plt.show()
        input('stop here')
        '''

    cat_planck = treecorr.Catalog(ra=planck_ra,
                                  dec=planck_dec,
                                  ra_units='deg',
                                  dec_units='deg',
                                  k=planckImap)

    print('Done!\n')

    #print planckpix.size,planckImap.size,plancknoise.size

    print('calculating cross-relation...')
    #'''cross correlation
    nk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    rk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    #nk = treecorr.KKCorrelation(min_sep=0.01, max_sep=3.0, nbins=50, sep_units='radians')
    nk.process(cat_galaxy, cat_planck)
    rk.process(cat_rand, cat_planck)

    xi, varxi = nk.calculateXi(rk)
    sig = np.sqrt(varxi)
    r = np.exp(nk.meanlogr)
    #'''
    '''auto correlation
    nn = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, nbins=35, sep_units='deg')
    rr = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, nbins=35, sep_units='deg')
    nr = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, nbins=35, sep_units='deg')
    nn.process(cat_galaxy)
    rr.process(cat_rand)
    nr.process(cat_galaxy, cat_rand)
    xi, varxi = nn.calculateXi(rr, nr)
    sig = np.sqrt(varxi)
    r = np.exp(nn.meanlogr)
    '''
    #print r
    #print xi

    print('Done!\n')

    if single_fre == '2':
        xi = xi * 69 * 1e-6
        varxi = varxi * 69 * 1e-6

    #print('Plotting')

    #not sure what r is
    '''auto correlation plot
    plt.plot(r, xi, color='blue')
    plt.plot(r,np.zeros(xi.size),color='red')

    plt.errorbar(r, xi, yerr=sig, lw=1, ls='',ecolor='g')

    #plt.xlabel(r'$\theta$ (rad)')
    plt.xlabel(r'$\theta$ (degrees)')
    plt.xscale('log')
    if catalogname=='1':
        plt.title('SuperCosmos x SuperCosmos')
    if catalogname=='2':
        plt.title('MCXC x MCXC')
    plt.show()
    '''

    if single_fre == '1':
        data = np.load('datalog/SuperCosmos.npy')
        cmbdata = data[0]
        cmbr = data[1]
        plt.plot(cmbr, cmbdata, color='green', label='CMB X galaxy')

        plt.plot(r, xi, color='blue', label='ISW X galaxy')
        plt.plot([0, 1, 10], [0, 0, 0], color='red', ls=':')

        plt.xscale('log')
        #plt.yscale('log', nonposy='clip')
        plt.xlabel(r'$\theta$ (degrees)')
        plt.ylabel(r'$w(\theta)$(K)')
        plt.legend()

    if single_fre == '2':
        plt.plot(r, xi, color='blue', label='CIB X galaxy')
        plt.plot([0, 1, 10], [0, 0, 0], color='red', ls=':')

        plt.xscale('log')
        plt.xlabel(r'$\theta$ (degrees)')
        plt.ylabel(r'$w(\theta)$(K)')
        plt.legend()
    if single_fre == '3':
        plt.plot(r, xi, color='blue', label='Subtracted X galaxy')
        plt.plot([0, 1, 10], [0, 0, 0], color='red', ls=':')

        plt.xscale('log')
        plt.xlabel(r'$\theta$ (degrees)')
        plt.ylabel(r'$w(\theta)$(K)')
        plt.legend()

    if catalogname == '1':
        if single_fre == '1':
            plt.title('SuperCosmos x ISW z {}'.format(str(z_bins)))
        if single_fre == '2':
            plt.title('SuperCosmos x CIB z {}'.format(str(z_bins)))
        if single_fre == '3':
            plt.title('SuperCosmos x Subtracted z {}'.format(str(z_bins)))

    if catalogname == '2':
        if single_fre == '1':
            plt.title('MCXC x ISW')
        if single_fre == '2':
            plt.title('MCXC x CIB')

    plt.show()

    #'''save data
    if single_fre == '1':
        if catalogname == '1':
            np.save('datalog/ISW_z_{}.npy'.format(str(z_bins)),
                    np.array([xi, r, sig]))
            print('datalog saved!')

    if single_fre == '2':
        if catalogname == '1':
            np.save('datalog/CIB_z_{}.npy'.format(str(z_bins)),
                    np.array([xi, r, sig]))
            print('datalog saved!')

    if single_fre == '3':
        if catalogname == '1':
            np.save('datalog/Subtracted_z_{}.npy'.format(str(z_bins)),
                    np.array([xi, r, sig]))
            print('datalog saved!')

    #'''

    print('4 bins datalog saved!')

    nk.clear()
    rk.clear()
    cat_galaxy.clear_cache()
    cat_rand.clear_cache()
    cat_planck.clear_cache()

    return None
示例#20
0
def written_as_a_function_to_save_memory(z_bins, randoms, result):
    print('loading SuperCosmos catalog and mask...')

    catalog = np.load('datalog/wisecatalog_z.npy')  #ra,deg,z

    #print(catalog.shape)

    scosmask = healpy.read_map('WISExSCOSmask.fits')

    num = 30000000
    #coord=SkyCoord(catalog[0],catalog[1],frame='galactic',unit='deg').icrs

    #catalog[0],catalog[1]=coord.ra.deg,coord.dec.deg
    coord = SkyCoord(catalog[0], catalog[1], frame='icrs', unit='deg').galactic
    l, b = coord.l.deg, coord.b.deg

    catalog = catalog[:, scosmask[healpy.ang2pix(
        256, l, b, nest=False, lonlat=True)] == 1]
    '''
    print(catalog.shape)
    print('z_min:',np.min(catalog[2]),'z_max:',np.max(catalog[2]))
    plt.hist(catalog[2],100)
    plt.xlabel('z')
    plt.ylabel('# of galaxies')
    plt.title('SuperCosmos z histogram(masked)')
    plt.show()

    plt.scatter(catalog[0],catalog[1],c=catalog[2],s=0.005,cmap='rainbow',edgecolors='none')
    plt.xlabel('RA(deg)')
    plt.ylabel('DEC(deg)')
    plt.title('SuperCosmos redshift scatter plot(masked)')
    plt.colorbar()
    plt.show()

    raw_input=()
    '''
    catalog = catalog[:, catalog[2].argsort()]

    if z_bins == 3:
        catalog = catalog[:, catalog[0].size / 4 * z_bins:]
    else:
        catalog = catalog[:, catalog[0].size / 4 * z_bins:catalog[0].size / 4 *
                          (z_bins + 1)]

    if z_bins == 0:
        catalog = catalog[:, catalog[2] >= 0.01]

    #print('bin',z_bins,'size',catalog[2].shape,'z range',catalog[2,0],'~',catalog[2,-1])
    #catalog=catalog[:,catalog[0].size]

    #cat_galaxy=treecorr.Catalog(ra=catalog[0],dec=catalog[1],ra_units='deg',dec_units='deg',k=np.ones(catalog[0].size))
    cat_galaxy = treecorr.Catalog(ra=catalog[0],
                                  dec=catalog[1],
                                  ra_units='deg',
                                  dec_units='deg')

    print('Done!\n')

    print 'generating random galaxy catalog'
    #plt.scatter(catalog[0],catalog[1],s=0.01)
    #plt.xlabel('RA(deg)')
    #plt.ylabel('DEC(deg)')
    #plt.show()
    ra_min = np.min(cat_galaxy.ra)
    ra_max = np.max(cat_galaxy.ra)
    dec_min = np.min(cat_galaxy.dec)
    dec_max = np.max(cat_galaxy.dec)
    print('ra range = %f .. %f' % (ra_min, ra_max))
    print('dec range = %f .. %f' % (dec_min, dec_max))

    rand_ra = np.random.uniform(ra_min, ra_max, num)
    rand_sindec = np.random.uniform(np.sin(dec_min), np.sin(dec_max), num)
    rand_dec = np.arcsin(rand_sindec)

    coord = SkyCoord(rand_ra, rand_dec, frame='icrs', unit='rad').galactic
    l, b = coord.l.deg, coord.b.deg

    rand_ra = rand_ra[scosmask[healpy.ang2pix(
        256, l, b, nest=False, lonlat=True)] == 1]
    rand_dec = rand_dec[scosmask[healpy.ang2pix(
        256, l, b, nest=False, lonlat=True)] == 1]

    #plt.scatter(np.rad2deg(rand_ra),np.rad2deg(rand_dec),s=0.01)
    #plt.xlabel('RA(deg)')
    #plt.ylabel('DEC(deg)')
    #plt.show()
    print('Done!\n')

    cat_rand = treecorr.Catalog(ra=rand_ra,
                                dec=rand_dec,
                                ra_units='radians',
                                dec_units='radians')

    #load planck data
    print('loading Planck catalog and mask...')

    planckdata = fits.open('COM_CMB_IQU-smica-nosz_2048_R3.00_full.fits')

    planckmask = fits.open('HFI_Mask_GalPlane-apo0_2048_R2.00.fits')

    planckImap = planckdata[1].data['I_STOKES']

    planckImask = planckmask[1].data['GAL080']

    planckdata.close()
    planckmask.close()

    planckpix = np.arange(0, planckImap.size)

    planckImap = planckImap[planckImask == 1]
    planckpix = planckpix[planckImask == 1]

    planck_ra, planck_dec = healpy.pix2ang(nside=2048,
                                           ipix=planckpix,
                                           nest=True,
                                           lonlat=True)

    coord = SkyCoord(planck_ra, planck_dec, frame='galactic', unit='deg').icrs
    planck_ra, planck_dec = coord.ra.deg, coord.dec.deg

    cat_planck = treecorr.Catalog(ra=planck_ra,
                                  dec=planck_dec,
                                  ra_units='deg',
                                  dec_units='deg',
                                  k=planckImap)

    print('Done!\n')

    print('calculating cross-relation...')
    nk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    rk = treecorr.NKCorrelation(min_sep=0.01,
                                max_sep=10,
                                nbins=35,
                                sep_units='deg')
    nk.process(cat_galaxy, cat_planck)
    rk.process(cat_rand, cat_planck)

    xi, varxi = nk.calculateXi(rk)
    sig = np.sqrt(varxi)
    r = np.exp(nk.meanlogr)

    #print xi

    print('Done!\n')

    #print('Plotting')

    #plt.plot(r, xi, color='blue')
    #plt.errorbar(r[xi>0], xi[xi>0], yerr=sig[xi>0], lw=1, ls='',ecolor='g')
    #leg = plt.errorbar(-r, xi, yerr=sig, color='blue')

    #plt.xscale('log')
    #plt.xlabel(r'$\theta$ (degrees)')
    #plt.ylabel(r'$w(\theta)$')
    #plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
    #plt.legend([leg], [r'$w(\theta)$'], loc='lower left')
    #plt.title('SuperCosmos x Planck at {} z bin'.format(str(z_bins)))

    #plt.show()

    #np.save('datalog/SuperCosmos_z_{}.npy'.format(str(z_bins)),np.array([xi,r,sig]))
    result[z_bins, randoms] = np.array([xi, r, sig])
    print randoms, ' runs'
    print('{} bins datalog saved!'.format(str(z_bins)))

    nk.clear()
    rk.clear()
    cat_galaxy.clear_cache()
    cat_rand.clear_cache()
    cat_planck.clear_cache()

    catalog = None
    scosmask = None
    coord = None
    l = None
    b = None
    ra_min, ra_max, dec_min, dec_max = None, None, None, None
    rand_ra, rand_sindec, rand_dec = None, None, None
    planck_ra, planck_dec = None, None
    planckImap, planckpix = None, None
    xi, r, sig, varxi = None, None, None, None

    return result
示例#21
0
raw_input('stop')

cat_planck = treecorr.Catalog(ra=planck_ra,
                              dec=planck_dec,
                              ra_units='deg',
                              dec_units='deg',
                              k=planckImap)

print('Done!\n')

#print planckpix.size,planckImap.size,plancknoise.size

print('calculating cross-relation...')
#'''cross correlation
nk = treecorr.NKCorrelation(min_sep=0.01,
                            max_sep=10,
                            nbins=35,
                            sep_units='deg')
rk = treecorr.NKCorrelation(min_sep=0.01,
                            max_sep=10,
                            nbins=35,
                            sep_units='deg')
#nk = treecorr.KKCorrelation(min_sep=0.01, max_sep=3.0, nbins=50, sep_units='radians')
nk.process(cat_galaxy, cat_planck)
rk.process(cat_rand, cat_planck)

xi, varxi = nk.calculateXi(rk)
sig = np.sqrt(varxi)
r = np.exp(nk.meanlogr)
#'''
'''auto correlation
nn = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, nbins=35, sep_units='deg')
示例#22
0
def corr2(config, logger=None):
    """Run the full two-point correlation function code based on the parameters in the
    given config dict.

    The function print_corr2_params() will output information about the valid parameters
    that are expected to be in the config dict.

    Optionally a logger parameter maybe given, in which case it is used for logging.
    If not given, the logging will be based on the verbose and log_file parameters.

    :param config:  The configuration dict which defines what to do.
    :param logger:  If desired, a logger object for logging. (default: None, in which case
                    one will be built according to the config dict's verbose level.)
    """
    # Setup logger based on config verbose value
    if logger is None:
        logger = treecorr.config.setup_logger(
                treecorr.config.get(config,'verbose',int,1),
                config.get('log_file',None))

    # Check that config doesn't have any extra parameters.
    # (Such values are probably typos.)
    # Also convert the given parameters to the correct type, etc.
    config = treecorr.config.check_config(config, corr2_valid_params, corr2_aliases, logger)

    import pprint
    logger.debug('Using configuration dict:\n%s',pprint.pformat(config))

    if ( 'output_dots' not in config 
          and config.get('log_file',None) is None 
          and config['verbose'] >= 2 ):
        config['output_dots'] = True

    # Set the number of threads
    num_threads = config.get('num_threads',None)
    logger.debug('From config dict, num_threads = %s',num_threads)
    treecorr.set_omp_threads(num_threads, logger)

    # Read in the input files.  Each of these is a list.
    cat1 = treecorr.read_catalogs(config, 'file_name', 'file_list', 0, logger)
    if len(cat1) == 0:
        raise AttributeError("Either file_name or file_list is required")
    cat2 = treecorr.read_catalogs(config, 'file_name2', 'file_list2', 1, logger)
    rand1 = treecorr.read_catalogs(config, 'rand_file_name', 'rand_file_list', 0, logger)
    rand2 = treecorr.read_catalogs(config, 'rand_file_name2', 'rand_file_list2', 1, logger)
    if len(cat2) == 0 and len(rand2) > 0:
        raise AttributeError("rand_file_name2 is invalid without file_name2")
    logger.info("Done reading input catalogs")

    # Do GG correlation function if necessary
    if 'gg_file_name' in config or 'm2_file_name' in config:
        logger.warning("Performing GG calculations...")
        gg = treecorr.GGCorrelation(config,logger)
        gg.process(cat1,cat2)
        logger.info("Done GG calculations.")
        if 'gg_file_name' in config:
            gg.write(config['gg_file_name'])
            logger.warning("Wrote GG correlation to %s",config['gg_file_name'])
        if 'm2_file_name' in config:
            gg.writeMapSq(config['m2_file_name'], m2_uform=config['m2_uform'])
            logger.warning("Wrote Mapsq values to %s",config['m2_file_name'])

    # Do NG correlation function if necessary
    if 'ng_file_name' in config or 'nm_file_name' in config or 'norm_file_name' in config:
        if len(cat2) == 0:
            raise AttributeError("file_name2 is required for ng correlation")
        logger.warning("Performing NG calculations...")
        ng = treecorr.NGCorrelation(config,logger)
        ng.process(cat1,cat2)
        logger.info("Done NG calculation.")

        # The default ng_statistic is compensated _iff_ rand files are given.
        rg = None
        if len(rand1) == 0:
            if config.get('ng_statistic',None) == 'compensated':
                raise AttributeError("rand_files is required for ng_statistic = compensated")
        elif config.get('ng_statistic','compensated') == 'compensated':
            rg = treecorr.NGCorrelation(config,logger)
            rg.process(rand1,cat2)
            logger.info("Done RG calculation.")

        if 'ng_file_name' in config:
            ng.write(config['ng_file_name'], rg)
            logger.warning("Wrote NG correlation to %s",config['ng_file_name'])
        if 'nm_file_name' in config:
            ng.writeNMap(config['nm_file_name'], rg, m2_uform=config['m2_uform'])
            logger.warning("Wrote NMap values to %s",config['nm_file_name'])

        if 'norm_file_name' in config:
            gg = treecorr.GGCorrelation(config,logger)
            gg.process(cat2)
            logger.info("Done GG calculation for norm")
            dd = treecorr.NNCorrelation(config,logger)
            dd.process(cat1)
            logger.info("Done DD calculation for norm")
            rr = treecorr.NNCorrelation(config,logger)
            rr.process(rand1)
            logger.info("Done RR calculation for norm")
            dr = None
            if config['nn_statistic'] == 'compensated':
                dr = treecorr.NNCorrelation(config,logger)
                dr.process(cat1,rand1)
                logger.info("Done DR calculation for norm")
            ng.writeNorm(config['norm_file_name'],gg,dd,rr,dr,rg,m2_uform=config['m2_uform'])
            logger.warning("Wrote Norm values to %s",config['norm_file_name'])

    # Do NN correlation function if necessary
    if 'nn_file_name' in config:
        if len(rand1) == 0:
            raise AttributeError("rand_file_name is required for NN correlation")
        if len(cat2) > 0 and len(rand2) == 0:
            raise AttributeError("rand_file_name2 is required for NN cross-correlation")
        logger.warning("Performing DD calculations...")
        dd = treecorr.NNCorrelation(config,logger)
        dd.process(cat1,cat2)
        logger.info("Done DD calculations.")

        dr = None
        rd = None
        if len(cat2) == 0:
            logger.warning("Performing RR calculations...")
            rr = treecorr.NNCorrelation(config,logger)
            rr.process(rand1)
            logger.info("Done RR calculations.")

            if config['nn_statistic'] == 'compensated':
                logger.warning("Performing DR calculations...")
                dr = treecorr.NNCorrelation(config,logger)
                dr.process(cat1,rand1)
                logger.info("Done DR calculations.")
        else:
            logger.warning("Performing RR calculations...")
            rr = treecorr.NNCorrelation(config,logger)
            rr.process(rand1,rand2)
            logger.info("Done RR calculations.")

            if config['nn_statistic'] == 'compensated':
                logger.warning("Performing DR calculations...")
                dr = treecorr.NNCorrelation(config,logger)
                dr.process(cat1,rand2)
                logger.info("Done DR calculations.")
                rd = treecorr.NNCorrelation(config,logger)
                rd.process(rand1,cat2)
                logger.info("Done RD calculations.")
        dd.write(config['nn_file_name'],rr,dr,rd)
        logger.warning("Wrote NN correlation to %s",config['nn_file_name'])

    # Do KK correlation function if necessary
    if 'kk_file_name' in config:
        logger.warning("Performing KK calculations...")
        kk = treecorr.KKCorrelation(config,logger)
        kk.process(cat1,cat2)
        logger.info("Done KK calculations.")
        kk.write(config['kk_file_name'])
        logger.warning("Wrote KK correlation to %s",config['kk_file_name'])

    # Do NG correlation function if necessary
    if 'nk_file_name' in config:
        if len(cat2) == 0:
            raise AttributeError("file_name2 is required for nk correlation")
        logger.warning("Performing NK calculations...")
        nk = treecorr.NKCorrelation(config,logger)
        nk.process(cat1,cat2)
        logger.info("Done NK calculation.")

        rk = None
        if len(rand1) == 0:
            if config.get('nk_statistic',None) == 'compensated':
                raise AttributeError("rand_files is required for nk_statistic = compensated")
        elif config.get('nk_statistic','compensated') == 'compensated':
            rk = treecorr.NKCorrelation(config,logger)
            rk.process(rand1,cat2)
            logger.info("Done RK calculation.")

        nk.write(config['nk_file_name'], rk)
        logger.warning("Wrote NK correlation to %s",config['nk_file_name'])

    # Do KG correlation function if necessary
    if 'kg_file_name' in config:
        if len(cat2) == 0:
            raise AttributeError("file_name2 is required for kg correlation")
        logger.warning("Performing KG calculations...")
        kg = treecorr.KGCorrelation(config,logger)
        kg.process(cat1,cat2)
        logger.info("Done KG calculation.")
        kg.write(config['kg_file_name'])
        logger.warning("Wrote KG correlation to %s",config['kg_file_name'])
示例#23
0
    def xi_2pt(cata,
               catb=None,
               k=None,
               ga=None,
               gb=None,
               corr='GG',
               maska=None,
               maskb=None,
               wa=None,
               wb=None,
               ran=True,
               mock=False,
               erron=True,
               jkmask=None,
               label0='',
               plot=False):
        """
    This is a flexible convenience wrapper for interaction with treecorr to work on CatalogStore objects. Some basic examples are given in corr_tests() of the main testsuite.py. g1, g2 correctly by c1, c2 if ellipticities and cat.bs is true. Correction by sensitivity, 1+m applied if cat.bs=True. Weighting applied if cat.wt is true. Other config properties for treecorr stored in CatalogStore object. See catalog.py or config.py. Not all correlation types fully integrated or tested. For example, only one kappa value is currently possible. Will be updated in future as useful.

    Use:

    :cata, catb:    CatalogStore - Must supply both cata, catb (can be same reference) if NG or NK correlation. Otherwise catb is optional.
    :k:             str - Array name in cata, catb to use for kappa correlation. 
    :ga, gb:        str - Array names for g1, g2 treecorr inputs. If None assume e1, e2.
    :corr:          str - Type of correlation for treecorr.
    :maska, maskb:  [bool] - Masking array to apply to input catalogs.
    :wa, wb:        [float] - Additional weights to apply after cat.w is used. Combined as e.g., w=sqrt(cat.w*wa).
    :ran:           bool - Use randoms in correlation calculation. If True, assumes cat.ran_ra, cat.ran_dec exist.
    :mock:          bool - If mock catalog from sims. Used when calculating covariances from sims, not currently migrated from SV code.
    :erron:         bool - Calculate jackknife or sim cov errors. If False, uses treecorr error outputs. Not currently migrated from SV code. When implemented requires cat.calc_err in ('jk', 'mock').
    :jkmask:        [bool] - For jk, mock cov calculation loop over regions/sims.
    :label0:        str - Additional (optional) label string used in some outputs.
    :plot:          bool - Plot output?

    Output (len cat.tbins):

    :theta:         [float] - Treecorr np.exp(meanlogr)
    :out:           ([float]x4) - Output of signal e.g., (xi+,xi-,xi+im,x-im). For correlations with only one xi output, (xi,0.,xi_im,0.).
    :err:           ([float]x4) - Same but for sqrt(var).
    :chi2:          ([float]x4) - Same but for chi^2 if using jk or sim covariance.

    """

        maska = catalog.CatalogMethods.check_mask(cata.coadd, maska)
        jkmask = catalog.CatalogMethods.check_mask(cata.coadd, jkmask)

        maska0 = maska & jkmask

        if wa is None:
            wa = np.ones(len(cata.coadd))

        e1, e2, w, ms = lin.linear_methods.get_lin_e_w_ms(cata,
                                                          xi=True,
                                                          mock=mock,
                                                          mask=maska0,
                                                          w1=wa)

        if catb is None:
            if corr not in ['GG', 'NN', 'KK']:
                raise UseError(
                    'Must supply both cata,catb for NG,NK correlations.')

        if ga is not None:
            e1 = getattr(cata, ga + '1')[maska]
            e2 = getattr(cata, ga + '2')[maska]
        else:
            ga = 'e'
        if catb is None:
            gb = ga

        if (corr == 'GG') | ((catb != None) & (corr == 'KG')):
            catxa = treecorr.Catalog(g1=e1,
                                     g2=e2,
                                     w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')
            catma = treecorr.Catalog(k=ms,
                                     w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')

        elif (corr == 'NN') | ((catb != None) & (corr in ['NG', 'NK'])):
            catxa = treecorr.Catalog(w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')
            if ran:
                catra = treecorr.Catalog(w=w,
                                         ra=cata.ran_ra[maska0],
                                         dec=cata.ran_dec[maska0],
                                         ra_units='deg',
                                         dec_units='deg')

        elif corr == 'KK':
            if k is None:
                raise UseError('Must specify k for KK correlation.')
            if k not in dir(cata):
                raise UseError('Unknown k field specified.')
            catxa = treecorr.Catalog(k=getattr(cata, k)[maska0],
                                     w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')

        if catb is not None:

            maskb = catalog.CatalogMethods.check_mask(catb.coadd, maskb)

            if wb is None:
                wb = np.ones(len(catb.coadd))

            e1, e2, w, ms = lin.linear_methods.get_lin_e_w_ms(catb,
                                                              xi=True,
                                                              mock=mock,
                                                              mask=maskb,
                                                              w1=wb)

            if gb is not None:
                e1 = getattr(cata, gb + '1')[maskb]
                e2 = getattr(cata, gb + '2')[maskb]
            else:
                gb = 'e'

            if corr in ['GG', 'NG', 'KG']:
                catxb = treecorr.Catalog(g1=e1,
                                         g2=e2,
                                         w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')
                catmb = treecorr.Catalog(k=ms,
                                         w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')
            elif corr == 'NN':
                catxb = treecorr.Catalog(w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')
                if ran:
                    catrb = treecorr.Catalog(w=w,
                                             ra=catb.ran_ra[maskb],
                                             dec=catb.ran_dec[maskb],
                                             ra_units='deg',
                                             dec_units='deg')
            elif corr in ['KK', 'NK']:
                if k is None:
                    raise UseError('Must specify k for KK correlation.')
                if k not in dir(catb):
                    raise UseError('Unknown k field specified.')
                catxb = treecorr.Catalog(k=getattr(catb, k)[maskb],
                                         w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')

        xim = None
        xip_im = None
        xim_im = None
        ximerr = None
        xiperr_im = None
        ximerr_im = None
        if corr == 'GG':
            gg = treecorr.GGCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            kk = treecorr.KKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            if catb is None:
                gg.process(catxa)
                kk.process(catma)
            else:
                gg.process(catxa, catxb)
                kk.process(catma, catmb)

            xip = gg.xip / kk.xi
            xim = gg.xim / kk.xi
            xiperr = ximerr = np.sqrt(gg.varxi)
            xip_im = gg.xip_im / kk.xi
            xim_im = gg.xim_im / kk.xi
            theta = np.exp(gg.meanlogr)

        elif corr == 'NN':
            nn = treecorr.NNCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            if ran:
                nr = treecorr.NNCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rr = treecorr.NNCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)

            if catb is None:
                nn.process(catxa)
                xip = nn.npairs
                xiperr = np.sqrt(nn.npairs)
                if ran:
                    nr.process(catxa, catra)
                    rr.process(catra)
                xip, xiperr = nn.calculateXi(rr, nr)
                xiperr = np.sqrt(xiperr)
            else:
                rn = treecorr.NNCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                nn.process(catxa, catxb)
                xip = nn.npairs
                xiperr = np.sqrt(nn.npairs)
                if ran:
                    nr.process(catxa, catrb)
                    nr.process(catra, catxb)
                    rr.process(catra, catrb)
                xip, xiperr = nn.calculateXi(rr, nr, rn)
                xiperr = np.sqrt(xiperr)
            theta = np.exp(nn.meanlogr)

        elif corr == 'KK':

            kk = treecorr.KKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            if catb is None:
                kk.process(catxa)
            else:
                kk.process(catxa, catxb)
            xip = kk.xi
            xiperr = np.sqrt(kk.varxi)
            theta = np.exp(kk.meanlogr)

        elif corr == 'KG':

            kg = treecorr.KGCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            kk = treecorr.KKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            kg.process(catxa, catxb)
            kk.process(catxa, catmb)
            xip = kg.xi / kk.xi
            xiperr = np.sqrt(kg.varxi)
            xip_im = kg.xi_im / kk.xi
            theta = np.exp(kg.meanlogr)

        elif corr == 'NG':

            ng = treecorr.NGCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            nk = treecorr.NKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            ng.process(catxa, catxb)
            nk.process(catxa, catmb)
            xip = ng.xi / nk.xi
            xiperr = np.sqrt(ng.varxi)
            xip_im = ng.xi_im / nk.xi
            if ran:
                rg = treecorr.NGCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rk = treecorr.NKCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rg.process(catra, catxb)
                rk.process(catra, catmb)
                xip, xip_im, xiperr = ng.calculateXi(rg)
                tmpa, tmp = nk.calculateXi(rk)
                if np.sum(tmpa) == 0:
                    tmpa = np.ones(len(xip))
                xip /= tmpa
                xiperr = np.sqrt(xiperr)
                xip_im /= tmpa
            theta = np.exp(ng.meanlogr)

        elif corr == 'NK':

            nk = treecorr.NKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            nk.process(catxa, catxb)
            xip = nk.xi
            xiperr = np.sqrt(nk.varxi)
            if ran:
                rk = treecorr.NKCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rk.process(catra, catxb)
                xip, xiperr = nk.calculateXi(rk)
                xiperr = np.sqrt(xiperr)
            theta = np.exp(nk.meanlogr)

        out = [xip, xim, xip_im, xim_im]
        err = [xiperr, ximerr, xiperr, ximerr]
        chi2 = [0., 0., 0., 0.]

        if erron:
            kwargs = {
                'catb': catb,
                'k': k,
                'corr': corr,
                'maska': maska,
                'maskb': maskb,
                'wa': wa,
                'wb': wb,
                'ran': ran
            }
            if catb is None:
                if corr in ['KK', 'NK', 'KG']:
                    label = 'xi_2pt_' + cata.name + '_' + k + '_' + corr + '_' + label0
                else:
                    label = 'xi_2pt_' + cata.name + '_' + corr + '_' + label0
            else:
                if corr in ['KK', 'NK', 'KG']:
                    label = 'xi_2pt_' + cata.name + '-' + catb.name + '_' + k + '_' + corr + '_' + label0
                else:
                    label = 'xi_2pt_' + cata.name + '-' + catb.name + '_' + corr + '_' + label0
            if cata.calc_err == 'jk':
                err, chi2 = jackknife_methods.jk(cata, xi_2pt.xi_2pt,
                                                 [xip, xim, xip_im, xim_im],
                                                 label, **kwargs)
            elif cata.calc_err == 'mock':
                ggperr, ggmerr, chi2p, chi2m, ceerr, cberr, cechi2, cbchi2 = BCC_Methods.jk_iter_xi(
                    cat,
                    ggp,
                    ggm,
                    ce,
                    cb,
                    mask,
                    w,
                    cosebi=cosebi,
                    parallel=parallel)

        if plot:
            fig.plot_methods.fig_create_xi(cata, corr, theta, out, err, k, ga,
                                           gb)

        return theta, out, err, chi2