Ejemplo n.º 1
0
    def gammat(self, lens, sources, lens_bin_idx):
        '''calculate tangential shear correlation'''
        lens_corr = self.io.df_to_corr(lens, shears=False)
        source_corr = self.io.df_to_corr(sources, shears=True)
        rand = self.io.read_randoms(self.io.path_dict['random_prefix'] +
                                    '_{}.hdf'.format(lens_bin_idx))

        corr_kwargs = {
            'min_sep': 3,
            'max_sep': 90,
            'nbins': 12,
            'sep_units': 'arcmin'
        }
        #now make correlation functions
        GGL = treecorr.NGCorrelation(**corr_kwargs)
        GGL.process(lens_corr, source_corr)

        # calculate random signal
        GGL_rand = treecorr.NGCorrelation(**corr_kwargs)
        GGL_rand.process(rand, source_corr)
        return {
            'xi+': GGL.xi - GGL_rand.xi,
            'xi-': GGL.xi_im,
            'r': np.exp(GGL.meanlogr),
            'sig': np.sqrt(GGL.varxi)
        }
Ejemplo n.º 2
0
def xshear_lens_jk(jk_label):

    source_jk_mask = labels_source_jk == jk_label
    source_mask = (source_z>0.1)&(source_z<0.3)
    source_mask = (~source_jk_mask)&(source_mask)

    source_cat=treecorr.Catalog(x=source_ra[source_mask],y=source_dec[source_mask],g1=source_e1[source_mask],g2=-1.*source_e2[source_mask],w=source_w[source_mask],x_units='degree',y_units='degree')        
    
    mask_z_lens = (lens_z_jk>0.5)&(lens_z_jk<0.7)
    lens_mask = labels_lens_jk == jk_label
    lens_mask = (~lens_mask)&(mask_z_lens)
    
    lens_cat = treecorr.Catalog(x=lens_ra_jk[lens_mask], y=lens_dec_jk[lens_mask], x_units='degree', y_units='degree')
    
    ng = treecorr.NGCorrelation(nbins = 20, min_sep=0.5, max_sep=250, sep_units='arcmin', verbose=1)
    ng.process(lens_cat, source_cat)
    r_lens_h, xt_lens_h, xx_lens_h , w_lens_h = ng.meanr , ng.xi , ng.xi_im, ng.npairs
    w_lens_h = w_lens_h / np.sum(w_lens_h)
    #print "lens", xt_lens_h

    random_mask = labels_random_jk == jk_label
    random_cat = treecorr.Catalog(x=random_ra_jk[~random_mask], y=random_dec_jk[~random_mask], x_units='degree', y_units='degree')
    
    ng = treecorr.NGCorrelation(nbins = 20, min_sep=0.5, max_sep=250, sep_units='arcmin', verbose=1)
    ng.process(random_cat, source_cat)
    r_rand_h, xt_rand_h, xx_rand_h , w_rand_h = ng.meanr , ng.xi, ng.xi_im, ng.npairs
    w_rand_h = w_rand_h / np.sum(w_rand_h)
    #print "random", xt_rand_h 

    return r_lens_h, xt_lens_h, xx_lens_h , w_lens_h, r_rand_h, xt_rand_h, xx_rand_h , w_rand_h
Ejemplo n.º 3
0
    def calculate_shear_pos(self, data, i, j):
        import treecorr

        cat_i = self.get_shear_catalog(data, i)
        n_i = cat_i.nobj

        cat_j, rancat_j = self.get_lens_catalog(data, j)
        n_j = cat_j.nobj
        n_rand_j = rancat_j.nobj if rancat_j is not None else 0

        print(
            f"Rank {self.rank} calculating shear-position bin pair ({i},{j}): {n_i} x {n_j} objects, {n_rand_j} randoms"
        )

        ng = treecorr.NGCorrelation(self.config)
        ng.process(cat_j, cat_i)

        if rancat_j:
            rg = treecorr.NGCorrelation(self.config)
            rg.process(rancat_j, cat_i)
        else:
            rg = None

        gammat, gammat_im, gammaterr = ng.calculateXi(rg=rg)

        theta = np.exp(ng.meanlogr)
        gammaterr = np.sqrt(gammaterr)

        return theta, gammat, gammaterr, ng.npairs, ng.weight
Ejemplo n.º 4
0
    def calc_pos_shear(self,i,j,verbose,num_threads):

        mask = self.lens_binning==i
        lenscat_i = treecorr.Catalog(w=self.lensweight[mask], ra=self.lens['ra'][mask], dec=self.lens['dec'][mask], ra_units='deg', dec_units='deg')

        mask = self.ran_binning==i
        rancat_i  = treecorr.Catalog(w=np.ones(np.sum(mask)), ra=self.randoms['ra'][mask], dec=self.randoms['dec'][mask], ra_units='deg', dec_units='deg')

        m1,m2,mask = self.get_m(j)
        if self.params['has_sheared']:
            cat_j = treecorr.Catalog(g1=self.shape['e1'][mask]/m1[mask], g2=self.shape['e2'][mask]/m2[mask], w=self.weight[mask], ra=self.shape['ra'][mask], dec=self.shape['dec'][mask], ra_units='deg', dec_units='deg')
        else:
            cat_j = treecorr.Catalog(g1=self.shape['e1'][mask], g2=self.shape['e2'][mask], w=self.weight[mask], ra=self.shape['ra'][mask], dec=self.shape['dec'][mask], ra_units='deg', dec_units='deg')
            biascat_j = treecorr.Catalog(k=np.sqrt(self.shape['m1'][mask]*self.shape['m2'][mask]), w=self.weight[mask], ra=self.shape['ra'][mask], dec=self.shape['dec'][mask], ra_units='deg', dec_units='deg')

        ng = treecorr.NGCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads)
        rg = treecorr.NGCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads)
        if self.params['has_sheared']:
            norm = 1.
        else:
            nk = treecorr.NKCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads)
            nk.process(lenscat_i,biascat_j)
            norm,tmp=nk.calculateXi()
        ng.process(lenscat_i,cat_j)
        rg.process(rancat_i,cat_j)
        gammat,gammat_im,gammaterr=ng.calculateXi(rg)

        theta=np.exp(ng.meanlogr)
        if np.sum(norm)==0:
          norm=1.
        gammat/=norm
        gammat_im/=norm
        gammaterr=np.sqrt(gammaterr/norm)

        return theta, gammat, gammaterr
Ejemplo n.º 5
0
def test_contiguous():
    # This unit test comes from Melanie Simet who discovered a bug in earlier
    # versions of the code that the Catalog didn't correctly handle input arrays
    # that were not contiguous in memory.  We want to make sure this kind of
    # input works correctly.  It also checks that the input dtype doesn't have
    # to be float

    source_data = numpy.array([
            (0.0380569697547, 0.0142782758818, 0.330845443464, -0.111049332655),
            (-0.0261291090735, 0.0863787933931, 0.122954685209, 0.40260430406),
            (-0.0261291090735, 0.0863787933931, 0.122954685209, 0.40260430406),
            (0.125086697534, 0.0283621046495, -0.208159531309, 0.142491564101),
            (0.0457709426026, -0.0299249486373, -0.0406555089425, 0.24515956887),
            (-0.00338578248926, 0.0460291122935, 0.363057738173, -0.524536297555)],
            dtype=[('ra', None), ('dec', numpy.float64), ('g1', numpy.float32),
                   ('g2', numpy.float128)])

    config = {'min_sep': 0.05, 'max_sep': 0.2, 'sep_units': 'degrees', 'nbins': 5 }

    cat1 = treecorr.Catalog(ra=[0], dec=[0], ra_units='deg', dec_units='deg') # dumb lens
    cat2 = treecorr.Catalog(ra=source_data['ra'], ra_units='deg',
                            dec=source_data['dec'], dec_units='deg',
                            g1=source_data['g1'],
                            g2=source_data['g2'])
    cat2_float = treecorr.Catalog(ra=source_data['ra'].astype(float), ra_units='deg',
                                  dec=source_data['dec'].astype(float), dec_units='deg',
                                  g1=source_data['g1'].astype(float),
                                  g2=source_data['g2'].astype(float))

    print("dtypes of original arrays: ", [source_data[key].dtype for key in ['ra','dec','g1','g2']])
    print("dtypes of cat2 arrays: ", [getattr(cat2,key).dtype for key in ['ra','dec','g1','g2']])
    print("is original g2 array contiguous?", source_data['g2'].flags['C_CONTIGUOUS'])
    print("is cat2.g2 array contiguous?", cat2.g2.flags['C_CONTIGUOUS'])
    assert not source_data['g2'].flags['C_CONTIGUOUS']
    assert cat2.g2.flags['C_CONTIGUOUS']

    ng = treecorr.NGCorrelation(config)
    ng.process(cat1,cat2)
    ng_float = treecorr.NGCorrelation(config)
    ng_float.process(cat1,cat2_float)
    numpy.testing.assert_equal(ng.xi, ng_float.xi)

    # While we're at it, check that non-1d inputs work, but emit a warning.
    if __name__ == '__main__':
        v = 1
    else:
        v = 0
    cat2_non1d = treecorr.Catalog(ra=source_data['ra'].reshape(3,2), ra_units='deg',
                                  dec=source_data['dec'].reshape(1,1,1,6), dec_units='deg',
                                  g1=source_data['g1'].reshape(6,1),
                                  g2=source_data['g2'].reshape(1,6), verbose=v)
    ng.process(cat1,cat2_non1d)
    numpy.testing.assert_equal(ng.xi, ng_float.xi)
Ejemplo n.º 6
0
def xshear_lens_jk(zl1, zl2, jk_label):

    source_file = h5py.File("source_zb_" + str(zl2 + 0.1) + "_0.9.h5")
    source_ra = source_file["ra"][:]
    source_dec = source_file["dec"][:]
    source_z = source_file["zb"][:]
    source_e1 = source_file["psf1"][:]
    source_e2 = source_file["psf2"][:]
    source_w = source_file["w"][:]
    source_size = source_file["snr"][:]
    source_file.close()

    source_cat = treecorr.Catalog(x=source_ra,
                                  y=source_dec,
                                  g1=source_e1,
                                  g2=-1. * source_e2,
                                  w=source_w,
                                  x_units='degree',
                                  y_units='degree')

    mask_z_lens = (lens_z_jk > zl1) & (lens_z_jk < zl2)
    lens_mask = labels_lens_jk == jk_label
    lens_mask = (~lens_mask) & (mask_z_lens)

    lens_cat = treecorr.Catalog(x=lens_ra_jk[lens_mask],
                                y=lens_dec_jk[lens_mask],
                                x_units='degree',
                                y_units='degree')

    ng = treecorr.NGCorrelation(nbins=25,
                                min_sep=0.3,
                                max_sep=300,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(source_cat, lens_cat)
    r_lens_h, xt_lens_h = ng.meanr, ng.xi

    random_mask = labels_random_jk == jk_label
    random_cat = treecorr.Catalog(x=random_ra_jk[~random_mask],
                                  y=random_dec_jk[~random_mask],
                                  x_units='degree',
                                  y_units='degree')

    ng = treecorr.NGCorrelation(nbins=25,
                                min_sep=0.3,
                                max_sep=300,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(source_cat, random_cat)
    r_rand_h, xt_rand_h = ng.meanr, ng.xi

    return r_lens_h, xt_lens_h, r_rand_h, xt_rand_h
    def compute_position_shear(self, i, j, cat1, cat2):
        maski = (cat1.mask) & (self.p1 == i)
        maskj = (cat2.mask) & (self.p2 == j)

        # Initialised the catlogues
        cat_i = treecorr.Catalog(ra=cat1.cols['ra'][maski],
                                 dec=cat1.cols['dec'][maski],
                                 ra_units='deg',
                                 dec_units='deg')

        rcat_i = treecorr.Catalog(ra=self.rcat1['ra'][maski],
                                  dec=self.rcat1['dec'][maski],
                                  ra_units='deg',
                                  dec_units='deg')

        namej_1, namej_2 = colnames[self.corrtype[1]]
        cat_j = treecorr.Catalog(g1=cat2.cols[namej_1][maskj],
                                 g2=cat2.cols[namej_2][maskj],
                                 ra=cat2.cols['ra'][maskj],
                                 dec=cat2.cols['dec'][maskj],
                                 ra_units='deg',
                                 dec_units='deg')

        # Set up the correlation
        ng = treecorr.NGCorrelation(nbins=cat1.info['tbins'],
                                    min_sep=cat1.info['tmin'],
                                    max_sep=cat1.info['tmax'],
                                    sep_units='arcmin',
                                    bin_slop=0.1,
                                    verbose=True,
                                    num_threads=1)
        rg = treecorr.NGCorrelation(nbins=cat1.info['tbins'],
                                    min_sep=cat1.info['tmin'],
                                    max_sep=cat1.info['tmax'],
                                    sep_units='arcmin',
                                    bin_slop=0.1,
                                    verbose=True,
                                    num_threads=1)

        # And process it
        ng.process(cat_i, cat_j)
        rg.process(rcat_i, cat_j)

        gammat, gammat_im, gammaterr = ng.calculateXi(rg)

        theta = np.exp(ng.meanlogr)
        gammaterr = np.sqrt(gammaterr)

        return theta, gammat, gammat_im, gammaterr, gammaterr
Ejemplo n.º 8
0
def pos_shear_corr(pos_lens,pos_source,shear_source,k_source=None,w_lense=None,w_source=None,same_cell=False,num_threads=0):

	nbins   = 6
	min_sep = 0.05 # 3 arcmin
	max_sep = 3.0 # 180 arcmin 
	bin_size = (max_sep-min_sep)/nbins # roughly
	bin_slop = 0.05/bin_size # 0.1 -> 0.05 # 2pt_pipeline for des used bin_slop: 0.01 here: https://github.com/des-science/2pt_pipeline/blob/master/pipeline/twopt_pipeline.yaml
	logger = None

	ra_lens, dec_lens = pos_lens
	ra_source, dec_source = pos_source
	g1_source, g2_source = shear_source

	# foreground (lens)
	cat_lens = treecorr.Catalog(ra=ra_lens, dec=dec_lens, w=w_lense, ra_units='degrees', dec_units='degrees') 
	
	# background (source)
	cat_source = treecorr.Catalog(ra=ra_source, dec=dec_source, w=w_source, g1=g1_source, g2=g2_source, k=k_source, ra_units='degrees', dec_units='degrees')

	ng = treecorr.NGCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=bin_slop, sep_units='degrees', logger=logger)
	ng.process_cross(cat_lens,cat_source,num_threads=num_threads)  # there's no process_auto for this object

	# one shear variance per pixel per source zbin
	varg = treecorr.calculateVarG(cat_source) if same_cell else np.nan 

	return ng, varg
Ejemplo n.º 9
0
def test_pairwise():
    # Test the same profile, but with the pairwise calcualtion:
    nsource = 1000000
    gamma0 = 0.05
    r0 = 10.
    L = 5. * r0
    numpy.random.seed(8675309)
    x = (numpy.random.random_sample(nsource)-0.5) * L
    y = (numpy.random.random_sample(nsource)-0.5) * L
    r2 = (x**2 + y**2)
    gammat = gamma0 * numpy.exp(-0.5*r2/r0**2)
    g1 = -gammat * (x**2-y**2)/r2
    g2 = -gammat * (2.*x*y)/r2

    dx = (numpy.random.random_sample(nsource)-0.5) * L
    dx = (numpy.random.random_sample(nsource)-0.5) * L

    lens_cat = treecorr.Catalog(x=dx, y=dx, x_units='arcmin', y_units='arcmin')
    source_cat = treecorr.Catalog(x=x+dx, y=y+dx, g1=g1, g2=g2, x_units='arcmin', y_units='arcmin')
    ng = treecorr.NGCorrelation(bin_size=0.1, min_sep=1., max_sep=25., sep_units='arcmin',
                                verbose=1, pairwise=True)
    ng.process(lens_cat, source_cat)

    r = ng.meanr
    true_gt = gamma0 * numpy.exp(-0.5*r**2/r0**2)

    print('ng.xi = ',ng.xi)
    print('ng.xi_im = ',ng.xi_im)
    print('true_gammat = ',true_gt)
    print('ratio = ',ng.xi / true_gt)
    print('diff = ',ng.xi - true_gt)
    print('max diff = ',max(abs(ng.xi - true_gt)))
    # I don't really understand why this comes out slightly less accurate.
    # I would have thought it would be slightly more accurate because it doesn't use the
    # approximations intrinsic to the tree calculation.
    assert max(abs(ng.xi - true_gt)) < 4.e-4
    assert max(abs(ng.xi_im)) < 3.e-5

    # Check that we get the same result using the corr2 executable:
    if __name__ == '__main__':
        lens_cat.write(os.path.join('data','ng_pairwise_lens.dat'))
        source_cat.write(os.path.join('data','ng_pairwise_source.dat'))
        import subprocess
        corr2_exe = get_script_name('corr2')
        p = subprocess.Popen( [corr2_exe,"ng_pairwise.yaml"] )
        p.communicate()
        corr2_output = numpy.genfromtxt(os.path.join('output','ng_pairwise.out'),names=True)
        print('ng.xi = ',ng.xi)
        print('from corr2 output = ',corr2_output['gamT'])
        print('ratio = ',corr2_output['gamT']/ng.xi)
        print('diff = ',corr2_output['gamT']-ng.xi)
        numpy.testing.assert_almost_equal(corr2_output['gamT']/ng.xi, 1., decimal=3)

        print('xi_im from corr2 output = ',corr2_output['gamX'])
        assert max(abs(corr2_output['gamX'])) < 3.e-5
Ejemplo n.º 10
0
def test_single():
    # Use gamma_t(r) = gamma0 exp(-r^2/2r0^2) around a single lens
    # i.e. gamma(r) = -gamma0 exp(-r^2/2r0^2) (x+iy)^2/r^2

    nsource = 1000000
    gamma0 = 0.05
    r0 = 10.
    L = 5. * r0
    numpy.random.seed(8675309)
    x = (numpy.random.random_sample(nsource)-0.5) * L
    y = (numpy.random.random_sample(nsource)-0.5) * L
    r2 = (x**2 + y**2)
    gammat = gamma0 * numpy.exp(-0.5*r2/r0**2)
    g1 = -gammat * (x**2-y**2)/r2
    g2 = -gammat * (2.*x*y)/r2

    lens_cat = treecorr.Catalog(x=[0], y=[0], x_units='arcmin', y_units='arcmin')
    source_cat = treecorr.Catalog(x=x, y=y, g1=g1, g2=g2, x_units='arcmin', y_units='arcmin')
    ng = treecorr.NGCorrelation(bin_size=0.1, min_sep=1., max_sep=25., sep_units='arcmin',
                                verbose=1)
    ng.process(lens_cat, source_cat)

    # log(<R>) != <logR>, but it should be close:
    print('meanlogr - log(meanr) = ',ng.meanlogr - numpy.log(ng.meanr))
    numpy.testing.assert_almost_equal(ng.meanlogr, numpy.log(ng.meanr), decimal=3)

    r = ng.meanr
    true_gt = gamma0 * numpy.exp(-0.5*r**2/r0**2)

    print('ng.xi = ',ng.xi)
    print('ng.xi_im = ',ng.xi_im)
    print('true_gammat = ',true_gt)
    print('ratio = ',ng.xi / true_gt)
    print('diff = ',ng.xi - true_gt)
    print('max diff = ',max(abs(ng.xi - true_gt)))
    assert max(abs(ng.xi - true_gt)) < 4.e-4
    assert max(abs(ng.xi_im)) < 3.e-5

    # Check that we get the same result using the corr2 executable:
    if __name__ == '__main__':
        lens_cat.write(os.path.join('data','ng_single_lens.dat'))
        source_cat.write(os.path.join('data','ng_single_source.dat'))
        import subprocess
        corr2_exe = get_script_name('corr2')
        p = subprocess.Popen( [corr2_exe,"ng_single.yaml"] )
        p.communicate()
        corr2_output = numpy.genfromtxt(os.path.join('output','ng_single.out'),names=True)
        print('ng.xi = ',ng.xi)
        print('from corr2 output = ',corr2_output['gamT'])
        print('ratio = ',corr2_output['gamT']/ng.xi)
        print('diff = ',corr2_output['gamT']-ng.xi)
        numpy.testing.assert_almost_equal(corr2_output['gamT']/ng.xi, 1., decimal=3)

        print('xi_im from corr2 output = ',corr2_output['gamX'])
        assert max(abs(corr2_output['gamX'])) < 3.e-5
Ejemplo n.º 11
0
def cross_corr(gal, shear, random, config):
    '''
    measure the cross-correlation between a gal catalog and a shear catalog
    '''
    min_sep = config['min_sep']
    max_sep = config['max_sep']
    units = config['units']
    nbins = config['nbins']
    bin_slop = config['bin_slop']

    gal_cat = treecorr.Catalog(ra = gal["RA"] , dec = gal["DEC"], ra_units='deg', dec_units='deg')
    shear_cat = treecorr.Catalog(ra = shear["RA"] , dec = shear["DEC"], g1 = shear["gamma1"], g2 = -1.*shear["gamma2"], ra_units='deg', dec_units='deg')
    ran_cat = treecorr.Catalog(ra = random["RA"] , dec = random["DEC"], ra_units='deg', dec_units='deg')
    ng = treecorr.NGCorrelation(min_sep = min_sep, max_sep = max_sep, nbins = nbins, sep_units = units, bin_slop = bin_slop)
    rg = treecorr.NGCorrelation(min_sep = min_sep, max_sep = max_sep, nbins = nbins, sep_units = units, bin_slop = bin_slop)
    ng.process(gal_cat, shear_cat, num_threads = 60)
    rg.process(ran_cat, shear_cat, num_threads = 60)
    theta, xi_t , xi_x , w , npairs = ng.meanr, ng.xi, ng.xi_im, ng.weight, ng.npairs
    theta_r, xi_tr , xi_xr , wr , npairs_r = rg.meanr, rg.xi, rg.xi_im, rg.weight, rg.npairs
    
    return theta, xi_t, xi_x, npairs, xi_tr, xi_xr, npairs_r
Ejemplo n.º 12
0
def get_xi(map, window_norm, mask=None, Sim_jk=None):
    self = Sim_jk
    maps = {'galaxy': map[0]}
    maps['shear'] = {0: map[1], 1: map[2]}
    if mask is None:
        mask = {}
        mask['galaxy'] = maps['galaxy'] == hp.UNSEEN
        mask['shear'] = maps['shear'][0] == hp.UNSEEN
    tree_cat_args = get_treecorr_cat_args(maps, masks=mask, nside=Sim_jk.nside)
    tree_cat = {}
    tree_cat['galaxy'] = treecorr.Catalog(w=maps['galaxy'][~mask['galaxy']],
                                          **tree_cat_args['galaxy'])
    tree_cat['shear'] = treecorr.Catalog(g1=maps['shear'][0][~mask['shear']],
                                         g2=maps['shear'][1][~mask['shear']],
                                         **tree_cat_args['shear'])
    del mask
    ndim = 3  #FIXME
    xi = np.zeros(self.n_th_bins * (self.ndim + 1))
    th_i = 0
    tree_corrs = {}
    n_th_bins = self.n_th_bins
    for corr in self.kappa_class.corrs:  #note that in treecorr npairs includes pairs with 0 weights. That affects this calc
        if corr == corr_ggl:
            tree_corrs[corr] = treecorr.NGCorrelation(**corr_config)
            tree_corrs[corr].process(tree_cat['galaxy'], tree_cat['shear'])
            xi[th_i:th_i + n_th_bins] = tree_corrs[corr].xi * tree_corrs[
                corr].weight / window_norm[corr][
                    'weight'] * -1  #sign convention
            #
            th_i += self.n_th_bins
        if corr == corr_ll:
            tree_corrs[corr] = treecorr.GGCorrelation(**corr_config)
            tree_corrs[corr].process(tree_cat['shear'])
            xi[th_i:th_i + n_th_bins] = tree_corrs[
                corr].xip  #*tree_corrs[corr].npairs/window_norm[corr]['weight']
            th_i += n_th_bins
            xi[th_i:th_i + n_th_bins] = tree_corrs[
                corr].xim  #*tree_corrs[corr].npairs/window_norm[corr]['weight']
            th_i += n_th_bins
        if corr == corr_gg:
            tree_corrs[corr] = treecorr.NNCorrelation(**corr_config)
            tree_corrs[corr].process(tree_cat['galaxy'])
            xi[th_i:th_i + n_th_bins] = tree_corrs[corr].weight / tree_corrs[
                corr].npairs  #window_norm[corr]['weight']  #
            #             xi[th_i:th_i+n_th_bins]=tree_corrs[corr].weight/window_norm[corr]
            th_i += n_th_bins


#     del tree_cat,tree_corrs
#     gc.collect()
    return xi
Ejemplo n.º 13
0
def compute_SingleMock_SinglePair_NGCorrelation(cat_j, bin_i, bin_j, config):

    cat_i_name = '/home/hcamacho/lens_s1_z' + str(bin_i + 1) + '_c1.fits'
    cat_i = treecorr.Catalog(cat_i_name, config)

    ng = treecorr.NGCorrelation(config)
    ng.process(cat_i, cat_j, num_threads=ncpus)

    data_name = '/home/anderson/3x2pt/ng_data/ng_s1_z' + str(bin_i + 1) + str(
        bin_j + 1) + '_c1.dat'
    ng.write(data_name)
    ng.clear()

    return
Ejemplo n.º 14
0
def compute_SinglePair_NGCorrelation(cat_j, bin_i, bin_j, config):

        cat_i_name = input_dir + 'lens-cat_z'+str(bin_i+1)+'.fits'
        cat_i = treecorr.Catalog(cat_i_name, config)

        ng = treecorr.NGCorrelation(config)
        ng.process(cat_i, cat_j, num_threads=ncpus)


        data_name = output_dir + 'ng_data/ng_z'+str(bin_i+1)+str(bin_j+1)+'.dat'
        ng.write(data_name)
        ng.clear()

        return
Ejemplo n.º 15
0
def xshear_lens_jk(zl1, zl2, jk_label):

    mask_z_source = source_z>zl2+0.1
    
    source_cat=treecorr.Catalog(x=source_ra[mask_z_source],y=source_dec[mask_z_source],g1=source_e1[mask_z_source],g2=-1.*source_e2[mask_z_source],w=source_w[mask_z_source],x_units='degree',y_units='degree')        
       
    mask_z_lens = (lens_z_jk>zl1)&(lens_z_jk<zl2)
    lens_mask = labels_lens_jk == jk_label
    lens_mask = (~lens_mask)&(mask_z_lens)
    
    lens_cat = treecorr.Catalog(x=lens_ra_jk[lens_mask], y=lens_dec_jk[lens_mask], x_units='degree', y_units='degree')
    
    ng = treecorr.NGCorrelation(nbins = 25, min_sep=0.3, max_sep=300, sep_units='arcmin', verbose=1)
    ng.process(lens_cat, source_cat)
    r_lens_h, xt_lens_h = ng.meanr , ng.xi 
    
    random_mask = labels_random_jk == jk_label
    random_cat = treecorr.Catalog(x=random_ra_jk[~random_mask], y=random_dec_jk[~random_mask], x_units='degree', y_units='degree')
    
    ng = treecorr.NGCorrelation(nbins = 25, min_sep=0.3, max_sep=300, sep_units='arcmin', verbose=1)
    ng.process(random_cat, source_cat)
    r_rand_h, xt_rand_h = ng.meanr , ng.xi
    
    return r_lens_h, xt_lens_h, r_rand_h, xt_rand_h
Ejemplo n.º 16
0
def getRandomGGL(source_table, n_resample=100):
    '''use a catalog of randoms as lens objects
    '''
    #read in the randoms and make a master table
    fields = ('F1', 'F2', 'F3', 'F4', 'F5')
    random_tables = [ascii.read(f + 'randoms.csv') for f in fields]
    master_randoms = vstack(random_tables)
    #the randoms are ~6 times bigger than the lens catalogs
    master_randoms = master_randoms[::6]

    gammat_list = []
    gammax_list = []
    r_list = []

    lens_tab_size = len(master_randoms)
    src_tab_size = len(source_table)

    #calibrate the shear bias
    source_table = shearBias(source_table)

    for i in range(0, n_resample):
        #make new catalogs by resampling input lens and src
        lens_resamp_idx = np.random.randint(0, lens_tab_size, lens_tab_size)
        src_resamp_idx = np.random.randint(0, src_tab_size, src_tab_size)
        random_cat = treecorr.Catalog(
            ra=master_randoms[lens_resamp_idx]['ra'].data,
            dec=master_randoms[lens_resamp_idx]['dec'].data,
            ra_units='radians',
            dec_units='radians')
        source_corr = astpyToCorr(source_table[src_resamp_idx])
        #now make correlation functions
        GGL = treecorr.NGCorrelation(min_sep=0.1,
                                     max_sep=90,
                                     nbins=10,
                                     sep_units='arcmin')
        GGL.process(random_cat, source_corr)
        gammat_list.append(GGL.xi)
        gammax_list.append(GGL.xi_im)
        r_list.append(np.exp(GGL.meanlogr))

    return {'gammat': gammat_list, "gammax": gammax_list, "r": r_list}
Ejemplo n.º 17
0
def test_add_precompute_results_treecorr():

    try:
        import treecorr
    except ImportError:
        warnings.warn('TreeCorr is not installed. Skipping test.',
                      RuntimeWarning)
        return 0

    table_l, table_s = get_test_catalogs(1000, 10000)
    theta_bins = np.logspace(0, 1, 11)

    cat_l = treecorr.Catalog(ra=table_l['ra'],
                             dec=table_l['dec'],
                             ra_units='deg',
                             dec_units='deg')
    cat_s = treecorr.Catalog(ra=table_s['ra'],
                             dec=table_s['dec'],
                             g1=table_s['e_1'],
                             g2=table_s['e_2'],
                             ra_units='deg',
                             dec_units='deg')

    ng = treecorr.NGCorrelation(max_sep=np.amax(theta_bins),
                                min_sep=np.amin(theta_bins),
                                nbins=len(theta_bins) - 1,
                                sep_units='deg',
                                metric='Arc',
                                brute=True)
    ng.process(cat_l, cat_s)

    table_l = precompute.add_precompute_results(table_l,
                                                table_s,
                                                theta_bins,
                                                shear_mode=True,
                                                nside=32)

    assert np.all(np.array(ng.npairs, dtype=int) == number_of_pairs(table_l))
    assert np.all(
        np.isclose(ng.xi, raw_tangential_shear(table_l), atol=1e-9, rtol=0))
Ejemplo n.º 18
0
    def NGCorrelation(self):
        """
        Caclulates 2D correlation function using Catalog's ra, dec.
        Requires randcatalog to exist.

        Returns tuple (logr, meanlogr, xi, xi_im, xivar)
        """
        catN = treecorr.Catalog(ra=self.catalog["ra"], dec=self.catalog["dec"], 
                  ra_units="radians", dec_units="radians", g1=self.catalog["g1"],
                  g2=self.catalog["g2"] )
        dd=treecorr.NGCorrelation(min_sep=self.min_sep, bin_size=self.bin_size, 
                                  max_sep=self.max_sep, sep_units='arcmin')

        dd.process(catN,catN)
        logr = dd.logr 
        meanlogr = dd.logr
        xi=dd.xi
        xi_im=dd.xi_im
        xivar=dd.varxi
        return (logr, meanlogr, xi, xi_im, xivar)


                                
Ejemplo n.º 19
0
def CorProcess(catN,
               catG,
               sightBins,
               nbins,
               min_sep,
               max_sep,
               rpar_step,
               rpar_min,
               logfile,
               which_corr='NG',
               RR=False):
    #print('test corProcess',file=logfile)
    corList = list()
    pairCounts = np.zeros((sightBins, nbins))
    wpairCounts = np.zeros((sightBins, nbins))
    for i in np.arange(sightBins):
        if which_corr == 'NG' and not RR:
            corList.append(
                treecorr.NGCorrelation(nbins=nbins,
                                       min_sep=min_sep,
                                       max_sep=max_sep,
                                       min_rpar=i * rpar_step + rpar_min,
                                       max_rpar=(i + 1) * rpar_step +
                                       rpar_min))  #metric='Rperp'
        if which_corr == 'NN' or RR:
            #print('RR test', file=logfile)
            corList.append(
                treecorr.NNCorrelation(nbins=nbins,
                                       min_sep=min_sep,
                                       max_sep=max_sep,
                                       min_rpar=i * rpar_step + rpar_min,
                                       max_rpar=(i + 1) * rpar_step +
                                       rpar_min))  #metric='Rperp'
        corList[i].process(catN, catG, metric='Rperp')
        pairCounts[i] = corList[i].npairs
        wpairCounts[i] = corList[i].weight
    return corList, pairCounts, wpairCounts
Ejemplo n.º 20
0
def getGGL(lens_table,
           source_table,
           n_resample=100,
           swap_test=True,
           cal_lens=True,
           cal_source=True,
           shape_cut=True):
    """
    calculate galaxy galaxy lensing

    parameters
    lens_table: astropy table of lens galaxies. must have positions and shear specified 
    source_table: astropy table of source galaxies. must have positions and shear specified
    n_resample: number of bootstrap resamplings to do. default is 100
    swap_test: calculate the tangential shear swapping lens and souces 

    returns a dictionary containing the following:
    gammat: tangential shear for each boostrap iterations. a list of n_resample
        python arrays
    gammax: tangential shear calculated after rotating sources 45 degrees
    r: mean center of radial bins
    gammat_flip: tangential shear calculated for swap test
    gammax_flip: tangential shear calculated after rotating sources 45 degrees
        calculated for the swap
    r_flip: mean center of radial bins 
    nullGGL : galaxy galaxy lens treecorr object. 
        swap shear and lens planes and calculate tangential shear
        nice null test for photo-zs
    """
    lens_tab_size = len(lens_table)
    gammat_list = []
    gammax_list = []
    r_list = []

    #calibrate the shear bias for both tables
    if cal_lens:
        lens_table = shearBias(lens_table)
    if shape_cut:
        source_table = shapeCut(source_table)
    if cal_source:
        source_table = shearBias(source_table)

    src_tab_size = len(source_table)

    #each iteration in the loop is a bootstrap resample
    for i in range(0, n_resample):
        #make new catalogs by resampling input lens and src
        lens_resamp_idx = np.random.randint(0, lens_tab_size, lens_tab_size)
        lens_corr = astpyToCorr(lens_table[lens_resamp_idx])
        src_resamp_idx = np.random.randint(0, src_tab_size, src_tab_size)
        source_corr = astpyToCorr(source_table[src_resamp_idx])
        #now make correlation functions
        GGL = treecorr.NGCorrelation(min_sep=0.1,
                                     max_sep=90,
                                     nbins=10,
                                     sep_units='arcmin')
        GGL.process(lens_corr, source_corr)
        gammat_list.append(GGL.xi)
        gammax_list.append(GGL.xi_im)
        r_list.append(np.exp(GGL.meanlogr))

    #option to do a one off lens source swap
    if swap_test == False:
        return {"gammat": gammat_list, "gammax": gammax_list, "r": r_list}

    else:
        nullGGL = treecorr.NGCorrelation(min_sep=0.1,
                                         max_sep=90,
                                         nbins=10,
                                         sep_units='arcmin')
        nullGGL.process(source_corr, lens_corr)
        return {
            "gammat": gammat_list,
            "gammax": gammax_list,
            "r": r_list,
            "gammat_flip": nullGGL.xi,
            "r_flip": np.exp(nullGGL.meanlogr)
        }
Ejemplo n.º 21
0
def corr2(config, logger=None):
    """Run the full two-point correlation function code based on the parameters in the
    given config dict.

    The function print_corr2_params() will output information about the valid parameters
    that are expected to be in the config dict.

    Optionally a logger parameter maybe given, in which case it is used for logging.
    If not given, the logging will be based on the verbose and log_file parameters.

    :param config:  The configuration dict which defines what to do.
    :param logger:  If desired, a logger object for logging. (default: None, in which case
                    one will be built according to the config dict's verbose level.)
    """
    # Setup logger based on config verbose value
    if logger is None:
        logger = treecorr.config.setup_logger(
                treecorr.config.get(config,'verbose',int,1),
                config.get('log_file',None))

    # Check that config doesn't have any extra parameters.
    # (Such values are probably typos.)
    # Also convert the given parameters to the correct type, etc.
    config = treecorr.config.check_config(config, corr2_valid_params, corr2_aliases, logger)

    import pprint
    logger.debug('Using configuration dict:\n%s',pprint.pformat(config))

    if ( 'output_dots' not in config 
          and config.get('log_file',None) is None 
          and config['verbose'] >= 2 ):
        config['output_dots'] = True

    # Set the number of threads
    num_threads = config.get('num_threads',None)
    logger.debug('From config dict, num_threads = %s',num_threads)
    treecorr.set_omp_threads(num_threads, logger)

    # Read in the input files.  Each of these is a list.
    cat1 = treecorr.read_catalogs(config, 'file_name', 'file_list', 0, logger)
    if len(cat1) == 0:
        raise AttributeError("Either file_name or file_list is required")
    cat2 = treecorr.read_catalogs(config, 'file_name2', 'file_list2', 1, logger)
    rand1 = treecorr.read_catalogs(config, 'rand_file_name', 'rand_file_list', 0, logger)
    rand2 = treecorr.read_catalogs(config, 'rand_file_name2', 'rand_file_list2', 1, logger)
    if len(cat2) == 0 and len(rand2) > 0:
        raise AttributeError("rand_file_name2 is invalid without file_name2")
    logger.info("Done reading input catalogs")

    # Do GG correlation function if necessary
    if 'gg_file_name' in config or 'm2_file_name' in config:
        logger.warning("Performing GG calculations...")
        gg = treecorr.GGCorrelation(config,logger)
        gg.process(cat1,cat2)
        logger.info("Done GG calculations.")
        if 'gg_file_name' in config:
            gg.write(config['gg_file_name'])
            logger.warning("Wrote GG correlation to %s",config['gg_file_name'])
        if 'm2_file_name' in config:
            gg.writeMapSq(config['m2_file_name'], m2_uform=config['m2_uform'])
            logger.warning("Wrote Mapsq values to %s",config['m2_file_name'])

    # Do NG correlation function if necessary
    if 'ng_file_name' in config or 'nm_file_name' in config or 'norm_file_name' in config:
        if len(cat2) == 0:
            raise AttributeError("file_name2 is required for ng correlation")
        logger.warning("Performing NG calculations...")
        ng = treecorr.NGCorrelation(config,logger)
        ng.process(cat1,cat2)
        logger.info("Done NG calculation.")

        # The default ng_statistic is compensated _iff_ rand files are given.
        rg = None
        if len(rand1) == 0:
            if config.get('ng_statistic',None) == 'compensated':
                raise AttributeError("rand_files is required for ng_statistic = compensated")
        elif config.get('ng_statistic','compensated') == 'compensated':
            rg = treecorr.NGCorrelation(config,logger)
            rg.process(rand1,cat2)
            logger.info("Done RG calculation.")

        if 'ng_file_name' in config:
            ng.write(config['ng_file_name'], rg)
            logger.warning("Wrote NG correlation to %s",config['ng_file_name'])
        if 'nm_file_name' in config:
            ng.writeNMap(config['nm_file_name'], rg, m2_uform=config['m2_uform'])
            logger.warning("Wrote NMap values to %s",config['nm_file_name'])

        if 'norm_file_name' in config:
            gg = treecorr.GGCorrelation(config,logger)
            gg.process(cat2)
            logger.info("Done GG calculation for norm")
            dd = treecorr.NNCorrelation(config,logger)
            dd.process(cat1)
            logger.info("Done DD calculation for norm")
            rr = treecorr.NNCorrelation(config,logger)
            rr.process(rand1)
            logger.info("Done RR calculation for norm")
            dr = None
            if config['nn_statistic'] == 'compensated':
                dr = treecorr.NNCorrelation(config,logger)
                dr.process(cat1,rand1)
                logger.info("Done DR calculation for norm")
            ng.writeNorm(config['norm_file_name'],gg,dd,rr,dr,rg,m2_uform=config['m2_uform'])
            logger.warning("Wrote Norm values to %s",config['norm_file_name'])

    # Do NN correlation function if necessary
    if 'nn_file_name' in config:
        if len(rand1) == 0:
            raise AttributeError("rand_file_name is required for NN correlation")
        if len(cat2) > 0 and len(rand2) == 0:
            raise AttributeError("rand_file_name2 is required for NN cross-correlation")
        logger.warning("Performing DD calculations...")
        dd = treecorr.NNCorrelation(config,logger)
        dd.process(cat1,cat2)
        logger.info("Done DD calculations.")

        dr = None
        rd = None
        if len(cat2) == 0:
            logger.warning("Performing RR calculations...")
            rr = treecorr.NNCorrelation(config,logger)
            rr.process(rand1)
            logger.info("Done RR calculations.")

            if config['nn_statistic'] == 'compensated':
                logger.warning("Performing DR calculations...")
                dr = treecorr.NNCorrelation(config,logger)
                dr.process(cat1,rand1)
                logger.info("Done DR calculations.")
        else:
            logger.warning("Performing RR calculations...")
            rr = treecorr.NNCorrelation(config,logger)
            rr.process(rand1,rand2)
            logger.info("Done RR calculations.")

            if config['nn_statistic'] == 'compensated':
                logger.warning("Performing DR calculations...")
                dr = treecorr.NNCorrelation(config,logger)
                dr.process(cat1,rand2)
                logger.info("Done DR calculations.")
                rd = treecorr.NNCorrelation(config,logger)
                rd.process(rand1,cat2)
                logger.info("Done RD calculations.")
        dd.write(config['nn_file_name'],rr,dr,rd)
        logger.warning("Wrote NN correlation to %s",config['nn_file_name'])

    # Do KK correlation function if necessary
    if 'kk_file_name' in config:
        logger.warning("Performing KK calculations...")
        kk = treecorr.KKCorrelation(config,logger)
        kk.process(cat1,cat2)
        logger.info("Done KK calculations.")
        kk.write(config['kk_file_name'])
        logger.warning("Wrote KK correlation to %s",config['kk_file_name'])

    # Do NG correlation function if necessary
    if 'nk_file_name' in config:
        if len(cat2) == 0:
            raise AttributeError("file_name2 is required for nk correlation")
        logger.warning("Performing NK calculations...")
        nk = treecorr.NKCorrelation(config,logger)
        nk.process(cat1,cat2)
        logger.info("Done NK calculation.")

        rk = None
        if len(rand1) == 0:
            if config.get('nk_statistic',None) == 'compensated':
                raise AttributeError("rand_files is required for nk_statistic = compensated")
        elif config.get('nk_statistic','compensated') == 'compensated':
            rk = treecorr.NKCorrelation(config,logger)
            rk.process(rand1,cat2)
            logger.info("Done RK calculation.")

        nk.write(config['nk_file_name'], rk)
        logger.warning("Wrote NK correlation to %s",config['nk_file_name'])

    # Do KG correlation function if necessary
    if 'kg_file_name' in config:
        if len(cat2) == 0:
            raise AttributeError("file_name2 is required for kg correlation")
        logger.warning("Performing KG calculations...")
        kg = treecorr.KGCorrelation(config,logger)
        kg.process(cat1,cat2)
        logger.info("Done KG calculation.")
        kg.write(config['kg_file_name'])
        logger.warning("Wrote KG correlation to %s",config['kg_file_name'])
Ejemplo n.º 22
0
    logging.info(f"Calculating for NSIDE = {nside}")
    rcscat = treecorr.Catalog("rcslens.fits", rcsconfig)
    szcat = treecorr.Catalog(f"szmaps_masked{maskno}_{nside}.fits", szconfig)
    m1cat = treecorr.Catalog("rcslens.fits", m1config)

    kg = treecorr.KGCorrelation(corrconfig,logger=logging.getLogger())
    kg.process(szcat, rcscat)   # Calculate the cross-correlation
    kg.write(f"crosscorr{maskno}_{nside}.result")

    nk = treecorr.NKCorrelation(corrconfig,logger=logging.getLogger())
    nk.process(szcat, m1cat)
    nk.write(f"calib{maskno}_{nside}.result")

    ny = treecorr.NKCorrelation(corrconfig,logger=logging.getLogger())
    ny.process(rcscat, szcat)
    ny.write(f"ycorr{maskno}_{nside}.result")
    
    ng = treecorr.NGCorrelation(corrconfig,logger=logging.getLogger())
    ng.process(rcscat, rcscat)
    ng.write(f"shearcorr{maskno}_{nside}.result")

    gg = treecorr.GGCorrelation(corrconfig,logger=logging.getLogger())
    gg.process(rcscat, rcscat)
    gg.write(f"shear_auto_corr{maskno}_{nside}.result")

    kk = treecorr.KKCorrelation(corrconfig,logger=logging.getLogger())
    kk.process(szcat, szcat)
    kk.write(f"sz_auto_corr{maskno}_{nside}.result")

    logging.info("DONE")
Ejemplo n.º 23
0
def test_rlens():
    # Same as above, except use R_lens for separation.
    # Use gamma_t(r) = gamma0 exp(-R^2/2R0^2) around a bunch of foreground lenses.

    nlens = 100
    nsource = 200000
    gamma0 = 0.05
    R0 = 10.
    L = 50. * R0
    numpy.random.seed(8675309)
    xl = (numpy.random.random_sample(nlens)-0.5) * L  # -250 < x < 250
    zl = (numpy.random.random_sample(nlens)-0.5) * L  # -250 < y < 250
    yl = numpy.random.random_sample(nlens) * 4*L + 10*L  # 5000 < z < 7000
    rl = numpy.sqrt(xl**2 + yl**2 + zl**2)
    xs = (numpy.random.random_sample(nsource)-0.5) * L
    zs = (numpy.random.random_sample(nsource)-0.5) * L
    ys = numpy.random.random_sample(nsource) * 8*L + 160*L  # 80000 < z < 84000
    rs = numpy.sqrt(xs**2 + ys**2 + zs**2)
    g1 = numpy.zeros( (nsource,) )
    g2 = numpy.zeros( (nsource,) )
    bin_size = 0.1
    # min_sep is set so the first bin doesn't have 0 pairs.
    min_sep = 1.3*R0
    # max_sep can't be too large, since the measured value starts to have shape noise for larger
    # values of separation.  We're not adding any shape noise directly, but the shear from other
    # lenses is effectively a shape noise, and that comes to dominate the measurement above ~4R0.
    max_sep = 4.*R0
    nbins = int(numpy.ceil(numpy.log(max_sep/min_sep)/bin_size))
    true_gt = numpy.zeros( (nbins,) )
    true_npairs = numpy.zeros((nbins,), dtype=int)
    print('Making shear vectors')
    for x,y,z,r in zip(xl,yl,zl,rl):
        # Use |r1 x r2| = |r1| |r2| sin(theta)
        xcross = ys * z - zs * y
        ycross = zs * x - xs * z
        zcross = xs * y - ys * x
        sintheta = numpy.sqrt(xcross**2 + ycross**2 + zcross**2) / (rs * r)
        Rlens = 2. * r * numpy.sin(numpy.arcsin(sintheta)/2)

        gammat = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)
        # For the rotation, approximate that the x,z coords are approx the perpendicular plane.
        # So just normalize back to the unit sphere and do the 2d projection calculation.
        # It's not exactly right, but it should be good enough for this unit test.
        dx = xs/rs-x/r
        dz = zs/rs-z/r
        drsq = dx**2 + dz**2
        g1 += -gammat * (dx**2-dz**2)/drsq
        g2 += -gammat * (2.*dx*dz)/drsq
        index = numpy.floor( numpy.log(Rlens/min_sep) / bin_size).astype(int)
        mask = (index >= 0) & (index < nbins)
        numpy.add.at(true_gt, index[mask], gammat[mask])
        numpy.add.at(true_npairs, index[mask], 1)
    true_gt /= true_npairs

    # Start with bin_slop == 0.  With only 100 lenses, this still runs very fast.
    lens_cat = treecorr.Catalog(x=xl, y=yl, z=zl)
    source_cat = treecorr.Catalog(x=xs, y=ys, z=zs, g1=g1, g2=g2)
    ng0 = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                 metric='Rlens', bin_slop=0)
    ng0.process(lens_cat, source_cat)

    Rlens = ng0.meanr
    theory_gt = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)

    print('Results with bin_slop = 0:')
    print('ng.npairs = ',ng0.npairs)
    print('true_npairs = ',true_npairs)
    print('ng.xi = ',ng0.xi)
    print('true_gammat = ',true_gt)
    print('ratio = ',ng0.xi / true_gt)
    print('diff = ',ng0.xi - true_gt)
    print('max diff = ',max(abs(ng0.xi - true_gt)))
    assert max(abs(ng0.xi - true_gt)) < 2.e-6
    print('ng.xi_im = ',ng0.xi_im)
    assert max(abs(ng0.xi_im)) < 1.e-6

    print('ng.xi = ',ng0.xi)
    print('theory_gammat = ',theory_gt)
    print('ratio = ',ng0.xi / theory_gt)
    print('diff = ',ng0.xi - theory_gt)
    print('max diff = ',max(abs(ng0.xi - theory_gt)))
    assert max(abs(ng0.xi - theory_gt)) < 4.e-5

    # Now use a more normal value for bin_slop.
    ng1 = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                 metric='Rlens', bin_slop=0.5)
    ng1.process(lens_cat, source_cat)
    Rlens = ng1.meanr
    theory_gt = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)

    print('Results with bin_slop = 0.5')
    print('ng.npairs = ',ng1.npairs)
    print('ng.xi = ',ng1.xi)
    print('theory_gammat = ',theory_gt)
    print('ratio = ',ng1.xi / theory_gt)
    print('diff = ',ng1.xi - theory_gt)
    print('max diff = ',max(abs(ng1.xi - theory_gt)))
    assert max(abs(ng1.xi - theory_gt)) < 5.e-5
    print('ng.xi_im = ',ng1.xi_im)
    assert max(abs(ng1.xi_im)) < 3.e-6

    # Check that we get the same result using the corr2 executable:
    if __name__ == '__main__':
        lens_cat.write(os.path.join('data','ng_rlens_lens.dat'))
        source_cat.write(os.path.join('data','ng_rlens_source.dat'))
        import subprocess
        corr2_exe = get_script_name('corr2')
        p = subprocess.Popen( [corr2_exe,"ng_rlens.yaml"] )
        p.communicate()
        corr2_output = numpy.genfromtxt(os.path.join('output','ng_rlens.out'),names=True)
        print('ng.xi = ',ng1.xi)
        print('from corr2 output = ',corr2_output['gamT'])
        print('ratio = ',corr2_output['gamT']/ng1.xi)
        print('diff = ',corr2_output['gamT']-ng1.xi)
        numpy.testing.assert_almost_equal(corr2_output['gamT'], ng1.xi, decimal=6)
        numpy.testing.assert_almost_equal(corr2_output['gamX'], ng1.xi_im, decimal=6)

    # Repeat with the sources being given as RA/Dec only.
    ral, decl = treecorr.CelestialCoord.xyz_to_radec(xl,yl,zl)
    ras, decs = treecorr.CelestialCoord.xyz_to_radec(xs,ys,zs)
    lens_cat = treecorr.Catalog(ra=ral, dec=decl, ra_units='radians', dec_units='radians', r=rl)
    source_cat = treecorr.Catalog(ra=ras, dec=decs, ra_units='radians', dec_units='radians',
                                  g1=g1, g2=g2)

    # Again, start with bin_slop == 0.
    # This version should be identical to the 3D version.  When bin_slop != 0, it won't be
    # exactly identical, since the tree construction will have different decisions along the
    # way (since everything is at the same radius here), but the results are consistent.
    ng0s = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                  metric='Rlens', bin_slop=0)
    ng0s.process(lens_cat, source_cat)

    Rlens = ng0s.meanr
    theory_gt = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)

    print('Results when sources have no radius information, first bin_slop=0')
    print('ng.npairs = ',ng0s.npairs)
    print('true_npairs = ',true_npairs)
    print('ng.xi = ',ng0s.xi)
    print('true_gammat = ',true_gt)
    print('ratio = ',ng0s.xi / true_gt)
    print('diff = ',ng0s.xi - true_gt)
    print('max diff = ',max(abs(ng0s.xi - true_gt)))
    assert max(abs(ng0s.xi - true_gt)) < 2.e-6
    print('ng.xi_im = ',ng0s.xi_im)
    assert max(abs(ng0s.xi_im)) < 1.e-6

    print('ng.xi = ',ng0s.xi)
    print('theory_gammat = ',theory_gt)
    print('ratio = ',ng0s.xi / theory_gt)
    print('diff = ',ng0s.xi - theory_gt)
    print('max diff = ',max(abs(ng0s.xi - theory_gt)))
    assert max(abs(ng0s.xi - theory_gt)) < 4.e-5

    assert max(abs(ng0s.xi - ng0.xi)) < 1.e-7
    assert max(abs(ng0s.xi_im - ng0.xi_im)) < 1.e-7
    assert max(abs(ng0s.npairs - ng0.npairs)) < 1.e-7

    # Now use a more normal value for bin_slop.
    ng1s = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                  metric='Rlens', bin_slop=0.5)
    ng1s.process(lens_cat, source_cat)
    Rlens = ng1s.meanr
    theory_gt = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)

    print('Results with bin_slop = 0.5')
    print('ng.npairs = ',ng1s.npairs)
    print('ng.xi = ',ng1s.xi)
    print('theory_gammat = ',theory_gt)
    print('ratio = ',ng1s.xi / theory_gt)
    print('diff = ',ng1s.xi - theory_gt)
    print('max diff = ',max(abs(ng1s.xi - theory_gt)))
    assert max(abs(ng1s.xi - theory_gt)) < 5.e-5
    print('ng.xi_im = ',ng1s.xi_im)
    assert max(abs(ng1s.xi_im)) < 3.e-6
Ejemplo n.º 24
0
    def xi_2pt(cata,
               catb=None,
               k=None,
               ga=None,
               gb=None,
               corr='GG',
               maska=None,
               maskb=None,
               wa=None,
               wb=None,
               ran=True,
               mock=False,
               erron=True,
               jkmask=None,
               label0='',
               plot=False):
        """
    This is a flexible convenience wrapper for interaction with treecorr to work on CatalogStore objects. Some basic examples are given in corr_tests() of the main testsuite.py. g1, g2 correctly by c1, c2 if ellipticities and cat.bs is true. Correction by sensitivity, 1+m applied if cat.bs=True. Weighting applied if cat.wt is true. Other config properties for treecorr stored in CatalogStore object. See catalog.py or config.py. Not all correlation types fully integrated or tested. For example, only one kappa value is currently possible. Will be updated in future as useful.

    Use:

    :cata, catb:    CatalogStore - Must supply both cata, catb (can be same reference) if NG or NK correlation. Otherwise catb is optional.
    :k:             str - Array name in cata, catb to use for kappa correlation. 
    :ga, gb:        str - Array names for g1, g2 treecorr inputs. If None assume e1, e2.
    :corr:          str - Type of correlation for treecorr.
    :maska, maskb:  [bool] - Masking array to apply to input catalogs.
    :wa, wb:        [float] - Additional weights to apply after cat.w is used. Combined as e.g., w=sqrt(cat.w*wa).
    :ran:           bool - Use randoms in correlation calculation. If True, assumes cat.ran_ra, cat.ran_dec exist.
    :mock:          bool - If mock catalog from sims. Used when calculating covariances from sims, not currently migrated from SV code.
    :erron:         bool - Calculate jackknife or sim cov errors. If False, uses treecorr error outputs. Not currently migrated from SV code. When implemented requires cat.calc_err in ('jk', 'mock').
    :jkmask:        [bool] - For jk, mock cov calculation loop over regions/sims.
    :label0:        str - Additional (optional) label string used in some outputs.
    :plot:          bool - Plot output?

    Output (len cat.tbins):

    :theta:         [float] - Treecorr np.exp(meanlogr)
    :out:           ([float]x4) - Output of signal e.g., (xi+,xi-,xi+im,x-im). For correlations with only one xi output, (xi,0.,xi_im,0.).
    :err:           ([float]x4) - Same but for sqrt(var).
    :chi2:          ([float]x4) - Same but for chi^2 if using jk or sim covariance.

    """

        maska = catalog.CatalogMethods.check_mask(cata.coadd, maska)
        jkmask = catalog.CatalogMethods.check_mask(cata.coadd, jkmask)

        maska0 = maska & jkmask

        if wa is None:
            wa = np.ones(len(cata.coadd))

        e1, e2, w, ms = lin.linear_methods.get_lin_e_w_ms(cata,
                                                          xi=True,
                                                          mock=mock,
                                                          mask=maska0,
                                                          w1=wa)

        if catb is None:
            if corr not in ['GG', 'NN', 'KK']:
                raise UseError(
                    'Must supply both cata,catb for NG,NK correlations.')

        if ga is not None:
            e1 = getattr(cata, ga + '1')[maska]
            e2 = getattr(cata, ga + '2')[maska]
        else:
            ga = 'e'
        if catb is None:
            gb = ga

        if (corr == 'GG') | ((catb != None) & (corr == 'KG')):
            catxa = treecorr.Catalog(g1=e1,
                                     g2=e2,
                                     w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')
            catma = treecorr.Catalog(k=ms,
                                     w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')

        elif (corr == 'NN') | ((catb != None) & (corr in ['NG', 'NK'])):
            catxa = treecorr.Catalog(w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')
            if ran:
                catra = treecorr.Catalog(w=w,
                                         ra=cata.ran_ra[maska0],
                                         dec=cata.ran_dec[maska0],
                                         ra_units='deg',
                                         dec_units='deg')

        elif corr == 'KK':
            if k is None:
                raise UseError('Must specify k for KK correlation.')
            if k not in dir(cata):
                raise UseError('Unknown k field specified.')
            catxa = treecorr.Catalog(k=getattr(cata, k)[maska0],
                                     w=w,
                                     ra=cata.ra[maska0],
                                     dec=cata.dec[maska0],
                                     ra_units='deg',
                                     dec_units='deg')

        if catb is not None:

            maskb = catalog.CatalogMethods.check_mask(catb.coadd, maskb)

            if wb is None:
                wb = np.ones(len(catb.coadd))

            e1, e2, w, ms = lin.linear_methods.get_lin_e_w_ms(catb,
                                                              xi=True,
                                                              mock=mock,
                                                              mask=maskb,
                                                              w1=wb)

            if gb is not None:
                e1 = getattr(cata, gb + '1')[maskb]
                e2 = getattr(cata, gb + '2')[maskb]
            else:
                gb = 'e'

            if corr in ['GG', 'NG', 'KG']:
                catxb = treecorr.Catalog(g1=e1,
                                         g2=e2,
                                         w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')
                catmb = treecorr.Catalog(k=ms,
                                         w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')
            elif corr == 'NN':
                catxb = treecorr.Catalog(w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')
                if ran:
                    catrb = treecorr.Catalog(w=w,
                                             ra=catb.ran_ra[maskb],
                                             dec=catb.ran_dec[maskb],
                                             ra_units='deg',
                                             dec_units='deg')
            elif corr in ['KK', 'NK']:
                if k is None:
                    raise UseError('Must specify k for KK correlation.')
                if k not in dir(catb):
                    raise UseError('Unknown k field specified.')
                catxb = treecorr.Catalog(k=getattr(catb, k)[maskb],
                                         w=w,
                                         ra=catb.ra[maskb],
                                         dec=catb.dec[maskb],
                                         ra_units='deg',
                                         dec_units='deg')

        xim = None
        xip_im = None
        xim_im = None
        ximerr = None
        xiperr_im = None
        ximerr_im = None
        if corr == 'GG':
            gg = treecorr.GGCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            kk = treecorr.KKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            if catb is None:
                gg.process(catxa)
                kk.process(catma)
            else:
                gg.process(catxa, catxb)
                kk.process(catma, catmb)

            xip = gg.xip / kk.xi
            xim = gg.xim / kk.xi
            xiperr = ximerr = np.sqrt(gg.varxi)
            xip_im = gg.xip_im / kk.xi
            xim_im = gg.xim_im / kk.xi
            theta = np.exp(gg.meanlogr)

        elif corr == 'NN':
            nn = treecorr.NNCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            if ran:
                nr = treecorr.NNCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rr = treecorr.NNCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)

            if catb is None:
                nn.process(catxa)
                xip = nn.npairs
                xiperr = np.sqrt(nn.npairs)
                if ran:
                    nr.process(catxa, catra)
                    rr.process(catra)
                xip, xiperr = nn.calculateXi(rr, nr)
                xiperr = np.sqrt(xiperr)
            else:
                rn = treecorr.NNCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                nn.process(catxa, catxb)
                xip = nn.npairs
                xiperr = np.sqrt(nn.npairs)
                if ran:
                    nr.process(catxa, catrb)
                    nr.process(catra, catxb)
                    rr.process(catra, catrb)
                xip, xiperr = nn.calculateXi(rr, nr, rn)
                xiperr = np.sqrt(xiperr)
            theta = np.exp(nn.meanlogr)

        elif corr == 'KK':

            kk = treecorr.KKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            if catb is None:
                kk.process(catxa)
            else:
                kk.process(catxa, catxb)
            xip = kk.xi
            xiperr = np.sqrt(kk.varxi)
            theta = np.exp(kk.meanlogr)

        elif corr == 'KG':

            kg = treecorr.KGCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            kk = treecorr.KKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            kg.process(catxa, catxb)
            kk.process(catxa, catmb)
            xip = kg.xi / kk.xi
            xiperr = np.sqrt(kg.varxi)
            xip_im = kg.xi_im / kk.xi
            theta = np.exp(kg.meanlogr)

        elif corr == 'NG':

            ng = treecorr.NGCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            nk = treecorr.NKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            ng.process(catxa, catxb)
            nk.process(catxa, catmb)
            xip = ng.xi / nk.xi
            xiperr = np.sqrt(ng.varxi)
            xip_im = ng.xi_im / nk.xi
            if ran:
                rg = treecorr.NGCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rk = treecorr.NKCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rg.process(catra, catxb)
                rk.process(catra, catmb)
                xip, xip_im, xiperr = ng.calculateXi(rg)
                tmpa, tmp = nk.calculateXi(rk)
                if np.sum(tmpa) == 0:
                    tmpa = np.ones(len(xip))
                xip /= tmpa
                xiperr = np.sqrt(xiperr)
                xip_im /= tmpa
            theta = np.exp(ng.meanlogr)

        elif corr == 'NK':

            nk = treecorr.NKCorrelation(nbins=cata.tbins,
                                        min_sep=cata.sep[0],
                                        max_sep=cata.sep[1],
                                        sep_units='arcmin',
                                        bin_slop=cata.slop,
                                        verbose=0)
            nk.process(catxa, catxb)
            xip = nk.xi
            xiperr = np.sqrt(nk.varxi)
            if ran:
                rk = treecorr.NKCorrelation(nbins=cata.tbins,
                                            min_sep=cata.sep[0],
                                            max_sep=cata.sep[1],
                                            sep_units='arcmin',
                                            bin_slop=cata.slop,
                                            verbose=0)
                rk.process(catra, catxb)
                xip, xiperr = nk.calculateXi(rk)
                xiperr = np.sqrt(xiperr)
            theta = np.exp(nk.meanlogr)

        out = [xip, xim, xip_im, xim_im]
        err = [xiperr, ximerr, xiperr, ximerr]
        chi2 = [0., 0., 0., 0.]

        if erron:
            kwargs = {
                'catb': catb,
                'k': k,
                'corr': corr,
                'maska': maska,
                'maskb': maskb,
                'wa': wa,
                'wb': wb,
                'ran': ran
            }
            if catb is None:
                if corr in ['KK', 'NK', 'KG']:
                    label = 'xi_2pt_' + cata.name + '_' + k + '_' + corr + '_' + label0
                else:
                    label = 'xi_2pt_' + cata.name + '_' + corr + '_' + label0
            else:
                if corr in ['KK', 'NK', 'KG']:
                    label = 'xi_2pt_' + cata.name + '-' + catb.name + '_' + k + '_' + corr + '_' + label0
                else:
                    label = 'xi_2pt_' + cata.name + '-' + catb.name + '_' + corr + '_' + label0
            if cata.calc_err == 'jk':
                err, chi2 = jackknife_methods.jk(cata, xi_2pt.xi_2pt,
                                                 [xip, xim, xip_im, xim_im],
                                                 label, **kwargs)
            elif cata.calc_err == 'mock':
                ggperr, ggmerr, chi2p, chi2m, ceerr, cberr, cechi2, cbchi2 = BCC_Methods.jk_iter_xi(
                    cat,
                    ggp,
                    ggm,
                    ce,
                    cb,
                    mask,
                    w,
                    cosebi=cosebi,
                    parallel=parallel)

        if plot:
            fig.plot_methods.fig_create_xi(cata, corr, theta, out, err, k, ga,
                                           gb)

        return theta, out, err, chi2
Ejemplo n.º 25
0
def test_rlens_bkg():
    # Same as above, except limit the sources to be in the background of the lens.

    nlens = 100
    nsource = 200000
    gamma0 = 0.05
    R0 = 10.
    L = 50. * R0
    numpy.random.seed(8675309)
    xl = (numpy.random.random_sample(nlens)-0.5) * L  # -250 < x < 250
    zl = (numpy.random.random_sample(nlens)-0.5) * L  # -250 < y < 250
    yl = numpy.random.random_sample(nlens) * 4*L + 10*L  # 5000 < z < 7000
    rl = numpy.sqrt(xl**2 + yl**2 + zl**2)
    xs = (numpy.random.random_sample(nsource)-0.5) * L
    zs = (numpy.random.random_sample(nsource)-0.5) * L
    ys = numpy.random.random_sample(nsource) * 12*L + 8*L  # 4000 < z < 10000
    rs = numpy.sqrt(xs**2 + ys**2 + zs**2)
    print('xl = ',numpy.min(xl),numpy.max(xl))
    print('yl = ',numpy.min(yl),numpy.max(yl))
    print('zl = ',numpy.min(zl),numpy.max(zl))
    print('xs = ',numpy.min(xs),numpy.max(xs))
    print('ys = ',numpy.min(ys),numpy.max(ys))
    print('zs = ',numpy.min(zs),numpy.max(zs))
    g1 = numpy.zeros( (nsource,) )
    g2 = numpy.zeros( (nsource,) )
    bin_size = 0.1
    # min_sep is set so the first bin doesn't have 0 pairs.
    min_sep = 1.3*R0
    # max_sep can't be too large, since the measured value starts to have shape noise for larger
    # values of separation.  We're not adding any shape noise directly, but the shear from other
    # lenses is effectively a shape noise, and that comes to dominate the measurement above ~4R0.
    max_sep = 4.*R0
    nbins = int(numpy.ceil(numpy.log(max_sep/min_sep)/bin_size))
    print('Making shear vectors')
    for x,y,z,r in zip(xl,yl,zl,rl):
        # This time, only give the true shear to the background galaxies.
        bkg = (rs > r)

        # Use |r1 x r2| = |r1| |r2| sin(theta)
        xcross = ys[bkg] * z - zs[bkg] * y
        ycross = zs[bkg] * x - xs[bkg] * z
        zcross = xs[bkg] * y - ys[bkg] * x
        sintheta = numpy.sqrt(xcross**2 + ycross**2 + zcross**2) / (rs[bkg] * r)
        Rlens = 2. * r * numpy.sin(numpy.arcsin(sintheta)/2)

        gammat = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)
        # For the rotation, approximate that the x,z coords are approx the perpendicular plane.
        # So just normalize back to the unit sphere and do the 2d projection calculation.
        # It's not exactly right, but it should be good enough for this unit test.
        dx = (xs/rs)[bkg]-x/r
        dz = (zs/rs)[bkg]-z/r
        drsq = dx**2 + dz**2

        g1[bkg] += -gammat * (dx**2-dz**2)/drsq
        g2[bkg] += -gammat * (2.*dx*dz)/drsq

    # Slight subtlety in this test vs the previous one.  We need to build up the full g1,g2
    # arrays first before calculating the true_gt value, since we need to include the background
    # galaxies for each lens regardless of whether they had signal or not.
    true_gt = numpy.zeros( (nbins,) )
    true_npairs = numpy.zeros((nbins,), dtype=int)
    for x,y,z,r in zip(xl,yl,zl,rl):
        # Use |r1 x r2| = |r1| |r2| sin(theta)
        xcross = ys * z - zs * y
        ycross = zs * x - xs * z
        zcross = xs * y - ys * x
        sintheta = numpy.sqrt(xcross**2 + ycross**2 + zcross**2) / (rs * r)
        Rlens = 2. * r * numpy.sin(numpy.arcsin(sintheta)/2)
        dx = xs/rs-x/r
        dz = zs/rs-z/r
        drsq = dx**2 + dz**2
        gt = -g1 * (dx**2-dz**2)/drsq - g2 * (2.*dx*dz)/drsq
        bkg = (rs > r)
        index = numpy.floor( numpy.log(Rlens/min_sep) / bin_size).astype(int)
        mask = (index >= 0) & (index < nbins) & bkg
        numpy.add.at(true_gt, index[mask], gt[mask])
        numpy.add.at(true_npairs, index[mask], 1)

    true_gt /= true_npairs

    # Start with bin_slop == 0.  With only 100 lenses, this still runs very fast.
    lens_cat = treecorr.Catalog(x=xl, y=yl, z=zl)
    source_cat = treecorr.Catalog(x=xs, y=ys, z=zs, g1=g1, g2=g2)
    ng0 = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                 metric='Rlens', bin_slop=0, min_rpar=0)
    ng0.process(lens_cat, source_cat)

    Rlens = ng0.meanr
    theory_gt = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)

    print('Results with bin_slop = 0:')
    print('ng.npairs = ',ng0.npairs)
    print('true_npairs = ',true_npairs)
    print('ng.xi = ',ng0.xi)
    print('true_gammat = ',true_gt)
    print('ratio = ',ng0.xi / true_gt)
    print('diff = ',ng0.xi - true_gt)
    print('max diff = ',max(abs(ng0.xi - true_gt)))
    assert max(abs(ng0.xi - true_gt)) < 2.e-6

    print('ng.xi = ',ng0.xi)
    print('theory_gammat = ',theory_gt)
    print('ratio = ',ng0.xi / theory_gt)
    print('diff = ',ng0.xi - theory_gt)
    print('max diff = ',max(abs(ng0.xi - theory_gt)))
    assert max(abs(ng0.xi - theory_gt)) < 1.e-3
    print('ng.xi_im = ',ng0.xi_im)
    assert max(abs(ng0.xi_im)) < 1.e-3

    # Without min_rpar, this should fail.
    lens_cat = treecorr.Catalog(x=xl, y=yl, z=zl)
    source_cat = treecorr.Catalog(x=xs, y=ys, z=zs, g1=g1, g2=g2)
    ng0 = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                 metric='Rlens', bin_slop=0)
    ng0.process(lens_cat, source_cat)
    Rlens = ng0.meanr

    print('Results without min_rpar')
    print('ng.xi = ',ng0.xi)
    print('true_gammat = ',true_gt)
    print('max diff = ',max(abs(ng0.xi - true_gt)))
    assert max(abs(ng0.xi - true_gt)) > 5.e-3

    # Now use a more normal value for bin_slop.
    ng1 = treecorr.NGCorrelation(bin_size=bin_size, min_sep=min_sep, max_sep=max_sep, verbose=1,
                                 metric='Rlens', bin_slop=0.5, min_rpar=0)
    ng1.process(lens_cat, source_cat)
    Rlens = ng1.meanr
    theory_gt = gamma0 * numpy.exp(-0.5*Rlens**2/R0**2)

    print('Results with bin_slop = 0.5')
    print('ng.npairs = ',ng1.npairs)
    print('ng.xi = ',ng1.xi)
    print('theory_gammat = ',theory_gt)
    print('ratio = ',ng1.xi / theory_gt)
    print('diff = ',ng1.xi - theory_gt)
    print('max diff = ',max(abs(ng1.xi - theory_gt)))
    assert max(abs(ng1.xi - theory_gt)) < 1.e-3
    print('ng.xi_im = ',ng1.xi_im)
    assert max(abs(ng1.xi_im)) < 1.e-3

    # Check that we get the same result using the corr2 executable:
    if __name__ == '__main__':
        lens_cat.write(os.path.join('data','ng_rlens_bkg_lens.dat'))
        source_cat.write(os.path.join('data','ng_rlens_bkg_source.dat'))
        import subprocess
        corr2_exe = get_script_name('corr2')
        p = subprocess.Popen( [corr2_exe,"ng_rlens_bkg.yaml"] )
        p.communicate()
        corr2_output = numpy.genfromtxt(os.path.join('output','ng_rlens_bkg.out'),names=True)
        print('ng.xi = ',ng1.xi)
        print('from corr2 output = ',corr2_output['gamT'])
        print('ratio = ',corr2_output['gamT']/ng1.xi)
        print('diff = ',corr2_output['gamT']-ng1.xi)
        numpy.testing.assert_almost_equal(corr2_output['gamT'], ng1.xi, decimal=6)
        numpy.testing.assert_almost_equal(corr2_output['gamX'], ng1.xi_im, decimal=6)
Ejemplo n.º 26
0
def xshear_lens_jk(zl1, zl2, jk_label):

    source_file = h5py.File("source_zb_" + str(zl2 + 0.1) + "_0.9.h5")
    source_ra = source_file["ra"][:]
    source_dec = source_file["dec"][:]
    source_z = source_file["zb"][:]
    source_e1 = source_file["e1"][:]
    source_e2 = source_file["e2"][:]
    source_w = source_file["w"][:]
    source_size = source_file["snr"][:]
    size_mask = source_size > np.median(source_size)
    source_file.close()

    #### LARGE GALAXIES ####

    source_cat = treecorr.Catalog(x=source_ra[size_mask],
                                  y=source_dec[size_mask],
                                  g1=source_e1[size_mask],
                                  g2=-1. * source_e2[size_mask],
                                  w=source_w[size_mask],
                                  x_units='degree',
                                  y_units='degree')

    mask_z_lens = (lens_z_jk > zl1) & (lens_z_jk < zl2)
    lens_mask = labels_lens_jk == jk_label
    lens_mask = (~lens_mask) & (mask_z_lens)

    lens_cat = treecorr.Catalog(x=lens_ra_jk[lens_mask],
                                y=lens_dec_jk[lens_mask],
                                x_units='degree',
                                y_units='degree')

    ng = treecorr.NGCorrelation(nbins=25,
                                min_sep=0.3,
                                max_sep=300,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(lens_cat, source_cat)
    r_lens_h, xt_lens_h = ng.meanr, ng.xi

    random_mask = labels_random_jk == jk_label
    random_cat = treecorr.Catalog(x=random_ra_jk[~random_mask],
                                  y=random_dec_jk[~random_mask],
                                  x_units='degree',
                                  y_units='degree')

    ng = treecorr.NGCorrelation(nbins=25,
                                min_sep=0.3,
                                max_sep=300,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(random_cat, source_cat)
    r_rand_h, xt_rand_h = ng.meanr, ng.xi

    #### SMALL GALAXIES ####

    source_cat = treecorr.Catalog(x=source_ra[~size_mask],
                                  y=source_dec[~size_mask],
                                  g1=source_e1[~size_mask],
                                  g2=-1. * source_e2[~size_mask],
                                  w=source_w[~size_mask],
                                  x_units='degree',
                                  y_units='degree')

    ng = treecorr.NGCorrelation(nbins=25,
                                min_sep=0.3,
                                max_sep=300,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(lens_cat, source_cat)
    r_lens_l, xt_lens_l = ng.meanr, ng.xi

    ng = treecorr.NGCorrelation(nbins=25,
                                min_sep=0.3,
                                max_sep=300,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(random_cat, source_cat)
    r_rand_l, xt_rand_l = ng.meanr, ng.xi

    return r_lens_h, xt_lens_h, r_rand_h, xt_rand_h, r_lens_l, xt_lens_l, r_rand_l, xt_rand_l
Ejemplo n.º 27
0
def test_haloellip():
    """This is similar to the Clampitt halo ellipticity measurement, but using counts for the
    background galaxies rather than shears.

    w_aligned = Sum_i (w_i * cos(2theta)) / Sum_i (w_i)
    w_cross = Sum_i (w_i * sin(2theta)) / Sum_i (w_i)

    where theta is measured w.r.t. the coordinate system where the halo ellitpicity
    is along the x-axis.  Converting this to complex notation, we obtain:

    w_a - i w_c = < exp(-2itheta) >
                = < exp(2iphi) exp(-2i(theta+phi)) >
                = < ehalo exp(-2i(theta+phi)) >

    where ehalo = exp(2iphi) is the unit-normalized shape of the halo in the normal world
    coordinate system.  Note that the combination theta+phi is the angle between the line joining
    the two points and the E-W coordinate, which means that

    w_a - i w_c = -gamma_t(n_bg, ehalo)

    so the reverse of the usual galaxy-galaxy lensing order.  The N is the background galaxies
    and G is the halo shapes (normalized to have |ehalo| = 1).
    """

    nhalo = 10
    nsource = 1000000  # sources per halo
    ntot = nsource * nhalo
    L = 100000.  # The side length in which the halos are placed
    R = 10.      # The (rms) radius of the associated sources from the halos
                 # In this case, we want L >> R so that most sources are only associated
                 # with the one halo we used for assigning its shear value.

    # Lenses are randomly located with random shapes.
    numpy.random.seed(86753099)
    halo_g1 = numpy.random.normal(0., 0.3, (nhalo,))
    halo_g2 = numpy.random.normal(0., 0.3, (nhalo,))
    halo_g = halo_g1 + 1j * halo_g2
    # The interpretation is simpler if they all have the same |g|, so just make them all 0.3.
    halo_g *= 0.3 / numpy.abs(halo_g)
    halo_absg = numpy.abs(halo_g)
    halo_x = (numpy.random.random_sample(nhalo)-0.5) * L
    halo_y = (numpy.random.random_sample(nhalo)-0.5) * L
    print('Made halos',len(halo_x))

    # For the sources, place nsource galaxies around each halo with the expected azimuthal pattern
    source_x = numpy.empty(ntot)
    source_y = numpy.empty(ntot)
    for i in range(nhalo):
        absg = halo_absg[i]
        # First position the sources in a Gaussian cloud around the halo center.
        dx = numpy.random.normal(0., 10., (nsource,))
        dy = numpy.random.normal(0., 10., (nsource,))
        r = numpy.sqrt(dx*dx + dy*dy)
        t = numpy.arctan2(dy,dx)
        # z = dx + idy = r exp(it)

        # Reposition the sources azimuthally so p(theta) ~ 1 + |g_halo| * cos(2 theta)
        # Currently t has p(t) = 1/2pi.
        # Let u be the new azimuthal angle with p(u) = (1/2pi) (1 + |g| cos(2u))
        # p(u) = |dt/du| p(t)
        # 1 + |g| cos(2u) = dt/du
        # t = int( (1 + |g| cos(2u)) du = u + 1/2 |g| sin(2u)

        # This doesn't have an analytic solution, but a few iterations of Newton-Raphson
        # should work well enough.
        u = t.copy()
        for i in range(4):
            u -= (u - t + 0.5 * absg * numpy.sin(2.*u)) / (1. + absg * numpy.cos(2.*u))

        z = r * numpy.exp(1j * u)
        exp2iphi = z**2 / numpy.abs(z)**2

        # Now rotate the whole system by the phase of the halo ellipticity.
        exp2ialpha = halo_g[i] / absg
        expialpha = numpy.sqrt(exp2ialpha)
        z *= expialpha
        # Place the source galaxies at this dx,dy with this shape
        source_x[i*nsource: (i+1)*nsource] = halo_x[i] + z.real
        source_y[i*nsource: (i+1)*nsource] = halo_y[i] + z.imag
    print('Made sources',len(source_x))

    source_cat = treecorr.Catalog(x=source_x, y=source_y)
    # Big fat bin to increase S/N.  The way I set it up, the signal is the same in all
    # radial bins, so just combine them together for higher S/N.
    ng = treecorr.NGCorrelation(min_sep=5, max_sep=10, nbins=1)
    halo_mean_absg = numpy.mean(halo_absg)
    print('mean_absg = ',halo_mean_absg)

    # First the original version where we only use the phase of the halo ellipticities:
    halo_cat1 = treecorr.Catalog(x=halo_x, y=halo_y,
                                 g1=halo_g.real/halo_absg, g2=halo_g.imag/halo_absg)
    ng.process(source_cat, halo_cat1)
    print('ng.npairs = ',ng.npairs)
    print('ng.xi = ',ng.xi)
    # The expected signal is
    # E(ng) = - < int( p(t) cos(2t) ) >
    #       = - < int( (1 + e_halo cos(2t)) cos(2t) ) >
    #       = -0.5 <e_halo>
    print('expected signal = ',-0.5 * halo_mean_absg)
    # These tests don't quite work at the 1% level of accuracy, but 2% seems to work for most.
    # This is effected by checking that 1/2 the value matches 0.5 to 2 decimal places.
    numpy.testing.assert_almost_equal(ng.xi, -0.5 * halo_mean_absg, decimal=2)

    # Next weight the halos by their absg.
    halo_cat2 = treecorr.Catalog(x=halo_x, y=halo_y, w=halo_absg,
                                 g1=halo_g.real/halo_absg, g2=halo_g.imag/halo_absg)
    ng.process(source_cat, halo_cat2)
    print('ng.xi = ',ng.xi)
    # Now the net signal is
    # sum(w * p*cos(2t)) / sum(w)
    # = 0.5 * <absg^2> / <absg>
    halo_mean_gsq = numpy.mean(halo_absg**2)
    print('expected signal = ',0.5 * halo_mean_gsq / halo_mean_absg)
    numpy.testing.assert_almost_equal(ng.xi, -0.5 * halo_mean_gsq / halo_mean_absg, decimal=2)

    # Finally, use the unnormalized halo_g for the halo ellipticities
    halo_cat3 = treecorr.Catalog(x=halo_x, y=halo_y, g1=halo_g.real, g2=halo_g.imag)
    ng.process(source_cat, halo_cat3)
    print('ng.xi = ',ng.xi)
    # Now the net signal is
    # sum(absg * p*cos(2t)) / N
    # = 0.5 * <absg^2>
    print('expected signal = ',0.5 * halo_mean_gsq)
    numpy.testing.assert_almost_equal(ng.xi, -0.5 * halo_mean_gsq, decimal=2)
Ejemplo n.º 28
0
def dsigma(lens, source):

    source_ra = source[:, 0]
    source_dec = source[:, 1]
    source_z = source[:, -1]
    source_e1 = -1. * source[:, 2]
    source_e2 = source[:, 3]
    source_w = source[:, 4]

    lens_z = lens[:, 4]
    lens_ra = lens[:, 3]
    lens_dec = lens[:, 2]

    lens_Dc = cosmo.comoving_distance(np.median(lens_z))
    source_Dc = cosmo.comoving_distance(source_z)
    lens_Da = cosmo.angular_diameter_distance(np.median(lens_z))

    source_Dc = source_Dc * 1e6
    lens_Dc = lens_Dc * 1e6
    lens_Da = lens_Da * 1e6

    DlsoDs = (source_Dc - lens_Dc) / source_Dc
    Sigma_crit = (c.value**2) / (4 * np.pi * G.value) * 1 / (lens_Da * DlsoDs)

    lens_cat = treecorr.Catalog(x=lens_ra,
                                y=lens_dec,
                                x_units='degree',
                                y_units='degree')
    source_cat = treecorr.Catalog(x=source_ra,
                                  y=source_dec,
                                  g1=source_e1 * Sigma_crit,
                                  g2=source_e2 * Sigma_crit,
                                  w=source_w * Sigma_crit**-2.,
                                  x_units='degree',
                                  y_units='degree')

    ng = treecorr.NGCorrelation(nbins=5,
                                min_sep=0.2,
                                max_sep=6,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(lens_cat, source_cat)
    r, xi_t, xi_x, w, npairs = ng.meanr, ng.xi, ng.xi_im, ng.weight, ng.npairs

    print "done with lens source cross-correlation"

    lens_Dc = cosmo.comoving_distance(np.median(rand_z))
    lens_Da = cosmo.angular_diameter_distance(np.median(rand_z))

    source_Dc = source_Dc * 1e6
    lens_Dc = lens_Dc * 1e6
    lens_Da = lens_Da * 1e6

    DlsoDs = (source_Dc - lens_Dc) / source_Dc
    Sigma_crit = (c.value**2) / (4 * np.pi * G.value) * 1 / (lens_Da * DlsoDs)

    random_cat = treecorr.Catalog(x=rname[:, 2],
                                  y=rname[:, 1],
                                  x_units='degree',
                                  y_units='degree')
    source_cat = treecorr.Catalog(x=source_ra,
                                  y=source_dec,
                                  g1=source_e1 * Sigma_crit,
                                  g2=source_e2 * Sigma_crit,
                                  w=source_w * Sigma_crit**-2.,
                                  x_units='degree',
                                  y_units='degree')
    ng = treecorr.NGCorrelation(nbins=5,
                                min_sep=0.2,
                                max_sep=6,
                                sep_units='arcmin',
                                verbose=1)
    ng.process(random_cat, source_cat)
    rr, xi_tr, xi_xr, wr = ng.meanr, ng.xi, ng.xi_im, ng.weight

    return r, xi_t, xi_x, w, xi_tr, xi_xr, wr, npairs, np.mean(Sigma_crit)
Ejemplo n.º 29
0
                                      g2=e2)

        troughcat = treecorr.Catalog(ra=RA,
                                     dec=DEC,
                                     ra_units='deg',
                                     dec_units='deg',
                                     w=weights)

        config = {
            'min_sep': Rarcmin,
            'max_sep': Rarcmax,
            'nbins': Nbins,
            'sep_units': 'arcmin',
            'verbose': 2
        }
        ng = treecorr.NGCorrelation(config)
        ng.process(troughcat, galcat)  # Compute the cross-correlation.

        output_temp = 'temp_treecor.txt'
        ng.write(output_temp)  # Write out to a file.
        shearfile = np.loadtxt(output_temp).T

        Rbins, gamma_t, gamma_x, gamma_error, Nsrc = \
        [shearfile[0], shearfile[3], shearfile[4], np.sqrt(shearfile[5]), shearfile[7]]
        """
        if 'pc' in Runit:
            # Translate to comoving ESD
            Rbins = Rbins*(1+troughZ)
            gamma_t, gamma_x, gamma_error = np.array([gamma_t, gamma_x, gamma_error])/(1+troughZ)**2
        """
Ejemplo n.º 30
0
    def map(self, mapunit):

        if self.gt is None:
            self.gt = np.zeros((self.njack, self.nabins, self.nmbins,
                                self.nmbins1, self.nzbins * self.nzbins1))
            self.gt_rand = np.zeros((self.njack, self.nabins, self.nmbins,
                                     self.nmbins1, self.nzbins * self.nzbins1))
            self.weights = np.zeros((self.njack, self.nabins, self.nmbins,
                                     self.nmbins1, self.nzbins * self.nzbins1))
            self.weights_rand = np.zeros(
                (self.njack, self.nabins, self.nmbins, self.nmbins1,
                 self.nzbins * self.nzbins1))

            self.varg = np.zeros((self.njack, self.nmbins, self.nmbins1,
                                  self.nzbins * self.nzbins1))

            self.nzd1 = np.zeros(
                (self.njack, 200, self.nmbins, 1, self.nzbins))

            self.nzd2 = np.zeros(
                (self.njack, 200, 1, self.nmbins1, self.nzbins1))

            self.nd1 = np.zeros((self.njack, self.nmbins, self.nmbins1,
                                 self.nzbins * self.nzbins1))
            self.nd2 = np.zeros((self.njack, self.nmbins, self.nmbins1,
                                 self.nzbins * self.nzbins1))

        for i in range(self.nzbins):
            print('Finding redshift indices')

            zlidx = mapunit['redshift'].searchsorted(self.zbins[i])
            zhidx = mapunit['redshift'].searchsorted(self.zbins[i + 1])

            for i1 in range(self.nzbins1):
                if self.same_zbins:
                    zlidx1 = mapunit['redshift1'].searchsorted(self.zbins[i])
                    zhidx1 = mapunit['redshift1'].searchsorted(self.zbins[i +
                                                                          1])
                else:
                    zlidx1 = mapunit['redshift1'].searchsorted(self.zbins1[i1])
                    zhidx1 = mapunit['redshift1'].searchsorted(self.zbins1[i1 +
                                                                           1])

                if self.rbins is not None:
                    if self.same_zbins:
                        zm = (self.zbins[i] + self.zbins[i + 1]) / 2
                    else:
                        zm = (self.zbins1[i] + self.zbins1[i + 1]) / 2

                    self.abins[:, i] = self.computeAngularBinsFromRadii(
                        self.rbins, zm)
                    if self.sep_units == 'arcmin':
                        self.abins[:, i] *= 60

                if (zlidx == zhidx):
                    print("No galaxies in redshift bin {0} to {1}".format(
                        self.zbins[i], self.zbins[i + 1]))
                    print('z: {}'.format(z))

                    print("Min and max z: {0}, {1}".format(
                        np.min(mapunit['redshift']),
                        np.max(mapunit['redshift'])))
                    continue
                elif (zlidx1 == zhidx1):
                    print("No galaxies in redshift bin {0} to {1}".format(
                        self.zbins1[i], self.zbins1[i + 1]))
                    print('z1: {}'.format(mapunit['redshift1']))

                    print("Min and max z1: {0}, {1}".format(
                        np.min(mapunit['redshift1']),
                        np.max(mapunit['redshift1'])))
                    continue

                for li, j in enumerate(self.minds):
                    if self.mkey is not None:
                        if self.mcutind is not None:
                            if self.upper_limit:
                                lidx = mapunit[self.mkey][
                                    zlidx:zhidx, self.mcutind] < self.mbins[j]
                            else:
                                lidx = ((self.mbins[j] <= mapunit[self.mkey][
                                    zlidx:zhidx, self.mcutind])
                                        & (mapunit[self.mkey][zlidx:zhidx,
                                                              self.mcutind] <
                                           self.mbins[j + 1]))
                        else:
                            if self.upper_limit:
                                lidx = mapunit[
                                    self.mkey][zlidx:zhidx] < self.mbins[j]
                            else:
                                lidx = ((self.mbins[j] <=
                                         mapunit[self.mkey][zlidx:zhidx])
                                        & (mapunit[self.mkey][zlidx:zhidx] <
                                           self.mbins[j + 1]))
                    else:
                        lidx = np.ones(zhidx - zlidx, dtype=np.bool)

                    for li1, j1 in enumerate(self.minds1):
                        if self.mkey1 is not None:
                            if self.mcutind1 is not None:
                                if self.upper_limit1:
                                    lidx1 = mapunit[self.mkey1][
                                        zlidx1:zhidx1,
                                        self.mcutind1] < self.mbins1[j1]
                                else:
                                    lidx1 = (
                                        (self.mbins1[j1] <= mapunit[self.mkey1]
                                         [zlidx1:zhidx1, self.mcutind1])
                                        & (mapunit[self.mkey1][zlidx1:zhidx1,
                                                               self.mcutind1] <
                                           self.mbins1[j1 + 1]))
                            else:
                                if self.upper_limit1:
                                    lidx1 = mapunit[self.mkey1][
                                        zlidx1:zhidx1] < self.mbins1[j1]
                                else:
                                    lidx1 = (
                                        (self.mbins1[j1] <=
                                         mapunit[self.mkey1][zlidx1:zhidx1])
                                        & (mapunit[self.mkey1][zlidx1:zhidx1] <
                                           self.mbins1[j1 + 1]))
                        else:
                            lidx1 = np.ones(zhidx1 - zlidx1, dtype=np.bool)

                        rands = self.getRandoms(
                            mapunit['azim_ang1'][zlidx1:zhidx1][lidx1],
                            mapunit['polar_ang1'][zlidx1:zhidx1][lidx1],
                            mapunit['redshift1'][zlidx1:zhidx1][lidx1],
                            zmin=self.zbins1[i1],
                            zmax=self.zbins1[i1 + 1])

                        self.nd1[self.jcount, j, j1,
                                 i * self.nzbins1 + i1] = len(
                                     mapunit['azim_ang'][zlidx:zhidx][lidx])
                        self.nd2[self.jcount, j, j1,
                                 i * self.nzbins1 + i1] = len(
                                     mapunit['azim_ang'][zlidx1:zhidx1][lidx1])

                        self.nzd1[self.jcount, :, j, 0, i], _ = np.histogram(
                            mapunit['redshift'][zlidx:zhidx][lidx],
                            self.nofzbins)
                        self.nzd2[self.jcount, :, 0, j1, i1], _ = np.histogram(
                            mapunit['redshift'][zlidx1:zhidx1][lidx1],
                            self.nofzbins)

                        print("Number of cat1 in this z/lum bin: {0}".format(
                            np.sum(lidx)))
                        print("Number of cat2 in this z/lum bin: {0}".format(
                            np.sum(lidx1)))

                        cat1 = treecorr.Catalog(
                            g1=mapunit['gamma1'][zlidx:zhidx][lidx],
                            g2=-mapunit['gamma2'][zlidx:zhidx][lidx],
                            ra=mapunit['azim_ang'][zlidx:zhidx][lidx],
                            dec=mapunit['polar_ang'][zlidx:zhidx][lidx],
                            ra_units='deg',
                            dec_units='deg')

                        cat2 = treecorr.Catalog(
                            ra=mapunit['azim_ang1'][zlidx1:zhidx1][lidx1],
                            dec=mapunit['polar_ang1'][zlidx1:zhidx1][lidx1],
                            ra_units='deg',
                            dec_units='deg')

                        rand_cat = treecorr.Catalog(ra=rands['azim_ang'],
                                                    dec=rands['polar_ang'],
                                                    ra_units='deg',
                                                    dec_units='deg')

                        sys.stdout.flush()
                        if (self.nd1[self.jcount, j, j1, i * self.nzbins1 + i1]
                                < 1) | (self.nd2[self.jcount, j, j1,
                                                 i * self.nzbins1 + i1] < 1):
                            continue

                        print('processing position-shear correlation')
                        ng = treecorr.NGCorrelation(min_sep=self.abins[0, i],
                                                    max_sep=self.abins[-1, i],
                                                    nbins=self.nabins,
                                                    sep_units=self.sep_units,
                                                    bin_slop=self.bin_slop)
                        print(
                            'min(ra), max(ra), min(dec), max(dec): {}, {}, {}, {}'
                            .format(np.min(mapunit['azim_ang']),
                                    np.max(mapunit['azim_ang']),
                                    np.min(mapunit['polar_ang']),
                                    np.max(mapunit['polar_ang'])))

                        ng.process_cross(cat2, cat1, num_threads=self.nthreads)

                        self.gt[self.jcount, :, j, j1,
                                i * self.nzbins1 + i1] = ng.xi
                        self.weights[self.jcount, :, j, j1,
                                     i * self.nzbins1 + i1] = ng.weight
                        self.varg[self.jcount, j, j1,
                                  i * self.nzbins1 + i1] = cat1.varg

                        rg = treecorr.NGCorrelation(min_sep=self.abins[0, i],
                                                    max_sep=self.abins[-1, i],
                                                    nbins=self.nabins,
                                                    sep_units=self.sep_units,
                                                    bin_slop=self.bin_slop)

                        print(
                            'min(rand_ra), max(rand_ra), min(rand_dec), max(rand_dec): {}, {}, {}, {}'
                            .format(np.min(rands['azim_ang']),
                                    np.max(rands['azim_ang']),
                                    np.min(rands['polar_ang']),
                                    np.max(rands['polar_ang'])))

                        rg.process_cross(rand_cat,
                                         cat1,
                                         num_threads=self.nthreads)
                        self.gt_rand[self.jcount, :, j, j1,
                                     i * self.nzbins + i1] = rg.xi
                        self.weights_rand[self.jcount, :, j, j1,
                                          i * self.nzbins + i1] = rg.weight

                        if self.amean is None:
                            if (ng.meanlogr != 0.0).any():
                                self.amean = np.exp(ng.meanlogr)
                            else:
                                self.amean = np.exp(ng.logr)