def calc_2pt(data, randoms, config_fname, zvar, random_zvar, ra_var='RA', dec_var='DEC', random_ra_var='RA', random_dec_var='DEC'): config_2pt = load_config(config_fname)['2PCF'] cat = treecorr.Catalog( ra=data[ra_var], dec=data[dec_var], dec_units='degrees', ra_units='degrees', ) random_cat = treecorr.Catalog( ra=randoms[random_ra_var], dec=randoms[random_dec_var], dec_units='degrees', ra_units='degrees', ) print config_2pt dd = treecorr.NNCorrelation(config=config_2pt) dr = treecorr.NNCorrelation(config=config_2pt) rr = treecorr.NNCorrelation(config=config_2pt) dd.process(cat, metric=config_2pt['metric']) dr.process(cat, random_cat, metric=config_2pt['metric']) rr.process(random_cat, metric=config_2pt['metric']) xi, varxi = dd.calculateXi(dr=dr, rr=rr) return xi
def get_xi_gg(self): if self.xis['gg'] is None: fname = self.get_xifile_name('gg', 'xi', 'npz') if not os.path.isfile(fname): self.get_catalog() self.get_delta_mask() self.get_random() bn = self._get_binning() dd = tcr.NNCorrelation(config=bn, var_method='jackknife') dd.process(self.cat) dr = tcr.NNCorrelation(config=bn) dr.process(self.cat, self.ran) rr = tcr.NNCorrelation(config=bn) rr.process(self.ran) xi, varxi = dd.calculateXi(rr, dr) self.xis['gg'] = { 'th': dd.rnom, 'th_mean': dd.meanr, 'DD': dd.npairs, 'DR': dr.npairs, 'RR': rr.npairs, 'xi': xi, 'cov': dd.cov, 'var': varxi } np.savez(fname, **(self.xis['gg'])) else: d = np.load(fname) self.xis['gg'] = { k: d[k] for k in ['th', 'th_mean', 'DD', 'DR', 'RR', 'xi', 'cov', 'var'] } return self.xis['gg']
def CorrelationFunction(source, source_random, corrconfig=None, source_ra='ra', source_dec='dec', source_random_ra=None, source_random_dec=None): if corrconfig is None: corrconfig = {'sep_units': 'degrees', 'min_sep': 0.1, 'max_sep': 6, 'nbins': 25, 'bin_slop': 0.25, 'num_threads': 1} if source_random_ra is None: source_random_ra = source_ra if source_random_dec is None: source_random_dec = source_dec SourceCat = treecorr.Catalog(ra=source[source_ra], dec=source[source_dec], ra_units='degrees', dec_units='degrees') SourceRand = treecorr.Catalog(ra=source_random[source_random_ra], dec=source_random[source_random_dec], ra_units='degrees', dec_units='degrees') dd = treecorr.NNCorrelation(**corrconfig) dr = treecorr.NNCorrelation(**corrconfig) rr = treecorr.NNCorrelation(**corrconfig) dd.process(SourceCat) dr.process(SourceCat, SourceRand) rr.process(SourceRand) xi, varxi = dd.calculateXi(rr, dr=dr) r = np.exp(dd.logr) return [xi, r]
def compute_gg_treecorr(cat1, cat2, options, nbins, ijk): print('Using treecorr', treecorr.version) slop = 0.1 # arrays to store the output r = np.zeros(nbins) DD = np.zeros_like(r) DR = np.zeros_like(r) RR = np.zeros_like(r) RD = np.zeros_like(r) # Set up the catalogues rcat1, rcat2, _, _ = randoms(cat1, cat2, ijk, period=options['box_size']) #import pdb ; pdb.set_trace() cat1 = treecorr.Catalog(g1=None, g2=None, x=cat1['x'], y=cat1['y'], z=cat1['z']) cat2 = treecorr.Catalog(g1=None, g2=None, x=cat2['x'], y=cat2['y'], z=cat2['z']) NR1 = rcat1.x.size * 1.0 NR2 = rcat2.x.size * 1.0 ND1 = cat1.x.size * 1.0 ND2 = cat2.x.size * 1.0 f0 = (NR1*NR2)/(ND1*ND2) f1 = (NR1*NR2)/(ND1*NR2) f2 = (NR1*NR2)/(ND2*NR1) print('Processing DD') nn = treecorr.NNCorrelation(nbins=nbins, min_sep=options['rlim'][0], max_sep=options['rlim'][1], bin_slop=slop, verbose=0, period=options['box_size']) #nn.process(rcat1, rcat2, period=options['box_size'], metric='Periodic') nn.process(cat1, cat2, metric='Periodic') nn.finalize() DD = np.copy(nn.weight) rp_bins = np.copy(nn.rnom) nn.clear() print('Processing RD') nn = treecorr.NNCorrelation(nbins=nbins, min_sep=options['rlim'][0], max_sep=options['rlim'][1], bin_slop=slop, verbose=0, period=options['box_size']) nn.process(rcat1, cat2, metric='Periodic') RD = np.copy(nn.weight) nn.clear() print('Processing DR') nn = treecorr.NNCorrelation(nbins=nbins, min_sep=options['rlim'][0], max_sep=options['rlim'][1], bin_slop=slop, verbose=0, period=options['box_size']) nn.process(cat1, rcat2, metric='Periodic') DR = np.copy(nn.weight) nn.clear() print('Processing RR') nn = treecorr.NNCorrelation(nbins=nbins, min_sep=options['rlim'][0], max_sep=options['rlim'][1], bin_slop=slop, verbose=0, period=options['box_size']) nn.process(rcat1, rcat2, metric='Periodic') RR = np.copy(nn.weight) nn.clear() gg = (f0 * DD/RR) - (f1 * DR/RR) - (f2 * RD/RR) + 1.0 return gg
def cross_correlate(cat1, cat2, ran1, ran2): D1_D2 = treecorr.NNCorrelation(min_sep=1, max_sep=200, nbins=10, sep_units='arcmin', var_method='jackknife') D1_D2.process(cat1, cat2) D1_R2 = treecorr.NNCorrelation(min_sep=1, max_sep=200, nbins=10, sep_units='arcmin', var_method='jackknife') D1_R2.process(cat1, ran2) R1_D2 = treecorr.NNCorrelation(min_sep=1, max_sep=200, nbins=10, sep_units='arcmin', var_method='jackknife') R1_D2.process(ran1, cat2) R1_R2 = treecorr.NNCorrelation(min_sep=1, max_sep=200, nbins=10, sep_units='arcmin', var_method='jackknife') R1_R2.process(ran1, ran2) xi_d, var_d = D1_D2.calculateXi(R1_R2, D1_R2, R1_D2) cov_jk = D1_D2.estimate_cov(method='jackknife') logr = D1_D2.meanlogr theta = np.exp(logr) return theta, xi_d, np.sqrt(np.diag(cov_jk))
def correlate(self,group1=0,group2=0): print 'Will correlate galaxies in groups %d and %d'%(group1,group2) data1 = self.get(table='subfind_halos' , fields='groupId, x, y, z', cond='subfind_halos.snapnum = %d AND subfind_halos.groupId = %d'%(self.snapshot, group1)) data2 = self.get(table='subfind_halos' , fields='groupId, x, y, z', cond='subfind_halos.snapnum = %d AND subfind_halos.groupId = %d'%(self.snapshot, group2)) #Setup the correlation print 'Setting up correlation' corr = treecorr.NNCorrelation(nbins=15, min_sep=30, max_sep=4e3) cat1 = treecorr.Catalog(x=data1['x'],y=data1['y'],z=data1['z']) cat2 = treecorr.Catalog(x=data2['x'],y=data2['y'],z=data2['z']) print 'Calculating...' corr.process(cat1,cat2) print 'Random-Random' rr = treecorr.NNCorrelation(nbins=15, min_sep=30, max_sep=4e3) rx,ry,rz = np.random.choice(data1['x'], size=5000),np.random.choice(data1['y'], size=5000),np.random.choice(data1['z'], size=5000) rcat = treecorr.Catalog(x=rx, y=ry, z=rz) rr.process(rcat) print 'Data-Random' dr = treecorr.NNCorrelation(nbins=15, min_sep=30, max_sep=4e3) dr.process(rcat, cat2) print 'Random-Data' rd = treecorr.NNCorrelation(nbins=15, min_sep=30, max_sep=4e3) rd.process(cat1, rcat) xi,varxi = corr.calculateXi(rr,dr,rd) return np.exp(corr.logr), xi, varxi
def get_xi_window_norm(window=None, nside=None): window_norm = {corr: {} for corr in corrs} mask = {} for tracer in window.keys(): # window[tracer]=kappa_class.z_bins[tracer][0]['window'] mask[tracer] = window[tracer] == hp.UNSEEN # window[tracer]=window[tracer][~mask[tracer]] fsky = mask[tracer].mean() cat0 = {'fullsky': np.ones_like(mask)} tree_cat_args0 = get_treecorr_cat_args(cat0, masks=None, nside=nside) tree_cat0 = treecorr.Catalog(**tree_cat_args0['fullsky']) tree_corrs0 = treecorr.NNCorrelation(**corr_config) _ = tree_corrs0.process(tree_cat0, tree_cat0) npairs0 = tree_corrs0.npairs * fsky del cat0, tree_cat0, tree_corrs0 tree_cat_args = get_treecorr_cat_args(window, masks=mask, nside=nside) tree_cat = { tracer: treecorr.Catalog(w=window[tracer][~mask[tracer]], **tree_cat_args[tracer]) for tracer in window.keys() } del mask for corr in corrs: tree_corrs = treecorr.NNCorrelation(**corr_config) _ = tree_corrs.process(tree_cat[corr[0]], tree_cat[corr[1]]) window_norm[corr]['weight'] = tree_corrs.weight window_norm[corr]['npairs'] = tree_corrs.npairs window_norm[corr]['npairs0'] = npairs0 del tree_cat, tree_corrs return window_norm
def calculate_total(RRa, DDa_all, DRa_all, RDa_all, DDc_all, DRc_all, RDc_all, RRc_all, Njk, config): """ Resum the auto and cross correlations to get total quantities. """ DDt = treecorr.NNCorrelation(config) DRt = treecorr.NNCorrelation(config) RDt = treecorr.NNCorrelation(config) RRt = treecorr.NNCorrelation(config) for i in range(Njk): DDt += DDa_all[i] DRt += DRa_all[i] DRt += DRc_all[i] RDt += RDa_all[i] RDt += RDc_all[i] RRt += RRa DDt.weight[:] += 0.5 * DDc_all[i].weight[:] DDt.npairs[:] += 0.5 * DDc_all[i].npairs[:] DDt.tot += 0.5 * DDc_all[i].tot RRt.weight[:] += 0.5 * RRc_all[i].weight[:] RRt.npairs[:] += 0.5 * RRc_all[i].npairs[:] RRt.tot += 0.5 * RRc_all[i].tot print "\t\tResumming HMCF JK total complete." return DDt, DRt, RDt, RRt
def calc_xi_perp(self, data1, data2, min_rpar, max_rpar, nbins=20, slop=0.1, randoms=True): # Build a catalogue of random points drawn from the same volume rx = np.random.random(size=data1['x'].size) * (data1['x'].max()-data1['x'].min()) + data1['x'].mean() ry = np.random.random(size=data1['x'].size) * (data1['y'].max()-data1['y'].min()) + data1['y'].mean() rz = np.random.random(size=data1['x'].size) * (data1['z'].max()-data1['z'].min()) + data1['z'].mean() # Create the catalogues cat_i = treecorr.Catalog(x=data1['x'], y=data1['y'], z=data1['z']) cat_j = treecorr.Catalog(x=data2['x'], y=data2['y'], z=data2['z']) rancat_i = treecorr.Catalog(x=rx, y=ry, z=rz) rancat_j = treecorr.Catalog(x=rx, y=ry, z=rz) nn = treecorr.NNCorrelation(nbins=nbins, min_rpar=min_rpar, max_rpar=max_rpar, min_sep=15, max_sep=10e3, bin_slop=slop) rn = treecorr.NNCorrelation(nbins=nbins, min_rpar=min_rpar, max_rpar=max_rpar, min_sep=15, max_sep=10e3, bin_slop=slop) nr = treecorr.NNCorrelation(nbins=nbins, min_rpar=min_rpar, max_rpar=max_rpar, min_sep=15, max_sep=10e3, bin_slop=slop) rr = treecorr.NNCorrelation(nbins=nbins, min_rpar=min_rpar, max_rpar=max_rpar, min_sep=15, max_sep=10e3, bin_slop=slop) nn.process(cat_i,cat_j, metric='Rperp') #, metric='Periodic') rn.process(rancat_i,cat_j, metric='Rperp') #, metric='Periodic') nr.process(cat_i,rancat_j, metric='Rperp') #, metric='Periodic') rr.process(rancat_i,rancat_j, metric='Rperp') #, metric='Periodic') R = np.exp(nn.meanlogr) if randoms: w, werr = nn.calculateXi(rr,dr=nr,rd=rn) else: w, werr = nn.calculateXi(rr,dr=None,rd=None) werr = np.sqrt(werr) return R, w, werr
def run_treecorr(self): R = self.gen_Catalog_random(N=self.Nptl_random) D1 = self.gen_Catalog_Dhalo() D2 = self.gen_Catalog_Dptl() rmin = self.rmin rmax = self.rmax nbins = 10 metric = 'Euclidean' # match the units of r with units of position self.DD = treecorr.NNCorrelation(min_sep=rmin, max_sep=rmax, nbins=nbins) self.DR = treecorr.NNCorrelation(min_sep=rmin, max_sep=rmax, nbins=nbins) self.RR = treecorr.NNCorrelation(min_sep=rmin, max_sep=rmax, nbins=nbins) self.RD = treecorr.NNCorrelation(min_sep=rmin, max_sep=rmax, nbins=nbins) self.DD.process(D1, D2, metric=metric) self.DR.process(D1, R, metric=metric) self.RR.process(R, R, metric=metric) self.RD.process(R, D2, metric=metric) xi, varxi = self.DD.calculateXi(self.RR, self.DR, self.RD) return self.DD.meanr, xi, varxi
def NN3DCorrelation(self, min_sep=1, max_sep=200, bin_size=0.5): """ Caclulates 3D correlation function of objects using Catalog's ra, dec, r Requires randcatalog to exist. Distance units are Mpc/h. Returns tuple (logr, meanlogr, xi, xivar) """ catS = treecorr.Catalog(ra=self.catalog["ra"], dec=self.catalog["dec"], r=self.catalog["r"], ra_units="radians", dec_units="radians") if (self.randcatalog): catR = treecorr.Catalog(ra=self.randcatalog["ra"], dec=self.randcatalog["dec"], r=self.randcatalog["r"], ra_units="radians", dec_units="radians") else: print ("Need random catalog for NN") stop() dd=treecorr.NNCorrelation(min_sep=min_sep, bin_size=bin_size, max_sep=max_sep) dr=treecorr.NNCorrelation(min_sep=min_sep, bin_size=bin_size, max_sep=max_sep) rr=treecorr.NNCorrelation(min_sep=min_sep, bin_size=bin_size, max_sep=max_sep) dd.process(catS) dr.process(catS, catR) rr.process(catR) xi, xivar=dd.calculateXi(rr,dr) logr = dd.logr meanlogr= dd.meanlogr return (logr, meanlogr, xi, xivar)
def calcalate_hhcf_full(outpath,halopath,nbins,limits,Nh,randoms): """ Calculate the halo-halo correlation function for the full volume. """ redpath = halopath+"/reduced_halo_cats/reduced_halo_cat.txt" infile = open(redpath,"r") halos = np.zeros((int(Nh),3)) i = 0 for line in infile: if line[0] is "#": continue parts = line.split() halos[i,:] = float(parts[x_index]),float(parts[y_index]),float(parts[z_index]) i+=1 infile.close() #Interface with treecorr config = {'nbins':nbins,'min_sep':limits[0],'max_sep':limits[1]} halo_cat = treecorr.Catalog(x=halos[:,0],y=halos[:,1],z=halos[:,2],config=config) random_cat = treecorr.Catalog(x=randoms[:,0],y=randoms[:,1],z=randoms[:,2],config=config) DD = treecorr.NNCorrelation(config) DR = treecorr.NNCorrelation(config) RR = treecorr.NNCorrelation(config) DD.process(halo_cat) DR.process(halo_cat,random_cat) RR.process(random_cat) DD.write(outpath+"/halohalo_correlation_function/full_hhcf/full_hhcf.txt",RR,DR) print "\tFull halo-halo correlation function complete." return
def calc_pos_pos(self,i,j,verbose,num_threads): mask = self.lens_binning==i lenscat_i = treecorr.Catalog(w=self.lensweight[mask], ra=self.lens['ra'][mask], dec=self.lens['dec'][mask], ra_units='deg', dec_units='deg') mask = self.ran_binning==i rancat_i = treecorr.Catalog(w=np.ones(np.sum(mask)), ra=self.randoms['ra'][mask], dec=self.randoms['dec'][mask], ra_units='deg', dec_units='deg') mask = self.lens_binning==j lenscat_j = treecorr.Catalog(w=self.lensweight[mask], ra=self.lens['ra'][mask], dec=self.lens['dec'][mask], ra_units='deg', dec_units='deg') mask = self.ran_binning==j rancat_j = treecorr.Catalog(w=np.ones(np.sum(mask)), ra=self.randoms['ra'][mask], dec=self.randoms['dec'][mask], ra_units='deg', dec_units='deg') nn = treecorr.NNCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads) rn = treecorr.NNCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads) nr = treecorr.NNCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads) rr = treecorr.NNCorrelation(nbins=self.params['tbins'], min_sep=self.params['tbounds'][0], max_sep=self.params['tbounds'][1], sep_units='arcmin', bin_slop=self.params['slop'], verbose=verbose,num_threads=num_threads) nn.process(lenscat_i,lenscat_j) rn.process(rancat_i,lenscat_j) nr.process(lenscat_i,rancat_j) rr.process(rancat_i,rancat_j) theta=np.exp(nn.meanlogr) wtheta,wthetaerr=nn.calculateXi(rr,dr=nr,rd=rn) wthetaerr=np.sqrt(wthetaerr) return theta, wtheta, wthetaerr
def calccorr(dataset, minsep=1 / 60, maxsep=6): nn = treecorr.NNCorrelation(nbins=16, min_sep=minsep, max_sep=maxsep, sep_units='deg', var_method='jackknife') cat = treecorr.Catalog(ra=dataset['ra'], dec=dataset['dec'], ra_units='deg', dec_units='deg', npatch=100) nn.process(cat) catrand = randomcorr(dataset, cat, minsep, maxsep) rr = treecorr.NNCorrelation(nbins=16, min_sep=minsep, max_sep=maxsep, sep_units='deg', var_method='jackknife') rr.process(catrand) dr = treecorr.NNCorrelation(nbins=16, min_sep=minsep, max_sep=maxsep, sep_units='deg', var_method='jackknife') dr.process(cat, catrand) corr = nn.calculateXi(rr, dr) bin_centers = nn.meanlogr cov = nn.estimate_cov('jackknife') return [corr, bin_centers, cov]
def NNCorrelation(self): """ Caclulates 2D correlation function using Catalog's ra, dec. Requires randcatalog to exist. Returns tuple (logr, meanlogr, xi, xivar) """ catS = treecorr.Catalog(ra=self.catalog["ra"], dec=self.catalog["dec"], ra_units="radians", dec_units="radians") if (self.randcatalog): catR = treecorr.Catalog(ra=self.randcatalog["ra"], dec=self.randcatalog["dec"], ra_units="radians", dec_units="radians") else: print ("Need random catalog for NN") stop() dd=treecorr.NNCorrelation(min_sep=self.min_sep, bin_size=self.bin_size, max_sep=self.max_sep, sep_units='arcmin') dr=treecorr.NNCorrelation(min_sep=self.min_sep, bin_size=self.bin_size, max_sep=self.max_sep, sep_units='arcmin' ) rr=treecorr.NNCorrelation(min_sep=self.min_sep, bin_size=self.bin_size, max_sep=self.max_sep, sep_units='arcmin') dd.process(catS) dr.process(catS, catR) rr.process(catR) xi, xivar=dd.calculateXi(rr,dr) logr = dd.logr meanlogr= dd.meanlogr return (logr, meanlogr, xi, xivar)
def finish_nn(corr, cat1, cat2, rcat1, rcat2, options, nbins): if cat2 is None: cat2 = cat1 rr = treecorr.NNCorrelation(min_sep=options['2pt']['rmin'], max_sep=options['2pt']['rmax'], nbins=nbins) rn = treecorr.NNCorrelation(min_sep=options['2pt']['rmin'], max_sep=options['2pt']['rmax'], nbins=nbins) nr = treecorr.NNCorrelation(min_sep=options['2pt']['rmin'], max_sep=options['2pt']['rmax'], nbins=nbins) # Do the pair counting print('Processing randoms', ) print('RR', ) rr.process(rcat1, rcat2) print('DR', ) nr.process(cat1, rcat2) print('RD', ) rn.process(rcat1, cat2) # Finish off xi, var = corr.calculateXi(rr, dr=nr, rd=rn) setattr(corr, 'xi', xi) return corr
def get_rr_dr(self, cat, rmin, rmax, nbins, niterations=1, binning="Log"): import treecorr from numpy.random import random_sample, choice rrcats = [] for i in range(0, niterations): nhalos = cat.ntot boxsize = self.boxsize.value x2 = (random_sample(nhalos * 2)) * boxsize y2 = (random_sample(nhalos * 2)) * boxsize z2 = (random_sample(nhalos * 2)) * boxsize cat2 = treecorr.Catalog(x=x2, y=y2, z=z2) rrcats.append(cat2) dr = treecorr.NNCorrelation(min_sep=rmin, max_sep=rmax, nbins=nbins, bin_slop=0, xperiod=boxsize, yperiod=boxsize, zperiod=boxsize, bin_type=binning, metric="Periodic") rr = treecorr.NNCorrelation(min_sep=rmin, max_sep=rmax, nbins=nbins, bin_slop=0, xperiod=boxsize, yperiod=boxsize, zperiod=boxsize, bin_type=binning, metric="Periodic") dr.process(rrcats, cat, metric='Periodic') rr.process(rrcats, metric='Periodic') return rr, dr
def _get_corrs_nosep(self, data, min_sep=44, max_sep=1e6, binning='log', nbins=20, ctype=('s','s'), estimator='Landy-Szalay', verbosity=1, randoms=None, method='halotools'): if verbosity>0: print 'Will construct %s - %s correlation functions'%ctype print 'Using %s estimator'%estimator # Decide on an appropriate binning scheme if (binning.lower()=='log'): rbins = np.logspace(np.log10(min_sep), np.log10(max_sep), nbins ) elif (binning.lower()=='linear'): rbins = np.linspace(min_sep, max_sep, nbins ) if verbosity>1: print 'Will use %s binning:'%binning, rbins # Parse the mask mask1 = util.choose_cs_mask(data,ctype[0]) mask2 = util.choose_cs_mask(data,ctype[1]) pos1 = pretending.return_xyz_formatted_array(data['x'], data['y'], data['z'], mask = mask1) pos2 = pretending.return_xyz_formatted_array(data['x'], data['y'], data['z'], mask = mask2) # And do the randoms if randoms is None: r1 = util.construct_random_cat(data, mask=mask1) r2 = util.construct_random_cat(data, mask=mask2) else: if verbosity>0: print 'Using random points provided for normalisation.' r1 = randoms R = np.sqrt(np.array(rbins)[1:]*np.array(rbins)[:-1]) print 'Using %s to calculate two-point correlations'%method if method=='halotools': return R, pretending.tpcf(pos1, rbins, sample2=pos2, randoms=r1, period=info.Lbox, estimator=estimator ) elif method=='treecorr': print 'Constructing catalogues...' cat_i = treecorr.Catalog(x=data['x'][mask1], y=data['y'][mask1], z=data['z'][mask1]) cat_j = treecorr.Catalog(x=data['x'][mask2], y=data['y'][mask2], z=data['z'][mask2]) rx_1 = (np.random.random(size=data['x'][mask1].size) - 0.5) * (data['x'][mask1].max()-data['x'][mask1].min()) + data['x'][mask1].mean() ry_1 = (np.random.random(size=data['x'][mask1].size) - 0.5) * (data['y'][mask1].max()-data['y'][mask1].min()) + data['y'][mask1].mean() rz_1 = (np.random.random(size=data['x'][mask1].size) - 0.5) * (data['z'][mask1].max()-data['z'][mask1].min()) + data['z'][mask1].mean() rancat_1 = treecorr.Catalog(x=rx_1, y=ry_1, z=rz_1) print 'Correlating...' nn = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=0.1) nr = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=0.1) rn = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=0.1) rr = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=0.1) nn.process(cat_i,cat_j) nr.process(rancat_1,cat_i) rn.process(cat_j,rancat_1) rr.process(rancat_1,rancat_1) R = np.exp(nn.meanlogr) w = (nn.weight - nr.weight - rn.weight + rr.weight) / rr.weight return R, w
def correlation_TreeCorr(data_ra, data_dec, data_r, rand_ra, rand_dec, rand_r, config) : import time import numpy as np import treecorr import sys # Begin timing start = time.time() # Make sure arrays match assert data_ra.size == data_dec.size, "Data must have both RA and DEC" assert rand_ra.size == rand_dec.size, "Randoms must have both RA and DEC" # Create TreeCorr catalog objects dcat = treecorr.Catalog(ra=data_ra, dec=data_dec, r=data_r, ra_units='deg', dec_units='deg') rcat = treecorr.Catalog(ra=rand_ra, dec=rand_dec, r=rand_r, ra_units='deg', dec_units='deg') print ('TreeCorr catalogs created') sys.stdout.flush() # Run TreeCorr processes for DD, DR, RD, and RR dd = treecorr.NNCorrelation(config) dr = treecorr.NNCorrelation(config) # rd = treecorr.NNCorrelation(config) rr = treecorr.NNCorrelation(config) dd.process(dcat) print ('DD done') sys.stdout.flush() # I also need to get the bin locations for plotting logr = dd.logr dr.process(dcat, rcat) print ('DR done') sys.stdout.flush() # rd.process(rcat, dcat) # print ('RD done') # sys.stdout.flush() rr.process(rcat) print ('RR done') sys.stdout.flush() # Find the correlation function and errors # xi, varxi = dd.calculateXi(rr, dr, rd) xi, varxi = dd.calculateXi(rr, dr) print ('Correlation function and errors calculated') sys.stdout.flush() # Find elapsed time runtime = time.time() - start del start ## Print the time it took h = int(np.floor(runtime/(60.0*60.0))) m = int(np.floor((runtime - (60.0*60.0*h))/60.0)) s = runtime - 60.0*60.0*h - 60.0*m print ('Elapsed time: {:>02d}:{:>02d}:{:>05.2f}'.format(h, m, s)) sys.stdout.flush() del runtime, h, m, s # Return xi, varxi, and bin locations return (xi, varxi, logr)
def pos_pos_corr(pos1,pos2,posr1,posr2,w1=None,w2=None,same_zshell=False,same_cell=False,unique_encounter=False,num_threads=0): nbins = 6 min_sep = 0.05 # 3 arcmin max_sep = 3.0 # 180 arcmin bin_size = (max_sep-min_sep)/nbins # roughly bin_slop = 0.05/bin_size # 0.1 -> 0.05 # 2pt_pipeline for des used bin_slop: 0.01 here: https://github.com/des-science/2pt_pipeline/blob/master/pipeline/twopt_pipeline.yaml # num_threads = 5 #None #0 # should query the number of cpus your computer has logger = None if same_zshell and same_cell: # auto ra, dec = pos1 # either 1 or 2 works, they're the same ra_R, dec_R = posr1 w = w1 cat = treecorr.Catalog(ra=ra, dec=dec, ra_units='degrees', dec_units='degrees', w=w) cat_R = treecorr.Catalog(ra=ra_R, dec=dec_R, ra_units='degrees', dec_units='degrees') else: # cross ra1, dec1 = pos1 ra2, dec2 = pos2 ra_R1, dec_R1 = posr1 ra_R2, dec_R2 = posr2 cat1 = treecorr.Catalog(ra=ra1, dec=dec1, ra_units='degrees', dec_units='degrees', w=w1) cat2 = treecorr.Catalog(ra=ra2, dec=dec2, ra_units='degrees', dec_units='degrees', w=w2) cat1_R = treecorr.Catalog(ra=ra_R1, dec=dec_R1, ra_units='degrees', dec_units='degrees') cat2_R = treecorr.Catalog(ra=ra_R2, dec=dec_R2, ra_units='degrees', dec_units='degrees') DD = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=bin_slop, sep_units='degrees', logger=logger) RR = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=bin_slop, sep_units='degrees', logger=logger) DR = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=bin_slop, sep_units='degrees', logger=logger) RD = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=bin_slop, sep_units='degrees', logger=logger) # *** two hp cells when corr is within the same zshell. c1xc2 enough, no c2xc1 later if same_zshell and same_cell: # auto # same z, same pix DD.process_auto(cat,num_threads=num_threads) RR.process_auto(cat_R,num_threads=num_threads) DR.process_cross(cat, cat_R,num_threads=num_threads) RD = DR.copy() elif same_zshell: # distribute the workload fairly b/w two ranks # same z, 2 pix: if unique_encounter: # the following two counts shouldn't be doubled up cuz they're the same in both directions DD.process_cross(cat1, cat2,num_threads=num_threads) RR.process_cross(cat1_R, cat2_R,num_threads=num_threads) else: DR.process_cross(cat1, cat2_R,num_threads=num_threads) DR.process_cross(cat2, cat1_R,num_threads=num_threads) RD = DR.copy() else: # different z (can have different/same pix) when 2 cats have diff zshells it is enough to make them different even within the same hp pix DD.process_cross(cat1, cat2,num_threads=num_threads) # metric='Rperp') RR.process_cross(cat1_R, cat2_R,num_threads=num_threads) DR.process_cross(cat1, cat2_R,num_threads=num_threads) RD.process_cross(cat1_R, cat2,num_threads=num_threads) # RD != DR here return DD, RR, DR, RD
def test_direct_partial(): # There are two ways to specify using only parts of a catalog: # 1. The parameters first_row and last_row would usually be used for files, but they are a # general way to use only a (contiguous) subset of the rows # 2. You can also set weights to 0 for the rows you don't want to use. # First test first_row, last_row ngal = 200 s = 10. numpy.random.seed(8675309) x1 = numpy.random.normal(0,s, (ngal,) ) y1 = numpy.random.normal(0,s, (ngal,) ) cat1a = treecorr.Catalog(x=x1, y=y1, first_row=28, last_row=144) x2 = numpy.random.normal(0,s, (ngal,) ) y2 = numpy.random.normal(0,s, (ngal,) ) cat2a = treecorr.Catalog(x=x2, y=y2, first_row=48, last_row=129) min_sep = 1. max_sep = 50. nbins = 50 dda = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=0.) dda.process(cat1a, cat2a) print('dda.npairs = ',dda.npairs) log_min_sep = numpy.log(min_sep) log_max_sep = numpy.log(max_sep) true_npairs = numpy.zeros(nbins) bin_size = (log_max_sep - log_min_sep) / nbins for i in range(27,144): for j in range(47,129): rsq = (x1[i]-x2[j])**2 + (y1[i]-y2[j])**2 logr = 0.5 * numpy.log(rsq) k = int(numpy.floor( (logr-log_min_sep) / bin_size )) if k < 0: continue if k >= nbins: continue true_npairs[k] += 1 print('true_npairs = ',true_npairs) print('diff = ',dda.npairs - true_npairs) numpy.testing.assert_array_equal(dda.npairs, true_npairs) # Now check that we get the same thing with all the points, but with w=0 for the ones # we don't want. w1 = numpy.zeros(ngal) w1[27:144] = 1. w2 = numpy.zeros(ngal) w2[47:129] = 1. cat1b = treecorr.Catalog(x=x1, y=y1, w=w1) cat2b = treecorr.Catalog(x=x2, y=y2, w=w2) ddb = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, bin_slop=0.) ddb.process(cat1b, cat2b) print('ddb.npairs = ',ddb.npairs) print('diff = ',ddb.npairs - true_npairs) numpy.testing.assert_array_equal(ddb.npairs, true_npairs)
def wtheta(self, table, bin_number, table2=None, bin_number_2=None): '''calculate position position correlation''' #setup correlation objects, random catalog corr_kwargs = { 'min_sep': 3.0, 'max_sep': 90, 'nbins': 12, 'sep_units': 'arcmin' } dd = treecorr.NNCorrelation(**corr_kwargs) rr = treecorr.NNCorrelation(**corr_kwargs) dr = treecorr.NNCorrelation(**corr_kwargs) rand = self.io.read_randoms(self.io.path_dict['random_prefix'] + '_{}.hdf'.format(bin_number)) # assert len(table)*6 == rand.ntot, "randoms are not scaled correctly for auto" #deal with second catalog if need be if table2 is not None: cat = self.io.df_to_corr(table) cat2 = self.io.df_to_corr(table2) rand2 = self.io.read_randoms(self.io.path_dict['random_prefix'] + '_{}.hdf'.format(bin_number_2)) # assert len(table2)*6 == rand2.ntot, "randoms are not scaled correctly for cross" rd = treecorr.NNCorrelation(**corr_kwargs) rr.process(rand, rand2) dd.process(cat, cat2) dr.process(cat, rand2) rd.process(rand, cat2) xi, varxi = dd.calculateXi(rr, dr, rd) sig = np.sqrt(varxi) r = np.exp(dd.meanlogr) # Coffset = calcC(rr) return {"xi": xi, "sig": sig, "r": r} #otherwise just deal with the auto correlation else: cat = self.io.df_to_corr(table) #calculate w of theta given our sanitized randoms and catalog data rr.process(rand) dd.process(cat) dr.process(cat, rand) xi, varxi = dd.calculateXi(rr, dr) sig = np.sqrt(varxi) r = np.exp(dd.meanlogr) # Coffset = calcC(rr) return {"xi": xi, "sig": sig, "r": r}
def getCrossWTheta(cat, cat2, rand_ra, rand_dec): """ calculate the angular two point correlation function using the landay-sazlay estimator note: rand_ra and rand_dec should sample the same space on the sky as the data to accurately calculate w of theta parameters cat: treecorr catalog of galaxies we will calculate w of theta for. rand_ra: numpy array. uniformly random sampled coordinates in RA space. rand_dec: numpy array. uniformly random sampled coordinates in DEC space returns: xi: numpy array. the angular two point correlation function sig: numpy array. xi's std dev noise estimated from treecor. underestimated error r: numpy array of angular bins xi is calculated for """ dd = treecorr.NNCorrelation(min_sep=0.1, max_sep=80, nbins=15, sep_units='arcmin') dd.process(cat, cat2) rand = treecorr.Catalog(ra=rand_ra, dec=rand_dec, ra_units='radians', dec_units='radians') rr = treecorr.NNCorrelation(min_sep=0.1, max_sep=80, nbins=15, sep_units='arcmin') rr.process(rand) r = np.exp(dd.meanlogr) dr = treecorr.NNCorrelation(min_sep=0.1, max_sep=80, nbins=15, sep_units='arcmin') dr.process(cat, rand) rd = treecorr.NNCorrelation(min_sep=0.1, max_sep=80, nbins=15, sep_units='arcmin') rd.process(rand, cat2) xi, varxi = dd.calculateXi(rr, dr, rd) sig = np.sqrt(varxi) Coffset = calcC(rr) return xi, sig, r, Coffset
def compute_2pt_raw(self): dd = treecorr.NNCorrelation(config=self.config_2pt) dr = treecorr.NNCorrelation(config=self.config_2pt) rr = treecorr.NNCorrelation(config=self.config_2pt) toc = time.time() dd.process(self.cat, metric=self.config_2pt['metric']) dr.process(self.cat, self.random_cat, metric=self.config_2pt['metric']) rr.process(self.random_cat, metric=self.config_2pt['metric']) self.xi, varxi = dd.calculateXi(dr=dr, rr=rr) tic = time.time() print '2PCF took', tic - toc stdout.flush()
def autoCorrelation(self, dataRADEC, randRADEC, ra_units='degrees', dec_units='degrees', min_sep=0.01, max_sep=10, bin_size=0.2, sep_units='degrees'): """ Use TreeCorr to make the autoCorrelation of two dataFrames passed in. dataFrame1 - obj: pandas dataFrame object made with makeDataCatalogs() dataFrame1 - obj: pandas dataFrame object made with makeDataCatalogs() Return - r, xi, varxi, sig """ self.dataRA = dataRADEC[0] self.dataDEC = dataRADEC[1] self.randRA = randRADEC[0] self.randDEC = randRADEC[1] dataCatalog = treecorr.Catalog(ra=self.dataRA, dec=self.dataDEC, ra_units=ra_units, dec_units=dec_units) randCatalog = treecorr.Catalog(ra=self.randRA, dec=self.randDEC, ra_units=ra_units, dec_units=dec_units) nn = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, bin_size=bin_size, sep_units=sep_units) nn.process(dataCatalog) rr = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, bin_size=bin_size, sep_units=sep_units) rr.process(randCatalog) dr = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, bin_size=bin_size, sep_units=sep_units) dr.process(dataCatalog, randCatalog) r = numpy.exp(nn.meanlogr) xi, varxi = nn.calculateXi(rr, dr) sig = numpy.sqrt(varxi) return r, xi, varxi, sig
def auto_corr(gal, random, config, weight="False"): ''' measure the auto-correlation with LS estimator using a gal catalog and a random catalog ''' min_sep = config['min_sep'] max_sep = config['max_sep'] units = config['units'] nbins = config['nbins'] bin_slop = config['bin_slop'] if weight == "True": gal_cat = treecorr.Catalog(ra=gal["RA"], dec=gal["DEC"], w=gal["W"], ra_units='deg', dec_units='deg') if weight == "False": gal_cat = treecorr.Catalog(ra=gal["RA"], dec=gal["DEC"], ra_units='deg', dec_units='deg') ran_cat = treecorr.Catalog(ra=random["RA"], dec=random["DEC"], ra_units='deg', dec_units='deg') nn = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, sep_units=units, bin_slop=bin_slop) rr = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, sep_units=units, bin_slop=bin_slop) dr = treecorr.NNCorrelation(min_sep=min_sep, max_sep=max_sep, nbins=nbins, sep_units=units, bin_slop=bin_slop) nn.process(gal_cat, gal_cat, num_threads=60) rr.process(ran_cat, ran_cat, num_threads=60) dr.process(gal_cat, ran_cat, num_threads=60) xi, varxi = nn.calculateXi(rr, dr) theta = nn.meanr return theta, xi, varxi
def NNCorr(galaxy,rand_galaxy): #galaxy ra=galaxy[:,0] dec=galaxy[:,1] z=galaxy[:,2] l,b=ecliptic2galactic(ra,dec) raa,decc,k=galactic_pixel(l,b) galaxy_catalogue=tc.Catalog(ra=raa,dec=decc,k=k,ra_units="deg",dec_units="deg") gg=tc.NNCorrelation(nbins=NBINS,min_sep=MIN_SEP,max_sep=MAX_SEP,bin_slop=0.01,verbose=0,sep_units='degrees') gg.process(galaxy_catalogue) #rand_galaxy rra=rand_gal[:,0] rdec=rand_gal[:,1] rz=rand_gal[:,2] rl,rb=ecliptic2galactic(rra,rdec) rraa,rdecc,rk=galactic_pixel(rl,rb) rand_gal_catalogue=tc.Catalog(ra=rraa,dec=rdecc,k=rk,ra_units="deg",dec_units="deg") rr=tc.NNCorrelation(nbins=NBINS,min_sep=MIN_SEP,max_sep=MAX_SEP,bin_slop=0.01,verbose=0,sep_units='degrees') rr.process(rand_gal_catalogue) #xi-rr xi,varxi=gg.calculateXi(rr) r=np.exp(gg.meanlogr) sig=np.sqrt(varxi) plt.plot(r,xi,color='blue') plt.plot(r,-xi,color='blue',ls=':') plt.loglog() plt.savefig("/home/yhwu/pic/xi_rr.png",png=1000) plt.clf() #xi-dr dr=tc.NNCorrelation(nbins=NBINS,min_sep=MIN_SEP,max_sep=MAX_SEP,bin_slop=0.01,verbose=0,sep_units='degrees') dr.process(galaxy_catalogue,rand_gal_catalogue) xi,varxi=gg.calculateXi(rr,dr) r=np.exp(gg.meanlogr) sig=np.sqrt(varxi) plt.plot(r,xi,color='blue') plt.plot(r,-xi,color='blue',ls=':') plt.loglog() plt.savefig("/home/yhwu/pic/xi_dr.png",png=1000)
def AutoCorrelationFunction(DataCatalog, RandCatalog): nn = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, bin_size=0.2, sep_units='degrees') nn.process(DataCatalog) rr = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, bin_size=0.2, sep_units='degrees') rr.process(RandCatalog) dr = treecorr.NNCorrelation(min_sep=0.01, max_sep=10, bin_size=0.2, sep_units='degrees') dr.process(DataCatalog, RandCatalog) r = numpy.exp(nn.meanlogr) xi, varxi = nn.calculateXi(rr, dr) sig = numpy.sqrt(varxi) return r, xi, varxi, sig
def _calc_2h_cc(self, data1, data2, mask1, mask2, save=False, verbose=False, weights=None, nbins=20, min_sep=44, max_sep=6e3, slop=0.1): w2h_cc=[] group_ids = np.unique(data1['groupId']) N = len(group_ids) for ig1 in group_ids: if verbose: print '%d/%d'%(ig1+1, N) maski = mask1 & (data1['groupId']==ig1) cat_i = treecorr.Catalog(w=weights, x=data1['x'][maski], y=data1['y'][maski], z=data1['z'][maski]) # Select all of the centrals that are not part of the same halo maskj = mask1 & (data1['groupId']!=ig1) cat_j = treecorr.Catalog(w=weights, x=data2['x'][maskj], y=data2['y'][maskj], z=data2['z'][maskj]) rx_j = (np.random.random(size=data2['x'][maskj].size) - 0.5) * (data2['x'][maskj].max()-data2['x'][maskj].min()) + data2['x'][maskj].mean() ry_j = (np.random.random(size=data2['x'][maskj].size) - 0.5) * (data2['y'][maskj].max()-data2['y'][maskj].min()) + data2['y'][maskj].mean() rz_j = (np.random.random(size=data2['x'][maskj].size) - 0.5) * (data2['z'][maskj].max()-data2['z'][maskj].min()) + data2['z'][maskj].mean() rancat_j = treecorr.Catalog(x=rx_j, y=ry_j, z=rz_j) f=10000 rx_i = (np.random.random(size=data1['x'][maski].size * f) -0.5) * (data2['x'][maskj].max()-data2['x'][maskj].min()) + data2['x'][maskj].mean() ry_i = (np.random.random(size=data1['x'][maski].size * f) -0.5) * (data2['y'][maskj].max()-data2['y'][maskj].min()) + data2['y'][maskj].mean() rz_i = (np.random.random(size=data1['x'][maski].size * f) -0.5) * (data2['z'][maskj].max()-data2['z'][maskj].min()) + data2['z'][maskj].mean() rancat_i = treecorr.Catalog(x=rx_i, y=ry_i, z=rz_i) nn = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) nr = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) rn = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) rr = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) nn.process(cat_i,cat_j) #, metric='Periodic') nr.process(rancat_j,cat_i) #, metric='Periodic') rn.process(cat_j,rancat_i) #, metric='Periodic') rr.process(rancat_i,rancat_j) #, metric='Periodic') R_2h_cc = np.exp(nn.meanlogr) coeff = 1./f w = (nn.weight - nr.weight - coeff * rn.weight + coeff*rr.weight)/(coeff * rr.weight) w2h_cc.append(w) if save: print 'Storing...' np.savetxt('R_2h_cc.txt',R_2h_cc) np.savetxt('w2h_cc.txt',w2h_cc) return R_2h_cc, w2h_cc
def _calc_2h_cs(self, data1, data2, mask1, mask2, save=False, verbose=False, weights=None, nbins=20, min_sep=44, max_sep=6e3, slop=0.1): """Given two numpy arrays of positions, compute the two halo central-satellite realspace correlation.""" w2h_cs = [] group_ids = np.unique(data1['groupId']) N = len(group_ids) for ig1 in group_ids: if verbose: print '%d/%d'%(ig1+1, N) maski = mask1 & (data1['groupId']==ig1) cat_i = treecorr.Catalog(w=weights, x=data1['x'][maski], y=data1['y'][maski], z=data1['z'][maski]) maskj = mask2 & (data2['groupId']!=ig1) cat_j = treecorr.Catalog(w=weights, x=data2['x'][maskj], y=data2['y'][maskj], z=data2['z'][maskj]) rx_j = (np.random.random(size=data2['x'][maskj].size) - 0.5) * (data2['x'][maskj].max()-data2['x'][maskj].min()) + data2['x'][maskj].mean() ry_j = (np.random.random(size=data2['x'][maskj].size) - 0.5) * (data2['y'][maskj].max()-data2['y'][maskj].min()) + data2['y'][maskj].mean() rz_j = (np.random.random(size=data2['x'][maskj].size) - 0.5) * (data2['z'][maskj].max()-data2['z'][maskj].min()) + data2['z'][maskj].mean() rancat_j = treecorr.Catalog(x=rx_j, y=ry_j, z=rz_j) f=10000 rx_i = (np.random.random(size=data1['x'][maski].size * f) - 0.5) * (data2['x'][maskj].max()-data2['x'][maskj].min()) + data2['x'][maskj].mean() ry_i = (np.random.random(size=data1['x'][maski].size * f) - 0.5) * (data2['y'][maskj].max()-data2['y'][maskj].min()) + data2['y'][maskj].mean() rz_i = (np.random.random(size=data1['x'][maski].size * f) - 0.5) * (data2['z'][maskj].max()-data2['z'][maskj].min()) + data2['z'][maskj].mean() rancat_i = treecorr.Catalog(x=rx_i, y=ry_i, z=rz_i) nn = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) nr = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) rn = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) rr = treecorr.NNCorrelation(nbins=nbins, min_sep=min_sep, max_sep=max_sep, bin_slop=slop) nn.process(cat_i,cat_j) #, metric='Periodic') nr.process(rancat_j,cat_i) #, metric='Periodic') rn.process(cat_j,rancat_i) #, metric='Periodic') rr.process(rancat_i,rancat_j) #, metric='Periodic') R_2h_cs = np.exp(nn.meanlogr) coeff = 1./f w = (nn.weight - nr.weight - coeff * rn.weight + coeff*rr.weight)/(coeff * rr.weight) w2h_cs.append(w) if save: print 'Storing...' np.savetxt('R_2h_cs.txt', R_2h_cs) np.savetxt('w2h_cs.txt', w2h_cs) return R_2h_cs, w2h_cs