def calibrate(data, method="grid", config=None, smoothing=3, sbins=16, rbins=16, nthread=1, rank=0): rbf = (method.lower()=="rbf") # Set up the wrappers nbc = cal.nbc() nbc_disc = cal.nbc() nbc.res = data.res del(data.res) gc.collect() nbc_bulge = cal.nbc() nbc_bulge.res=nbc.res[nbc.res["is_bulge"].astype(bool)] nbc_disc.res=nbc.res[np.invert(nbc.res["is_bulge"].astype(bool))] # Fit or interpolate if method is "polynomial": nbc_disc.fit("m", table="%s/nbc_data/bias_table_hoopoe-v1-fullcat-disc-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins)) nbc_disc.fit("a", table="%s/nbc_data/bias_table_hoopoe-v1-fullcat-disc-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins)) nbc_bulge.fit("m", table="%s/nbc_data/bias_table_hoopoe-v1-fullcat-bulge-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins)) nbc_bulge.fit("a", table="%s/nbc_data/bias_table_hoopoe-v1-fullcat-bulge-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins)) elif method is "rbf": nbc_disc.fit_rbf(table="%s/nbc_data/bias_table_hoopoe-v1-fullcat-disc-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins), smoothing=smoothing) nbc_bulge.fit_rbf(table="%s/nbc_data/bias_table_hoopoe-v1-fullcat-bulge-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins), smoothing=smoothing) elif method is "grid": nbc_disc.bias_grid = fi.FITS("%s/nbc_data/bias_table_hoopoe-v1-fullcat-disc-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins))[1].read() nbc_bulge.bias_grid = fi.FITS("%s/nbc_data/bias_table_hoopoe-v1-fullcat-bulge-%dsbins-%drbins.fits"%(config["output"]["dir"],sbins,rbins))[1].read() # Apply to get a bias correction for each galaxy in the data nbc_disc.apply(split_half=0, scheme=method) print "Done disc calibration" nbc_bulge.apply(split_half=0, scheme=method) print "Done bulge calibration" print "Merging bulge/disc results...", nbc.res = arr.add_col(nbc.res, "m") nbc.res = arr.add_col(nbc.res, "c1") nbc.res = arr.add_col(nbc.res, "c2") nbc.res = arr.add_col(nbc.res, "a") nbc.res["m"][np.invert(nbc.res["is_bulge"].astype(bool))] = nbc_disc.res["m"] nbc.res["c1"][np.invert(nbc.res["is_bulge"].astype(bool))] = nbc_disc.res["c1"] nbc.res["c2"][np.invert(nbc.res["is_bulge"].astype(bool))] = nbc_disc.res["c2"] nbc.res["a"][np.invert(nbc.res["is_bulge"].astype(bool))] = nbc_disc.res["a"] nbc.res["m"][nbc.res["is_bulge"].astype(bool)] = nbc_bulge.res["m"] nbc.res["c1"][nbc.res["is_bulge"].astype(bool)] = nbc_bulge.res["c1"] nbc.res["c2"][nbc.res["is_bulge"].astype(bool)] = nbc_bulge.res["c2"] nbc.res["a"][nbc.res["is_bulge"].astype(bool)] = nbc_bulge.res["a"] print "done" if not os.path.exists(os.path.dirname(config["output"]["dir"])): os.system("mkdir -p %s"%os.path.dirname(config["output"]["dir"])) print "Saving calibrated catalogue to %s"%config["output"]["filename"] nbc.export(filename="%s/%s"%(config["output"]["dir"],config["output"]["filename"]))
def get_phi_col(self,neighbour_cat): print "Computing ellipticity-ellipticity misalignment between each object and its nearest neighbour." eres = self.res["e1"]+ 1j*self.res["e2"] eneigh = neighbour_cat.res["e1"] + 1j*neighbour_cat.res["e2"] phi_res = np.angle(eres) phi_neigh = np.angle(eneigh) dphi = (phi_res - phi_neigh)/2 # Enforce limits at +-pi, then +-pi/2 #sel1=dphi>np.pi #sel2=dphi<-1.0*np.pi # #dphi[sel1] = -1.0*np.pi + (dphi[sel1]-np.pi) #dphi[sel2] = 1.0*np.pi + (dphi[sel2]+np.pi) sel1=dphi>np.pi/2 sel2=dphi<-1.0*np.pi/2 dphi[sel1] = np.pi/2 - (dphi[sel1] - np.pi/2) dphi[sel2] = -1.*np.pi/2 - (dphi[sel2] + np.pi/2) dphi/=np.pi self.res = arr.add_col(self.res,"dphi",dphi)
def get_combined_calibration(self, nbc_disc, nbc_bulge, split_half=2, names=["m", "c1", "c2"]): print "Will combine bulge and disc calibration fits." if split_half==0: for bias in names: self.res = arr.add_col(self.res, bias, np.zeros_like(self.res['e1'])) bulge = self.res["is_bulge"].astype(bool) print "column : %s, bulge : %d/%d, disc : %d/%d"%(bias, self.res[bulge].size, self.res.size, self.res[np.invert(bulge)].size, self.res.size) try: self.res[bias][bulge] = nbc_bulge.res[bias][bulge] except: import pdb ; pdb.set_trace() self.res[bias][np.invert(bulge)] = nbc_disc.res[bias][np.invert(bulge)] else: com =""" for i, bias in enumerate(names): bulge = self.res['is_bulge'].astype(bool) if i==0: print 'bulge :', self.res[bulge].size, 'disc : ', self.res[np.invert(bulge)].size, 'total : ', self.res.size self.res = arr.add_col(self.res, bias, np.zeros_like(self.res['e1'])) print 'column : ', bias self.res[bias][bulge] = nbc_bulge.res[bias][bulge] self.res[bias][np.invert(bulge)] = nbc_disc.res[bias][np.invert(bulge)]""".replace("res", "res%d"%split_half) exec(com) print "done"
def get_beta_col(self,ncat): print "Computing ellipticity-position misalignment angle between each object and its nearest neighbour." dx = self.truth["ra"] - ncat.truth["ra"] dy = self.truth["dec"] - ncat.truth["dec"] # position angle of the separation vector, in sky coordinates # has bounds [-pi,pi] theta = np.arctan(dy/dx) # position angle of the central galaxy in stamp coordinates eres = self.res["e1"]+ 1j*self.res["e2"] phi = np.angle(eres)/2 # cos(beta) = Rneigh.ecent # where Rneigh, ecent are unit vectors beta = (phi - theta) np.putmask(beta,np.invert(np.isfinite(beta)),0) # Impose bounds as above sel1=beta>np.pi/2 sel2=beta<-1.0*np.pi/2 beta[sel1] = np.pi/2 - (beta[sel1] - np.pi/2) beta[sel2] = -1.*np.pi/2 - (beta[sel2] + np.pi/2) beta/=np.pi self.res = arr.add_col(self.res,"dbeta",beta)
def apply(self, names=["m","a"], split_half=2, use_rbf=False): if use_rbf: print "Using RBF interpolation" else: print "Using polynomial fit" # Choose input data if split_half>0: exec "catalogue_to_calibrate = self.res%d"%split_half else: catalogue_to_calibrate = self.res # Apply the best-fit calibration coefficients for bias_name in names: print "creating column %s"%bias_name if not use_rbf: com2 = "a0 "+", a%d "*self.optimised_coefficients_m[1:].size +"=tuple(self.optimised_coefficients_%s)"%bias_name com2 = com2%tuple(np.linspace(1,17,17)) exec com2 com="eval_%s(np.array([catalogue_to_calibrate['snr'],catalogue_to_calibrate['mean_rgpp_rp']]).T"%bias_name + ", a%d "*self.optimised_coefficients_m.size+")" com=com%tuple(np.linspace(0,17,18)) exec "bias=%s"%com else: try: bias = self.do_rbf_interpolation(bias_name, catalogue_to_calibrate) except: import pdb ; pdb.set_trace() if split_half>0: exec "self.res%d = arr.add_col(self.res2, '%s', bias)"%(split_half, bias_name) if bias_name=="a": exec "self.res%d = arr.add_col(self.res2, 'c1', bias*self.res%d['mean_psf_e1_sky'])"%(split_half, split_half) exec "self.res%d = arr.add_col(self.res2, 'c2', bias*self.res%d['mean_psf_e2_sky'])"%(split_half, split_half) else: self.res = arr.add_col(self.res, bias_name, bias) if bias_name=="a": self.res = arr.add_col(self.res, "c1", bias*self.res["mean_psf_e1_sky"]) self.res = arr.add_col(self.res, "c2", bias*self.res["mean_psf_e2_sky"]) print "Finished calibration"
def match_to_faint(self): import copy from sklearn.neighbors import NearestNeighbors import scipy.spatial as sps faint = di.load_truth(self.truth_path, faint=True, add_tilename_col=True) indices = np.zeros(self.res.size) distances = np.zeros(self.res.size) lookup = np.linspace(0,faint.size-1, faint.size).astype(int) tiles = np.unique(self.truth["tilename"]) for it in tiles: print "Matching in pixel coordinates, tile %s"%it sel0 = faint["tilename"]==it sel1 = self.truth["tilename"]==it # All positions where an object was simulated # Restrict the search to this tile x_pool = faint["ra"][sel0] #pool_of_possible_neighbours["ix"][sel0] y_pool = faint["dec"][sel0] #pool_of_possible_neighbours["iy"][sel0] xy_pool=np.vstack((x_pool,y_pool)) # Positions of those objects for which we have im3shape results # We want to find neighbours for these objects x_tar = self.truth["ra"][sel1] y_tar = self.truth["dec"][sel1] xy_tar=np.vstack((x_tar,y_tar)) # Build a tree using the pool nbrs = NearestNeighbors(n_neighbors=2, algorithm='kd_tree', metric="euclidean").fit(xy_pool.T) # Query it for the target catalogue d,i = nbrs.kneighbors(xy_tar.T) distances[sel1], indices[sel1] = d.T[1], lookup[sel0][i.T[1]] neighbour_cat = copy.deepcopy(self) neighbour_cat.res["id"]= faint[indices.astype(int)]["associated_object"] neighbour_cat.res["coadd_objects_id"]= faint[indices.astype(int)]["associated_object"] neighbour_cat.res["e1"]= faint[indices.astype(int)]["intrinsic_e1"]+faint[indices.astype(int)]["true_g1"] neighbour_cat.res["e2"]= faint[indices.astype(int)]["intrinsic_e2"]+faint[indices.astype(int)]["true_g2"] neighbour_cat.res["ra"]= faint[indices.astype(int)]["ra"] neighbour_cat.res["dec"]= faint[indices.astype(int)]["dec"] neighbour_cat.truth= faint[indices.astype(int)] neighbour_cat.truth = arr.add_col(neighbour_cat.truth, "nearest_neighbour_pixel_dist", distances) return neighbour_cat
def setup(load_sim, load_data, config, verbose=True): im3shape_columns = ["e1", "e2", "mean_hsm_psf_e1_sky", "mean_hsm_psf_e2_sky", "mean_hsm_psf_sigma", "snr", "mean_rgpp_rp","mean_mask_fraction", "radius", "coadd_objects_id", "is_bulge", "bulge_flux", "disc_flux", "info_flag", "mag_auto_r"] truth_columns = ['DES_id', 'cosmos_ident', 'cosmos_photoz', 'sextractor_pixel_offset', 'true_g1', 'true_g2', 'intrinsic_e1', 'intrinsic_e2', 'ra', 'dec', 'hlr', 'mag', 'flux'] # Load the y1 data if load_data: if verbose: print "Loading data %s"%config["inputs"]["i3s"] y1v2 = s.shapecat(res=config["inputs"]["i3s"]) y1v2.load(truth=False, prune=True, cols=[im3shape_columns,truth_columns]) y1v2.res = y1v2.res[y1v2.res["info_flag"]==0] # This should always be true, but just in case... sel = ((y1v2.res["snr"] > 12) & (y1v2.res["snr"] < 200) & (y1v2.res["mean_rgpp_rp"] > 1.13) & (y1v2.res["mean_rgpp_rp"] < 3.0)) y1v2.res=y1v2.res[sel] else: if verbose: print "Not loading data (either it's been loaded already or it's not needed)" # And the simulation results if load_sim: if verbose: print "Loading simulation %s"%config["inputs"]["hoopoe"] hoopoe = s.shapecat(res=config["inputs"]["hoopoe"] ,truth=config["inputs"]["hoopoe"]) hoopoe.res = fi.FITS(hoopoe.res_path)["i3s"].read() hoopoe.truth = fi.FITS(hoopoe.truth_path)["truth"].read() sel = np.isfinite(hoopoe.res["mean_hsm_psf_e1_sky"]) & np.isfinite(hoopoe.res["mean_hsm_psf_e2_sky"]) hoopoe.truth = hoopoe.truth[sel] hoopoe.res = hoopoe.res[sel] if (config["selection"]["mask"].lower()!="none"): apply_selection = True selection = fi.FITS(config["selection"]["mask"])["sel"].read().astype(bool) weights = fi.FITS(config["selection"]["mask"])["wts"].read() if verbose: print "Applying additional cuts and weights from %s"%config["selection"]["mask"] hoopoe.res = hoopoe.res[selection] hoopoe.truth = hoopoe.truth[selection] weights = weights[selection] if (not config["selection"]["reweight"]): if verbose: print "Ignoring weights." weights = np.ones(hoopoe.res["coadd_objects_id"].size) if not config["calibration"]["zbins"]: if verbose: print "Using DES redshift bins" exclude = (hoopoe.res["des_bin"]!=0 ) hoopoe.truth = hoopoe.truth[exclude] weights = weights[exclude] hoopoe.res = hoopoe.res[exclude] else: if verbose: print "Using tophat redshift bins" if (config["selection"]["weights"].lower()!="none"): if verbose: print "Using im3shape weights from %s"%config["selection"]["weights"].lower() im3shape_weights = fi.FITS(config["selection"]["weights"])[-1].read() hoopoe.res = arr.add_col(hoopoe.res, "weight", im3shape_weights) if (config["selection"]["resample"]): print "Will apply resampling to match data" edat = np.sqrt(y1v2.res["e1"]**2+y1v2.res["e2"]**2) eh = np.sqrt(hoopoe.res["e1"]**2+hoopoe.res["e2"]**2) subsample = di.get_selection_to_match(edat,eh,nbins=35) hoopoe.res = hoopoe.res[subsample] hoopoe.truth = hoopoe.truth[subsample] weights = weights[subsample] print "Final selection : %d galaxies"%hoopoe.res["coadd_objects_id"].size print "Final selection : %d unique COSMOS IDs"%np.unique(hoopoe.truth["cosmos_ident"]).size else: if verbose: print "Not loading simulation." return hoopoe, weights, y1v2
def get_neighbours(self): import copy from sklearn.neighbors import NearestNeighbors import scipy.spatial as sps fulltruth = di.load_truth(self.truth_path) import fitsio as fi reference=fi.FITS("/home/samuroff/y1a1_16tiles_positions.fits")[1].read() fulltruth,reference = di.match_results(fulltruth,reference, name1="coadd_objects_id", name2="DES_id") fulltruth["ra"]=reference["ra"] fulltruth["dec"]=reference["dec"] meds_path=self.truth_path.replace("truth", "meds/*/*") meds_info = di.get_pixel_cols(meds_path) pool_of_possible_neighbours,fulltruth = di.match_results(meds_info,fulltruth, name1="DES_id", name2="coadd_objects_id" ) fulltruth = arr.add_col(fulltruth, "ix", pool_of_possible_neighbours["ix"]) fulltruth = arr.add_col(fulltruth, "iy", pool_of_possible_neighbours["iy"]) fulltruth = arr.add_col(fulltruth, "tile", pool_of_possible_neighbours["tile"]) objects_needing_neighbours,self.truth = di.match_results(meds_info,self.truth, name1="DES_id", name2="coadd_objects_id" ) self.truth = arr.add_col(self.truth, "ix", objects_needing_neighbours["ix"]) self.truth = arr.add_col(self.truth, "iy", objects_needing_neighbours["iy"]) self.truth = arr.add_col(self.truth, "tile", objects_needing_neighbours["tile"]) cut=(fulltruth["sextractor_pixel_offset"]<1.0) & (fulltruth["ra"]!=0.0) fulltruth = fulltruth[cut] pool_of_possible_neighbours = pool_of_possible_neighbours[cut] indices = np.zeros(self.res.size) distances = np.zeros(self.res.size) lookup = np.linspace(0,fulltruth.size-1, fulltruth.size).astype(int) tiles = np.unique(self.truth["tile"]) for it in tiles: print "Matching in pixel coordinates, tile %s"%it sel0 = pool_of_possible_neighbours["tile"]==it sel1 = objects_needing_neighbours["tile"]==it # All positions where an object was simulated # Restrict the search to this tile x_pool = pool_of_possible_neighbours["ix"][sel0] y_pool = pool_of_possible_neighbours["iy"][sel0] xy_pool=np.vstack((x_pool,y_pool)) # Positions of those objects for which we have im3shape results # We want to find neighbours for these objects x_tar = self.truth["ix"][sel1] y_tar = self.truth["iy"][sel1] xy_tar=np.vstack((x_tar,y_tar)) # Build a tree using the pool nbrs = NearestNeighbors(n_neighbors=2, algorithm='kd_tree', metric="euclidean").fit(xy_pool.T) # Query it for the target catalogue d,i = nbrs.kneighbors(xy_tar.T) distances[sel1], indices[sel1] = d.T[1], lookup[sel0][i.T[1]] neighbour_cat = copy.deepcopy(self) neighbour_cat.res["id"]= fulltruth[indices.astype(int)]["DES_id"] neighbour_cat.res["coadd_objects_id"]= fulltruth[indices.astype(int)]["DES_id"] neighbour_cat.res["e1"]= fulltruth[indices.astype(int)]["intrinsic_e1"]+fulltruth[indices.astype(int)]["true_g1"] neighbour_cat.res["e2"]= fulltruth[indices.astype(int)]["intrinsic_e2"]+fulltruth[indices.astype(int)]["true_g2"] np.putmask(neighbour_cat.res["e1"], neighbour_cat.res["e1"]<-1, fulltruth[indices.astype(int)]["mean_psf_e1"]) np.putmask(neighbour_cat.res["e2"], neighbour_cat.res["e2"]<-1, fulltruth[indices.astype(int)]["mean_psf_e2"]) neighbour_cat.res["ra"]= fulltruth[indices.astype(int)]["ra"] neighbour_cat.res["dec"]= fulltruth[indices.astype(int)]["dec"] neighbour_cat.truth= fulltruth[indices.astype(int)] neighbour_cat.truth["nearest_neighbour_pixel_dist"] = distances return neighbour_cat
def multinest_cornerplot(names, chains, colours=["purple"]*10, kde=None, plots=None, lims=[(None,None)]*10, blind=[False]*10, ls=["-"]*10, fill=[False]*10, alpha=[0.2]*10, labels=[None]*10, fontsize=18, trim=0): plt.style.use("y1a1") plt.switch_backend("pdf") import tools.arrays as arr #cplots = plot2D() for j,chain in enumerate(chains): vals = arr.add_col(np.array(chain.samples),'weight',chain.weight) chains[j] = vals parameter_labels = { 'a_gi':r'$A_\mathrm{GI}$', 'a_ii':r'$A_\mathrm{II}$', 'c1':r'$A_1$', 'c2':r'$A_2$', 'alpha_ii':r'$\eta_\mathrm{GI}$', 'alpha_gi':r'$\eta_\mathrm{II}$', 'alpha_1':r'$\eta_1$', 'alpha_2':r'$\eta_2$', 's8':'$S_8$', 'omega_m':'$\Omega_\mathrm{m}$', 'w':'$w_0$', 'bias_ta':r'$b^\mathrm{src}_g$', 'bias_tt':r'$b^\mathrm{src}_g$', 'a1':r'$A^{(1)}$', 'a2':r'$A^{(2)}$', 'a3':r'$A^{(3)}$', 'a4':r'$A^{(4)}$'} npar = len(names) naxis = npar ipanel = 0 sections={'a_gi':'intrinsic_alignment_parameters', 'a_ii':'intrinsic_alignment_parameters', 'alpha_ii':'intrinsic_alignment_parameters', 'alpha_gi':'intrinsic_alignment_parameters', 'c1':'intrinsic_alignment_parameters', 'c2':'intrinsic_alignment_parameters', 'alpha_1':'intrinsic_alignment_parameters', 'alpha_2':'intrinsic_alignment_parameters', 'bias_ta':'intrinsic_alignment_parameters', 'bias_tt':'intrinsic_alignment_parameters', 'a1':'intrinsic_alignment_parameters', 'a2':'intrinsic_alignment_parameters', 'a3':'intrinsic_alignment_parameters', 'a4':'intrinsic_alignment_parameters', 's8':'cosmological_parameters', 'w':'cosmological_parameters', 'omega_m':'cosmological_parameters'} print("Will make corner plot of %d parameters"%npar) for i,name1 in enumerate(names): fullname1 = "%s--%s"%(sections[name1], name1) for j, name2 in enumerate(names): ipanel += 1 fullname2 = "%s--%s"%(sections[name2], name2) if j>i: continue plt.subplot(naxis,naxis,ipanel, aspect="auto") print(i, j, fullname1, fullname2) likemax = 0 likemin = 10000 for l,chain in enumerate(chains): like = make_panel(i, j, name1, name2, chain, labels[l], blind[l], lims[name1], lims[name2], alpha[l], fill[l], ls[l], colours[l], kde=kde, plots=plots, trim=trim, fontsize=fontsize) if like is None: continue else: likemax = max(like.max(),likemax) likemin = min(like.min(),likemin) #self.choose_panel_contents(i,j, fullname1, fullname2, colour=colour, kde=kde, plots=plots, contours=contours, ls=ls, overplot=overplot, fill=fill, alpha=alpha, label=label, include=include) if (blind[l]): plt.yticks(visible=False) plt.xticks(visible=False) show={(0,0):[False,False], (1,0):[False,True], (1,1):[False,False], (2,0):[False,True], (2,1):[False,False], (2,2):[False,False], (3,0):[True,True], (3,1):[True,False], (3,2):[True,False], (3,3):[True,False]} if not i==npar-1: plt.xticks(visible=False) if not show[(i,j)][0]: pass #plt.xticks(visible=False) if not show[(i,j)][1]: plt.yticks(visible=False) if (j==0) and (i!=0): if not blind: plt.yticks(np.arange(lims[name1][0], lims[name1][1], 1)[1:],visible=True, fontsize=fontsize/2) plt.ylabel(parameter_labels[name1], fontsize=fontsize) if i==naxis-1: if not blind: plt.xticks(np.arange(lims[name2][0], lims[name2][1], 1)[::2][1:],visible=True, fontsize=fontsize/2) plt.xlabel(parameter_labels[name2], fontsize=fontsize) if len(lims)>0 and (i!=j): plt.xlim(lims[name2][0], lims[name2][1]) plt.ylim(lims[name1][0], lims[name1][1]) elif (i==j): plt.ylim(likemin, likemax) plt.xlim(lims[name2][0], lims[name2][1]) #plt.axhline(0,color="k", ls=":", alpha=0.5) #plt.axvline(0,color="k", ls=":", alpha=0.5) plt.subplots_adjust(hspace=0, wspace=0) return 0
def get_neighbours(self): import copy from sklearn.neighbors import NearestNeighbors import scipy.spatial as sps fulltruth = di.load_truth(self.truth_path) import fitsio as fi reference=fitsio.FITS("/share/des/disc6/samuroff/y1/hoopoe/y1a1-v2.2_10/meds/y1a1_positions.fits")[1].read() fulltruth,ref = di.match_results(fulltruth,reference, name1="coadd_objects_id", name2="DES_id") fulltruth["ra"]=ref["ra"] fulltruth["dec"]=ref["dec"] self.truth,ref = di.match_results(self.truth,reference, name1="coadd_objects_id", name2="DES_id") self.truth["ra"]=ref["ra"] self.truth["dec"]=ref["dec"] self.truth,self.res = di.match_results(self.truth,self.res, name1="coadd_objects_id", name2="DES_id") meds_path=self.truth_path.replace("truth", "meds/*/*") meds_info = di.get_pixel_cols(meds_path) pool_of_possible_neighbours,fulltruth = di.match_results(meds_info,fulltruth, name1="DES_id", name2="coadd_objects_id" ) fulltruth = arr.add_col(fulltruth, "ix", pool_of_possible_neighbours["ix"]) fulltruth = arr.add_col(fulltruth, "iy", pool_of_possible_neighbours["iy"]) try: fulltruth = arr.add_col(fulltruth, "tile", pool_of_possible_neighbours["tile"]) except: fulltruth["tile"] = pool_of_possible_neighbours["tile"] objects_needing_neighbours,self.truth = di.match_results(meds_info,self.truth, name1="DES_id", name2="coadd_objects_id" ) self.truth = arr.add_col(self.truth, "ix", objects_needing_neighbours["ix"]) self.truth = arr.add_col(self.truth, "iy", objects_needing_neighbours["iy"]) try: self.truth = arr.add_col(self.truth, "tile", objects_needing_neighbours["tile"]) except: self.truth["tile"] = objects_needing_neighbours["tile"] cut=(fulltruth["sextractor_pixel_offset"]<1.0) & (fulltruth["ra"]!=0.0) fulltruth = fulltruth[cut] pool_of_possible_neighbours = pool_of_possible_neighbours[cut] indices = np.zeros(self.res.size) distances = np.zeros(self.res.size) lookup = np.linspace(0,fulltruth.size-1, fulltruth.size).astype(int) tiles = np.unique(self.truth["tile"]) for it in tiles: print "Matching in pixel coordinates, tile %s"%it sel0 = pool_of_possible_neighbours["tile"]==it sel1 = objects_needing_neighbours["tile"]==it # All positions where an object was simulated # Restrict the search to this tile x_pool = fulltruth["ra"][sel0] #pool_of_possible_neighbours["ix"][sel0] y_pool = fulltruth["dec"][sel0] #pool_of_possible_neighbours["iy"][sel0] xy_pool=np.vstack((x_pool,y_pool)) # Positions of those objects for which we have im3shape results # We want to find neighbours for these objects x_tar = self.truth["ra"][sel1] y_tar = self.truth["dec"][sel1] xy_tar=np.vstack((x_tar,y_tar)) # Build a tree using the pool nbrs = NearestNeighbors(n_neighbors=2, algorithm='kd_tree', metric="euclidean").fit(xy_pool.T) # Query it for the target catalogue d,i = nbrs.kneighbors(xy_tar.T) distances[sel1], indices[sel1] = d.T[1], lookup[sel0][i.T[1]] neighbour_cat = copy.deepcopy(self) neighbour_cat.res["id"]= fulltruth[indices.astype(int)]["DES_id"] neighbour_cat.res["coadd_objects_id"]= fulltruth[indices.astype(int)]["DES_id"] neighbour_cat.res["e1"]= fulltruth[indices.astype(int)]["intrinsic_e1"]+fulltruth[indices.astype(int)]["true_g1"] neighbour_cat.res["e2"]= fulltruth[indices.astype(int)]["intrinsic_e2"]+fulltruth[indices.astype(int)]["true_g2"] np.putmask(neighbour_cat.res["e1"], neighbour_cat.res["e1"]<-1, fulltruth[indices.astype(int)]["mean_psf_e1"]) np.putmask(neighbour_cat.res["e2"], neighbour_cat.res["e2"]<-1, fulltruth[indices.astype(int)]["mean_psf_e2"]) neighbour_cat.res["ra"]= fulltruth[indices.astype(int)]["ra"] neighbour_cat.res["dec"]= fulltruth[indices.astype(int)]["dec"] neighbour_cat.truth= fulltruth[indices.astype(int)] neighbour_cat.truth["nearest_neighbour_pixel_dist"] = distances return neighbour_cat
colours = [ 'purple', 'purple', 'purple', 'royalblue', 'royalblue', 'royalblue', 'purple', 'purple', 'purple' ] ls = ['--', ':', '-'] * 3 alpha = [0., 0., 0.3] * 3 panel_numbers = {} npar = None for j, path in enumerate(files): chain = mc.chain(path) chain.add_column("s8", values="sigma_8*((omega_m/0.3)**0.5)") vals = arr.add_col(np.array(chain.samples), 'weight', chain.weight) names = chain.samples.dtype.names npar = len(names) nrows = 1 + int(npar / 5) ipanel = 0 for i, name in enumerate(names): label = parameter_labels[name] if label not in panel_numbers.keys(): ipanel += 1 panel_numbers[label] = ipanel else: ipanel = panel_numbers[label]