示例#1
0
文件: mbdb.py 项目: ssamuroff/mbii
    def cross_match(self, source, table, fields, match_column, match_column2='', fatal_errors=True):

        # Build the SQL query
        if match_column2=='':
            match_column2=match_column
        print 'Will cross match column %s in the table provided with %s in table %s'%(match_column,match_column2,table)
        print 'Building query...'
        sql = "SELECT %s FROM %s WHERE %s IN ("%(fields, table, match_column)

        for row in source[match_column]:
            sql+="'%d',"%int(row)
        sql = sql[:-1] + ')'
        
        try:
            # prepare a cursor for the query
            cursor = self.db.cursor()
            cursor.execute(sql)
            print("Fetching %d entries" % cursor.rowcount)
        except:
            if fatal_errors:
                    raise
            else:
                print("Error when runnning the SQL command")
                return

        results = fromarrays(np.array(cursor.fetchall()).squeeze().T,names=fields)

        # Finally match the results
        # Without this the results of the second query are misaligned  
        sm, rm = di.match_results(source, results, name1='subfindId', name2='subfindId')
        return sm, rm
示例#2
0
	def add_bpz_cols(self, fil="/share/des/disc3/samuroff/y1/photoz/bpz/NSEVILLA_PHOTOZ_TPL_Y1G103_1_bpz_highzopt_2_9_16.fits", array=None, exclude=None):
		print "Loading BPZ results from %s"%fil
		if array is None:
			bpz = fio.FITS(fil)[1].read()
		else:
			bpz = array

		self.res, bpz = di.match_results(self.res, bpz, name1="coadd_objects_id", name2="coadd_objects_id")

		for colname in bpz.dtype.names:
			if colname=="coadd_objects_id":
				continue
			if exclude is not None:
				if colname in exclude:
					continue
			else:
				print "Adding column: %s"%colname 
				self.res = add_col(self.res, colname, bpz[colname])
示例#3
0
	def add_bpz_cols(self, fil="/share/des/disc3/samuroff/y1/photoz/bpz/NSEVILLA_PHOTOZ_TPL_Y1G103_1_bpz_highzopt_2_9_16.fits", array=None, exclude=None):
		print "Loading BPZ results from %s"%fil
		if array is None:
			bpz = fio.FITS(fil)[1].read()
		else:
			bpz = array

		self.res, bpz = di.match_results(self.res, bpz, name1="coadd_objects_id", name2="coadd_objects_id")

		for colname in bpz.dtype.names:
			if colname=="coadd_objects_id":
				continue
			if exclude is not None:
				if colname in exclude:
					continue
			else:
				print "Adding column: %s"%colname 
				self.res = add_col(self.res, colname, bpz[colname])
示例#4
0
def setup(load_sim, load_data, config, verbose=True):
	im3shape_columns = ["e1", "e2", "mean_hsm_psf_e1_sky", "mean_hsm_psf_e2_sky", "mean_hsm_psf_sigma", "mean_psf_fwhm","snr", "mean_rgpp_rp","mean_mask_fraction","radius", "coadd_objects_id", "is_bulge", "bulge_flux", "disc_flux", "mean_flux", "info_flag", "mag_auto_r", "tilename", "ra", "dec", "stamp_size", "n_exposure", "likelihood", "chi2_pixel"]
	truth_columns = ['DES_id', 'cosmos_ident', 'cosmos_photoz', 'sextractor_pixel_offset', 'true_g1', 'true_g2', 'intrinsic_e1', 'intrinsic_e2', 'ra', 'dec', 'hlr', 'mag', 'flux']

	y1v2 = None
	hoopoe = None
	weights = None

	# Load the y1 data
	if load_data:
		if verbose:
			print "Loading data %s"%config["input"]["i3s"]
		y1v2 = s.shapecat(res=config["input"]["i3s"])
		y1v2.load(truth=False, prune=True, cols=[im3shape_columns,truth_columns])
		y1v2.res = y1v2.res[y1v2.res["info_flag"]==0] # This should always be true, but just in case...
		sel = ((y1v2.res["snr"] > 12) & (y1v2.res["snr"] < 200) & (y1v2.res["mean_rgpp_rp"] > 1.13) & (y1v2.res["mean_rgpp_rp"] < 3.0))
		y1v2.res=y1v2.res[sel]
		if config["selection"]["cut_glowing_edges"]:
			print "Removing glowing edges."
			glowing_edges_ids = fi.FITS("/share/des/disc7/samuroff/des/y1a1-im3shape-bad-edge-ids-sorted.fits")[-1].read()
			glowing_edges_mask = np.invert(np.in1d(y1v2.res["coadd_objects_id"], glowing_edges_ids["ids"]))
			n0 = y1v2.res["coadd_objects_id"].size
			y1v2.res = y1v2.res[glowing_edges_mask]
	else:
		if verbose:
			print "Not loading data (either it's been loaded already or it's not needed)"
	
	# And the simulation results
	if load_sim:
		if verbose:
			print "Loading simulation %s"%config["input"]["hoopoe"]
		hoopoe = s.shapecat(res=config["input"]["hoopoe"] ,truth=config["input"]["hoopoe"])
		hoopoe.res = fi.FITS(hoopoe.res_path)["i3s"].read()
		hoopoe.truth = fi.FITS(hoopoe.truth_path)["truth"].read()
		hoopoe.res, hoopoe.truth = di.match_results(hoopoe.res, hoopoe.truth, name1="DES_id")


		sel = np.isfinite(hoopoe.res["mean_hsm_psf_e1_sky"]) & np.isfinite(hoopoe.res["mean_hsm_psf_e2_sky"])
		hoopoe.truth = hoopoe.truth[sel]
		hoopoe.res = hoopoe.res[sel]

		if (config["selection"]["mask"].lower()!="none"):
			apply_selection = True
			selection = fi.FITS(config["selection"]["mask"])["sel"].read().astype(bool)
			weights = fi.FITS(config["selection"]["mask"])["wts"].read()

			sel = ((hoopoe.res["snr"] > 12) & (hoopoe.res["snr"] < 200) & (hoopoe.res["mean_rgpp_rp"] > 1.13) & (hoopoe.res["mean_rgpp_rp"] < 3.0))
			selection = selection & sel

			if verbose:
				print "Applying additional cuts and weights from %s"%config["selection"]["mask"]
			hoopoe.res = hoopoe.res[selection]
			hoopoe.truth = hoopoe.truth[selection]
			weights = weights[selection]
		if (not config["selection"]["reweight"]):
			if verbose:
				print "Ignoring weights."
			weights = np.ones(hoopoe.res["coadd_objects_id"].size)

		if not (config["calibration"]["ztype"]=="tophat"):
			if verbose:
				print "Using DES redshift bins"

			exclude = (hoopoe.res["des_bin"]!=0 )
			hoopoe.truth = hoopoe.truth[exclude]  
			weights = weights[exclude]  
			hoopoe.res = hoopoe.res[exclude]
		else:
			if verbose:
				print "Using tophat redshift bins"

		if (config["selection"]["resample"]):
			print "Will apply resampling to match data"
			edat = np.sqrt(y1v2.res["e1"]**2+y1v2.res["e2"]**2)
			eh = np.sqrt(hoopoe.res["e1"]**2+hoopoe.res["e2"]**2)
			subsample = di.get_selection_to_match(edat,eh,nbins=35)
			hoopoe.res = hoopoe.res[subsample]
			hoopoe.truth = hoopoe.truth[subsample]
			weights = weights[subsample]

		if (config["selection"]["weights_file"].lower()!="none"):
			import astropy.table as tb
			wts = fi.FITS(config["selection"]["weights_file"])["i3s_weights_col"].read()
			col_to_replace = "mean_unmasked_flux_frac"
			print "Renaming column '%s' as 'weight'"%col_to_replace
			tab = tb.Table(hoopoe.res)
			tab.rename_column(col_to_replace,"weight")
			tab["weight"] = wts["weight"]
			hoopoe.res = np.array(tab)

		if config["selection"]["cut_glowing_edges"]:
			print "Removing glowing edges."
			glowing_edges_ids = fi.FITS("/share/des/disc7/samuroff/des/y1a1-im3shape-bad-edge-ids-sorted.fits")[-1].read()
			glowing_edges_mask = np.invert(np.in1d(hoopoe.res["coadd_objects_id"], glowing_edges_ids["ids"]))
			n0 = hoopoe.res["coadd_objects_id"].size
			hoopoe.truth = hoopoe.truth[glowing_edges_mask]
			hoopoe.res = hoopoe.res[glowing_edges_mask]
			weights = weights[glowing_edges_mask]
			print "cut removes %2.2fM/%2.2fM galaxies"%((n0-hoopoe.res["coadd_objects_id"].size)/1e6,n0/1e6)

		

		print "Final selection : %d galaxies"%hoopoe.res["coadd_objects_id"].size
		print "Final selection : %d unique COSMOS IDs"%np.unique(hoopoe.truth["cosmos_ident"]).size

	else: 
		if verbose:
			print "Not loading simulation."

	return hoopoe, weights, y1v2
示例#5
0
    def load_distance_cat(self, catalogue, path="/share/des/disc7/samuroff/des/hoopoe/neighbour_catalogue.fits"):
    	print "Loading neighbour lookup catalogue from %s"%path
    	self.distance_cat = ncat=fi.FITS(path)[1].read(columns=["coadd_objects_id", "neighbour_distance", "truth_index"])

    	dmask =np.in1d(self.distance_cat["coadd_objects_id"],catalogue.res["coadd_objects_id"])
    	self.distance_cat,catalogue.res=di.match_results(self.distance_cat[dmask], catalogue.res)
示例#6
0
def bad_tree(cat, bad_ids, truth=None, verbose=True, mask=None, ntiles=np.inf):
    if truth is None:
        truth = self.truth_path

    R=[]
    ids=[]

    if verbose:
        print "Setting up..."

    if mask is None:
        mask=np.ones_like(cat.res).astype(bool)

    cat.tiles = np.unique(cat.res["tilename"][mask]).astype("S12")
    object_tiles = cat.res["tilename"][mask].astype("S12")
    columns=['id', 'DES_id', 'cosmos_ident', 'cosmos_photoz', 'spectral_type', 'flags', 'star_flag', 'nearest_neighbour_pixel_dist', 'nearest_neighbour_index', 'true_g1', 'true_g2', 'intrinsic_e1', 'intrinsic_e2', 'intrinsic_e1_hsm', 'intrinsic_e2_hsm', 'hlr',  'ra', 'dec', 'mag', 'flux', 'nexp', 'mean_psf_e1', 'mean_psf_e2', 'mean_psf_fwhm']


    print "Searching for corrupted COSMOS profiles in %d truth tables."%cat.tiles.size
    for i, tile in enumerate(cat.tiles):
        if i > ntiles:
            break

        select = (object_tiles==tile)

        print i+1, tile, object_tiles[select].size

        filename = glob.glob("%s/%s*.fz"%(truth,tile))

        if len(filename)==0:
            print "Truth table is missing."
            continue

        if len(filename)>1:
            print "Warning - multiple truth tables found." 
            print "Will use %s"%filename[0]

        filename = filename[0]
        truth_table = fitsio.FITS(filename)[1].read(columns=columns)

        filename = glob.glob(("%s/%s*_cat.fits"%(truth,tile)).replace("truth", "OPS"))
        if len(filename)==0:
            print "SExtractor catalogue is missing."
            continue
        if len(filename)>1:
            print "Warning - multiple object catalogues tables found." 
            print "Will use %s"%filename[0]
        filename = filename[0]
        se_cat = fitsio.FITS(filename)[1].read()

        import pdb ; pdb.set_trace()
        select_tile = cat.res["tilename"]==tile

        # Frustratingly time consuming step to match up pixel positions with the correct object ids
        xy=np.vstack((se_cat["ALPHAWIN_J2000"], se_cat["DELTAWIN_J2000"]))
        xy0 = np.vstack((cat.truth["ra"][select_tile],cat.truth["dec"][select_tile]))
        tree = sps.KDTree(xy.T)
        results = tree.query(xy0.T, k=1)
        se_cat = se_cat[results[1]]

        # Set up a KD tree structure using only the bad profiles in this tile
        select_bad = np.in1d(truth_table["cosmos_ident"], bad_ids)
        xy_im_bad = np.vstack(( se_cat["XWIN_IMAGE"][select_bad], se_cat["YWIN_IMAGE"][select_bad] ))
        bad_tree = sps.KDTree(xy_im_bad.T)

        i3s_cuts = np.in1d(truth_table["DES_id"], cat.truth["DES_id"][select_tile])
        xy_im = np.vstack(( se_cat["XWIN_IMAGE"][i3s_cuts], se_cat["YWIN_IMAGE"][i3s_cuts] ))
        

        # Query it to find a match to each object
        R, ids = bad_tree.query(xy_im.T, k=2)
        import pdb ; pdb.set_trace()











        ra = truth_table["ra"][select_bad]
        dec = truth_table["dec"][select_bad]
        xy0 = np.vstack((ra[ra!=0.0], dec[dec!=0.0]))
        if verbose:
            print "Setting up KD tree (%d objects)"%ra.size,
        tree = sps.KDTree(xy0.T)

        # Query it to get a distance from every object in the final im3shape catalogue
        # to the nearest instance of a corrupted COSMOS profile
        if verbose:
            print "querying...",
        xy = np.vstack((cat.res["ra"][mask][select], cat.res["dec"][mask][select])) 
        result = tree.query(xy.T, k=1)[0] * 60 * 60 /0.27
        with open("corruption_dist.txt", "a") as f:
            out = np.vstack((cat.truth["DES_id"][mask][select], result))
            np.savetxt(f, out.T)
        R.append(result)
        ids.append(cat.res["coadd_objects_id"][mask][select])
        if verbose:
            print "Done %2.2f %2.2f"%(result.min(), result.max())

    b = np.zeros(np.concatenate(ids).size,dtype=[("coadd_objects_id", int),("dist", float)])
    b["coadd_objects_id"] = np.concatenate(ids)
    b["dist"] = np.concatenate(R)
    b,tmp = di.match_results(b,cat.res)

   
    return b["coadd_objects_id"], b["dist"]
示例#7
0
	def load(self, res=True, truth=False, epoch=False,coadd=False, postprocessed=True, keyword="DES", apply_infocuts=True, ext=".fits"):
		
		if res:
			if "%s"%ext in self.res_path:
				files=[self.res_path]
			else:
				files = glob.glob("%s/*%s"%(self.res_path,ext))
			print "%s/*.%s"%(self.res_path,ext)
			single_file=False
			print "loading %d results file(s) from %s"%(len(files),self.res_path)

			if self.noisefree and apply_infocuts:
				self.res = pf.getdata(files[0])
				tmp, noise_free_infocuts = self.get_infocuts(exclude=["chi"], return_string=True)

				noise_free_infocuts = noise_free_infocuts.replace("cuts= ((", "cuts= ((%s['chi2_pixel']>0.004) & (%s['chi2_pixel']<0.2) & (")

			if len(files)>1:
				if apply_infocuts and self.noisefree:
					self.res, self.files, i = di.load_results(res_path =self.res_path, format=ext[1:], apply_infocuts=False, additional_cuts=noise_free_infocuts, keyword=keyword, postprocessed=postprocessed, return_filelist=True)
				else:
					self.res, self.files, i = di.load_results(res_path =self.res_path, format=ext[1:], apply_infocuts=apply_infocuts, keyword=keyword, postprocessed=postprocessed, return_filelist=True)
			else:
				if ext.lower()==".fits":
					self.res = fio.FITS(files[0])[1].read()
				elif ext.lower()==".txt":
					self.res = np.genfromtxt(files[0], names=True)
				self.files=files
				single_file=True
				i=None


		if truth:
			if ".fits" in self.truth_path:
				files=[self.truth_path]
			else:
				files = glob.glob("%s/*.fits*"%self.truth_path)
			single_file=False

			print "loading truth files from %s"%self.truth_path
			if len(files)>1:
				self.truth = di.load_truth(truth_path=self.truth_path, match=self.files, ind=i, res=self.res)
			else:
				self.truth = pf.getdata(files[0])

		if truth and res:
			self.res, self.truth = di.match_results(self.res,self.truth)
			if ("ra" in self.res.dtype.names): 
				if not (self.res["ra"]==self.truth["ra"]).all():
					self.res["ra"] = self.truth["ra"]
					self.res["dec"] = self.truth["dec"]

			print "Found catalogue of %d objects after matching to truth table"%len(self.res)


		if coadd:
			if ".fits" in self.coadd_path:
				files=[self.coadd_path]
			else:
				files = glob.glob("%s/*cat*.fits"%self.coadd_path)
			single_file=False

			print "loading coadd catalogue files from %s"%self.coadd_path
			if len(files)>1:
				print "update code..."
			else:
				self.coadd = pf.getdata(files[0])

			ids = self.res["row_id"]-1
			self.coadd= self.coadd[ids]

		if epoch:
			path = self.res_path.replace("main", "epoch")
			try:
				self.epoch = di.load_epoch(path)
			except:
				self.epoch = di.load_epoch(path.replace("bord", "disc"))

		if hasattr(self, "truth"):
			sel = self.truth["sextractor_pixel_offset"]<1.0
			self.truth = self.truth[sel]
			if hasattr(self, "res"):
				self.res = self.res[sel]
			if coadd:
				self.coadd = self.coadd[sel]
示例#8
0
	def get_neighbours(self):
		import copy
		from sklearn.neighbors import NearestNeighbors
		import scipy.spatial as sps

		fulltruth = di.load_truth(self.truth_path)

		import fitsio as fi
		reference=fi.FITS("/home/samuroff/y1a1_16tiles_positions.fits")[1].read()
		fulltruth,reference = di.match_results(fulltruth,reference, name1="coadd_objects_id", name2="DES_id")
		fulltruth["ra"]=reference["ra"]
		fulltruth["dec"]=reference["dec"]

		meds_path=self.truth_path.replace("truth", "meds/*/*")
		meds_info = di.get_pixel_cols(meds_path)
		pool_of_possible_neighbours,fulltruth = di.match_results(meds_info,fulltruth, name1="DES_id", name2="coadd_objects_id" )
		fulltruth = arr.add_col(fulltruth, "ix", pool_of_possible_neighbours["ix"])
		fulltruth = arr.add_col(fulltruth, "iy", pool_of_possible_neighbours["iy"])
		fulltruth = arr.add_col(fulltruth, "tile", pool_of_possible_neighbours["tile"])

		objects_needing_neighbours,self.truth = di.match_results(meds_info,self.truth, name1="DES_id", name2="coadd_objects_id" )
		self.truth = arr.add_col(self.truth, "ix", objects_needing_neighbours["ix"])
		self.truth = arr.add_col(self.truth, "iy", objects_needing_neighbours["iy"])
		self.truth = arr.add_col(self.truth, "tile", objects_needing_neighbours["tile"])



		cut=(fulltruth["sextractor_pixel_offset"]<1.0) & (fulltruth["ra"]!=0.0)
		fulltruth = fulltruth[cut]
		pool_of_possible_neighbours = pool_of_possible_neighbours[cut]

		indices = np.zeros(self.res.size)
		distances = np.zeros(self.res.size)
		lookup = np.linspace(0,fulltruth.size-1, fulltruth.size).astype(int)

		tiles = np.unique(self.truth["tile"]) 

		for it in tiles:
			print "Matching in pixel coordinates, tile %s"%it
			sel0 = pool_of_possible_neighbours["tile"]==it
			sel1 = objects_needing_neighbours["tile"]==it

			# All positions where an object was simulated
			# Restrict the search to this tile
			x_pool = pool_of_possible_neighbours["ix"][sel0]
			y_pool = pool_of_possible_neighbours["iy"][sel0]
			xy_pool=np.vstack((x_pool,y_pool))

			# Positions of those objects for which we have im3shape results
			# We want to find neighbours for these objects
			x_tar = self.truth["ix"][sel1]
			y_tar = self.truth["iy"][sel1]
			xy_tar=np.vstack((x_tar,y_tar))

			# Build a tree using the pool
			nbrs = NearestNeighbors(n_neighbors=2, algorithm='kd_tree', metric="euclidean").fit(xy_pool.T)
			# Query it for the target catalogue
			d,i = nbrs.kneighbors(xy_tar.T)
			distances[sel1], indices[sel1] = d.T[1], lookup[sel0][i.T[1]]

		neighbour_cat = copy.deepcopy(self)

		neighbour_cat.res["id"]= fulltruth[indices.astype(int)]["DES_id"]
		neighbour_cat.res["coadd_objects_id"]= fulltruth[indices.astype(int)]["DES_id"]
		neighbour_cat.res["e1"]= fulltruth[indices.astype(int)]["intrinsic_e1"]+fulltruth[indices.astype(int)]["true_g1"] 
		neighbour_cat.res["e2"]= fulltruth[indices.astype(int)]["intrinsic_e2"]+fulltruth[indices.astype(int)]["true_g2"]
		np.putmask(neighbour_cat.res["e1"], neighbour_cat.res["e1"]<-1, fulltruth[indices.astype(int)]["mean_psf_e1"])
		np.putmask(neighbour_cat.res["e2"], neighbour_cat.res["e2"]<-1, fulltruth[indices.astype(int)]["mean_psf_e2"])
		neighbour_cat.res["ra"]= fulltruth[indices.astype(int)]["ra"]
		neighbour_cat.res["dec"]= fulltruth[indices.astype(int)]["dec"]
		neighbour_cat.truth= fulltruth[indices.astype(int)]
		neighbour_cat.truth["nearest_neighbour_pixel_dist"] = distances

		return neighbour_cat
示例#9
0
baryons1 = fi.FITS(
    '/home/ssamurof/massive_black_ii/subhalo_cat-nthreshold5.fits'
)['baryons'][:]
baryons_symm = util.gather_mpi_output(
    '/home/ssamurof/massive_black_ii/cats/symmetrised/*-masswtdmedian-stellarmasscut.fits',
    hdu='baryons')
dm1 = fi.FITS(
    '/home/ssamurof/massive_black_ii/subhalo_cat-nthreshold5.fits')['dm'][:]

baryons1 = baryons1[(dm1['npart'] > 1000)]

# Enforce the same selection function in both catalogues
import tools.diagnostics as di
baryons1, baryons_symm = di.match_results(baryons1,
                                          baryons_symm,
                                          name1='lambda1',
                                          name2='lambda1')
print baryons1.size, baryons_symm.size

nmax = 300
# The two catalogues hardcoded here should have identical selection functions
select = (baryons1['npart'] > nmax) & (
    np.isfinite(baryons1['x']) & np.isfinite(baryons1['y'])
    & np.isfinite(baryons1['z'])) & (baryons1['x'] < 100000) & (
        baryons1['y'] < 100000) & (baryons1['z'] < 100000) & (
            baryons1['x'] > 0) & (baryons1['y'] > 0) & (baryons1['z'] > 0)

mask1 = (baryons1['c3'] != 0) & select
cat1 = treecorr.Catalog(x=baryons1['x'][mask1],
                        y=baryons1['y'][mask1],
                        z=baryons1['z'][mask1],
示例#10
0
	def get_neighbours(self):
		import copy
		from sklearn.neighbors import NearestNeighbors
		import scipy.spatial as sps

		fulltruth = di.load_truth(self.truth_path)

		import fitsio as fi
		reference=fitsio.FITS("/share/des/disc6/samuroff/y1/hoopoe/y1a1-v2.2_10/meds/y1a1_positions.fits")[1].read()
		fulltruth,ref = di.match_results(fulltruth,reference, name1="coadd_objects_id", name2="DES_id")
		fulltruth["ra"]=ref["ra"]
		fulltruth["dec"]=ref["dec"]
		self.truth,ref = di.match_results(self.truth,reference, name1="coadd_objects_id", name2="DES_id")
		self.truth["ra"]=ref["ra"]
		self.truth["dec"]=ref["dec"]
		self.truth,self.res = di.match_results(self.truth,self.res, name1="coadd_objects_id", name2="DES_id")


		meds_path=self.truth_path.replace("truth", "meds/*/*")
		meds_info = di.get_pixel_cols(meds_path)
		pool_of_possible_neighbours,fulltruth = di.match_results(meds_info,fulltruth, name1="DES_id", name2="coadd_objects_id" )
		fulltruth = arr.add_col(fulltruth, "ix", pool_of_possible_neighbours["ix"])
		fulltruth = arr.add_col(fulltruth, "iy", pool_of_possible_neighbours["iy"])
		try:
			fulltruth = arr.add_col(fulltruth, "tile", pool_of_possible_neighbours["tile"])
		except:
			fulltruth["tile"] = pool_of_possible_neighbours["tile"]

		objects_needing_neighbours,self.truth = di.match_results(meds_info,self.truth, name1="DES_id", name2="coadd_objects_id" )
		self.truth = arr.add_col(self.truth, "ix", objects_needing_neighbours["ix"])
		self.truth = arr.add_col(self.truth, "iy", objects_needing_neighbours["iy"])
		try:
			self.truth = arr.add_col(self.truth, "tile", objects_needing_neighbours["tile"])
		except:
			self.truth["tile"] = objects_needing_neighbours["tile"]



		cut=(fulltruth["sextractor_pixel_offset"]<1.0) & (fulltruth["ra"]!=0.0)
		fulltruth = fulltruth[cut]
		pool_of_possible_neighbours = pool_of_possible_neighbours[cut]

		indices = np.zeros(self.res.size)
		distances = np.zeros(self.res.size)
		lookup = np.linspace(0,fulltruth.size-1, fulltruth.size).astype(int)

		tiles = np.unique(self.truth["tile"]) 

		for it in tiles:
			print "Matching in pixel coordinates, tile %s"%it
			sel0 = pool_of_possible_neighbours["tile"]==it
			sel1 = objects_needing_neighbours["tile"]==it

			# All positions where an object was simulated
			# Restrict the search to this tile
			x_pool = fulltruth["ra"][sel0] #pool_of_possible_neighbours["ix"][sel0]
			y_pool = fulltruth["dec"][sel0] #pool_of_possible_neighbours["iy"][sel0]
			xy_pool=np.vstack((x_pool,y_pool))

			# Positions of those objects for which we have im3shape results
			# We want to find neighbours for these objects
			x_tar = self.truth["ra"][sel1]
			y_tar = self.truth["dec"][sel1]
			xy_tar=np.vstack((x_tar,y_tar))

			# Build a tree using the pool
			nbrs = NearestNeighbors(n_neighbors=2, algorithm='kd_tree', metric="euclidean").fit(xy_pool.T)
			# Query it for the target catalogue
			d,i = nbrs.kneighbors(xy_tar.T)
			distances[sel1], indices[sel1] = d.T[1], lookup[sel0][i.T[1]]

		neighbour_cat = copy.deepcopy(self)

		neighbour_cat.res["id"]= fulltruth[indices.astype(int)]["DES_id"]
		neighbour_cat.res["coadd_objects_id"]= fulltruth[indices.astype(int)]["DES_id"]
		neighbour_cat.res["e1"]= fulltruth[indices.astype(int)]["intrinsic_e1"]+fulltruth[indices.astype(int)]["true_g1"] 
		neighbour_cat.res["e2"]= fulltruth[indices.astype(int)]["intrinsic_e2"]+fulltruth[indices.astype(int)]["true_g2"]
		np.putmask(neighbour_cat.res["e1"], neighbour_cat.res["e1"]<-1, fulltruth[indices.astype(int)]["mean_psf_e1"])
		np.putmask(neighbour_cat.res["e2"], neighbour_cat.res["e2"]<-1, fulltruth[indices.astype(int)]["mean_psf_e2"])
		neighbour_cat.res["ra"]= fulltruth[indices.astype(int)]["ra"]
		neighbour_cat.res["dec"]= fulltruth[indices.astype(int)]["dec"]
		neighbour_cat.truth= fulltruth[indices.astype(int)]
		neighbour_cat.truth["nearest_neighbour_pixel_dist"] = distances

		return neighbour_cat
示例#11
0
	def load(self, res=True, truth=False, epoch=False, coadd=False, prune=False, cols=[None,None], postprocessed=True, keyword="DES", apply_infocuts=True, ext=".fits", match=[], ntiles=None):
		
		if res and (not hasattr(self, "res")):
			if "%s"%ext in self.res_path:
				files=[self.res_path]
			else:
				files = glob.glob("%s/*%s"%(self.res_path,ext))

			if ntiles is not None:
				files = files[:ntiles]

			print "%s/*%s"%(self.res_path,ext)
			single_file=False
			print "loading %d results file(s) from %s"%(len(files),self.res_path)

			if self.noisefree and apply_infocuts:
				self.res = pf.getdata(files[0])
				tmp, noise_free_infocuts = self.get_infocuts(exclude=["chi"], return_string=True)

				#noise_free_infocuts = noise_free_infocuts.replace("cuts= ((", "cuts= ((%s['chi2_pixel']>0.004) & (%s['chi2_pixel']<0.2) & (")

			if len(files)>1:
				if apply_infocuts and self.noisefree:
					self.res, self.files, i = di.load_results(res_path =self.res_path, format=ext[1:], cols=cols[0], apply_infocuts=False, additional_cuts=noise_free_infocuts, keyword=keyword, postprocessed=postprocessed, return_filelist=True, match=match)
				else:
					self.res, self.files, i = di.load_results(res_path =self.res_path, format=ext[1:], cols=cols[0], ntot=len(files) ,apply_infocuts=apply_infocuts, keyword=keyword, postprocessed=postprocessed, return_filelist=True, match=match)
			else:
				if ext.lower()==".fits":
					self.res = fio.FITS(files[0])[1].read()
				elif ext.lower()==".txt":
					self.res = np.genfromtxt(files[0], names=True)
				self.files=files
				single_file=True
				i=None

			self.indices=i


		if truth:
			if ".fits" in self.truth_path:
				files=[self.truth_path]
			else:
				files = glob.glob("%s/*.fits*"%self.truth_path)
			single_file=False

			print "loading truth files from %s"%self.truth_path
			if len(files)>1:
				if res:
					self.truth = di.load_truth(truth_path=self.truth_path, cols=cols[1], apply_infocuts=apply_infocuts, match=self.files, ind=self.indices, res=self.res)
				else:
					self.truth = di.load_truth(truth_path=self.truth_path, cols=cols[1], apply_infocuts=apply_infocuts)
			else:
				self.truth = pf.getdata(files[0])

		if truth and res:
			self.res, self.truth = di.match_results(self.res,self.truth, name2="coadd_objects_id")
			if ("ra" in self.res.dtype.names): 
				if not (self.res["ra"]==self.truth["ra"]).all():
					self.res["ra"] = self.truth["ra"]
					self.res["dec"] = self.truth["dec"]

			print "Found catalogue of %d objects after matching to truth table"%len(self.res)

		if coadd:
			if ".fits" in self.coadd_path:
				files=[self.coadd_path]
			else:
				files = glob.glob("%s/*cat*.fits"%self.coadd_path)
			single_file=False

			print "loading coadd catalogue files from %s"%self.coadd_path
			if len(files)>1:
				print "update code..."
			else:
				self.coadd = pf.getdata(files[0])

			ids = self.res["row_id"]-1
			self.coadd= self.coadd[ids]

		if epoch:
			path = self.res_path.replace("main", "epoch")
			try:
				self.epoch = di.load_epoch(path)
			except:
				self.epoch = di.load_epoch(path.replace("bord", "disc"))

		if prune:
			sel = np.isfinite(self.res["mean_psf_e1_sky"]) & np.isfinite(self.res["mean_psf_e2_sky"]) 
			self.res = self.res[sel]
			if hasattr(self, "truth"):
				self.truth = self.truth[sel]