Exemplo n.º 1
0
parser.add_argument("icat")
parser.add_argument("ocat")
parser.add_argument("-b", "--beam",    type=str,   default="1.4", help="Beam in arcmin or beam(l)")
parser.add_argument("-s", "--snmin",   type=float, default=5)
parser.add_argument("-v", "--verbose", action="store_true")
parser.add_argument(      "--bscale",  type=float, default=1)
parser.add_argument(      "--artrad",  type=float, default=20)
parser.add_argument(      "--artnum",  type=float, default=7)
parser.add_argument(      "--artpen",  type=float, default=2)
args = parser.parse_args()
import numpy as np
from enlib import utils, dory

beam      = dory.get_beam(args.beam)
beam_prof = dory.get_beam_profile(beam)
beam_area = dory.calc_beam_profile_area(beam_prof)

# Apply ad-hoc beam scaling
beam_prof[0] *= args.bscale

cat   = dory.read_catalog(args.icat)
nread = len(cat)

# We will remove sources that are weaker than the surroundings' contribution that area, so
# get the total flux at each source's position
flux  = dory.eval_flux_at_srcs(cat, beam_prof, verbose=args.verbose)
# We will also penalize source detection in areas with too many sources. We can
# do this with the same function, if we modify the beam a bit
r_dummy   = np.linspace(0, args.artrad*utils.arcmin, 10000)
b_dummy   = r_dummy*0+1; b_dummy[-1] = 0
cat_dummy = cat.copy()
Exemplo n.º 2
0
			raise
	if "full" in args.output:
		if len(reg_cats) > 0: my_cat = np.concatenate(reg_cats)
		else: my_cat = np.zeros([0], dory.cat_dtype)
		tot_cat  = dory.allgather_catalog(my_cat, comm)
		tot_cat  = dory.merge_duplicates(tot_cat)
		# Sort by S/N catalog order
		tot_cat = tot_cat[np.argsort(tot_cat.amp[:,0]/tot_cat.damp[:,0])[::-1]]
		if comm.rank == 0:
			print "Writing catalogue"
			dory.write_catalog_fits(args.odir + "/cat.fits", tot_cat)
			dory.write_catalog_txt (args.odir + "/cat.txt",  tot_cat)
elif args.mode == "subtract":
	icat      = dory.read_catalog(args.icat)
	beam_prof = get_beam_profile(beam)
	barea     = dory.calc_beam_profile_area(beam_prof)
	fluxconv  = utils.flux_factor(barea, args.freq*1e9)/1e6
	# Reformat the catalog to the format sim_srcs takes
	srcs      = np.concatenate([[icat.dec, icat.ra], icat.flux.T/fluxconv],0).T
	# Evaluate the model in regions which we can mpi parallelize over
	models    = []
	omaps     = []
	for ri in range(comm.rank, len(regions), comm.size):
		reg_fid = regions[ri]
		reg_pad = dory.pad_region(reg_fid, args.pad)
		print "%3d region %3d/%d %5d %5d %6d %6d" % (comm.rank, ri+1, len(regions), reg_fid[0,0], reg_fid[1,0], reg_fid[0,1], reg_fid[1,1])
		map    = enmap.read_map(args.imap, pixbox=reg_pad)
		map    = work_around_stupid_mpi4py_bug(map)
		model  = pointsrcs.sim_srcs(map.shape, map.wcs, srcs, beam_prof, dtype=map.dtype, pixwin=True,verbose=args.verbose)
		omaps.append(map-model)
		if args.omodel: models.append(model)