def read_cat(ifile): if ifile.endswith(".fits"): import numpy as np from enlib import dory, utils cat = dory.read_catalog(ifile) return np.array([cat.ra/utils.degree, cat.dec/utils.degree, cat.amp[:,0]/cat.damp[:,0], cat.amp[:,0]]).T else: res = [] with open(ifile, "r") as ifile: for line in ifile: if line.startswith("#") or len(line) == 0: continue toks = line.split() res.append(map(float, toks[:4])) return res
parser.add_argument( "--bscale", type=float, default=1) parser.add_argument( "--artrad", type=float, default=20) parser.add_argument( "--artnum", type=float, default=7) parser.add_argument( "--artpen", type=float, default=2) args = parser.parse_args() import numpy as np from enlib import utils, dory beam = dory.get_beam(args.beam) beam_prof = dory.get_beam_profile(beam) beam_area = dory.calc_beam_profile_area(beam_prof) # Apply ad-hoc beam scaling beam_prof[0] *= args.bscale cat = dory.read_catalog(args.icat) nread = len(cat) # We will remove sources that are weaker than the surroundings' contribution that area, so # get the total flux at each source's position flux = dory.eval_flux_at_srcs(cat, beam_prof, verbose=args.verbose) # We will also penalize source detection in areas with too many sources. We can # do this with the same function, if we modify the beam a bit r_dummy = np.linspace(0, args.artrad*utils.arcmin, 10000) b_dummy = r_dummy*0+1; b_dummy[-1] = 0 cat_dummy = cat.copy() sn = np.abs(cat.amp[:,0])/cat.damp[:,0] cat_dummy.flux[:,0] = sn > args.snmin nnear = dory.eval_flux_at_srcs(cat_dummy, np.array([r_dummy,b_dummy]), verbose=args.verbose) nmax1 = np.max(nnear) # Use nnear to get a per-source S/N threshold
if "maps" in args.output: for name in map_keys: if comm.rank == 0: print("Writing %s" % name) merged = dory.merge_maps_onto( [result[name] for result in results], shape, wcs, comm, root=0, crop=args.apod + args.apod_margin) if comm.rank == 0: enmap.write_map(args.odir + "/%s.fits" % name, merged) del merged elif args.mode == "fit": icat = dory.read_catalog(args.icat) if args.nsigma is not None: icat = icat[np.abs(icat.flux[:, 0]) >= icat.dflux[:, 0] * args.nsigma] if args.split: npre = len(icat) icat = dory.split_sources(icat, nimage=args.split_nimage, dist=args.split_dist * utils.arcmin, minflux=args.split_minflux / 1e3) print("Added %d extra images around %d sources > %f mJy" % (len(icat) - npre, (len(icat) - npre) // args.split_nimage, args.split_minflux)) beam_prof = dory.get_beam_profile(beam) barea = dory.calc_beam_profile_area(beam_prof) reg_cats = [] utils.mkdir(args.odir)
tot_cat = dory.merge_duplicates(tot_cat) if comm.rank == 0: print "Writing catalogue" dory.write_catalog_fits(args.odir + "/cat.fits", tot_cat) dory.write_catalog_txt(args.odir + "/cat.txt", tot_cat) # Then build the full maps if "maps" in args.output: for name in map_keys: if comm.rank == 0: print "Writing %s" % name merged = dory.merge_maps_onto([result[name] for result in results], shape, wcs, comm, root=0, crop=args.apod+args.apod_margin) if comm.rank == 0: enmap.write_map(args.odir + "/%s.fits" % name, merged) del merged elif args.mode == "fit": icat = dory.read_catalog(args.icat) reg_cats = [] utils.mkdir(args.odir) for ri in range(comm.rank, len(regions), comm.size): reg_fid = regions[ri] reg_pad = dory.pad_region(reg_fid, args.pad, fft=True) print "%3d region %3d/%d %5d %5d %6d %6d" % (comm.rank, ri+1, len(regions), reg_fid[0,0], reg_fid[1,0], reg_fid[0,1], reg_fid[1,1]) try: # We support polarization here, but treat each component independently imap = enmap.read_map(args.imap, pixbox=reg_pad).preflat[:args.ncomp] idiv = divdiag(enmap.read_map(args.idiv, pixbox=reg_pad))[:args.ncomp] if args.mask: mshape, mwcs = enmap.read_map_geometry(args.mask) mbox = enmap.pixbox_of(mwcs, imap.shape, imap.wcs) idiv *= (1-enmap.read_map(args.mask, pixbox=mbox).preflat[0]) if "debug" in args.output: dump_prefix = args.odir + "/region_%02d_" % ri