parser.add_argument("prefix", nargs="?", help="Output file name prefix") parser.add_argument("--dets", type=str, default=0, help="Detector slice") args = parser.parse_args() utils.mkdir(args.odir) comm = mpi.COMM_WORLD dtype = np.float32 if config.get("map_bits") == 32 else np.float64 ncomp = 3 tsize = 720 root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") down = config.get("downsample") # Set up logging utils.mkdir(root + ".log") logfile = root + ".log/log%03d.txt" % comm.rank log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=comm.rank, shared=True) # Set up our geometry shape, wcs = enmap.read_map_geometry(args.area) shape = (ncomp, ) + shape[-2:] msys = config.get("map_sys") dist = config.get("map_dist") # Filter parameters filter_fknee = 0.2 filter_alpha = -3 # Get our tod list filedb.init() ids = todinfo.get_tods(args.sel, filedb.scans) # Dump our settings if comm.rank == 0:
comm_world = mpi.COMM_WORLD comm_group = comm_world.Split(comm_world.rank%args.nsub, comm_world.rank/args.nsub) comm_sub = comm_world.Split(comm_world.rank/args.nsub, comm_world.rank%args.nsub) ids = todinfo.get_tods(args.sel, filedb.scans) tol = args.tol*utils.arcmin daz = args.daz*utils.arcmin dtype = np.float32 if config.get("map_bits") == 32 else np.float64 tods_per_map = args.group utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") # Set up logging utils.mkdir(root + "log") logfile = root + "log/log%03d.txt" % comm_world.rank log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=comm_world.rank, shared=False) # Run through all tods to determine the scanning patterns L.info("Detecting scanning patterns") boxes = np.zeros([len(ids),2,2]) for ind in range(comm_world.rank, len(ids), comm_world.size): id = ids[ind] entry = filedb.data[id] try: d = actdata.read(entry, ["boresight","tconst","cut","cut_noiseest"]) d = actdata.calibrate(d, exclude=["autocut"]) if d.ndet == 0 or d.nsamp == 0: raise errors.DataMissing("no data") except errors.DataMissing as e: L.debug("Skipped %s (%s)" % (ids[ind], e.message)) continue # Reorder from az,el to el,az
parser.add_argument("--nt", type=int, default=10) parser.add_argument("--dets", type=str, default=0) parser.add_argument("--ntod", type=int, default=0) parser.add_argument("-w", "--weighted", type=int, default=1) parser.add_argument("-D", "--deslope", type=int, default=0) args = parser.parse_args() comm = mpi.COMM_WORLD filedb.init() ids = [line.split()[0] for line in open(args.idlist, "r")] if args.ntod: ids = ids[:args.ntod] is_dmap = os.path.isdir(args.imap) log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, rank=comm.rank) tshape = (720, 720) # Read in all our scans L.info("Reading %d scans" % len(ids)) myinds = np.arange(len(ids))[comm.rank::comm.size] myinds, myscans = scanutils.read_scans(ids, myinds, actscan.ACTScan, filedb.data, dets=args.dets, downsample=config.get("downsample")) myinds = np.array(myinds, int) # Collect scan info. This currently fails if any task has empty myinds read_ids = [ids[ind] for ind in utils.allgatherv(myinds, comm)]
from enlib import sharp, utils, enmap, curvedsky, log, coordinates parser = argparse.ArgumentParser() parser.add_argument("ihealmap") parser.add_argument("template") parser.add_argument("ofile") parser.add_argument("-n", "--ncomp", type=int, default=1) parser.add_argument("-i", "--first", type=int, default=0) parser.add_argument("-v", "--verbosity", type=int, default=2) parser.add_argument("-r", "--rot", type=str, default=None) parser.add_argument("-u", "--unit", type=float, default=1) parser.add_argument("-O", "--order", type=int, default=0) parser.add_argument("-s", "--scalar", action="store_true") args = parser.parse_args() log_level = log.verbosity2level(args.verbosity) L = log.init(level=log_level) ncomp = args.ncomp assert ncomp == 1 or ncomp == 3, "Only 1 or 3 components supported" # Read the input maps L.info("Reading " + args.ihealmap) imap = np.atleast_2d( healpy.read_map(args.ihealmap, field=tuple(range(args.first, args.first + ncomp)))) nside = healpy.npix2nside(imap.shape[-1]) mask = imap < -1e20 dtype = imap.dtype bsize = 100 if args.unit != 1: imap[~mask] /= args.unit
config.default("verbosity", 1, "Verbosity for output. Higher means more verbose. 0 outputs only errors etc. 1 outputs INFO-level and 2 outputs DEBUG-level messages.") parser = config.ArgumentParser(os.environ["HOME"] + "/.enkirc") parser.add_argument("filelist") parser.add_argument("srcs") parser.add_argument("odir") parser.add_argument("-R", "--radius", type=float, default=5.0) parser.add_argument("-r", "--resolution", type=float, default=0.25) args = parser.parse_args() comm = mpi4py.MPI.COMM_WORLD myid = comm.rank nproc = comm.size log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, rank=myid) # Allow filelist to take the format filename:[slice] toks = args.filelist.split(":") filelist, fslice = toks[0], ":".join(toks[1:]) filelist = [line.split()[0] for line in open(filelist,"r") if line[0] != "#"] filelist = eval("filelist"+fslice) utils.mkdir(args.odir) srcs = np.loadtxt(args.srcs).T # create minimaps around each source nsrc = srcs.shape[1] ncomp = 1 n = int(np.round(2*args.radius/args.resolution)) R = args.radius*np.pi/180/60
f.write(argstring + "\n") print argstring with open(root + "env.txt","w") as f: for k,v in os.environ.items(): f.write("%s: %s\n" %(k,v)) with open(root + "ids.txt","w") as f: for id in filelist: f.write("%s\n" % str(id)) shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]), root + "filedb.txt") try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), root + "todinfo.hdf") except IOError: pass # Set up logging utils.mkdir(root + "log") logfile = root + "log/log%03d.txt" % comm.rank log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=comm.rank) # And benchmarking utils.mkdir(root + "bench") benchfile = root + "bench/bench%03d.txt" % comm.rank def parse_desc(desc, default={}): res = default.copy() # Parse normally now that the : are out of the way for tok in utils.split_outside(desc, ",", "[({", "])}"): subtoks = tok.split("=") if len(subtoks) == 1: res["value"] = subtoks[0] else: key, val = subtoks res[key] = val return res
from enlib import sharp, utils, enmap, curvedsky, log, coordinates parser = argparse.ArgumentParser() parser.add_argument("ihealmap") parser.add_argument("template") parser.add_argument("ofile") parser.add_argument("-n", "--ncomp", type=int, default=1) parser.add_argument("-i", "--first", type=int, default=0) parser.add_argument("-v", "--verbosity", type=int, default=2) parser.add_argument("-r", "--rot", type=str, default=None) parser.add_argument("-u", "--unit", type=float, default=1) parser.add_argument("-O", "--order", type=int, default=0) parser.add_argument("-s", "--scalar", action="store_true") args = parser.parse_args() log_level = log.verbosity2level(args.verbosity) L = log.init(level=log_level) ncomp = args.ncomp assert ncomp == 1 or ncomp == 3, "Only 1 or 3 components supported" # Read the input maps L.info("Reading " + args.ihealmap) imap = np.atleast_2d(healpy.read_map(args.ihealmap, field=tuple(range(args.first,args.first+ncomp)))) nside = healpy.npix2nside(imap.shape[-1]) mask = imap < -1e20 dtype = imap.dtype bsize = 100 if args.unit != 1: imap[~mask]/= args.unit # Read the template shape, wcs = enmap.read_map_geometry(args.template)
with open(args.odir + "/args.txt","w") as f: f.write(" ".join([pipes.quote(a) for a in sys.argv[1:]]) + "\n") with open(args.odir + "/env.txt","w") as f: for k,v in os.environ.items(): f.write("%s: %s\n" %(k,v)) with open(args.odir + "/ids.txt","w") as f: for id in filelist: f.write("%s\n" % id) shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]), args.odir + "/filedb.txt") try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), args.odir + "/todinfo.txt") except IOError: pass # Set up logging utils.mkdir(args.odir + "/log") logfile = args.odir + "/log/log%03d.txt" % myid log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=myid, shared=False) # And benchmarking utils.mkdir(args.odir + "/bench") benchfile = args.odir + "/bench/bench%03d.txt" % myid # Read our point source list params = pointsrcs.src2param(pointsrcs.read(args.srcs)) ntot = len(params) # Eliminate insignificant sources params = params[np.abs(params[:,2])>args.minamp] if comm.rank == 0: L.info("Got %d sources, keeping %d > %d uK" % (ntot,len(params),args.minamp)) pointsrcs.write(args.odir + "/srcs.txt", pointsrcs.param2src(params)) # Our noise model is slightly different from the main noise model,
with open(args.odir + "/args.txt","w") as f: f.write(" ".join([pipes.quote(a) for a in sys.argv[1:]]) + "\n") with open(args.odir + "/env.txt","w") as f: for k,v in os.environ.items(): f.write("%s: %s\n" %(k,v)) with open(args.odir + "/ids.txt","w") as f: for id in filelist: f.write("%s\n" % id) shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]), args.odir + "/filedb.txt") try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), args.odir + "/todinfo.txt") except (IOError, OSError): pass # Set up logging utils.mkdir(args.odir + "/log") logfile = args.odir + "/log/log%03d.txt" % myid log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=myid, shared=False) # And benchmarking utils.mkdir(args.odir + "/bench") benchfile = args.odir + "/bench/bench%03d.txt" % myid # Read our point source list params = pointsrcs.src2param(pointsrcs.read(args.srcs)) ntot = len(params) # Eliminate insignificant sources params = params[np.abs(params[:,2])>args.minamp] if comm.rank == 0: L.info("Got %d sources, keeping %d > %d uK" % (ntot,len(params),args.minamp)) pointsrcs.write(args.odir + "/srcs.txt", pointsrcs.param2src(params)) # Our noise model is slightly different from the main noise model,
comm_world.rank / args.nsub) comm_sub = comm_world.Split(comm_world.rank / args.nsub, comm_world.rank % args.nsub) ids = todinfo.get_tods(args.sel, filedb.scans) tol = args.tol * utils.arcmin daz = args.daz * utils.arcmin dtype = np.float32 if config.get("map_bits") == 32 else np.float64 tods_per_map = args.group utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") # Set up logging utils.mkdir(root + "log") logfile = root + "log/log%03d.txt" % comm_world.rank log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=comm_world.rank, shared=False) # Run through all tods to determine the scanning patterns L.info("Detecting scanning patterns") boxes = np.zeros([len(ids), 2, 2]) for ind in range(comm_world.rank, len(ids), comm_world.size): id = ids[ind] entry = filedb.data[id] try: d = actdata.read(entry, ["boresight", "tconst", "cut", "cut_noiseest"]) d = actdata.calibrate(d, exclude=["autocut"]) if d.ndet == 0 or d.nsamp == 0: raise errors.DataMissing("no data") except errors.DataMissing as e: L.debug("Skipped %s (%s)" % (ids[ind], str(e))) continue # Reorder from az,el to el,az
if myid == 0: config.save(root + "config.txt") with open(root + "args.txt", "w") as f: f.write(" ".join([pipes.quote(a) for a in sys.argv[1:]]) + "\n") with open(root + "env.txt", "w") as f: for k, v in os.environ.items(): f.write("%s: %s\n" % (k, v)) with open(root + "ids.txt", "w") as f: for id in filelist: f.write("%s\n" % id) shutil.copyfile(config.get("filedb"), root + "filedb.txt") # Set up logging utils.mkdir(root + "log") logfile = root + "log/log%03d.txt" % myid log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=myid) # And benchmarking utils.mkdir(root + "bench") benchfile = root + "bench/bench%03d.txt" % myid # Read in all our scans L.info("Reading scans") tmpinds = np.arange(len(filelist))[myid::nproc] myscans, myinds = [], [] for ind in tmpinds: try: d = scan.read_scan(filelist[ind]) except IOError: try: d = data.ACTScan(db[filelist[ind]]) except errors.DataMissing as e:
filedb.init() ids = filedb.scans[args.sel] comm = mpi.COMM_WORLD dtype = np.float64 area = enmap.read_map(args.area).astype(dtype) utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") # Set up logging utils.mkdir(root + "log") logfile = root + "log/log%03d.txt" % comm.rank log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=comm.rank) L.info("Initialized") # Loop through each scan, and compute the hits hits = enmap.zeros((3, ) + area.shape[-2:], area.wcs, dtype=dtype) myinds = np.arange(comm.rank, len(ids), comm.size) for ind in myinds: id = ids[ind] entry = filedb.data[id] try: scan = actscan.ACTScan(entry) if scan.ndet == 0 or scan.nsamp == 0: raise errors.DataMissing("Tod contains no valid data") except errors.DataMissing as e: L.debug("Skipped %s (%s)" % (str(id), e.message))
import astropy.io.fits from enlib import enmap, fft, coordinates, utils, bunch, interpol, bench, zipper, mpi, log, wcsutils parser = argparse.ArgumentParser() parser.add_argument("infos", nargs="+") parser.add_argument("odir") parser.add_argument("--nmax", type=int, default=0) parser.add_argument("-d", "--downgrade", type=int, default=1) parser.add_argument("-O", "--order", type=int, default=0) parser.add_argument("-U", "--unskew", type=str, default="shift") parser.add_argument("-C", "--cmode", type=int, default=0) args = parser.parse_args() fft.engine = "fftw" comm = mpi.COMM_WORLD log_level = log.verbosity2level(1) L = log.init(level=log_level, rank=comm.rank, shared=False) dtype = np.float32 ref_time = 55500 beam_sigma = 1.4*utils.arcmin*utils.fwhm corrfun_smoothing = 5*beam_sigma def prepare(map, hitmap=False): """Prepare a map for input by cutting off one pixel along each edge, as out-of-bounds data accumulates there, and downgrading to the target resolution.""" # Get rid of polarization for now. Remove this later. if map.ndim == 3: map = map[:1] # Cut off edge pixels map[...,:1,:] = 0 map[...,-1:,:] = 0 map[...,:,:1] = 0
parser.add_argument("sel") parser.add_argument("area") parser.add_argument("odir") parser.add_argument("prefix",nargs="?") parser.add_argument("--ndet", type=int, default=0, help="Max number of detectors") args = parser.parse_args() filedb.init() utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") log_level = log.verbosity2level(config.get("verbosity")) dtype = np.float32 if config.get("map_bits") == 32 else np.float64 area = enmap.read_map(args.area) comm = mpi.COMM_WORLD ids = filedb.scans[args.sel] L = log.init(level=log_level, rank=comm.rank) # Set up our output map. osig = enmap.zeros((1,)+area.shape[-2:], area.wcs, dtype) odiv = osig*0 sig_all = np.zeros(len(ids)) sig_med = sig_all*0 div_all, div_med = sig_all*0, sig_med*0 # Read in all our scans for ind in range(comm.rank, len(ids), comm.size): id = ids[ind] entry = filedb.data[id] try: d = actscan.ACTScan(entry) if d.ndet == 0 or d.nsamp == 0:
import numpy as np, argparse, pixie from enlib import mpi, utils, log, fft parser = argparse.ArgumentParser() parser.add_argument("orbits") parser.add_argument("odir") parser.add_argument("-C", "--config", type=str, default=None) parser.add_argument("-s", "--seed", type=int, default=0) args = parser.parse_args() fft.engine = "fftw" orbits = pixie.parse_ints(args.orbits) comm = mpi.COMM_WORLD L = log.init(rank=comm.rank, shared=False, level='DEBUG') # Build our actual simulator. This is shared between tods config = pixie.load_config(args.config) sim = pixie.PixieSim(config) utils.mkdir(args.odir) for ind, orbit in enumerate(orbits): orbit = orbits[ind] L.info("orbit %3d" % orbit) np.random.seed([args.seed, orbit]) tod = sim.sim_tod(orbit, comm=comm) if comm.rank == 0: pixie.write_tod(args.odir + "/tod%03d.hdf" % orbit, tod) del tod
import numpy as np, argparse from enlib import enmap, log parser = argparse.ArgumentParser() parser.add_argument("ifiles", nargs="+") parser.add_argument("ofile") parser.add_argument("-v", "--verbose", action="store_true") args = parser.parse_args() L = log.init(level=log.DEBUG if args.verbose else log.ERROR) maps = [] for ifile in args.ifiles: L.info("Reading %s" % ifile) maps.append(enmap.read_map(ifile)) L.info("Stacking") maps = enmap.samewcs(maps, maps[0]) L.info("Writing %s" % args.ofile) enmap.write_map(args.ofile, maps)
beam = bunch.Bunch(profile=np.exp(-0.5 * (r / b)**2), rmax=10 * b) beam_global = b if not args.oldformat: beam_global = 1.0 print "Using new model" else: print "Using old model" # prior on beam deformations beam_rel_min = 0.5 beam_rel_max = 2.0 beam_ratio_max = 3.0 # prior on position pos_rel_max = 5 * m2r log_level = log.verbosity2level(args.verbosity) L = log.init(level=log_level, rank=comm.rank, shared=False) bench.stats.info = [("time", "%6.2f", "%6.3f", 1e-3), ("cpu", "%6.2f", "%6.3f", 1e-3), ("mem", "%6.2f", "%6.2f", 2.0**30), ("leak", "%6.2f", "%6.3f", 2.0**30)] filelist = utils.read_lines(args.filelist) srcs = pointsrcs.read(args.srcs) posi, ampi, beami = [0, 1], [2, 3, 4], [5, 6, 7] nsrc = len(srcs) utils.mkdir(args.odir) if args.oldformat:
) parser = config.ArgumentParser(os.environ["HOME"] + "/.enkirc") parser.add_argument("filelist") parser.add_argument("srcs") parser.add_argument("odir") parser.add_argument("-R", "--radius", type=float, default=5.0) parser.add_argument("-r", "--resolution", type=float, default=0.25) args = parser.parse_args() comm = mpi4py.MPI.COMM_WORLD myid = comm.rank nproc = comm.size log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, rank=myid) # Allow filelist to take the format filename:[slice] toks = args.filelist.split(":") filelist, fslice = toks[0], ":".join(toks[1:]) filelist = [line.split()[0] for line in open(filelist, "r") if line[0] != "#"] filelist = eval("filelist" + fslice) utils.mkdir(args.odir) srcs = np.loadtxt(args.srcs).T # create minimaps around each source nsrc = srcs.shape[1] ncomp = 1 n = int(np.round(2 * args.radius / args.resolution)) R = args.radius * np.pi / 180 / 60