예제 #1
0
def parse_src_handling():
	res = {}
	res["mode"]   = config.get("src_handling")
	if res["mode"] == "none": return None
	res["amplim"] = config.get("src_handling_lim")
	res["srcs"]   = None
	srcfile = config.get("src_handling_list")
	if srcfile:
		res["srcs"] = pointsrcs.read(srcfile)
	return res
예제 #2
0
파일: tod2map2.py 프로젝트: amaurea/tenki
def parse_src_handling():
	res = {}
	res["mode"]   = config.get("src_handling")
	if res["mode"] == "none": return None
	res["amplim"] = config.get("src_handling_lim")
	res["srcs"]   = None
	srcfile = config.get("src_handling_list")
	if srcfile:
		res["srcs"] = pointsrcs.read(srcfile)
	return res
예제 #3
0
from __future__ import division, print_function
import numpy as np, argparse, os, glob
from scipy import ndimage
from enlib import enmap, utils, pointsrcs, mpi
from enact import files
parser = argparse.ArgumentParser()
parser.add_argument("ifile")
parser.add_argument("srcs")
parser.add_argument("beam")
parser.add_argument("ofile")
parser.add_argument("-m", "--mask", type=float, default=0)
parser.add_argument("-a", "--apod", type=int, default=16)
args = parser.parse_args()

comm = mpi.COMM_WORLD
srcs = pointsrcs.read(args.srcs)
beam = files.read_beam(args.beam)
beam[0] *= utils.degree


def build_single(ifile, srcs, beam, ofile, mask_level=0, apod_size=16):
    imap = enmap.read_map(ifile)
    omap, oslice = pointsrcs.sim_srcs(imap.shape[-2:],
                                      imap.wcs,
                                      srcs,
                                      beam,
                                      return_padded=True)
    if mask_level:
        mask = omap > mask_level
        omap = 1 - np.cos(
            np.minimum(1,
예제 #4
0
		for id in filelist:
			f.write("%s\n" % id)
	shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]),  args.odir + "/filedb.txt")
	try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), args.odir + "/todinfo.txt")
	except (IOError, OSError): pass
# Set up logging
utils.mkdir(args.odir + "/log")
logfile   = args.odir + "/log/log%03d.txt" % myid
log_level = log.verbosity2level(config.get("verbosity"))
L = log.init(level=log_level, file=logfile, rank=myid, shared=False)
# And benchmarking
utils.mkdir(args.odir + "/bench")
benchfile = args.odir + "/bench/bench%03d.txt" % myid

# Read our point source list
params = pointsrcs.src2param(pointsrcs.read(args.srcs))
ntot   = len(params)
# Eliminate insignificant sources
params = params[np.abs(params[:,2])>args.minamp]

if comm.rank == 0:
	L.info("Got %d sources, keeping %d > %d uK" % (ntot,len(params),args.minamp))
	pointsrcs.write(args.odir + "/srcs.txt", pointsrcs.param2src(params))

# Our noise model is slightly different from the main noise model,
# since we assume it is white and independent between detectors,
# which is not strictly true. To minimize error, we measure the
# noise level using a method which is as close as possible to
# how we will use it later
def blockify(tod, w): return tod[:tod.size/w*w].reshape(-1,w)
def get_desloped_var(blocks):
예제 #5
0
		for id in filelist:
			f.write("%s\n" % id)
	shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]),  args.odir + "/filedb.txt")
	try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), args.odir + "/todinfo.txt")
	except IOError: pass
# Set up logging
utils.mkdir(args.odir + "/log")
logfile   = args.odir + "/log/log%03d.txt" % myid
log_level = log.verbosity2level(config.get("verbosity"))
L = log.init(level=log_level, file=logfile, rank=myid, shared=False)
# And benchmarking
utils.mkdir(args.odir + "/bench")
benchfile = args.odir + "/bench/bench%03d.txt" % myid

# Read our point source list
params = pointsrcs.src2param(pointsrcs.read(args.srcs))
ntot   = len(params)
# Eliminate insignificant sources
params = params[np.abs(params[:,2])>args.minamp]

if comm.rank == 0:
	L.info("Got %d sources, keeping %d > %d uK" % (ntot,len(params),args.minamp))
	pointsrcs.write(args.odir + "/srcs.txt", pointsrcs.param2src(params))

# Our noise model is slightly different from the main noise model,
# since we assume it is white and independent between detectors,
# which is not strictly true. To minimize error, we measure the
# noise level using a method which is as close as possible to
# how we will use it later
def blockify(tod, w): return tod[:tod.size/w*w].reshape(-1,w)
def get_desloped_var(blocks):
예제 #6
0
                           comm=comm).astype(dtype)
         if not pertod:
             filter = mapmaking.FilterBuddyDmap(myscans,
                                                mysubs,
                                                m,
                                                sys=sys,
                                                mul=-mul,
                                                tmul=tmul)
         else:
             raise NotImplementedError(
                 "FIXME: Implement per tod buddy subtraction with dmaps"
             )
 elif param["name"] == "src":
     if param["value"] == 0: continue
     if "params" not in param: params = myscans[0].pointsrcs
     else: params = pointsrcs.read(param["params"])
     params = pointsrcs.src2param(params)
     params = params.astype(np.float64)
     print "FIXME: how to handle per-source beams? Forcing to relative for now"
     params[:, 5:7] = 1
     params[:, 7] = 0
     filter = mapmaking.FilterAddSrcs(myscans,
                                      params,
                                      sys=param["sys"],
                                      mul=-float(param["mul"]))
 else:
     raise ValueError("Unrecognized fitler name '%s'" % param["name"])
 # Add to normal filters of post-noise-model filters based on parameters
 if "postnoise" in param and int(param["postnoise"]) > 0:
     print "postnosie"
     filters2.append(filter)
예제 #7
0
def read_srcs(fname):
	data = pointsrcs.read(fname)
	return np.array([data.ra*utils.degree, data.dec*utils.degree,data.I])
예제 #8
0
파일: tod2map2.py 프로젝트: amaurea/tenki
			# Drift is in degrees per hour, but we want it per second
			drift = float(param["drift"])/3600
			area = enmap.zeros((args.ncomp*(1+leftright),)+ashape[-2:], awcs, dtype)
			# Find the duration of each tod. We need this for the y offsets
			nactive = utils.allgather(np.array(len(active_scans)), comm)
			offs    = utils.cumsum(nactive, endpoint=True)
			durs    = np.zeros(np.sum(nactive))
			for i, scan in enumerate(active_scans): durs[offs[comm.rank]+i] = scan.nsamp/scan.srate
			durs    = utils.allreduce(durs, comm)
			ys      = utils.cumsum(durs)*drift
			my_ys   = ys[offs[comm.rank]:offs[comm.rank+1]]
			# That was surprisingly cumbersome
			signal  = mapmaking.SignalNoiseRect(active_scans, area, drift, my_ys, comm, name=effname, mode=param["mode"], ofmt=param["ofmt"], output=param["output"]=="yes")
		elif param["type"] == "srcsamp":
			if param["srcs"] == "none": srcs = None
			else: srcs = pointsrcs.read(param["srcs"])
			minamp = float(param["minamp"])
			signal = mapmaking.SignalSrcSamp(active_scans, dtype=dtype, comm=comm,
					srcs=srcs, amplim=minamp)
			signal_srcsamp = signal
		else:
			raise ValueError("Unrecognized signal type '%s'" % param["type"])
		# Hack. Special source handling for some signals
		if white_src_handler and param["type"] in ["map","dmap","fmap","fdmap"]:
			white_src_handler.add_signal(signal)
		# Add signal to our list of signals to map
		signals.append(signal)

	def matching_signals(params, signal_params, signals):
		for sparam, signal in zip(signal_params, signals):
			if sparam["name"] in params and params[sparam["name"]] == "yes":
예제 #9
0
파일: srclik_tod.py 프로젝트: amaurea/tenki
else:
	print "Using old model"
# prior on beam deformations
beam_rel_min = 0.5
beam_rel_max = 2.0
beam_ratio_max = 3.0
# prior on position
pos_rel_max = 5*m2r

log_level = log.verbosity2level(args.verbosity)
L = log.init(level=log_level, rank=comm.rank, shared=False)
bench.stats.info = [("time","%6.2f","%6.3f",1e-3),("cpu","%6.2f","%6.3f",1e-3),("mem","%6.2f","%6.2f",2.0**30),("leak","%6.2f","%6.3f",2.0**30)]

filelist = utils.read_lines(args.filelist)

srcs = pointsrcs.read(args.srcs)
posi, ampi, beami = [0,1], [2,3,4], [5,6,7]
nsrc = len(srcs)

utils.mkdir(args.odir)

if args.oldformat:
	def apply_model(tod, pflat, d, dir=1):
		ptsrc_data.pmat_model(tod, pflat, d, dir=dir)
	def pmat_thumbs(dir, tod, rhs, boxes, d):
		ptsrc_data.pmat_thumbs(dir, tod, rhs, d.point, d.phase, boxes)
else:
	def apply_model(tod, pflat, d, dir=1):
		ptsrc_data.pmat_beam_foff(tod, pflat, beam, d, dir=dir)
	def pmat_thumbs(dir, tod, rhs, boxes, d):
		ptsrc_data.pmat_thumbs_hor(dir, tod, rhs, d.point, d.phase, boxes, d.rbox, d.nbox, d.ys)
예제 #10
0
 if not use_dmap:
     myinds = scanutils.distribute_scans2(inds, costs, comm)
 else:
     myinds, mysubs, mybbox = scanutils.distribute_scans2(
         inds, costs, comm, boxes)
 L.info("Rereading shuffled scans")
 del myscans  # scans do take up some space, even without the tod being read in
 myinds, myscans = scanutils.read_scans(
     chunk_ids,
     myinds,
     actscan.ACTScan,
     filedb.data,
     downsample=config.get("downsample"))
 if args.srcsub:
     #### 2. Prepare our point source database and the corresponding cuts
     src_override = pointsrcs.read(args.srcs) if args.srcs else None
     for scan in myscans:
         scan.srcparam = pointsrcs.src2param(
             src_override if src_override is not None else scan.
             pointsrcs)
         scan.srcparam, nmerged = planet9.merge_nearby(scan.srcparam)
         planet9.cut_srcs_rad(scan, scan.srcparam[nmerged > 1])
         hour = (scan.mjd0 % 1) % 24
         isday = hour > 11 or hour < 23
         if isday:
             planet9.cut_bright_srcs_daytime(
                 scan,
                 scan.srcparam,
                 alim_include=args.srclim_day,
                 errbox=dayerr)
         else:
예제 #11
0
			# Drift is in degrees per hour, but we want it per second
			drift = float(param["drift"])/3600
			area = enmap.zeros((args.ncomp*(1+leftright),)+ashape[-2:], awcs, dtype)
			# Find the duration of each tod. We need this for the y offsets
			nactive = utils.allgather(np.array(len(active_scans)), comm)
			offs    = utils.cumsum(nactive, endpoint=True)
			durs    = np.zeros(np.sum(nactive))
			for i, scan in enumerate(active_scans): durs[offs[comm.rank]+i] = scan.nsamp/scan.srate
			durs    = utils.allreduce(durs, comm)
			ys      = utils.cumsum(durs)*drift
			my_ys   = ys[offs[comm.rank]:offs[comm.rank+1]]
			# That was surprisingly cumbersome
			signal  = mapmaking.SignalNoiseRect(active_scans, area, drift, my_ys, comm, name=effname, mode=param["mode"], ofmt=param["ofmt"], output=param["output"]=="yes")
		elif param["type"] == "srcsamp":
			if param["srcs"] == "none": srcs = None
			else: srcs = pointsrcs.read(param["srcs"])
			minamp = float(param["minamp"])
			if "mask" in param: m = enmap.read_map(param["mask"]).astype(dtype)
			else: m = None
			signal = mapmaking.SignalSrcSamp(active_scans, dtype=dtype, comm=comm,
					srcs=srcs, amplim=minamp, mask=m)
			signal_srcsamp = signal
		else:
			raise ValueError("Unrecognized signal type '%s'" % param["type"])
		# Hack. Special source handling for some signals
		if white_src_handler and param["type"] in ["map","dmap","fmap","fdmap"]:
			white_src_handler.add_signal(signal)
		# Add signal to our list of signals to map
		signals.append(signal)

	def matching_signals(params, signal_params, signals):
예제 #12
0
def read_srcs(fname):
	data = pointsrcs.read(fname)
	return np.array([data.ra*utils.degree, data.dec*utils.degree,data.I])