Пример #1
0
def get_rangedata(id):
    entry = filedb.data[id]
    # Read the tod as usual
    with show("read"):
        d = actdata.read(entry)
    with show("calibrate"):
        # Don't apply time constant (and hence butterworth) deconvolution since we
        # will fit these ourselves
        d = actdata.calibrate(d, exclude=["autocut", "tod_fourier"])
    if d.ndet == 0 or d.nsamp < 2: raise errors.DataMissing("no data in tod")
    tod = d.tod
    del d.tod
    # Very simple white noise model
    with show("noise"):
        ivar = estimate_ivar(tod)
        asens = np.sum(ivar)**-0.5 / d.srate**0.5
    with show("planet mask"):
        # Generate planet cut
        planet_cut = cuts.avoidance_cut(d.boresight, d.point_offset, d.site,
                                        args.planet, R)
    with show("atmosphere"):
        # Subtract atmospheric model
        tod -= estimate_atmosphere(tod, planet_cut, d.srate, model_fknee,
                                   model_alpha)
        tod = tod.astype(dtype, copy=False)
    with show("extract"):
        # Should now be reasonably clean of correlated noise. Extract our range data
        rdata = build_rangedata(tod, planet_cut, d, ivar)
    return rdata
Пример #2
0
def read_metadata(entry):
    '''
    Parameters
    ----------
    entry : filedb.data object

    Returns
    -------
    data : enlib.dataset.DataSet instance
    '''

    data = actdata.read(entry, exclude=['tod'])
    data = actdata.calibrate(data, exclude=['autocut'])
    if data.ndet == 0 or data.nsamp == 0:
        raise errors.DataMissing("No data in tod")
    return data
Пример #3
0
def scan_iterator(filelist,
                  inds,
                  reader,
                  db=None,
                  dets=None,
                  quiet=False,
                  downsample=1,
                  hwp_resample=False):
    """Given a set of ids/files and a set of indices into that list. Try
	to read each of these scans. Returns a list of successfully read scans
	and a list of their indices."""
    for ind in inds:
        try:
            if isinstance(filelist[ind], list): raise IOError
            d = enscan.read_scan(filelist[ind])
            actdata.read(filedb.data[filelist[ind]])
        except IOError:
            try:
                if isinstance(filelist[ind], list):
                    entry = [db[id] for id in filelist[ind]]
                else:
                    entry = db[filelist[ind]]
                d = reader(entry)
                if d.ndet == 0 or d.nsamp == 0:
                    raise errors.DataMissing("Tod contains no valid data")
            except errors.DataMissing as e:
                if not quiet:
                    L.debug("Skipped %s (%s)" %
                            (str(filelist[ind]), e.message))
                continue
        if dets:
            if dets.startswith("@"):
                uids = [int(w) for w in open(dets[1:], "r")]
                _, det_inds = utils.common_inds([uids, d.dets])
                d = d[det_inds]
            else:
                d = eval("d[%s]" % dets)
        hwp_active = np.any(d.hwp_phase[0] != 0)
        if hwp_resample and hwp_active:
            mapping = enscan.build_hwp_sample_mapping(d.hwp)
            d = d.resample(mapping)
        d = d[:, ::downsample]
        if not quiet: L.debug("Read %s" % str(filelist[ind]))
        yield ind, d
Пример #4
0
if args.equator: sys += "/0_0"
utils.mkdir(args.odir)
prefix = args.odir + "/"
if args.tag: prefix += args.tag + "_"

for ind in range(comm.rank, len(ids), comm.size):
	id    = ids[ind]
	bid   = id.replace(":","_")
	entry = filedb.data[id]
	# Read the tod as usual
	try:
		with bench.show("read"):
			d = actdata.read(entry)
		with bench.show("calibrate"):
			d = actdata.calibrate(d, exclude=["autocut"])
		if d.ndet == 0 or d.nsamp < 2: raise errors.DataMissing("no data in tod")
	except errors.DataMissing as e:
		print "Skipping %s (%s)" % (id, e.message)
		continue
	print "Processing %s" % id
	# Very simple white noise model
	with bench.show("ivar"):
		tod  = d.tod
		del d.tod
		tod -= np.mean(tod,1)[:,None]
		tod  = tod.astype(dtype)
		diff = tod[:,1:]-tod[:,:-1]
		diff = diff[:,:diff.shape[-1]/csize*csize].reshape(d.ndet,-1,csize)
		ivar = 1/(np.median(np.mean(diff**2,-1),-1)/2**0.5)
		del diff
	# Generate planet cut
Пример #5
0
        abscal = d.gain_correction[entry.tag]

        if d.gain_mode == 'mce':
            abscal /= d.mce_gain
        elif d.gain_mode == 'mce_compat':
            abscal /= d.mce_gain * 1217.8583043
        else:
            raise ValueError('gain_mode {} not understood'.format(d.gain_mode))

        with bench.show("calibrate"):
            d = actdata.calibrate(d, exclude=["autocut"])

        rel_gain = d.gain_raw.copy()  # To store later on.

        if d.ndet == 0 or d.nsamp < 2:
            raise errors.DataMissing("no data in tod")
        # Select detectors if needed.
        if dbox is not None:
            mid = np.mean(utils.minmax(d.point_template, 0), 0)
            off = d.point_template - mid
            good = np.all((off > dbox[0]) & (off < dbox[1]), -1)
            d = d.restrict(dets=d.dets[good])
    except errors.DataMissing as e:
        print("Skipping %s (%s)" % (id, e.message))
        continue
    print("Processing %s" % id, d.ndet, d.nsamp)

    # Very simple white noise model.
    with bench.show("ivar"):
        tod = d.tod
        del d.tod
Пример #6
0
    log_level = log.verbosity2level(config.get("verbosity"))
    L = log.init(level=log_level, rank=comm.rank)
    tagger = WorkspaceTagger()

    ids = filedb.scans[args.sel]
    for ind in range(comm.rank, len(ids), comm.size):
        id = ids[ind]
        entry = filedb.data[id]
        try:
            # We need the tod and all its dependences to estimate which noise
            # category the tod falls into. But we don't need all the dets.
            # Speed things up by only reading 25% of them.
            d = actdata.read(entry, ["boresight", "point_offsets", "site"])
            d = actdata.calibrate(d, exclude=["autocut"])
            if d.ndet == 0 or d.nsamp == 0:
                raise errors.DataMissing("Tod contains no valid data")
            if d.nsamp < min_samps:
                raise errors.DataMissing("Tod is too short")
        except errors.DataMissing as e:
            L.debug("Skipped %s (%s)" % (id, e.message))
            continue
        L.debug(id)

        # Get the scan el and az bounds
        az1 = np.min(d.boresight[1])
        az2 = np.max(d.boresight[1])
        el = np.mean(d.boresight[2])

        if not valid_az_range(az1, az2):
            L.debug("Skipped %s (%s)" % (id, "Azimuth crosses poles"))
            continue
Пример #7
0
def onlyfinite(a): return a[np.isfinite(a)]

# Process each scan independently
myinds = np.arange(len(filelist))[myid::nproc]
for ind in myinds:
	ofile = args.odir + "/%s.hdf" % filelist[ind]
	if args.c and os.path.isfile(ofile):
		L.info("Already done %s" % filelist[ind])
		continue
	L.info("Processing %s" % filelist[ind])
	try:
		d = scan.read_scan(filelist[ind])
	except (IOError, OSError):
		try:
			d = actscan.ACTScan(db[filelist[ind]])
			if d.ndet == 0 or d.nsamp == 0: raise errors.DataMissing("all samples cut")
		except errors.DataMissing as e:
			L.debug("Skipped %s (%s)" % (filelist[ind], str(e)))
			continue
	try:
		L.debug("Reading samples")
		tod   = d.get_samples().astype(dtype)
	except errors.DataMissing as e:
		L.debug("Skipped %s (%s)" % (filelist[ind], str(e)))
		continue

	# Measure noise
	L.debug("Noise")
	ivar = 1/np.array([np.median(onlyfinite(get_desloped_var(blockify(t,20)))) for t in tod])
		# Set up pmat for this scan
	L.debug("Pmats")
Пример #8
0
for ind in range(len(ids)):
    id = ids[ind]
    ofile = op.join(args.odir, id.replace(":","_")+".png")
    # find sources
    sids = lib.get_sids_in_tod(id, srcpos[:,base_sids], bounds[...,ind], base_sids, src_sys=sys, pad=poly_pad)
    if len(sids) == 0:
        print(f"{id} has no sources: skipping")
        continue
    else:
        print(f"found {len(sids)} sources")
    # insert source into tod
    entry = filedb.data[id]
    try:
        scan = actscan.ACTScan(entry, verbose=verbose>=2)
        if scan.ndet < 2 or scan.nsamp < 1: raise errors.DataMissing("no data in tod")
    except errors.DataMissing as e:
        print("%s skipped: %s" % (id, e))
        continue
    scan = scan[:,::down]
    scan.tod = scan.get_samples()
    # build source lists
    # ra, dec, T, Q, U, omg, phi
    t0 = u.mjd2ctime(scan.mjd0)
    phi0 = np.mod(phi+t0*omg, 2*np.pi)
    srcs = np.array([srcpos[0], srcpos[1], amps, amps*0, amps*0, omg, phi0, D])
    # srcs = np.array([srcpos[0], srcpos[1], amps, amps*0, amps*0])
    # build pointing matrix
    P = lib.PmatTotVar(scan, srcs, perdet=False, sys=sys)
    # P = lib.PmatTot(scan, srcpos[:,sids], perdet=False, sys=sys)
    # project pulsar into the given tod
Пример #9
0
logfile = root + "log/log%03d.txt" % comm.rank
log_level = log.verbosity2level(config.get("verbosity"))
L = log.init(level=log_level, file=logfile, rank=comm.rank)

L.info("Initialized")

# Loop through each scan, and compute the hits
hits = enmap.zeros((3, ) + area.shape[-2:], area.wcs, dtype=dtype)
myinds = np.arange(comm.rank, len(ids), comm.size)
for ind in myinds:
    id = ids[ind]
    entry = filedb.data[id]
    try:
        scan = actscan.ACTScan(entry)
        if scan.ndet == 0 or scan.nsamp == 0:
            raise errors.DataMissing("Tod contains no valid data")
    except errors.DataMissing as e:
        L.debug("Skipped %s (%s)" % (str(id), e.message))
        continue
    scan = scan[:, ::config.get("downsample")]
    L.debug("Processing %s" % str(id))

    pmap = pmat.PmatMap(scan, hits)
    pcut = pmat.PmatCut(scan)
    tod = np.full([scan.ndet, scan.nsamp], 1, dtype=dtype)
    junk = np.zeros(pcut.njunk, dtype=dtype)
    pcut.backward(tod, junk)
    pmap.backward(tod, hits)
hits = hits[0]

# Collect result
Пример #10
0
            ofile = root + "main_map.fits"
            if os.path.isfile(ofile): continue
        tasks.append((si, id))

# Each task processes tasks independently
for ti in range(comm.rank, len(tasks), comm.size):
    si, id = tasks[ti]
    bid = id.replace(":", "_")
    L.info("Processing src %3d id %s" % (si, id))
    root = args.odir + "/src%03d_%s_" % (si, bid)
    entry = filedb.data[id]
    osys = "hor:%.6f_%.6f:cel/0_0:hor" % tuple(srcs[:2, si])
    try:
        scans = [actscan.ACTScan(entry)]
        if scans[0].nsamp == 0 or scans[0].ndet == 0:
            raise errors.DataMissing("no data in scan")
    except errors.DataMissing as e:
        print "Skipping %s: %s" % (id, str(e))
        continue
    # Signals
    signal_cut = mapmaking.SignalCut(scans, dtype=dtype, comm=tcomm)
    signal_map = mapmaking.SignalMap(scans, area, comm=tcomm, sys=osys)
    # Weights
    weights = [mapmaking.FilterWindow(config.get("tod_window"))]
    # And equation system
    eqsys = mapmaking.Eqsys(scans, [signal_cut, signal_map],
                            weights=weights,
                            dtype=dtype,
                            comm=tcomm)
    eqsys.calc_b()
    # Precons
Пример #11
0
			print(sids)
			print(amps)
			continue

		# Read the data
		entry = filedb.data[id]
		try:
			data = actdata.read(entry, exclude=["tod"], verbose=verbose)
			data+= actdata.read_tod(entry)
			data = actdata.calibrate(data, verbose=verbose)
			#print("fixme") # FIXME
			#data.restrict(dets=data.dets[100:150])
			# Avoid planets while building noise model
			if planet is not None:
				data.cut_noiseest *= actdata.cuts.avoidance_cut(data.boresight, data.point_offset, data.site, planet, R)
			if data.ndet < 2 or data.nsamp < 1: raise errors.DataMissing("no data in tod")
		except errors.DataMissing as e:
			print("%s skipped: %s" % (id, e))
			continue
		# Prepeare our samples
		#data.tod -= np.mean(data.tod,1)[:,None]
		data.tod -= data.tod[:,None,0].copy()
		data.tod  = data.tod.astype(dtype)
		# Set up our likelihood
		L = Likelihood(data, srcpos[:,sids], amps[sids], filter=highpass)
		# Find out which sources are reliable, so we don't waste time on bad ones
		if prune_unreliable_srcs:
			_, aicov = L.fit_amp()
			good = amps[sids]**2*aicov[:,0,0,0,0] > args.minsn**2
			sids = [sid for sid,g in zip(sids,good) if g]
			nsrc = len(sids)
Пример #12
0
# Read my scans
njunk_tot = 0
cg_rhs = area * 0
cg_rjunk = []
if args.precompute:
    prec_NNmap = {lam: area * 0 for lam in np.unique(cooldown)}
    prec_NNjunk = {lam: [] for lam in np.unique(cooldown)}
scans = []
for ind in range(comm.rank, len(ids), comm.size):
    id = ids[ind]
    entry = filedb.data[id]
    try:
        scan = actscan.ACTScan(entry)
        if scan.ndet == 0 or scan.nsamp == 0:
            raise errors.DataMissing("No samples in scan")
        if args.ndet:
            scan = scan[:args.ndet]
        if downsample > 1:
            scan = scan[:, ::downsample]
        scan.pmap = pmat.PmatMap(scan, area)
        scan.pcut = pmat.PmatCut(scan)
        # Build the noise model
        tod = scan.get_samples()
        tod -= np.mean(tod, 1)[:, None]
        tod = tod.astype(dtype)
        scan.noise = scan.noise.update(tod, scan.srate)
        scan.T = np.min(scan.noise.D) * Tscale
        scan.noise_bar = nmat.NmatDetvecs(scan.noise.D - scan.T, scan.noise.V,
                                          scan.noise.E, scan.noise.bins,
                                          scan.noise.ebins, scan.noise.dets)
Пример #13
0
    def __init__(self, entry, subdets=None, d=None, verbose=False, dark=False):
        self.fields = [
            "gain", "mce_filter", "tags", "polangle", "tconst", "hwp", "cut",
            "point_offsets", "boresight", "site", "tod_shape", "array_info",
            "beam", "pointsrcs", "buddies"
        ]
        if dark: self.fields += ["dark"]
        if config.get("noise_model") == "file":
            self.fields += ["noise"]
        else:
            if config.get("cut_noise_whiteness"):
                self.fields += ["noise_cut"]
            if config.get("cut_spikes"):
                self.fields += ["spikes"]
        if d is None:
            d = actdata.read(entry, self.fields, verbose=verbose)
            d = actdata.calibrate(d, verbose=verbose)
            if subdets is not None:
                d.restrict(dets=d.dets[subdets])
        if d.ndet == 0 or d.nsamp == 0:
            raise errors.DataMissing("No data in scan")
        ndet = d.ndet
        # Necessary components for Scan interface
        self.mjd0 = utils.ctime2mjd(d.boresight[0, 0])
        self.boresight = np.ascontiguousarray(
            d.boresight.T.copy())  # [nsamp,{t,az,el}]
        self.boresight[:, 0] -= self.boresight[0, 0]
        self.offsets = np.zeros([ndet, self.boresight.shape[1]])
        self.offsets[:, 1:] = d.point_offset
        self.cut = d.cut.copy()
        self.cut_noiseest = d.cut_noiseest.copy()
        self.comps = np.zeros([ndet, 4])
        self.beam = d.beam
        self.pointsrcs = d.pointsrcs
        self.comps = d.det_comps
        self.hwp = d.hwp
        self.hwp_phase = d.hwp_phase
        self.dets = d.dets
        self.dgrid = (d.array_info.nrow, d.array_info.ncol)
        self.array_info = d.array_info
        self.sys = config.get("tod_sys",
                              entry.tod_sys if "tod_sys" in entry else None)
        self.site = d.site
        self.speed = d.speed
        if "noise" in d:
            self.noise = d.noise
        else:
            spikes = d.spikes[:2].T if "spikes" in d else None
            self.noise = nmat_measure.NmatBuildDelayed(
                model=config.get("noise_model"),
                spikes=spikes,
                cut=self.cut_noiseest)
        if "dark_tod" in d:
            self.dark_tod = d.dark_tod
        if "dark_cut" in d:
            self.dark_cut = d.dark_cut
        if "buddy_comps" in d:
            # Expand buddy_offs to {dt,daz,ddec}
            self.buddy_comps = d.buddy_comps
            self.buddy_offs = np.concatenate(
                [d.buddy_offs[..., :1] * 0, d.buddy_offs], -1)
        self.autocut = d.autocut if "autocut" in d else []
        # Implementation details. d is our DataSet, which we keep around in
        # because we need it to read tod consistently later. It will *not*
        # take part in any sample slicing operations, as that might make the
        # delayed tod read inconsistent with the rest. It could take part in
        # detector slicing as long as calibrate_tod operates on each detector
        # independently. This is true now, but would not be so if we did stuff
        # like common mode subtraction there. On the other hand, not doing this
        # would prevent slicing before reading from giving any speedup or memory
        # savings. I don't think allowing this should be a serious problem.
        self.d = d
        self.entry = entry

        def fmt_id(entry):
            if isinstance(entry, list):
                return "+".join([fmt_id(e) for e in entry])
            else:
                if entry.tag: return entry.id + ":" + entry.tag
                else: return entry.id

        self.id = fmt_id(entry)
        self.sampslices = []
        self.mapping = None

        # FIXME: debug test
        if config.get("dummy_cut") > 0:
            nmax = int(config.get("dummy_cut_len"))
            # Power law between 1 and nmax, with slope -1.
            # C(w) = log(w)/log(nmax)
            # P(w) = w**-1/log(nmax)
            # w(C) = n**C
            # Mean: (nmax-1)/log(nmax)
            nmean = (nmax - 1) / np.log(nmax)
            ncut = int(self.nsamp * config.get("dummy_cut") / nmean)
            cut_ranges = np.zeros([self.ndet, ncut, 2], int)
            w = (nmax**np.random.uniform(0, 1, size=[self.ndet,
                                                     ncut])).astype(int)
            np.clip(w, 1, nmax)
            cut_ranges[:, :, 0] = np.random.uniform(0,
                                                    self.nsamp,
                                                    size=[self.ndet,
                                                          ncut]).astype(int)
            cut_ranges[:, :, 0] = np.sort(cut_ranges[:, :, 0], 1)
            cut_ranges[:, :, 1] = cut_ranges[:, :, 0] + w
            np.clip(cut_ranges[:, :, 1], 0, self.nsamp)
            cut_dummy = sampcut.from_list(cut_ranges, self.nsamp)
            print(np.mean(w), nmean, nmax, ncut)
            print("cut fraction before", float(self.cut.sum()) / self.cut.size)
            self.cut *= cut_dummy
            print("cut fraction after", float(self.cut.sum()) / self.cut.size)
Пример #14
0
filedb.init()
ids = filedb.scans[args.sel]
ntod= len(ids)

cuts  = np.zeros([ntod,ndet],dtype=np.uint8)
stats = None
if args.full_stats: stats = np.zeros([ntod,ndet,4])
for si in range(comm.rank, ntod, comm.size):
	try:
		id    = ids[si]
		entry = filedb.data[id]
		ofile = "%s/%s.txt" % (args.odir, id)
		try:
			d     = actdata.read(entry, fields=["gain","tconst","cut","tod","boresight","hwp"])
			d     = actdata.calibrate(d, exclude=["tod_fourier","autocut"])
			if d.ndet == 0 or d.nsamp == 0: raise errors.DataMissing("empty tod")
		except (IOError, OSError, errors.DataMissing) as e:
			print "Skipped (%s)" % (str(e))
			continue
		print "Read %s" % id
		# Filter the HWP signal
		print "no hwp filter"
		#d.tod = todfilter.filter_poly_jon(d.tod, d.boresight[1], hwp=d.hwp)

		ft    = fft.rfft(d.tod)
		ps    = np.abs(ft)**2/(d.tod.shape[1]*srate)
		inds  = bins*ps.shape[1]/fmax
		bfreqs= np.mean(bins,1)

		#tod_moo = fft.irfft(ft, normalize=True)
		#ind = np.where(d.dets==640)[0]
Пример #15
0
# Set up logging
utils.mkdir(root + "log")
logfile = root + "log/log%03d.txt" % comm_world.rank
log_level = log.verbosity2level(config.get("verbosity"))
L = log.init(level=log_level, file=logfile, rank=comm_world.rank, shared=False)

# Run through all tods to determine the scanning patterns
L.info("Detecting scanning patterns")
boxes = np.zeros([len(ids), 2, 2])
for ind in range(comm_world.rank, len(ids), comm_world.size):
    id = ids[ind]
    entry = filedb.data[id]
    try:
        d = actdata.read(entry, ["boresight", "tconst", "cut", "cut_noiseest"])
        d = actdata.calibrate(d, exclude=["autocut"])
        if d.ndet == 0 or d.nsamp == 0: raise errors.DataMissing("no data")
    except errors.DataMissing as e:
        L.debug("Skipped %s (%s)" % (ids[ind], str(e)))
        continue
    # Reorder from az,el to el,az
    boxes[ind] = [
        np.min(d.boresight[2:0:-1], 1),
        np.max(d.boresight[2:0:-1], 1)
    ]
    L.info("%5d: %s" % (ind, id))
boxes = utils.allreduce(boxes, comm_world)

# Prune null boxes
usable = np.all(boxes != 0, (1, 2))
moo = ids[usable]
cow = boxes[usable]
Пример #16
0
cuts = np.zeros([ntod, ndet], dtype=np.uint8)
stats = None
if args.full_stats: stats = np.zeros([ntod, ndet, 4])
for si in range(comm.rank, ntod, comm.size):
    try:
        id = ids[si]
        entry = filedb.data[id]
        ofile = "%s/%s.txt" % (args.odir, id)
        try:
            d = actdata.read(
                entry,
                fields=["gain", "tconst", "cut", "tod", "boresight", "hwp"])
            d = actdata.calibrate(d, exclude=["tod_fourier", "autocut"])
            if d.ndet == 0 or d.nsamp == 0:
                raise errors.DataMissing("empty tod")
        except (IOError, errors.DataMissing) as e:
            print "Skipped (%s)" % (e.message)
            continue
        print "Read %s" % id
        # Filter the HWP signal
        print "no hwp filter"
        #d.tod = todfilter.filter_poly_jon(d.tod, d.boresight[1], hwp=d.hwp)

        ft = fft.rfft(d.tod)
        ps = np.abs(ft)**2 / (d.tod.shape[1] * srate)
        inds = bins * ps.shape[1] / fmax
        bfreqs = np.mean(bins, 1)

        #tod_moo = fft.irfft(ft, normalize=True)
        #ind = np.where(d.dets==640)[0]
Пример #17
0
# Process each scan independently
myinds = np.arange(len(filelist))[myid::nproc]
for ind in myinds:
    ofile = args.odir + "/%s.hdf" % filelist[ind]
    if args.c and os.path.isfile(ofile):
        L.info("Already done %s" % filelist[ind])
        continue
    L.info("Processing %s" % filelist[ind])
    try:
        d = scan.read_scan(filelist[ind])
    except IOError:
        try:
            d = actscan.ACTScan(db[filelist[ind]])
            if d.ndet == 0 or d.nsamp == 0:
                raise errors.DataMissing("all samples cut")
        except errors.DataMissing as e:
            L.debug("Skipped %s (%s)" % (filelist[ind], e.message))
            continue
    try:
        L.debug("Reading samples")
        tod = d.get_samples().astype(dtype)
    except errors.DataMissing as e:
        L.debug("Skipped %s (%s)" % (filelist[ind], e.message))
        continue

    # Measure noise
    L.debug("Noise")
    ivar = 1 / np.array([
        np.median(onlyfinite(get_desloped_var(blockify(t, 20)))) for t in tod
    ])