예제 #1
0
def find_spikes(tod, nsigma=10, width=15, padding=7, noise=None):
    res = []
    ftod = tod.reshape(-1, tod.shape[-1])
    if noise is None: noise = estimate_white_noise(ftod)**0.5
    for di, d in enumerate(ftod):
        smooth = scipy.signal.medfilt(d, width)
        bad = np.abs(d - smooth) > noise[di] * nsigma
        bad = rangelist.Rangelist(bad)
        bad = bad.widen(padding)
        res.append(rangelist.Rangelist(bad))
    res = rangelist.Multirange(res)
    res.data.reshape(tod.shape[:-1])
    return res
예제 #2
0
def find_jumps(tod,
               bsize=100,
               nsigma=10,
               margin=50,
               step=50,
               margin_step=1000):
    ndet = tod.shape[0]
    cuts = []
    for det, det_tod in enumerate(tod):
        n = len(det_tod)
        # Compute difference tod
        dtod = det_tod[1:] - det_tod[:-1]
        nsamp = dtod.size
        # Find typical standard deviation
        nblock = int(nsamp / bsize)
        sigma = utils.medmean(
            np.var(dtod[:nblock * bsize].reshape(nblock, bsize), -1))**0.5
        # Look for samples that deviate too much from 0
        bad = np.abs(dtod) > sigma * nsigma
        bad = np.concatenate([bad[:1], bad])
        # Look for steps, areas where the mean level changes dramatically on each
        # side of the jump. First find the center of each bad region
        steps = bad * 0
        labels, nlabel = scipy.ndimage.label(bad)
        centers = np.array(
            scipy.ndimage.center_of_mass(bad, labels,
                                         np.arange(nlabel + 1))).astype(int)[:,
                                                                             0]
        # Find mean to the left and right of each bad region
        for i, pos in enumerate(centers):
            m1 = np.mean(det_tod[max(0, pos -
                                     step * 3 / 2):max(1, pos - step / 2)])
            m2 = np.mean(det_tod[min(n - 2, pos +
                                     step / 2):min(n - 1, pos + step * 3 / 2)])
            if np.abs(m2 - m1) > sigma * nsigma:
                steps[pos] = 1
        #print centers.shape, np.sum(steps)
        # Grow each cut by a margin
        bad = scipy.ndimage.distance_transform_edt(1 - bad) <= margin
        steps = scipy.ndimage.distance_transform_edt(1 - steps) <= margin_step
        cuts.append(rangelist.Rangelist(bad | steps))
    return rangelist.Multirange(cuts)
예제 #3
0
파일: flagrange.py 프로젝트: TevaIlan/enlib
 def to_rangelist(self):
     ranges = self.to_ranges()
     return rangelist.Multirange(
         [rangelist.Rangelist(r, n=self.nsamp) for r in ranges])
예제 #4
0
                                          "hor",
                                          args.objname,
                                          mjd,
                                          site=scan.site)
    visible = np.any(object_pos[1] >= margin)
    if not visible:
        cut = rangelist.zeros((d.ndet, d.nsamp))
    else:
        pmap = pmat.PmatMap(scan, mask, sys="hor:%s" % args.objname)
        # Build a tod to project onto.
        tod = np.zeros((d.ndet, d.nsamp), dtype=dtype)
        # And project
        pmap.forward(tod, mask)
        # Any nonzero samples should be cut
        tod = np.rint(tod)
        cut = rangelist.Multirange([rangelist.Rangelist(t) for t in tod])
    print "%s %6.4f %d" % (id, float(cut.sum()) / cut.size, visible)
    mystats.append([ind, float(cut.sum()) / cut.size, visible])
    # Write cuts to output directory
    if args.persample:
        files.write_cut("%s/%s.cuts" % (args.odir, id),
                        d.dets,
                        cut,
                        nrow=d.array_info.nrow,
                        ncol=d.array_info.ncol)
mystats = np.array(mystats)
stats = utils.allgatherv(mystats, comm)
if comm.rank == 0:
    with open(args.odir + "/stats.txt", "w") as f:
        for stat in stats:
            f.write("%s %6.4f %d\n" %
예제 #5
0
	def build_bins(a, nbin):
		box = np.array([np.min(a),np.max(a)])
		return np.minimum(np.floor((a-box[0])/(box[1]-box[0])*nbin).astype(int),nbin-1)
	def bin_by_pix(a, pix, nbin):
		a = np.asarray(a)
		if a.ndim == 0: a = np.full(len(pix), a)
		fa = a.reshape(-1,a.shape[-1])
		fo = np.zeros([fa.shape[0],nbin])
		for i in range(len(fa)):
			fo[i] = np.bincount(pix, fa[i], minlength=nbin)
		return fo.reshape(a.shape[:-1]+(nbin,))
	# Gapfill poltod in regions with far too few hits, to
	# avoid messing up the poltod power spectrum
	mask = poldiv[0,0] < np.mean(poldiv[0,0])*0.1
	for i in range(poltod.shape[0]):
		poltod[i] = gapfill.gapfill_copy(poltod[i], rangelist.Rangelist(mask))

	# Calc phase which is equal to az while az velocity is positive
	# and 2*max(az) - az while az velocity is negative
	x = np.arange(len(az))
	az_spline = UnivariateSpline(x, az, s=1e-4)
	daz  = az_spline.derivative(1)(x)
	ddaz = az_spline.derivative(2)(x)
	phase = az.copy()
	phase[daz<0] = 2*np.max(az)-az[daz<0]
	# Bin by az and phase
	apix = build_bins(az, nbin)
	ppix = build_bins(phase, nbin)
	for i, pix in enumerate([apix, ppix]):
		tod_eq.rhs[i] += bin_by_pix(polrhs, pix, nbin)
		tod_eq.div[i] += bin_by_pix(poldiv, pix, nbin)
예제 #6
0
def nocut(ndet, nsamp):
    return rangelist.Multirange([
        rangelist.Rangelist(np.zeros([0, 2], dtype=int), n=nsamp)
        for i in range(ndet)
    ])