Ejemplo n.º 1
0
def white_noise(ndet, nsamp, sigma):
	bins  = linbin(1.0, 1)
	ebins = np.array([[0,0]])
	D     = np.zeros([1,ndet])+sigma**2
	V     = np.zeros([1,ndet])
	E     = np.zeros([1])
	return nmat.NmatDetvecs(D, V, E, bins, ebins)
Ejemplo n.º 2
0
def oneoverf_noise(ndet, nsamp, sigma, fknee=0.2, alpha=1):
	nbin  = 1000
	bins  = linbin(1.0, nbin)
	freq  = np.mean(bins,1)
	Nu    = np.empty([nbin,ndet])
	Nu[:,:] = ((1+(freq/fknee)**-alpha)*sigma**2)[:,None]
	#Nu[:,:] = ((0+(freq/fknee)**-alpha)*sigma**2)[:,None]
	ebins = np.zeros([nbin,2],dtype=int)
	return nmat.NmatDetvecs(Nu, np.zeros([1,ndet]), np.zeros([1]), bins, ebins)
Ejemplo n.º 3
0
def prepare_detvecs(D, Vlist, Elist, ibins, srate, dets):
	D = np.asarray(D)
	if dets is None: dets = np.arange(D.shape[1])
	assert len(dets) == D.shape[1]
	fbins = ibins*(srate/2.)/ibins[-1,-1]
	etmp = np.concatenate([[0],np.cumsum(np.array([len(e) for e in Elist]))])
	ebins= np.array([etmp[0:-1],etmp[1:]]).T
	E, V = np.hstack(Elist), np.hstack(Vlist).T
	return nmat.NmatDetvecs(D, V, E, fbins, ebins, dets)
Ejemplo n.º 4
0
def oneoverf_detcorr_noise(ndet, nsamp, sigma, fknee=0.2, alpha=1):
	# A single, atmospheric mode
	nbin  = 1000
	bins  = linbin(1.0, nbin)
	freq  = np.mean(bins,1)
	Nu    = np.zeros([nbin,ndet])+sigma**2
	E     = (freq/fknee)**-alpha * sigma**2
	V     = np.zeros([nbin,ndet])+1
	ebins = linbin(nbin,nbin)
	return nmat.NmatDetvecs(Nu, V, E, bins, ebins)
Ejemplo n.º 5
0
 if scan.ndet == 0 or scan.nsamp == 0:
     raise errors.DataMissing("No samples in scan")
 if args.ndet:
     scan = scan[:args.ndet]
 if downsample > 1:
     scan = scan[:, ::downsample]
 scan.pmap = pmat.PmatMap(scan, area)
 scan.pcut = pmat.PmatCut(scan)
 # Build the noise model
 tod = scan.get_samples()
 tod -= np.mean(tod, 1)[:, None]
 tod = tod.astype(dtype)
 scan.noise = scan.noise.update(tod, scan.srate)
 scan.T = np.min(scan.noise.D) * Tscale
 scan.noise_bar = nmat.NmatDetvecs(scan.noise.D - scan.T, scan.noise.V,
                                   scan.noise.E, scan.noise.bins,
                                   scan.noise.ebins, scan.noise.dets)
 # Set up cuts
 scan.cut_range = [njunk_tot, njunk_tot + scan.pcut.njunk]
 njunk_tot += scan.pcut.njunk
 # Prepare our filtered data. We do this one of two ways.
 # Either store Nb"d for each TOD, which can end up taking
 # up a lot of memory, or precompute P'(Nb"+(lT)")"Nbd"d for
 # each value of lambda. This saves memory if the maps aren't
 # too big and if the number of lambdas is reasonably small.
 # For 6 lambdas and deep56 size, we get 240 MB * 6 = 1.4 GB.
 # That corresponds to storing 4 downsampled tods.
 if args.precompute:
     iNbd = scan.noise_bar.apply(tod.copy())
     for lam in np.unique(cooldown):
         # Could cache this too, but it's fast to compute