Exemple #1
0
 def test_k_from_k3pk(self):
     ref_k_par = .2
     ref_z = f212z(119.7e6)
     ref_umag = 30 / (3e8 / (119.7 * 1e6))
     ref_k_perp = ref_umag * pspec.dk_du(ref_z)
     ref_k_mag = np.sqrt(ref_k_par**2 + ref_k_perp**2)
     _, out_k_mag, _, _ = load_k3pk(test_data_file, verbose=False)
     self.assertEqual(ref_k_mag, out_k_mag,
             msg='Expected k_mag {0} but ' \
                     'retruned k_mag {1}'.format(ref_k_mag,out_k_mag)
                     )
Exemple #2
0
    ndelays = len(delays)
    nkparr = n.sum(delays >= 0)
    #P = F['P'].T
    #FOLD UP THE PSPEC AROUND ZERO RELATIVE DELAY!!!
    #XXX THIS WILL BE INCORRECT FOR PHASED UP STUFF.
    wedge_X = n.sqrt(n.abs(P[nkparr:,:,0]*n.conj(P[nkparr:,:,0]) + \
        n.flipud(P[:nkparr,:,0]*n.conj(P[:nkparr,:,0]))/2))
    wedge_Y = n.sqrt(n.abs(P[nkparr:,:,1]*n.conj(P[nkparr:,:,1]) + \
        n.flipud(P[:nkparr,:,1]*n.conj(P[:nkparr,:,1]))/2))
    print n.sum(n.isnan(P)), n.sum(P < 0)

    wedge_X = n.ma.masked_invalid(n.log10(wedge_X))
    wedge_Y = n.ma.masked_invalid(n.log10(wedge_Y))

    kparr = delays[delays >= 0] * pspec.dk_deta(z)
    kperp = bl_lengths * pspec.dk_du(z)
    horizon = bl_lengths * pspec.dk_deta(z)
    print bl_lengths, pspec.dk_deta(z), z
    print horizon
    windowpad = windowpad_ns * pspec.dk_deta(z)
    KPERP, KPARR = n.meshgrid(kperp, kparr)
    MIN = n.min([
        n.mean(wedge_X[KPARR > (horizon + windowpad)]),
        n.mean(wedge_X[KPARR > (horizon + windowpad)])
    ]) * 0.8

    dfcoarse = 30 / 24. * 1e-3
    if PLOTWINDOWCONTOUR:
        #calculate danny's stat window
        BL, D = n.meshgrid(bl_lengths, delays[delays >= 0])
        coarse_chan_delay = 1 / (1.28 / 1e3)  #
Exemple #3
0
# compute Pk vs kpl vs bootstraps
print "   Bootstrapping..."
pk_pspecs, vals = average_bootstraps(pspecs, Nt_eff=Neff_lst,
                                     Nboots=args.nboots,
                                     avg_func=eval(args.avg_func),
                                     version=args.version)
outname = 'pspec_2d_to_1d.npz'
print '   Saving', outname  # save all values used in bootstrapping
np.savez(args.output + outname, **vals)

# Compute |k|
bl_length = np.linalg.norm(pspecs['uvw'])
wavelength = cosmo_units.c / (pspecs['freq'] * 1e9)
ubl = bl_length / wavelength
kperp = dk_du(pspecs['freq']) * ubl
print "   freq = ", pspecs['freq']
print "   kperp = ", kperp
pk_pspecs['k'] = np.sqrt(kperp**2 + pk_pspecs['kpl_fold']**2)
pk_pspecs['kperp'] = np.ma.masked_invalid(kperp)
pk_pspecs['cmd'] = pk_pspecs['cmd'].item() + ' \n ' + ' '.join(sys.argv)
if NGPS_LST > 0: pk_pspecs['nlsts_g'] = Neff_lst / NGPS_LST  # number of lsts in one group
else: pk_pspecs['nlsts_g'] = Neff_lst
if args.version == 4: pk_pspecs['nPS'] = pspecs['pCv'].shape[0] * pspecs['pCv'].shape[2]
else: pk_pspecs['nPS'] = pspecs['pCv'].shape[0]

# Important numbers
print "   Total number of bls = ", pk_pspecs['nbls']
print "      number of bl groups = ", pk_pspecs['ngps']
print "      nbls in a group = ", pk_pspecs['nbls'] / pk_pspecs['ngps']
print "   Total number of lsts = ", Neff_lst
Exemple #4
0
print files, np.argsort(x)
files = [files[xi] for xi in n.argsort(x)]
freqs = n.array(freqs)
freqs = freqs[n.argsort(x)]
noise_scale = noise_scale[np.argsort(x)]
print "Power Spectrum at {0:.2f}Mhz".format(freqs[0])

x = n.sort(x)

print "loading files", files
z = f212z(freqs * 1e6)
print "processing redshifts:", z
#redshift_files_pads = dict(zip(z,files,x))
umags = 30 / (c / (freqs * 1e6))
print "umags = ", umags
kperps = umags * pspec.dk_du(z)
Pkk = []
Pkk_err = []
k3Pk = []
k3err = []
neg = []
kpars = []
kmags = []
Pks = []
Pkerr = []
for i, FILE in enumerate(files):
    F = n.load(FILE)
    print FILE, z[i]
    k = n.sqrt(F['kpl']**2 + kperps[i]**2)
    k3Pk.append(F['k3pk'])
    Pks.append(F['pk'])
Exemple #5
0
        pspecs[key] = np.ma.masked_invalid(np.array(pspecs[key]))
    except:
        import ipdb
        ipdb.set_trace()
if args.add_pCv:
    pspecs['pk_vs_t'] += pspecs['pCv']
# compute Pk vs kpl vs bootstraps
pk_pspecs = average_bootstraps(pspecs,
                               Nt_eff=Neff_lst,
                               Nboots=100,
                               avg_func=np.mean)
# Compute |k|
pspecs['freq'] = np.mean(pspecs['afreqs'])
wavelength = cosmo_units.c / (pspecs['freq'] * 1e9)
ubl = args.bl_length / wavelength
kperp = dk_du(pspecs['freq']) * ubl
print "freq = ", pspecs['freq']
print "kperp = ", kperp
pk_pspecs['k'] = np.sqrt(kperp**2 + pk_pspecs['kpl_fold']**2)
# apply corrections to all the various channels
pspec_channels = ['pC', 'pI', 'pCv', 'pIv']
corrections = [
    1 / np.sqrt(2),  # the median overestimates by sqrt(2)
    1.39
]  # beam^2
# for chan in pspec_channels:
#     for c in [chan,chan+'_fold']: #no need to correct error bars
#         for correction in corrections:
#             pk_pspecs[c] *= correction
# make a pspec file
for key in pk_pspecs.keys():
N = 100
fig = figure()
#plot a fringe of constant kperp

i,j = np.indices((N,N))
kperp = np.linspace(0.0,0.35,N)
fq = np.linspace(0.12,0.18,N)

fringe = np.cos(2.*np.pi*10.*kperp[j])

ax = fig.add_subplot(111)
imshow(fringe,
        extent=[kperp[0],kperp[-1],fq[0],fq[-1]],
        origin=(0,0),
        aspect='auto',
        cmap = cm.Blues,
        alpha=0.5
        )

for b in [30.,100.,200.,350.,500.]:
    b *= fq/0.3 #in lambda.
    b *= pspec.dk_du(7.5)
    plot(b, fq, 'k', lw=2)

ax.set_xlabel(r'$k_{||}\ [h{\rm Mpc}^{-1}]$', size=16)
ax.set_ylabel(r'${\rm Frequency}\ [{\rm GHz}]$', size=16)


fig.savefig('introduction/figures/BaselineTrack.eps', fmt='eps')
show()
Exemple #7
0
 nkparr = n.sum(delays>=0)
 #P = F['P'].T
 #FOLD UP THE PSPEC AROUND ZERO RELATIVE DELAY!!!
 #XXX THIS WILL BE INCORRECT FOR PHASED UP STUFF.
 wedge_X = n.sqrt(n.abs(P[nkparr:,:,0]*n.conj(P[nkparr:,:,0]) + \
     n.flipud(P[:nkparr,:,0]*n.conj(P[:nkparr,:,0]))/2))
 wedge_Y = n.sqrt(n.abs(P[nkparr:,:,1]*n.conj(P[nkparr:,:,1]) + \
     n.flipud(P[:nkparr,:,1]*n.conj(P[:nkparr,:,1]))/2))
 print n.sum(n.isnan(P)),n.sum(P<0)
 
 wedge_X = n.ma.masked_invalid(n.log10(wedge_X))
 wedge_Y = n.ma.masked_invalid(n.log10(wedge_Y))
 
 
 kparr = delays[delays>=0]*pspec.dk_deta(z)
 kperp = bl_lengths*pspec.dk_du(z)
 horizon = bl_lengths*pspec.dk_deta(z)
 print bl_lengths,pspec.dk_deta(z),z
 print horizon
 windowpad = windowpad_ns*pspec.dk_deta(z)
 KPERP,KPARR = n.meshgrid(kperp,kparr)
 MIN = n.min([n.mean(wedge_X[KPARR>(horizon+windowpad)]),n.mean(wedge_X[KPARR>(horizon+windowpad)])])*0.8
 
 dfcoarse=30/24.*1e-3
 if PLOTWINDOWCONTOUR:
     #calculate danny's stat window
     BL,D = n.meshgrid(bl_lengths,delays[delays>=0])
     coarse_chan_delay =  1/(1.28/1e3)#
     print "coarse_chan_delay",coarse_chan_delay
     WINDOW = n.zeros_like(D)
     WINDOW[n.logical_and(D>(windowpad_ns + bl_lengths),D<coarse_chan_delay)] = 1
Exemple #8
0
def posterior(kpl, pk, err, pkfold=None, errfold=None, f0=.151, umag=16.,
              theo_noise=None, verbose=False):
    """Find posterior of Delta^2."""
    import scipy.interpolate as interp
    k0 = n.abs(kpl).argmin()
    kpl = kpl[k0:]
    z = pspec.f2z(f0)
    kpr = pspec.dk_du(z) * umag
    k = n.sqrt(kpl**2 + kpr**2)
    if pkfold is None:
        if verbose:
            print 'Folding for posterior'
        pkfold = pk[k0:].copy()
        errfold = err[k0:].copy()
        pkpos, errpos = pk[k0 + 1:].copy(), err[k0 + 1:].copy()
        pkneg, errneg = pk[k0 - 1:0:-1].copy(), err[k0 - 1:0:-1].copy()
        pkfold[1:] = ((pkpos / errpos**2 + pkneg / errneg**2)
                      / (1. / errpos**2 + 1. / errneg**2))
        errfold[1:] = n.sqrt(1. / (1. / errpos**2 + 1. / errneg**2))
        # ind = n.logical_and(kpl>.2, kpl<.5)
    ind = n.logical_and(k > .15, k < .5)
    # ind = n.logical_and(kpl>.12, kpl<.5)
    # print kpl,pk.real,err
    k = k[ind]
    pkfold = pkfold[ind]
    errfold = errfold[ind]
    # if not theo_noise is None:
    #   theo_noise=theo_noise[ind]
    pk = pkfold
    err = errfold
    err_omit = err.copy()
    # s = n.logspace(1,3.5,100)
    s = n.linspace(-5000, 5000, 10000)
    #    print s
    data = []
    data_omit = []
    for _k, _pk, _err in zip(k, pk, err):
        if verbose:
            print _k, _pk.real, _err
    #       print '%6.3f    %9.5f     9.5f'%(_k, _pk.real, _err)
    for ss in s:
        data.append(n.exp(-.5 * n.sum((pk.real - ss)**2 / err**2)))
        data_omit.append(n.exp(-.5 * n.sum((pk.real - ss)**2 / err_omit**2)))
    #    print data[-1]
    data = n.array(data)
    data_omit = n.array(data_omit)
    # print data
    # print s
    # data/=n.sum(data)
    data /= n.max(data)
    data_omit /= n.max(data_omit)
    p.figure(5, figsize=(6.5, 5.5))
    p.plot(s, data, 'k', linewidth=2)
    # p.plot(s, data_omit, 'k--', linewidth=1)
    # use a spline interpolator to get the 1 and 2 sigma limits.
    # spline = interp.interp1d(data,s)
    # print spline
    # print max(data), min(data)
    # print spline(.68), spline(.95)
    # p.plot(spline(n.linspace(.0,1,100)),'o')
    # p.plot(s, n.exp(-.5)*n.ones_like(s))
    # p.plot(s, n.exp(-.5*2**2)*n.ones_like(s))
    data_c = n.cumsum(data)
    data_omit_c = n.cumsum(data_omit)
    data_c /= data_c[-1]
    data_omit_c /= data_omit_c[-1]
    mean = s[n.argmax(data)]
    s1lo, s1hi = s[data_c < 0.1586][-1], s[data_c > 1 - 0.1586][0]
    s2lo, s2hi = s[data_c < 0.0227][-1], s[data_c > 1 - 0.0227][0]
    if verbose:
        print 'Posterior: Mean, (1siglo,1sighi), (2siglo,2sighi)'
    if verbose:
        print 'Posterior:', mean, (s1lo, s1hi), (s2lo, s2hi)
    mean_o = s[n.argmax(data_omit)]
    s1lo_o, s1hi_o = s[data_omit_c < 0.1586][-1], s[data_omit_c > 1 - 0.1586][0]
    s2lo_o, s2hi_o = s[data_omit_c < 0.0227][-1], s[data_omit_c > 1 - 0.0227][0]
    if verbose:
        print 'Posterior (omit):', mean_o, (s1lo_o, s1hi_o), (s2lo_o, s2hi_o)

    p.vlines(s1lo, 0, 1, color=(0, 107 / 255., 164 / 255.), linewidth=2)
    p.vlines(s1hi, 0, 1, color=(0, 107 / 255., 164 / 255.), linewidth=2)

    # limits for data_omit
    p.vlines(s2lo, 0, 1, color=(1, 128 / 255., 14 / 255.), linewidth=2)
    p.vlines(s2hi, 0, 1, color=(1, 128 / 255., 14 / 255.), linewidth=2)

    if theo_noise is not None:
        s2l_theo = n.sqrt(1. / n.mean(1. / theo_noise**2))
        p.vlines(s2l_theo, 0, 1, color='black', linewidth=2)
        if verbose:
            print('Noise level: {0:0>5.3f} mk^2'.format(s2l_theo))
    p.xlabel(r'$k^3/2\pi^2\ P(k)\ [{\rm mK}^2]$', fontsize='large')
    p.ylabel('Posterior Distribution', fontsize='large')
    p.xlim(0, 700)
    p.title('z = {0:.2f}'.format(z))
    if (s2lo > 700) or (s2hi > 1000):
        p.xlim(0, 1500)
    p.grid(1)
    p.subplots_adjust(left=.15, top=.95, bottom=.15, right=.95)
    p.savefig('posterior_{0:.2f}.png'.format(z))
    f = open('posterior_{0:.2f}.txt'.format(z), 'w')
    f.write('Posterior: Mean,\t(1siglo,1sighi),\t(2siglo,2sighi)\n')
    f.write('Posterior: {0:.4f},\t({1:.4f},{2:.4f}),\t({3:.4f},'
            '{4:.4f})\n'.format(mean, s1lo, s1hi, s2lo, s2hi))
    f.write('Posterior (omit): {0:.4f},\t({1:.4f},{2:.4f}),t({3:.4f},'
            '{4:.4f})\n'.format(mean_o, s1lo_o, s1hi_o, s2lo_o, s2hi_o))
    f.write('Noise level: {0:0>5.3f} mk^2\n'.format(s2l_theo))
    f.close()
Exemple #9
0
    npz = n.load(f_name)  # matt's data
    freq = npz['freq']
    z_bin = f2z(freq)

# load 21cmSense Noise models used to compute Beta
# Beta = (P_noise + P_inj)/P_out

noise_files = ('/home/mkolopanis/psa64/'
               '21cmsense_noise/dish_size_1/*drift_mod*.npz')
n_fqs, n_ks, noise = py21cm.load_noise_files(glob.glob(noise_files))

noise_interp = py21cm.noise_interp2d(n_fqs, n_ks, noise)

kpls_pos = n.concatenate(n.array_split(kpls, [10, 11])[1:])
umag = 30 / (3e8 / (freq * 1e9))
kpr = dk_du(z_bin) * umag
n_k = n.array(n.sqrt(kpls_pos**2 + kpr**2))
d2_n = noise_interp(freq * 1e3, n_k)
pk_n = 2 * n.pi**2 / (n_k**3) * d2_n  # * 3887./2022
p_n = n.median(pk_n)
# p_n = n.max( pk_n )

print 'This script does not Bootstrap'

# take the median over k's
# Now has shape (inj, boots*times)
pIs = n.ma.median(pIs, 1)
pCs = n.ma.median(pCs, 1)
pIvs = n.ma.median(pIvs, 1)
pCvs = n.ma.median(pCvs, 1)
pCvs_pk = n.ma.median(pCvs_pk, 1)
Exemple #10
0
            freqs.append(chan/2. + 100) #a pretty good apprximation of chan 2 freq for 500kHz channels
        except(IndexError):
            print "[FAIL] no freq found. Skipping..."
print "sorting input files"
files = files[n.argsort(freqs)]
freqs = n.sort(freqs)
print "found freqs"
freqs = n.array(freqs)
print freqs

z = f212z(freqs*1e6)
print "processing redshifts:",z
redshift_files = dict(zip(z,files))
umags = 30/(c/(freqs*1e6))
print "umags = ",umags
kperps = umags*pspec.dk_du(z)
Pkk = []
Pkk_err = []
k3Pk = []
k3err = []
neg = []
kpars = []
kmags = []
Pks = []
Pkerr =[]
for i,FILE in enumerate(files):
    F = n.load(FILE)
    print FILE,z[i]
    k = n.sqrt(F['kpl']**2 + kperps[i]**2)
    k3Pk.append(F['k3pk'])
    Pks.append(F['pk'])
Exemple #11
0
files = sort(args)

#form up the frequency
#dchan = n.array([int(F.split('/')[1].split('_')[1])-int(F.split('/')[1].split('_')[0]) for F in files])
#chans = n.array([int(F.split('/')[1].split('_')[0]) for F in files])+dchan/2
chans = n.array([f['chans'] for f in files])
freqs = n.array([f['afreqs'] for f in files])
I = n.argsort(chans)
chans = chans[I]
freqs = freqs[I]
#freqs = chans/2. + 100
print freqs
z = f212z(freqs*1e6)
umags = 30/(c/freqs*1e6)
kperps = umags*pspec.dk_du(z)
#k = n.sqrt(kpl**2 + kpr**2)
#k3 = n.abs(k**3 / (2*n.pi**2))
#print [len(p) for p in P]


#plot a single k bin
#mykpl = 0.35
#assume 30m baselines at 150Mhz
#print "plotting k_parallel = ",mykpl
Pkk = []
Pkk_err = []
k3Pk = []
k3err = []
neg = []
for i,FILE in enumerate(files):
Exemple #12
0
def posterior(kpl,
              pk,
              err,
              pkfold=None,
              errfold=None,
              f0=.151,
              umag=16.,
              theo_noise=None,
              verbose=False):
    import scipy.interpolate as interp
    k0 = n.abs(kpl).argmin()
    kpl = kpl[k0:]
    z = pspec.f2z(f0)
    kpr = pspec.dk_du(z) * umag
    k = n.sqrt(kpl**2 + kpr**2)
    if pkfold is None:
        if verbose: print 'Folding for posterior'
        pkfold = pk[k0:].copy()
        errfold = err[k0:].copy()
        pkpos, errpos = pk[k0 + 1:].copy(), err[k0 + 1:].copy()
        pkneg, errneg = pk[k0 - 1:0:-1].copy(), err[k0 - 1:0:-1].copy()
        pkfold[1:] = (pkpos / errpos**2 +
                      pkneg / errneg**2) / (1. / errpos**2 + 1. / errneg**2)
        errfold[1:] = n.sqrt(1. / (1. / errpos**2 + 1. / errneg**2))
        #ind = n.logical_and(kpl>.2, kpl<.5)
    ind = n.logical_and(k > .15, k < .5)
    #ind = n.logical_and(kpl>.12, kpl<.5)
    #print kpl,pk.real,err
    k = k[ind]
    pkfold = pkfold[ind]
    errfold = errfold[ind]
    #if not theo_noise is None:
    #    theo_noise=theo_noise[ind]
    pk = pkfold
    err = errfold
    err_omit = err.copy()
    #s = n.logspace(1,3.5,100)
    s = n.linspace(-5000, 5000, 10000)
    #    print s
    data = []
    data_omit = []
    for _k, _pk, _err in zip(k, pk, err):
        if verbose: print _k, _pk.real, _err
    #    print '%6.3f    %9.5f     9.5f'%(_k, _pk.real, _err)
    for ss in s:
        data.append(n.exp(-.5 * n.sum((pk.real - ss)**2 / err**2)))
        data_omit.append(n.exp(-.5 * n.sum((pk.real - ss)**2 / err_omit**2)))
    #    print data[-1]
    data = n.array(data)
    data_omit = n.array(data_omit)
    #print data
    #print s
    #data/=n.sum(data)
    data /= n.max(data)
    data_omit /= n.max(data_omit)
    p.figure(5, figsize=(6.5, 5.5))
    p.plot(s, data, 'k', linewidth=2)
    #    p.plot(s, data_omit, 'k--', linewidth=1)
    #use a spline interpolator to get the 1 and 2 sigma limits.
    #spline = interp.interp1d(data,s)
    #print spline
    #print max(data), min(data)
    #print spline(.68), spline(.95)
    #p.plot(spline(n.linspace(.0,1,100)),'o')
    #    p.plot(s, n.exp(-.5)*n.ones_like(s))
    #    p.plot(s, n.exp(-.5*2**2)*n.ones_like(s))
    data_c = n.cumsum(data)
    data_omit_c = n.cumsum(data_omit)
    data_c /= data_c[-1]
    data_omit_c /= data_omit_c[-1]
    mean = s[n.argmax(data)]
    s1lo, s1hi = s[data_c < 0.1586][-1], s[data_c > 1 - 0.1586][0]
    s2lo, s2hi = s[data_c < 0.0227][-1], s[data_c > 1 - 0.0227][0]
    if verbose: print 'Posterior: Mean, (1siglo,1sighi), (2siglo,2sighi)'
    if verbose: print 'Posterior:', mean, (s1lo, s1hi), (s2lo, s2hi)
    mean_o = s[n.argmax(data_omit)]
    s1lo_o, s1hi_o = s[data_omit_c < 0.1586][-1], s[data_omit_c > 1 -
                                                    0.1586][0]
    s2lo_o, s2hi_o = s[data_omit_c < 0.0227][-1], s[data_omit_c > 1 -
                                                    0.0227][0]
    if verbose:
        print 'Posterior (omit):', mean_o, (s1lo_o, s1hi_o), (s2lo_o, s2hi_o)

    p.vlines(s1lo, 0, 1, color=(0, 107 / 255., 164 / 255.), linewidth=2)
    p.vlines(s1hi, 0, 1, color=(0, 107 / 255., 164 / 255.), linewidth=2)

    # limits for data_omit
    p.vlines(s2lo, 0, 1, color=(1, 128 / 255., 14 / 255.), linewidth=2)
    p.vlines(s2hi, 0, 1, color=(1, 128 / 255., 14 / 255.), linewidth=2)

    if not theo_noise is None:
        s2l_theo = n.sqrt(1. / n.mean(1. / theo_noise**2))
        p.vlines(s2l_theo, 0, 1, color='black', linewidth=2)
        if verbose: print('Noise level: {0:0>5.3f} mk^2'.format(s2l_theo))
    p.xlabel(r'$k^3/2\pi^2\ P(k)\ [{\rm mK}^2]$', fontsize='large')
    p.ylabel('Posterior Distribution', fontsize='large')
    p.xlim(0, 700)
    p.title('z = {0:.2f}'.format(z))
    if (s2lo > 700) or (s2hi > 1000):
        p.xlim(0, 1500)
    p.grid(1)
    p.subplots_adjust(left=.15, top=.95, bottom=.15, right=.95)
    p.savefig('posterior_{0:.2f}.png'.format(z))
    f = open('posterior_{0:.2f}.txt'.format(z), 'w')
    f.write('Posterior: Mean,\t(1siglo,1sighi),\t(2siglo,2sighi)\n')
    f.write(
        'Posterior: {0:.4f},\t({1:.4f},{2:.4f}),\t({3:.4f},{4:.4f})\n'.format(
            mean, s1lo, s1hi, s2lo, s2hi))
    f.write(
        'Posterior (omit): {0:.4f}, ({1:.4f},{2:.4f}),\t({3:.4f},{4:.4f})\n'.
        format(mean_o, s1lo_o, s1hi_o, s2lo_o, s2hi_o))
    f.write('Noise level: {0:0>5.3f} mk^2\n'.format(s2l_theo))
    f.close()
                 'pspec_Jul6_noise_3Jy_inttime_44_95_115_I.npz')  # matt's data
    freq = npz['freq']
    z_bin = f2z(freq)

# load 21cmSense Noise models used to compute Beta
# Beta = (P_noise + P_inj)/P_out

n_fqs, n_ks, noise = py21cm.load_noise_files(
        glob.glob('/home/mkolopanis/psa64/21cmsense_noise/'
                  'dish_size_1/*drift_mod*.npz'))

noise_interp = py21cm.noise_interp2d(n_fqs, n_ks, noise)

kpls_pos = n.concatenate(n.array_split(kpls, [10, 11])[1:])
umag = 30/(3e8/(freq*1e9))
kpr = dk_du(z_bin) * umag
n_k = n.array(n.sqrt(kpls_pos**2 + kpr**2))
d2_n = noise_interp(freq*1e3, n_k)
pk_n = 2*n.pi**2/(n_k**3) * d2_n  # * 3887./2022
p_n = n.median(pk_n)
# p_n = n.max( pk_n )

pIvs_boot = []
pCvs_boot = []
pCs_boot = []
pIs_boot = []

print 'Boostrapping the Bootstraps'
for nboot in xrange(NBOOT):
    if (nboot+1) % 10 == 0:
        print '   ', nboot+1, '/', NBOOT
Exemple #14
0
def kpr_from_sep(sepstr, fq, grid_spacing=[4., 32.]):
    kx, ky = uv_from_sep(sepstr, grid_spacing=grid_spacing)
    scalar = pspec.dk_du(pspec.f2z(fq)) * (fq / 0.3)
    return scalar * np.sqrt(kx**2 + ky**2)
Exemple #15
0
def get_k3pk_from_npz(files=None, verbose=False):
    '''
    Loads output from plot_pk_k3pk.npz and returns Delta^2 spectrum
    returns lists of k, Delta^2, Delta^2_err ordered by  decreasing redshift
    return format: z, k_magnitude, Delta^2, Delta^2_err
    '''
    if files is None: #check that files are passed
        print 'No Files gives for loading'
        return 0,'','',''

    one_file_flag=False
    if len(n.shape(files)) ==0: files = [files];
    if len(files) == 1: one_file_flag=True
    freqs = []
    if verbose: print "parsing npz file frequencies"
    for filename in files:
        if verbose: print filename,
        try:
            if verbose: print "npz..",
            freqs.append(n.load(filename)['freq']*1e3) #load freq in MHz
            if verbose: print "[Success]"
        except(KeyError):
            if verbose: print "[FAIL]"
            try:
                if verbose: print "looking for path like RUNNAME/chan_chan/I/pspec.npz"
                dchan = int(filename.split('/')[1].split('_')[1])-int(filename.split('/')[1].split('_')[0])
                chan = int(filename.split('/')[1].split('_')[0]) + dchan/2
                freqs.append(chan/2. + 100) #a pretty good apprximation of chan 2 freq for 500kHz channels
            except(IndexError):
                if verbose: print "[FAIL] no freq found. Skipping..."

    if len(freqs) ==0: #check if any files were loaded correctly
        print 'No parsable frequencies found'
        print 'Exiting'
        return 0,'','',''

    if verbose: print "sorting input files"
    files = n.array(files)
    files = files[n.argsort(freqs)]
    freqs = n.sort(freqs)
    if verbose: print "found freqs"
    freqs = n.array(freqs)
    if verbose: print freqs

    z = f212z(freqs*1e6)
    if verbose: print "processing redshifts:",z
    #redshift_files = dict(zip(z,files))
    umags = 30/(c/(freqs*1e6))
    # if verbose: print "umags = ",umags
    kperps = umags*pspec.dk_du(z)
    k3Pk = []
    k3err = []
    kmags = []
    for i,FILE in enumerate(files):
        F = n.load(FILE)
        if verbose: print FILE.split('/')[-1],z[i]
        # k = n.sqrt(F['kpl']**2 + kperps[i]**2)
        k3Pk.append(F['k3pk'])
        k3err.append(F['k3err'])
        kmags.append(F['k'])
    # if one_file_flag:
        # z = n.squeeze(z)
        # kmags = n.squeeze(kmags)
        # k3Pk = n.squeeze(k3Pk)
        # k3err = n.squeeze(k3err)
    return z, kmags, k3Pk, k3err