Esempio n. 1
0
 def test_q_eor_nocov(self):
     k1, k2 = ('a', (0, 1), 'I'), ('b', (0, 1), 'I')
     ds = oqe.DataSet({k1: self.eor, k2: self.eor})
     q = ds.q_hat(k1, k2, use_cov=False)
     self.assertTrue(np.all(q > 0))
     self.assertAlmostEqual(np.average(q).real, q.shape[0], 0)
     n1, n2 = oqe.noise(self.eor.shape), oqe.noise(self.eor.shape)
     ds = oqe.DataSet({k1: self.eor + n1, k2: self.eor + n2})
     qn = ds.q_hat(k1, k2, use_cov=False)
     self.assertFalse(np.all(qn > 0))
     self.assertAlmostEqual(np.average(qn).real, qn.shape[0], 0)
     self.assertAlmostEqual(np.average(qn).real, np.average(q).real, 0)
     ds = oqe.DataSet({k1: n1, k2: n2})
     qn = ds.q_hat(k1, k2, use_cov=False)
     self.assertFalse(np.all(qn > 0))
     self.assertAlmostEqual(np.average(qn).real, 0, 0)
Esempio n. 2
0
def load_other():
    dsets_other = {}
    if opts.Clongtime:
        dsets_final = {}
        for k in dsets:
            firstfile = dsets[k][0]
            dsets_final[k] = glob.glob(
                '/'.join(firstfile.split('/')[:-1]) + '/lst.*' +
                firstfile.split('.')[-1])  #full time range
    else:
        dsets_final = dsets
    for k in dsets_final:
        dsets_other[k] = []
        for file in dsets_final[k]:
            if opts.Cfg:
                dsets_other[k].append(
                    '../../lstbin_fg/' + file.split('/')[1] + '/' +
                    file.split('/')[-1][:-1])  #fg containing data
            elif opts.CnoFRF:
                dsets_other[k].append(file[:-1])  #gets rid of 'L' on filename
            elif opts.otherbls != None:
                oldsep = filter(lambda x: 'sep' in x, file.split('/'))[0]
                newsep = oldsep.split('p')[0] + 'p' + opts.otherbls[1:-1]
                dsets_other[k].append(file.replace(oldsep, newsep))
            elif opts.Clongtime != None:
                dsets_other[k].append(file)
    data_dict_other = {}
    flg_dict_other = {}
    conj_dict_other = {}
    lsts_other, data_other, flgs_other = {}, {}, {}
    keys_other = []
    print 'Reading in other set of data to estimate C'
    for k in days:
        lsts_other[k], data_other[k], flgs_other[k] = capo.miriad.read_files(
            dsets_other[k], antstr=antstr, polstr=POL, verbose=True)
        lsts_other[k] = n.array(lsts_other[k]['lsts'])
        for bl in data_other[k]:
            d = n.array(
                data_other[k][bl][POL])[:,
                                        chans] * jy2T  #extract frequency range
            flg = n.array(flgs_other[k][bl][POL])[:, chans]
            key_other = (k, bl, POL)
            keys_other.append(key_other)
            data_dict_other[key_other] = d
            flg_dict_other[key_other] = n.logical_not(flg)
            conj_dict_other[key_other[1]] = conj[key_other[1]]
    ds_other = oqe.DataSet()
    inds = oqe.lst_align(lsts_other)
    data_dict, flg_dict, lsts = oqe.lst_align_data(inds,
                                                   dsets=data_dict_other,
                                                   wgts=flg_dict_other,
                                                   lsts=lsts_other)
    ds_other.set_data(dsets=data_dict_other,
                      conj=conj_dict_other,
                      wgts=flg_dict_other)
    return keys_other, ds_other
Esempio n. 3
0
 def test_q_eor_cov(self):
     k1, k2 = ('a', (0, 1), 'I'), ('b', (0, 1), 'I')
     ds = oqe.DataSet({k1: self.eor, k2: self.eor})
     C1, C2 = ds.C(k1), ds.C(k2)
     I = np.identity(C1.shape[0])
     np.testing.assert_array_almost_equal(C1, I, 1)
     np.testing.assert_array_equal(C1, C2)
     qI = ds.q_hat(k1, k2, use_cov=False)
     qC = ds.q_hat(k1, k2, use_cov=True)
     self.assertTrue(np.all(qC > 0))
     ds.set_C({k1: I, k2: I})
     qCI = ds.q_hat(k1, k2, use_cov=True)
     np.testing.assert_array_equal(qCI, qI)
     self.assertAlmostEqual(np.average(qC), np.average(qI), -1)
Esempio n. 4
0
        bm.shape = (-1, 1)
        fg_t = np.sin((cnt + 1) * ts)
        fg_t.shape = (1, -1)
        fg += bm * fg_ch * fg_t
    eor = .01 * oqe.noise(size=(NCHAN, NSAMP))

#f = 0.3 # inject less eor so pspec goes below the eor signal used for computing ratios
f = 1  # inject less eor so pspec goes below the eor signal used for computing ratios
dat = {}
for k in fg:
    dat[k] = (fg[k] + f * eor[k])
NSAMP = fg[k].shape[0]
dat_cut = {}
for k in dat:
    dat_cut[k] = np.concatenate([dat[k][:54], dat[k][65:]], axis=0)
ds = oqe.DataSet(dsets=dat)
#ds = oqe.DataSet(dsets=dat_cut)

prf_c_final = {.1: 100, .15: 100, .2: 100}
prf_w_final = {.1: 100, .15: 100, .2: 100}
for boot in xrange(1):
    print boot
    # gather C,iC matrices for baselines to try cross-application
    ds.clear_cache()
    ds.set_data(dat)
    Cs, iCs = {}, {}
    for k in dat:
        #Cs[k] = ds.C(k)
        #Cs[k] = sum([ds.C(ki)+0*np.identity(NCHAN) for ki in dat])
        Cs[k] = sum(
            [ds.C(ki) + 3e-6 * np.identity(NCHAN) for ki in dat if ki != k])
Esempio n. 5
0
import capo, aipy
import capo.oqe as oqe
import sys

CH0, NCHAN = 30, 61
NSAMP = 120

for i in xrange(10):
    e = oqe.noise(size=(NCHAN, NSAMP))
    v = oqe.noise(size=(NCHAN, NSAMP))
    r = e + v

    k = ('even', (0, 1), 'I')
    k1 = ('e', (0, 1), 'I')
    k2 = ('v', (0, 1), 'I')
    ds = oqe.DataSet(dsets={k: r.T[:-20]})
    print i, np.linalg.cond(ds.C(k))
    iC_r = ds.iC(k)
    ds.set_data({k: e.T})
    q_e = ds.q_hat(k, k)
    ds.set_data({k: v.T})
    q_v = ds.q_hat(k, k)
    ds.set_data({k: r.T})
    q_r = ds.q_hat(k, k)
    F = ds.get_F(k, k)
    ds.set_data({k1: e.T, k2: v.T})
    ds.set_iC({k1: iC_r, k2: iC_r})
    q_ev = ds.q_hat(k1, k2)
    (M, W) = ds.get_MW(F)
    p_e = ds.p_hat(M, q_e)
    p_v = ds.p_hat(M, q_v)
Esempio n. 6
0
    for bl in data[k]:
        d = n.array(data[k][bl][POL])[:,
                                      chans] * jy2T  #extract frequency range
        flg = n.array(flgs[k][bl][POL])[:, chans]
        key = (k, bl, POL)
        data_dict[key] = d
        flg_dict[key] = n.logical_not(flg)
        conj_dict[key[1]] = conj[bl]
keys = data_dict.keys()
bls_master = []
for key in keys:  #populate list of baselines
    if key[0] == keys[0][0]: bls_master.append(key[1])
print 'Baselines:', len(bls_master)

#Align and create dataset
ds = oqe.DataSet(lmode=LMODE)
inds = oqe.lst_align(lsts)
data_dict, flg_dict, lsts = oqe.lst_align_data(
    inds, dsets=data_dict, wgts=flg_dict, lsts=lsts
)  #the lsts given is a dictionary with 'even','odd', etc., but the lsts returned is one array

#If data is replaced by noise
if opts.noise_only:
    if opts.same == None and opts.diff == None:
        print 'Need to specify if noise is the same on all baselines (--same) or different (--diff)'
        sys.exit()
    #Prep FRF Stuff
    ij = bls_master[0]  #ij = (1,4)
    if blconj[a.miriad.ij2bl(
            ij[0], ij[1]
    )]:  #makes sure FRP will be the same whether bl is a conjugated one or not
Esempio n. 7
0
    for bl in data[k]:
        d = n.array(data[k][bl][POL])[:,
                                      chans] * jy2T  #extract frequency range
        flg = n.array(flgs[k][bl][POL])[:, chans]
        key = (k, bl, POL)
        data_dict[key] = d
        flg_dict[key] = n.logical_not(flg)
        conj_dict[key[1]] = conj[bl]
keys = data_dict.keys()
bls_master = []
for key in keys:  #populate list of baselines
    if key[0] == keys[0][0]: bls_master.append(key[1])
print 'Baselines:', len(bls_master)

#Align and create dataset
ds = oqe.DataSet(lmode=LMODE)
inds = oqe.lst_align(lsts)
data_dict, flg_dict, lsts = oqe.lst_align_data(inds,
                                               dsets=data_dict,
                                               wgts=flg_dict,
                                               lsts=lsts)

#Prep FRF Stuff
timelen = data_dict[keys[0]].shape[0]
ij = bls_master[0]  #ij = (1,4)
if blconj[a.miriad.ij2bl(
        ij[0], ij[1]
)]:  #makes sure FRP will be the same whether bl is a conjugated one or not
    if ij[0] < ij[1]:
        temp = (ij[1], ij[0])
        ij = temp
Esempio n. 8
0
    lsts[k] = n.array(lsts[k]['lsts'])
    for bl in data[k]:
        d = n.array(data[k][bl][POL])[:,chans] * jy2T  #extract frequency range
        flg = n.array(flgs[k][bl][POL])[:,chans]
        key = (k,bl,POL)
        data_dict[key] = d
        flg_dict[key] = n.logical_not(flg)
        conj_dict[key[1]] = conj[a.miriad.ij2bl(bl[0],bl[1])]
keys = data_dict.keys()
bls_master = []
for key in keys: #populate list of baselines
    if key[0] == keys[0][0]: bls_master.append(key[1])
print 'Baselines:', len(bls_master)

#Alig and create dataset
ds = oqe.DataSet()
lsts,data_dict,flg_dict = ds.lst_align(lsts,dsets=data_dict,wgts=flg_dict) #the lsts given is a dictionary with 'even','odd', etc., but the lsts returned is one array

#If data is replaced by noise
if opts.noise_only:
    if opts.same == None and opts.diff == None: 
        print 'Need to specify if noise is the same on all baselines (--same) or different (--diff)'
        sys.exit()
    #Prep FRF Stuff
    ij = bls_master[0] #ij = (1,4)
    if blconj[a.miriad.ij2bl(ij[0],ij[1])]: #makes sure FRP will be the same whether bl is a conjugated one or not
        if ij[0] < ij[1]: temp = (ij[1],ij[0]); ij=temp  
    timelen = data_dict[keys[0]].shape[0] 
    bins = fringe.gen_frbins(inttime)
    frp, bins = fringe.aa_to_fr_profile(aa, ij, len(afreqs)/2, bins=bins)
    timebins, firs = fringe.frp_to_firs(frp, bins, aa.get_freqs(), fq0=aa.get_freqs()[len(afreqs)/2])
Esempio n. 9
0
                e_ = clip_array(e_.T, NSAMP, axis=1)
                v_1 = clip_array(v_full.T, NSAMP, axis=1)
                r_ = clip_array(r_.T, NSAMP, axis=1)

                r[k][bl][POL] = r_.T
                e[k][bl][POL] = e_.T
                v[k][bl][POL] = v_1.T
        #wij_ = np.zeros_like(r)
        #dij,wij = r , np.logical_not(wij_)
        #r,_w,_,_ = fringe.apply_frf(aa,dij,wij,ij[0],ij[1],pol=POL,bins=bins,firs=fir)

        #k = ('even',(0,1),'I')
        #k1 = ('e',(0,1),'I')
        #k2 = ('v',(0,1),'I')
        ds = oqe.DataSet(dsets=r)
        #print i, np.log10(np.linalg.cond(ds.C(k)))
        all_gps = ds.gen_bl_boots(NBOOT, ngps=NGPS)
        _qs_e, _qs_v, _qs_r = [], [], []
        _ps_e, _ps_v, _ps_r = [], [], []
        for nboot, gps in enumerate(all_gps):
            _qs_e.append([])
            _qs_v.append([])
            _qs_r.append([])
            _ps_e.append([])
            _ps_v.append([])
            _ps_r.append([])
            _tmp_c = []
            bls = [bl for gp in gps for bl in gp]
            _Cez, _Cvz, _Crz = {}, {}, {}
            _Ce, _Cv, _Cr = {}, {}, {}
Esempio n. 10
0
                                       wij,
                                       ij[0],
                                       ij[1],
                                       pol=POL,
                                       bins=bins,
                                       firs=fir)

        e = clip_array(e.T, NSAMP, axis=1)
        v = clip_array(v.T, NSAMP, axis=1)
        r = clip_array(r.T, NSAMP, axis=1)

        k = ('even', (0, 1), 'I')
        k1 = ('e', (0, 1), 'I')
        k2 = ('v', (0, 1), 'I')

        ds = oqe.DataSet(dsets={k: r.T})
        #print i, np.log10(np.linalg.cond(ds.C(k)))
        tmp_c.append(np.log10(np.linalg.cond(ds.C(k))))
        iC_r = ds.iC(k)
        ds.set_data({k: r.T})
        q_r = ds.q_hat(k, k)
        tmp_qs_r.append(q_r)
        F = ds.get_F(k, k)
        (M, W) = ds.get_MW(F, mode=normalize_mode)
        p_r = ds.p_hat(M, q_r)
        tmp_ps_r.append(p_r)

        ds.set_data({k1: e.T})
        iC_e = ds.iC(k1)
        I_e = np.identity(iC_e.shape[0])
        ds.set_iC({k1: I_e})
Esempio n. 11
0
 def test_q_fft(self):
     k1, k2 = ('a', (0, 1), 'I'), ('b', (0, 1), 'I')
     ds = oqe.DataSet({k1: self.eor, k2: self.eor})
     qnofft = ds.q_hat(k1, k2, use_cov=False, use_fft=False)
     qfft = ds.q_hat(k1, k2, use_cov=False, use_fft=True)
     np.testing.assert_array_almost_equal(qnofft, qfft)