Example #1
0
    
#hera info assuming a hex of 19 and 128 antennas
aa = a.cal.get_aa(opts.cal, n.array([.150]))
info = omni.aa_to_info(aa, fcal=True, ex_ants=[81])
infotest = omni.aa_to_info(aa, fcal=True, ubls=[(80,104),(9,22),(80,96)],ex_ants=[81])
#info = hx.hera_to_info(3, 128, connections=connection_file, ex_ants=[81])
#infotest = hx.hera_to_info(3, 128, connections=connection_file,  ex_ants=[81])
#infotest = hx.hera_to_info(3, 128, connections=connection_file, ubls=[(80,104),(9,22),(80,96)], ex_ants=[81])
reds = flatten_reds(info.get_reds())
redstest = infotest.get_reds()#for plotting 

print len(reds)
#Read in data here.
ant_string =','.join(map(str,info.subsetant))
bl_string = ','.join(['_'.join(map(str,k)) for k in reds])
times, data, flags = arp.get_dict_of_uv_data(args, bl_string, opts.pol, verbose=True)
dataxx = {}
for (i,j) in data.keys():
    dataxx[(i,j)] = data[(i,j)]['xx']
fqs = n.linspace(.1,.2,1024)
dlys = n.fft.fftshift(n.fft.fftfreq(fqs.size, fqs[1]-fqs[0]))

#gets phase solutions per frequency.
fc = omni.FirstCal(dataxx,fqs,info)
sols = fc.run()
#import IPython; IPython.embed()
#save_gains(sols,fqs, opts.pol)
#save solutions
dataxx_c = {}
for (a1,a2) in info.bl_order():
    if (a1,a2) in dataxx.keys():
Example #2
0
    except: pass
for bl in opts.ubls.split(','):
    try:
        i,j = bl.split('_')
        ubls.append((int(i),int(j)))
    except: pass
print 'Excluding Antennas:',ex_ants
if len(ubls) != None: print 'Using Unique Baselines:',ubls
info = omni.aa_to_info(aa, fcal=True, ubls=ubls, ex_ants=ex_ants)
reds = flatten_reds(info.get_reds())

print 'Number of redundant baselines:',len(reds)
#Read in data here.
ant_string =','.join(map(str,info.subsetant))
bl_string = ','.join(['_'.join(map(str,k)) for k in reds])
times, data, flags = arp.get_dict_of_uv_data(args, bl_string, opts.pol, verbose=True)
datapack,wgtpack = {},{}
for (i,j) in data.keys():
    datapack[(i,j)] = data[(i,j)][opts.pol]
    wgtpack[(i,j)] = np.logical_not(flags[(i,j)][opts.pol])
nfreq = datapack[datapack.keys()[0]].shape[1] #XXX less hacky than previous hardcode, but always safe?
fqs = n.linspace(.1,.2,nfreq)
dlys = n.fft.fftshift(n.fft.fftfreq(fqs.size, np.diff(fqs)[0]))

#gets phase solutions per frequency.
fc = omni.FirstCal(datapack,wgtpack,fqs,info)
sols = fc.run(tune=True,verbose=opts.verbose,offset=True,plot=opts.plot)

#Save solutions
if len(args)==1: filename=args[0]
else: filename='fcgains.%s.npz'%opts.pol #if averaging a bunch together of files together.
Example #3
0
#! /usr/bin/env python
import aipy as a
import numpy as n
from pylab import *
import optparse, sys, os
from capo.arp import get_dict_of_uv_data

o = optparse.OptionParser()
o.set_usage("print_blflagging.py [options]")
o.set_description(__doc__)
a.scripting.add_standard_options(o, ant=True, pol=True)
o.add_option('-v', action='store_true', help='turn on more verbs')
o.add_option('--detail', action='store_true', help='print stats for each bl')
opts, args = o.parse_args(sys.argv[1:])

t, dat, flg = get_dict_of_uv_data(args, opts.ant, opts.pol, verbose=opts.v)

bls = dat.keys()
nantot = 0
flagtot = 0
ntot = 0
if opts.detail:
    print 'bl pol nancount flagfrac'
for bl in bls:
    for pol in dat[bl].keys():
        if opts.detail:
            print a.miriad.bl2ij(bl), pol, n.sum(n.isnan(dat[bl][pol])),
            print n.sum(flg[bl][pol]) / float(flg[bl][pol].size)
        nantot += n.sum(n.isnan(dat[bl][pol]))
        flagtot += n.sum(flg[bl][pol])
        ntot += n.float(flg[bl][pol].size)
Example #4
0
print 'These are the spearations that we are going to use ', seps
    
#Get the fir filters for the separation used.
firs = {}
for sep in seps:
    c = 0 
    while c != -1:
        ij = map(int, sep2ij[sep].split(',')[c].split('_'))
        bl = a.miriad.ij2bl(*ij)
        if blconj[bl]: c+=1
        else: break
    frp, bins = fringe.aa_to_fr_profile(aa, ij, 100,frpad=opts.frpad)
    timebins, firs[sep] = fringe.frp_to_firs(frp, bins, aa.get_afreqs(), fq0=aa.get_afreqs()[100])
    
baselines = ''.join(sep2ij[sep] for sep in seps)
times, data, flags = arp.get_dict_of_uv_data(args, baselines, pol, verbose=True)
lsts = [ aa.sidereal_time() for k in map(aa.set_jultime(), times) ]

_d = {}
_w = {}
for bl in data.keys():
    if not _d.has_key(bl): _d[bl],_w[bl] = {}, {}
    #get filter which is baseline dependent.
    sep = bl2sep[bl]
    fir = firs[sep]
    if blconj[bl]: fir = n.conj(fir)
    print map(int, a.miriad.bl2ij(bl)), sep, blconj[bl]
    for pol in data[bl].keys():
        if not _d[bl].has_key(pol): _d[bl][pol], _w[bl][pol] = {}, {}
        _d[bl][pol] = n.zeros_like(data[bl][pol])
        _w[bl][pol] = n.zeros_like(data[bl][pol])
Example #5
0
o.add_option(
    '--awesome',
    action='store_true',
    help='try to seperate the weights using linear algebra instead of just \
averaging over baselines')
opts, args = o.parse_args(sys.argv[1:])
uv = a.miriad.UV(args[0])
nant = uv['nants']

jds = []
for filename in args:
    F = n.zeros((nant, nant))
    C = n.zeros_like(F)
    jds.append(file2jd(filename))
    t, dat, flg = get_dict_of_uv_data([filename],
                                      opts.ant,
                                      opts.pol,
                                      verbose=opts.v)
    bls = dat.keys()
    for bl in bls:
        (i, j) = a.miriad.bl2ij(bl)
        pols = dat[bl].keys()
        for pol in pols:
            F[i, j] += n.sum(flg[bl][pol])
            F[j, i] += n.sum(flg[bl][pol])
            F[i, i] += n.sum(flg[bl][pol])
            F[j, j] += n.sum(flg[bl][pol])
            Size = flg[bl][pol].size
            C[i, j] += 1
            C[j, i] += 1
            C[i, i] += 1
            C[j, j] += 1
Example #6
0
o.set_description(__doc__)
a.scripting.add_standard_options(o, ant=True, pol=True)
o.add_option('-v',action='store_true',help='turn on more verbs')
o.add_option('--detail',action='store_true',help='print stats for each bl')
o.add_option('--awesome',action='store_true',help='try to seperate the weights using linear algebra instead of just \
averaging over baselines')
opts,args=o.parse_args(sys.argv[1:])
uv = a.miriad.UV(args[0])
nant=uv['nants']

jds = []
for filename in args:
    F = n.zeros((nant,nant))
    C = n.zeros_like(F)
    jds.append(file2jd(filename))
    t,dat,flg = get_dict_of_uv_data([filename],opts.ant,opts.pol,verbose=opts.v)
    bls = dat.keys()
    for bl in bls:
        (i,j) = a.miriad.bl2ij(bl)
        pols = dat[bl].keys()
        for pol in pols:
            F[i,j] += n.sum(flg[bl][pol])
            F[j,i] += n.sum(flg[bl][pol])
            F[i,i] += n.sum(flg[bl][pol])
            F[j,j] += n.sum(flg[bl][pol])
            Size = flg[bl][pol].size
            C[i,j] +=1
            C[j,i] +=1 
            C[i,i] +=1
            C[j,j] +=1
    #imshow(n.log(F))
Example #7
0
import aipy as a
import numpy as n
from pylab import *
import optparse, sys, os
from capo.arp import get_dict_of_uv_data

o=optparse.OptionParser()
o.set_usage("print_blflagging.py [options]")
o.set_description(__doc__)
a.scripting.add_standard_options(o, ant=True, pol=True)
o.add_option('-v',action='store_true',help='turn on more verbs')
o.add_option('--detail',action='store_true',help='print stats for each bl')
opts,args=o.parse_args(sys.argv[1:])


t,dat,flg = get_dict_of_uv_data(args,opts.ant,opts.pol,verbose=opts.v)

bls = dat.keys()
nantot = 0
flagtot = 0
ntot = 0
if opts.detail:
    print 'bl pol nancount flagfrac'
for bl in bls:
    for pol in dat[bl].keys():
        if opts.detail:
            print a.miriad.bl2ij(bl),pol,n.sum(n.isnan(dat[bl][pol])),
            print n.sum(flg[bl][pol])/float(flg[bl][pol].size)
        nantot += n.sum(n.isnan(dat[bl][pol]))
        flagtot += n.sum(flg[bl][pol])
        ntot += n.float(flg[bl][pol].size)
Example #8
0
print 'These are the spearations that we are going to use ', seps
    
#Get the fir filters for the separation used.
firs = {}
for sep in seps:
    c = 0 
    while c != -1:
        ij = map(int, sep2ij[sep].split(',')[c].split('_'))
        bl = a.miriad.ij2bl(*ij)
        if blconj[bl]: c+=1
        else: break
    frp, bins = fringe.aa_to_fr_profile(aa, ij, 100,frpad=1.0)
    timebins, firs[sep] = fringe.frp_to_firs(frp, bins, aa.get_afreqs(), fq0=aa.get_afreqs()[100],mdl=skew,startprms=(.001,.001,-50),frpad=opts.frpad)
    
baselines = ''.join(sep2ij[sep] for sep in seps)
times, data, flags = arp.get_dict_of_uv_data(args, baselines, pol, verbose=True)
lsts = [ aa.sidereal_time() for k in map(aa.set_jultime(), times) ]

_d = {}
_w = {}
for bl in data.keys():
    if not _d.has_key(bl): _d[bl],_w[bl] = {}, {}
    #get filter which is baseline dependent.
    sep = bl2sep[bl]
    fir = firs[sep]
    if blconj[bl]: fir = n.conj(fir)
    print map(int, a.miriad.bl2ij(bl)), sep, blconj[bl]
    for pol in data[bl].keys():
        if not _d[bl].has_key(pol): _d[bl][pol], _w[bl][pol] = {}, {}
        _d[bl][pol] = n.zeros_like(data[bl][pol])
        _w[bl][pol] = n.zeros_like(data[bl][pol])
Example #9
0
autocorrDir='/Users/aaronew/Dropbox/HERA_autocorrs/' #parent directory of autocorrelations
simDir='/Users/aaronew/Dropbox/DishPapers/DishSimulations/' #parent directory for simulations
reflectometryDir='/Users/aaronew/Dropbox/DishPapers/DishReflectometry/'#parent directory for UCB reflectometery measurements
reflectometryRBDir='/Users/aaronew/Dropbox/DishPapers/Reflectometry_RB/'#parent directory for Rich Bradley's reflectometery measurements

#************************************************************
#first load up and plot autocorrelations. This code is
#primarily taken from Aaron Parsons arp/scripts/auto_delays.py
#************************************************************
thresh=.5
tol=1e-9
ants=[0,62,104,96]
fileList=glob.glob(autocorrDir+'*AR')
c0,c1=140,930
antstr=','.join(['%d_%d'%(i,i) for i in ants])
times,dat,flg=arp.get_dict_of_uv_data(fileList,antstr=antstr,polstr='xx',verbose=True)
print dat.keys()
div=False
colors=['']*10
g0,g1,g2={},{},{}
w0,w1,w2={},{},{}
for i,ant in enumerate(ants):
    print ant
    bl=a.miriad.ij2bl(ant,ant)
    print bl
    fqs=n.linspace(.1,.2,dat[bl]['xx'].shape[1])[c0:c1]
    tau=fft.fftfreq(fqs.size,fqs[1]-fqs[0])
    d,f=dat[bl]['xx'][:,c0:c1],flg[bl]['xx'][:,c0:c1]
    ntimes,nfreqs=d.shape
    w=n.logical_not(f).astype(n.float)
    d*=w
Example #10
0
a.scripting.add_standard_options(o, ant=True, pol=True, cal=True)
o.add_option("-v", action="store_true", help="turn on more verbs")
o.add_option(
    "--pols",
    default="xx,xx",
    type=str,
    help="pols to compare, ex xx,I compares xx in file 1 to I in file 2 [default xx,xx]",
)
# o.add_option('--plot',dest='plot',default=False, action='store_true',\
#    help='Outputs plot to X-Window and saves plot')
opts, args = o.parse_args(sys.argv[1:])
# start by assuming things are lst aligned (they aren't)
filename = args[0]
pol_A = opts.pols.split(",")[0]
print "reading", filename
t_A, dat_A, flg_A = get_dict_of_uv_data([filename], opts.ant, pol_A, verbose=opts.v)
bls_A = dat_A.keys()

uv = a.miriad.UV(filename)
aa = a.cal.get_aa(opts.cal, uv["sdf"], uv["sfreq"], uv["nchan"])
freqs_A = aa.get_afreqs()
t_A = Time(t_A, scale="utc", format="jd", location=(aa.lat, aa.long))
lst_A = t_A.sidereal_time("apparent")
if len(bls_A) == 0:
    print "no data found in file ", filename[0]
    sys.exit()
D_A = n.ma.masked_where([flg_A[bl][pol_A] for bl in bls_A], [dat_A[bl][pol_A] for bl in bls_A])

filename = args[1]
print "reading", filename
pol_B = opts.pols.split(",")[1]
Example #11
0
o.add_option(
    '--pols',
    default='xx,xx',
    type=str,
    help=
    'pols to compare, ex xx,I compares xx in file 1 to I in file 2 [default xx,xx]'
)
#o.add_option('--plot',dest='plot',default=False, action='store_true',\
#    help='Outputs plot to X-Window and saves plot')
opts, args = o.parse_args(sys.argv[1:])
#start by assuming things are lst aligned (they aren't)
filename = args[0]
pol_A = opts.pols.split(',')[0]
print "reading", filename
t_A, dat_A, flg_A = get_dict_of_uv_data([filename],
                                        opts.ant,
                                        pol_A,
                                        verbose=opts.v)
bls_A = dat_A.keys()

uv = a.miriad.UV(filename)
aa = a.cal.get_aa(opts.cal, uv['sdf'], uv['sfreq'], uv['nchan'])
freqs_A = aa.get_afreqs()
t_A = Time(t_A, scale='utc', format='jd', location=(aa.lat, aa.long))
lst_A = t_A.sidereal_time('apparent')
if len(bls_A) == 0:
    print "no data found in file ", filename[0]
    sys.exit()
D_A = n.ma.masked_where([flg_A[bl][pol_A] for bl in bls_A],
                        [dat_A[bl][pol_A] for bl in bls_A])

filename = args[1]