def group_redundant_bls(antpos): '''Return 2 dicts: bls contains baselines grouped by separation ('drow,dcol'), conj indicates for each baseline whether it must be conjugated to be redundant with the rest of the baselines in its redundancy group.''' bls,conj = {}, {} for ri in xrange(antpos.shape[0]): for ci in xrange(antpos.shape[1]): for rj in xrange(antpos.shape[0]): for cj in xrange(ci,antpos.shape[1]): if ri >= rj and ci == cj: continue # exclude repeat +/- listings of certain bls sep = '%d,%d' % (rj-ri, cj-ci) i,j = antpos[ri,ci], antpos[rj,cj] if i > j: i,j,c = j,i,True else: c = False bl = ij2bl(i,j) bls[sep] = bls.get(sep,[]) + [bl] conj[bl] = c return bls, conj
def get_bl(i,j): if i > j: return ij2bl(j,i) else: return ij2bl(i,j)
data = {} closure = {} ants = {} times = [] for filename in args: print 'Reading', filename uv = a.miriad.UV(filename) NCHAN = uv['nchan'] window = a.dsp.gen_window(uv['nchan'], window='blackman-harris') a.scripting.uv_selector(uv, ants=opts.ant) for (crd,t,(i,j)),d,f in uv.all(raw=True): if len(times) == 0 or times[-1] != t: times.append(t) if len(times) > 10: continue ants[i] = ants[j] = None bl = ij2bl(i,j) if False: # Override inferred delays with computed delays taus = n.array([-aa.get_baseline(i,j,src=cat[s])[-1] for s in srcs] + [123, 456]) taus = n.around(taus, -1) # Estimate resolution of delay bins #n.random.shuffle(taus) # Randomize source delay order else: _d = n.abs(n.fft.ifft(d * window)) data[bl] = _d _d = n.fft.fftshift(_d) taubins = n.argsort(_d) taus = dlys[taubins[-NDLY:]][::-1] if False: true_taus = n.array([-aa.get_baseline(i,j,src=cat[s])[-1] for s in srcs]) best_score,best_order = n.Inf, None for t in itertools.permutations(true_taus): score = n.sqrt(n.average(n.abs(taus - t)**2))
def testant4(self): """Test aipy.scripting.parse_ants()""" nants = 4 cases = { 'all': [], 'auto': [('auto',1)], 'cross': [('auto',0)], '0_1': [(ij2bl(0,1),1)], '0_1,1_2': [(ij2bl(0,1),1), (ij2bl(1,2),1)], '0x_1x': [(ij2bl(0,1),1,'xx')], '(0x,0y)_1x': [(ij2bl(0,1),1,'xx'), (ij2bl(0,1),1,'yx')], '(0,1)_2': [(ij2bl(0,2),1), (ij2bl(1,2),1)], '0_(1,2)': [(ij2bl(0,1),1), (ij2bl(0,2),1)], '(0,1)_(2,3)': [(ij2bl(0,2),1), (ij2bl(0,3),1), (ij2bl(1,2),1), (ij2bl(1,3),1)], '0_(1,-2)': [(ij2bl(0,1),1), (ij2bl(0,2),0)], '(-0,1)_(2,-3)': [(ij2bl(0,2),0), (ij2bl(0,3),0), (ij2bl(1,2),1), (ij2bl(1,3),0)], '0,1,all': [], } for i in range(nants): cases[str(i)] = list(map(lambda x: (ij2bl(x,i),1), range(nants))) cases['-'+str(i)] = list(map(lambda x: (ij2bl(x,i),0), range(nants))) # inelegantly paste on the new pol parsing flag on the above tests # XXX really should add some new tests for the new pol parsing for k in cases: cases[k] = [(v+(-1,))[:3] for v in cases[k]] for ant_str in cases: self.assertEqual(a.scripting.parse_ants(ant_str, nants), cases[ant_str]) self.assertRaises(ValueError, a.scripting.parse_ants, '(0_1)_2', nants)
def testant4(self): """Test aipy.scripting.parse_ants()""" nants = 4 cases = { 'all': [], 'auto': [('auto', 1)], 'cross': [('auto', 0)], '0_1': [(ij2bl(0, 1), 1)], '0_1,1_2': [(ij2bl(0, 1), 1), (ij2bl(1, 2), 1)], '0x_1x': [(ij2bl(0, 1), 1, 'xx')], '(0x,0y)_1x': [(ij2bl(0, 1), 1, 'xx'), (ij2bl(0, 1), 1, 'yx')], '(0,1)_2': [(ij2bl(0, 2), 1), (ij2bl(1, 2), 1)], '0_(1,2)': [(ij2bl(0, 1), 1), (ij2bl(0, 2), 1)], '(0,1)_(2,3)': [(ij2bl(0, 2), 1), (ij2bl(0, 3), 1), (ij2bl(1, 2), 1), (ij2bl(1, 3), 1)], '0_(1,-2)': [(ij2bl(0, 1), 1), (ij2bl(0, 2), 0)], '(-0,1)_(2,-3)': [(ij2bl(0, 2), 0), (ij2bl(0, 3), 0), (ij2bl(1, 2), 1), (ij2bl(1, 3), 0)], '0,1,all': [], } for i in range(nants): cases[str(i)] = list(map(lambda x: (ij2bl(x, i), 1), range(nants))) cases['-' + str(i)] = list( map(lambda x: (ij2bl(x, i), 0), range(nants))) # inelegantly paste on the new pol parsing flag on the above tests # XXX really should add some new tests for the new pol parsing for k in cases: cases[k] = [(v + (-1, ))[:3] for v in cases[k]] for ant_str in cases: self.assertEqual(a.scripting.parse_ants(ant_str, nants), cases[ant_str]) self.assertRaises(ValueError, a.scripting.parse_ants, '(0_1)_2', nants)
def ijp2blp(i,j,pol): return miriad.ij2bl(i,j) * 16 + (pol + 9)
def ijp2blp(i, j, pol): return miriad.ij2bl(i, j) * 16 + (pol + 9)
def get_bl(i, j): if i > j: return ij2bl(j, i) else: return ij2bl(i, j)
data = {} closure = {} ants = {} times = [] for filename in args: print 'Reading', filename uv = a.miriad.UV(filename) NCHAN = uv['nchan'] window = a.dsp.gen_window(uv['nchan'], window='blackman-harris') a.scripting.uv_selector(uv, ants=opts.ant) for (crd, t, (i, j)), d, f in uv.all(raw=True): if len(times) == 0 or times[-1] != t: times.append(t) if len(times) > 10: continue ants[i] = ants[j] = None bl = ij2bl(i, j) if False: # Override inferred delays with computed delays taus = n.array( [-aa.get_baseline(i, j, src=cat[s])[-1] for s in srcs] + [123, 456]) taus = n.around(taus, -1) # Estimate resolution of delay bins #n.random.shuffle(taus) # Randomize source delay order else: _d = n.abs(n.fft.ifft(d * window)) data[bl] = _d _d = n.fft.fftshift(_d) taubins = n.argsort(_d) taus = dlys[taubins[-NDLY:]][::-1] if False: true_taus = n.array( [-aa.get_baseline(i, j, src=cat[s])[-1] for s in srcs])