コード例 #1
0
def test_compare_headers():

    uvf  = LedaFits('data/test_lalc.fitsidi')

    lalc = LedaFits()
    lalc.filename = 'data/test_lalc.LA'
    lalc.readLfile(n_ant=256, n_chans=109)

    ok_count = 0
    # Check all header values
    h1("Testing header keywords")
    h2("Common")
    ok_count += compare_dicts(uvf.h_common, lalc.h_common)
    h2("Parameters")
    ok_count += compare_dicts(uvf.h_params, lalc.h_params)
    h2("Antenna")
    ok_count += compare_dicts(uvf.h_antenna, lalc.h_antenna)
    h2("Array Geometry")
    ok_count += compare_dicts(uvf.h_array_geometry, lalc.h_array_geometry)
    h2("Frequency")
    ok_count += compare_dicts(uvf.h_frequency, lalc.h_frequency)
    h2("Source")
    ok_count += compare_dicts(uvf.h_source, lalc.h_source)
    h2("UV DATA")
    ok_count += compare_dicts(uvf.h_uv_data, lalc.h_uv_data)

    assert ok_count == 7
コード例 #2
0
ファイル: 5_test_hdf.py プロジェクト: jaycedowell/interfits
def test_hdf():
    idi = LedaFits('data/test_lalc.fitsidi')
    idi.exportHdf5('data/test_lalc.hdf')

    hdf = LedaFits('data/test_lalc.hdf')

    ok_count = 0
    # Check all header values
    h1("Testing header keywords")
    h2("Common")
    ok_count += compare_dicts(idi.h_common, hdf.h_common)
    h2("Params")
    ok_count += compare_dicts(idi.h_params, hdf.h_params)
    h2("Antenna")
    ok_count += compare_dicts(idi.h_antenna, hdf.h_antenna)
    h2("Array Geometry")
    ok_count += compare_dicts(idi.h_array_geometry, hdf.h_array_geometry)
    h2("Frequency")
    ok_count += compare_dicts(idi.h_frequency, hdf.h_frequency)
    h2("Source")
    ok_count += compare_dicts(idi.h_source, hdf.h_source)
    h2("UV DATA")
    ok_count += compare_dicts(idi.h_uv_data, hdf.h_uv_data)

    h1("Testing data tables")
    h2("Antenna")
    ok_count += compare_dicts(idi.d_antenna, hdf.d_antenna)
    h2("Array Geometry")
    ok_count += compare_dicts(idi.d_array_geometry, hdf.d_array_geometry)
    h2("Frequency")
    ok_count += compare_dicts(idi.d_frequency, hdf.d_frequency)
    h2("Source")
    ok_count += compare_dicts(idi.d_source, hdf.d_source)
    h2("UV DATA")
    ok_count += compare_dicts(idi.d_uv_data, hdf.d_uv_data)
    assert ok_count == 12
    print "PASS: All header and table data match"

    try:
        assert str(hdf.__repr__()) == str(idi.__repr__())
        print "PASS: __repr__ match"
    except:
        print "ERROR: __repr__ do not match"
        print str(hdf.__repr__())
        print str(idi.__repr__())
        print hdf.h_uv_data
        print idi.h_uv_data
        print hdf.h_array_geometry
        print idi.h_array_geometry
        raise
コード例 #3
0
def test_lalc():
    """ Check that LA->interfits->LA->interfits creates identical data. """

    lalc = LedaFits()
    lalc.filename = 'data/test_lalc.LA'
    lalc.readLfile(n_ant=256, n_chans=109)

    lalc.exportFitsidi('data/test_lalc_direct.fitsidi')
    uvf  = LedaFits('data/test_lalc_direct.fitsidi')

    ok_count = 0
    # Check all header values
    h1("Testing header keywords")
    h2("Common")
    ok_count += compare_dicts(uvf.h_common, lalc.h_common)
    h2("Parameters")
    ok_count += compare_dicts(uvf.h_params, lalc.h_params)
    h2("Antenna")
    ok_count += compare_dicts(uvf.h_antenna, lalc.h_antenna)
    h2("Array Geometry")
    ok_count += compare_dicts(uvf.h_array_geometry, lalc.h_array_geometry)
    h2("Frequency")
    ok_count += compare_dicts(uvf.h_frequency, lalc.h_frequency)
    h2("Source")
    ok_count += compare_dicts(uvf.h_source, lalc.h_source)
    h2("UV DATA")
    ok_count += compare_dicts(uvf.h_uv_data, lalc.h_uv_data)

    h1("Testing data tables")
    h2("Antenna")
    ok_count += compare_dicts(uvf.d_antenna, lalc.d_antenna)
    h2("Array Geometry")
    ok_count += compare_dicts(uvf.d_array_geometry, lalc.d_array_geometry)
    h2("Frequency")
    ok_count += compare_dicts(uvf.d_frequency, lalc.d_frequency)
    h2("Source")
    ok_count += compare_dicts(uvf.d_source, lalc.d_source)

    assert ok_count == 11

    try:
        assert repr(lalc) == repr(uvf)
    except AssertionError:
        print "ERROR: __repr__ outputs do not match"
        print repr(uvf)
        print repr(lalc)

    os.remove('data/test_lalc_direct.fitsidi')
コード例 #4
0
def main(args):
	# Parse the command line
	config = parseConfig(args)
	filenames = config['args']
	
	# Inspect the files to try and figure out what is what
	metadataList = []
	for filename in filenames:
		uvw = LedaFits(verbose=False)
		metadataList.append( (filename, uvw.inspectFile(filename)) )
		
	# Group the files by start time and save the filenames and frequency ranges
	groups = []
	for filename,metadata in metadataList:
		tStart = metadata['tstart']
		chanBW = metadata['chanbw']
		freqStart = metadata['reffreq'] + (1                 - metadata['refpixel'])*chanBW
		freqStop  = metadata['reffreq'] + (metadata['nchan'] - metadata['refpixel'])*chanBW
		
		## See if this file represents the start of a new group or not
		new = True
		for group in groups:
			if tStart == group[0]:
				new = False
				group[1].append(filename)
				group[2].append((freqStart,freqStop,chanBW))
				break
				
		## A new group has been found
		if new:
			group = [tStart, [filename,], [(freqStart,freqStop,chanBW),]]
			groups.append(group)
			
	# Report
	print "Got %i files with groupings:" % len(filenames)
	validity = []
	for i,group in enumerate(groups):
		## Sort the group by frequency
		freqs = []
		for start,stop,cbw in group[2]:
			freqs.append(start)
		freqOrder = [j[0] for j in sorted(enumerate(freqs), key=lambda x:x[1])]
		group[1] = [group[1][j] for j in freqOrder]
		group[2] = [group[2][j] for j in freqOrder]
		
		## Report and validate
		print "  Group #%i" % (i+1,)
		print "    -> start time %s (%.2f)" % (datetime.utcfromtimestamp(group[0]), group[0])
		valid = True
		for j,(name,(start,stop,chanBW)) in enumerate(zip(group[1], group[2])):
			### Check for frequency continuity
			try:
				freqDiff = start - oldStop
			except NameError:
				freqDiff = chanBW
			oldStop = stop
			if freqDiff != chanBW:
				valid = False
				
			### Report on this file
			print "      %i: %s from %.2f to %.2f MHz" % (j+1, os.path.basename(name), start/1e6, stop/1e6)
		validity.append(valid)
		print "    -> valid set? %s" % valid
		
		## Reset the validity between groups
		del oldStop
	print " "
	
	# Combine
	for i,(valid,group) in enumerate(zip(validity,groups)):
		print "Combining group #%i..." % (i+1,)
		
		## Jump over invalid groups
		if not valid:
			print "  -> invalid, skipping"
			continue
			
		## Read in the files
		uvws = []
		for filename in group[1]:
			uvws.append( LedaFits(filename, verbose=False) )
			
		## Build the output name
		obsDate = datetime.strptime(uvws[0].date_obs, "%Y-%m-%dT%H:%M:%S")
		if len(group[1]) > 1:
			outname = "%s_%s_%s_comb%i.FITS_" % (uvws[0].instrument, uvws[0].telescope, obsDate.strftime("%Y%m%d%H%M%S"), len(uvws))
		else:
			obsFreq = int((uvws[0].formatFreqs()).mean() / 1e6)
			outname = "%s_%s_%s_%iMHz.FITS_" % (uvws[0].instrument, uvws[0].telescope, obsDate.strftime("%Y%m%d%H%M%S"), obsFreq)
		print "  -> group file basename will be '%s*'" % outname
		
		## Make a note of lowest frequency value
		freq_min = numpy.min(uvws[0].formatFreqs())
		
		## Concatenate together the various FLUX sets
		if len(uvws) > 1:
			timeBL = uvws[0].d_uv_data["FLUX"].shape[0]
			freqPolComp = uvws[0].d_uv_data["FLUX"].shape[1]
			new_uv_data = numpy.zeros((timeBL, freqPolComp*len(group[1])), dtype=uvws[0].d_uv_data["FLUX"].dtype)
			for i,uvw in enumerate(uvws):
				new_uv_data[:,i*freqPolComp:(i+1)*freqPolComp] = 1.0*uvw.d_uv_data["FLUX"]
			uvws[0].d_uv_data["FLUX"] = new_uv_data
			
		## Overwrite frequency axis keywords so that we can export UV_DATA table correctly
		uvws[0].h_common["REF_FREQ"] = freq_min
		uvws[0].h_common["REF_PIXL"] = 1
		uvws[0].h_common["NO_CHAN"]  *= len(group[1])
		uvws[0].h_params["NCHAN"] = uvws[0].h_common["NO_CHAN"] 
		uvws[0].d_frequency["TOTAL_BANDWIDTH"]  *= len(group[1])
		
		## Remove the other LedaFits instances since we only need the first one now
		while len(uvws) > 1:
			del uvws[-1]
			
		## Add in the UVW coordinates
		uvws[0].generateUVW(src='ZEN', use_stored=False, update_src=True)
		
		if config['applyPhasing']:
			## Apply the cable delays
			uvws[0].apply_cable_delays()
			
			## Phase to zenith
			uvws[0].phase_to_src(src='ZEN')
		else:
			## Update the outname to reflect the fact that no phasing as been applied
			outname = "%sNoPhasing_" % outname
			
		## Save
		if config['fullRes']:
			### Extract all possible baselines
			bls = getAllBaselines(uvws[0])
			uvws[0].select_baselines(bls)
			
			### Verify
			uvws[0].verify()
			
			### Save as FITS IDI
			uvws[0].exportFitsidi(outname+'1')
			
			### Cleanup the associated XML file
			try:
				xmlname = outname+'1.xml'
				os.unlink(xmlname)
			except OSError:
				pass
				
		if config['totalPower']:
			### Extract the total power at full resolution
			bls = getTotalPowerBaselines(uvws[0])
			uvws[0].select_baselines(bls)
			
			### Verify
			uvws[0].verify()
			
			### Save as FITS IDI
			uvws[0].exportFitsidi(outname+'TP')
			
			### Cleanup the associated XML file
			try:
				xmlname = outname+'TP.xml'
				os.unlink(xmlname)
			except OSError:
				pass
				
		if config['switching']:
			### Extract the switching baselines at full resolution
			bls = getSwitchingBaselines(uvws[0])
			uvws[0].select_baselines(bls)
			
			### Verify
			uvws[0].verify()
			
			### Save as FITS IDI
			uvws[0].exportFitsidi(outname+'SW')
			
			### Cleanup the associated XML file
			try:
				xmlname = outname+'SW.xml'
				os.unlink(xmlname)
			except OSError:
				pass
				
		if config['average']:
			### Extract the static baselines
			bls = getStaticBaselines(uvws[0])
			uvws[0].select_baselines(bls)
			
			### Decimate within a try...expect block to deal with bad decimation parameters
			try:
				uvws[0].average_time_frequency(config['tDecim'], config['sDecim'], mode='nearest')
			except ValueError, e:
				print "ERROR: %s, skipping" % str(e)
				continue
				
			### Verify
			uvws[0].verify()
			
			### Save as FITS IDI
			uvws[0].exportFitsidi(outname+'AV')
			
			### Cleanup the associated XML file
			try:
				xmlname = outname+'AV.xml'
				os.unlink(xmlname)
			except OSError:
				pass
				
		## Cleanup
		del uvws[0]
		gc.collect()
コード例 #5
0
def test_compare_flux():

    uvf  = LedaFits('data/test_lalc.fitsidi')

    lalc = LedaFits()
    lalc.filename = 'data/test_lalc.LA'
    lalc.readLfile(n_ant=256, n_chans=109)


    lalc_flux = lalc.d_uv_data['FLUX']
    uvf_flux  = uvf.d_uv_data['FLUX']

    lalc_bls = lalc.d_uv_data['BASELINE']
    uvf_bls  = uvf.d_uv_data['BASELINE']

    assert np.allclose(lalc_bls, uvf_bls)
    print "PASS: BASELINE IDS MATCH"

    assert lalc_flux.shape == uvf_flux.shape
    print "PASS: FLUX SHAPE MATCH"
    print lalc_flux.shape
    print uvf_flux.shape

    try:
        assert lalc_flux.dtype == uvf_flux.dtype
        print "PASS: FLUX DTYPE MATCH"
    except AssertionError:
        print "ERROR: DTYPES DO NOT MATCH"
    print lalc_flux.dtype, uvf_flux.dtype

    print "Testing flux data..."
    for row in range(0, lalc_flux.shape[0]):
        if not row % 1000:
            print "\t %i of %i"%(row, lalc_flux.shape[0])
        try:
            xxl = lalc_flux[row][::8]**2 + lalc_flux[row][1::8]**2
            yyl = lalc_flux[row][2::8]**2 + lalc_flux[row][3::8]**2
            xyl = lalc_flux[row][4::8]**2 + lalc_flux[row][5::8]**2
            yxl = lalc_flux[row][6::8]**2 + lalc_flux[row][7::8]**2

            xxu = uvf_flux[row][::8]**2  + uvf_flux[row][1::8]**2
            yyu = uvf_flux[row][2::8]**2 + uvf_flux[row][3::8]**2
            xyu = uvf_flux[row][4::8]**2 + uvf_flux[row][5::8]**2
            yxu = uvf_flux[row][6::8]**2 + uvf_flux[row][7::8]**2

            assert np.allclose(xxl, xxu)
            assert np.allclose(yyl, yyu)
            assert np.allclose(xyl, xyu)
            assert np.allclose(yxl, yxu)

        except AssertionError:
            print "ERROR: Flux values do not agree"
            print uvf_flux[row, 0:10]
            print lalc_flux[row, 0:10]
            raise

    print "PASS: FLUX DATA MATCH"

    print "Testing stokes generator..."
    xxl, yyl, xyl, yxl = lalc.formatStokes()
    xxu, yyu, xyu, yxu = uvf.formatStokes()

    try:
        assert np.allclose(np.abs(xxl), np.abs(xxu))
        assert np.allclose(np.abs(yyl), np.abs(yyu))
        assert np.allclose(np.abs(xyl), np.abs(xyu))
        assert np.allclose(np.abs(yxl), np.abs(yxu))

    except AssertionError:
        print "ERROR: Flux values do not agree"
        raise
    print "PASS: FLUX DATA STOKES FORMAT"
コード例 #6
0
def main(args):
	filenames = args
	
	# Inspect the files to try and figure out what is what
	metadataList = []
	for filename in filenames:
		uvw = LedaFits()
		metadataList.append( (filename, uvw.inspectFile(filename)) )
		
	# Group the files by start time and save the filenames and frequency ranges
	groups = []
	for filename,metadata in metadataList:
		tStart = metadata['tstart']
		chanBW = metadata['chanbw']
		freqStart = metadata['reffreq'] + (1                 - metadata['refpixel'])*chanBW
		freqStop  = metadata['reffreq'] + (metadata['nchan'] - metadata['refpixel'])*chanBW
		
		## See if this file represents the start of a new group or not
		new = True
		for group in groups:
			if tStart == group[0]:
				new = False
				group[1].append(filename)
				group[2].append((freqStart,freqStop,chanBW))
				break
				
		## A new group has been found
		if new:
			group = [tStart, [filename,], [(freqStart,freqStop,chanBW),]]
			groups.append(group)
			
	# Report
	print "Got %i files with groupings:" % len(filenames)
	validity = []
	for i,group in enumerate(groups):
		## Sort the group by frequency
		freqs = []
		for start,stop,cbw in group[2]:
			freqs.append(start)
		freqOrder = [j[0] for j in sorted(enumerate(freqs), key=lambda x:x[1])]
		group[1] = [group[1][j] for j in freqOrder]
		group[2] = [group[2][j] for j in freqOrder]
		
		## Report and validate
		print "  Group #%i" % (i+1,)
		print "    -> start time %s (%.2f)" % (datetime.utcfromtimestamp(group[0]), group[0])
		valid = True
		for j,(name,(start,stop,chanBW)) in enumerate(zip(group[1], group[2])):
			### Check for frequency continuity
			try:
				freqDiff = start - oldStop
			except NameError:
				freqDiff = chanBW
			oldStop = stop
			if freqDiff != chanBW:
				valid = False
				
			### Report on this file
			print "      %i: %s from %.2f to %.2f MHz" % (j+1, os.path.basename(name), start/1e6, stop/1e6)
		validity.append(valid)
		print "    -> valid set? %s" % valid
		
		## Reset the validity between groups
		del oldStop
	print " "
	
	# Combine
	for i,(valid,group) in enumerate(zip(validity,groups)):
		print "Combining group #%i..." % (i+1,)
		
		## Jump over invalid groups
		if not valid:
			print "  -> invalid, skipping"
			continue
			
		## Read in the files
		uvws = []
		for filename in group[1]:
			uvws.append( LedaFits(filename) )
			
		## Make a note of lowest frequency value
		freq_min = numpy.min(uvws[0].formatFreqs())
		
		## Concatenate together the various FLUX sets
		if len(uvws) > 1:
			timeBL = uvws[0].d_uv_data["FLUX"].shape[0]
			freqPolComp = uvws[0].d_uv_data["FLUX"].shape[1]
			new_uv_data = numpy.zeros((timeBL, freqPolComp*len(group[1])), dtype=uvws[0].d_uv_data["FLUX"].dtype)
			for i,uvw in enumerate(uvws):
				new_uv_data[:,i*freqPolComp:(i+1)*freqPolComp] = 1.0*uvw.d_uv_data["FLUX"]
			uvws[0].d_uv_data["FLUX"] = new_uv_data
			
		## Overwrite frequency axis keywords so that we can export UV_DATA table correctly
		uvws[0].h_common["REF_FREQ"] = freq_min
		uvws[0].h_common["REF_PIXL"] = 1
		uvws[0].h_common["NO_CHAN"]  *= len(group[1])
		uvws[0].h_params["NCHAN"] = uvws[0].h_common["NO_CHAN"] 
		uvws[0].d_frequency["TOTAL_BANDWIDTH"]  *= len(group[1])
		
		## Remove the other LedaFits instances since we only need the first one now
		while len(uvws) > 1:
			del uvws[-1]
			
		## Load in the current antenna mapping to associate antenna IDs with stand names
		ant_ids = uvws[0].d_array_geometry['NOSTA']
		ant_nms = [int(_annameRE.match(nm).group('id')) for nm in uvws[0].d_array_geometry['ANNAME']]
		
		## Figure out which antenna ID each switching outrigger is
		### Stand numbers for the switching outriggers
		outriggers = [35, 257, 259]
		
		ant_ids_outriggers = []
		for outrigger in outriggers:
			ant_ids_outriggers.append( ant_ids[ant_nms.index(outrigger)] )
			
		## Extract
		import pylab
		pols = uvws[0].stokes_axis
		freqs = uvws[0].formatFreqs()
		for stand,ant_id in zip(outriggers, ant_ids_outriggers):
			print "  -> Extracting data for Stand #%i..." % stand
			timestamps, data = uvws[0].extractTotalPower(ant_id, timestamps=True)
			tRel = timestamps - timestamps[0]
			tRel *= 24*3600	# Timestamps in seconds
			data = data.real	# Autocorrelations should be real
			
			### Plot mean power over time
			pylab.figure()
			for i in xrange(data.shape[0]):
				pylab.plot(tRel, data[i,:,:].mean(axis=-1), linestyle='-', marker='o', label='%i-%s' % (stand, pols[i]))
			pylab.xlabel('Time [s]')
			pylab.ylabel('Mean PSD [arb.]')
			pylab.legend()
			pylab.draw()
			
			### Plot the spectra
			pylab.figure()
			for i in xrange(data.shape[0]):
				pylab.subplot(2, 2, i+1)
				for j in xrange(data.shape[1]):
					pylab.plot(freqs/1e6, data[i,j,:])
				pylab.title("%i-%s" % (stand, pols[i]))
				pylab.xlabel('Frequency [MHz]')
				pylab.ylabel('PSD [arb.]')
			pylab.draw()
		pylab.show()
コード例 #7
0
def test_lalc_json():

    lalc = LedaFits()
    lalc.filename = 'data/test_lalc.LA'
    lalc.readLfile(n_ant=256, n_chans=109)

    lalc.readJson('data/test_lalc_json/d_antenna.json')
    lalc.generateUVW()
    uu = lalc.d_uv_data["UU"]
    vv = lalc.d_uv_data["VV"]
    ww = lalc.d_uv_data["WW"]

    #print uu
    #print len(uu)

    print "Checking data...",
    try:
        assert len(uu) == len(vv) == len(ww)
    except:
        print len(uu), len(vv), len(ww)
        raise
    try:
        assert len(uu) == len(lalc.d_uv_data["BASELINE"])
    except AssertionError:
        print len(uu), len(lalc.d_uv_data["BASELINE"])
        raise
    try:
        assert len(uu) == len(lalc.d_uv_data["FLUX"])
    except:
        print len(uu), len(lalc.d_uv_data["BASELINE"])
        raise
    print "OK"

    lalc.remove_miriad_baselines()

    h2("Exporting sewed data...")
    lalc.exportFitsidi('data/test_lalc_uvw.fitsidi', '../config/config.xml')
コード例 #8
0
def remove_miriad_lalc():
    uvf = LedaFits('data/test_lalc.fitsidi')
    uvf.remove_miriad_baselines()
    uvf.exportFitsidi('data/test_lalc_255.fitsidi')
コード例 #9
0
def test_compare_idi_lalc_json():
    lalc = LedaFits()
    lalc.filename = 'data/test_lalc.LA'
    lalc.readLfile(n_ant=256, n_chans=109)
    lalc.readJson('data/test_lalc_json/d_antenna.json')
    lalc.generateUVW()