コード例 #1
0
    def test_source(self):
        """Test the SOURCE table."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-SO.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        so = hdulist['SOURCE'].data
        # Correct number of entries
        self.assertEqual(len(so.field('SOURCE_ID')), 1)

        # Correct Source ID number
        self.assertEqual(so.field('SOURCE_ID'), 1)

        hdulist.close()
コード例 #2
0
    def test_antenna(self):
        """Test the ANTENNA table."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-AN.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        an = hdulist['ANTENNA'].data
        # Correct number of stands
        self.assertEqual(len(data['antennas']), len(an.field('ANTENNA_NO')))

        # Correct FREQIDs
        for freqid in an.field('FREQID'):
            self.assertEqual(freqid, 1)

        hdulist.close()
コード例 #3
0
    def test_bandpass(self):
        """Test the BANDPASS table."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-BP.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        bp = hdulist['BANDPASS'].data
        # Correct number of entries
        self.assertEqual(len(data['antennas']), len(bp.field('ANTENNA_NO')))

        # Correct Source ID number
        for src in bp.field('SOURCE_ID'):
            self.assertEqual(src, 0)

        # Correct FREQIDs
        for freqid in bp.field('FREQID'):
            self.assertEqual(freqid, 1)

        hdulist.close()
コード例 #4
0
    def test_array_geometry(self):
        """Test the ARRAY_GEOMETRY table."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-AG.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        ag = hdulist['ARRAY_GEOMETRY'].data
        # Correct number of stands
        self.assertEqual(len(data['antennas']), len(ag.field('NOSTA')))

        # Correct stand names
        names = ['LWA%03i' % ant.stand.id for ant in data['antennas']]
        for name, anname in zip(names, ag.field('ANNAME')):
            self.assertEqual(name, anname)

        hdulist.close()
コード例 #5
0
ファイル: test_fitsidi.py プロジェクト: lwa-project/lsl
    def test_frequency(self):
        """Test the FREQUENCY table."""

        testTime = time.time() - 2*86400.0
        testFile = os.path.join(self.testPath, 'idi-test-FQ.fits')
        
        # Get some data
        data = self._init_data()
        
        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(unix_to_taimjd(testTime), 6.0, data['bl'], data['vis'])
        fits.write()
        fits.close()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        fq = hdulist['FREQUENCY'].data
        # Correct number of FREQIDs
        self.assertEqual(len(fq.field('FREQID')), 1)

        # Correct channel width
        self.assertAlmostEqual(fq.field('CH_WIDTH')[0], numpy.abs(data['freq'][1]-data['freq'][0]), 4)

        # Correct bandwidth
        self.assertAlmostEqual(fq.field('TOTAL_BANDWIDTH')[0], numpy.abs(data['freq'][-1]-data['freq'][0]).astype(numpy.float32), 4)

        # Correct sideband
        self.assertEqual(fq.field('SIDEBAND')[0], 1)

        hdulist.close()
コード例 #6
0
    def test_write_tables(self):
        """Test if the FITS IDI writer writes all of the tables."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-W.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.set_observer('Dowell, Jayce', 'LD009', 'Test')
        fits.add_header_keyword('EXAMPLE', 'example keyword')
        fits.add_comment('This is a comment')
        fits.add_history('This is history')
        fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        # Check that all of the extensions are there
        extNames = [hdu.name for hdu in hdulist]
        for ext in [
                'ARRAY_GEOMETRY', 'FREQUENCY', 'ANTENNA', 'BANDPASS', 'SOURCE',
                'UV_DATA'
        ]:
            self.assertTrue(ext in extNames)
        # Check header values that we set
        self.assertEqual('Dowell, Jayce', hdulist[0].header['OBSERVER'])
        self.assertEqual('LD009', hdulist[0].header['PROJECT'])
        self.assertEqual('Test', hdulist[0].header['LWATYPE'])
        self.assertEqual('example keyword', hdulist[0].header['EXAMPLE'])
        # Check the comments and history
        self.assertTrue('This is a comment' in str(
            hdulist[0].header['COMMENT']).split('\n'))
        self.assertTrue(
            'This is history' in str(hdulist[0].header['HISTORY']).split('\n'))

        hdulist.close()
コード例 #7
0
    def test_writer_errors(self):
        """Test that common FITS IDI error conditions are caught."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-ERR.fits')

        # Get some data
        data = self.__initData()

        for i in range(4):
            # Start the file
            fits = fitsidi.Idi(testFile, ref_time=testTime, overwrite=True)
            if i != 0:
                fits.set_stokes(['xx'])
            if i != 1:
                fits.set_frequency(data['freq'])
            if i != 2:
                fits.set_geometry(data['site'], data['antennas'])
            if i != 3:
                fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
            self.assertRaises(RuntimeError, fits.write)
コード例 #8
0
ファイル: test_fitsidi.py プロジェクト: lwa-project/lsl
    def test_mapper(self):
        """Test the NOSTA_MAPPER table."""

        testTime = time.time() - 2*86400.0
        testFile = os.path.join(self.testPath, 'idi-test-SM.fits')
        
        # Get some data
        data = self._init_data()
        
        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(unix_to_taimjd(testTime), 6.0, data['bl'], data['vis'])
        fits.write()
        fits.close()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        extNames = [hdu.name for hdu in hdulist]
        maxStand = -1
        for ant in data['antennas']:
            if ant.stand.id > maxStand:
                maxStand = ant.stand.id
        if maxStand > 255:
            self.assertTrue('NOSTA_MAPPER' in extNames)

            # Make sure the mapper makes sense
            mp = hdulist['NOSTA_MAPPER'].data
            ag = hdulist['ARRAY_GEOMETRY'].data
            mNoSta = mp.field('NOSTA')
            aNoSta = ag.field('NOSTA')
            mNoAct = mp.field('NOACT')
            aAnNam = ag.field('ANNAME')
            for msta, mact, asta, anam in zip(mNoSta, mNoAct, aNoSta, aAnNam):
                self.assertEqual(msta, asta)
                self.assertEqual(mact, int(anam[3:]))

        hdulist.close()
コード例 #9
0
def process_chunk(idf,
                  site,
                  good,
                  filename,
                  int_time=5.0,
                  pols=[
                      'xx',
                  ],
                  chunk_size=100):
    """
    Given a lsl.reader.ldp.TBNFile instances and various parameters for the 
    cross-correlation, write cross-correlate the data and save it to a file.
    """

    # Get antennas
    antennas = site.antennas

    # Get the metadata
    sample_rate = idf.get_info('sample_rate')
    freq = idf.get_info('freq1')

    # Create the list of good digitizers and a digitizer to Antenna instance mapping.
    # These are:
    #  toKeep  -> mapping of digitizer number to array location
    #  mapper -> mapping of Antenna instance to array location
    toKeep = [antennas[i].digitizer - 1 for i in good]
    mapper = [antennas[i] for i in good]

    # Create a list of unqiue stands to know what style of IDI file to create
    stands = set([antennas[i].stand.id for i in good])

    # Main loop over the input file to read in the data and organize it.  Several control
    # variables are defined for this:
    #  ref_time -> time (in seconds since the UNIX epoch) for the first data set
    #  setTime -> time (in seconds since the UNIX epoch) for the current data set
    ref_time = 0.0
    setTime = 0.0
    wallTime = time.time()
    for s in range(chunk_size):
        try:
            readT, t, data = idf.read(int_time)
        except Exception as e:
            print("Error: %s" % str(e))
            continue

        ## Prune out what we don't want
        data = data[toKeep, :, :]

        ## Split the polarizations
        antennasX, antennasY = [
            a for i, a in enumerate(antennas) if a.pol == 0 and i in toKeep
        ], [a for i, a in enumerate(antennas) if a.pol == 1 and i in toKeep]
        dataX, dataY = data[0::2, :, :], data[1::2, :, :]
        validX = numpy.ones((dataX.shape[0], dataX.shape[2]),
                            dtype=numpy.uint8)
        validY = numpy.ones((dataY.shape[0], dataY.shape[2]),
                            dtype=numpy.uint8)

        ## Apply the cable delays as phase rotations
        for i in range(dataX.shape[0]):
            gain = numpy.sqrt(antennasX[i].cable.gain(freq))
            phaseRot = numpy.exp(2j*numpy.pi*freq*(antennasX[i].cable.delay(freq) \
                                                   -antennasX[i].stand.z/speedOfLight))
            for j in range(dataX.shape[2]):
                dataX[i, :, j] *= phaseRot / gain
        for i in range(dataY.shape[0]):
            gain = numpy.sqrt(antennasY[i].cable.gain(freq))
            phaseRot = numpy.exp(2j*numpy.pi*freq*(antennasY[i].cable.delay(freq)\
                                                   -antennasY[i].stand.z/speedOfLight))
            for j in range(dataY.shape[2]):
                dataY[i, :, j] *= phaseRot / gain

        setTime = t
        if s == 0:
            ref_time = setTime

        # Setup the set time as a python datetime instance so that it can be easily printed
        setDT = setTime.datetime
        print("Working on set #%i (%.3f seconds after set #1 = %s)" %
              ((s + 1),
               (setTime - ref_time), setDT.strftime("%Y/%m/%d %H:%M:%S.%f")))

        # Loop over polarization products
        for pol in pols:
            print("->  %s" % pol)
            if pol[0] == 'x':
                a1, d1, v1 = antennasX, dataX, validX
            else:
                a1, d1, v1 = antennasY, dataY, validY
            if pol[1] == 'x':
                a2, d2, v2 = antennasX, dataX, validX
            else:
                a2, d2, v2 = antennasY, dataY, validY

            ## Get the baselines
            baselines = uvutils.get_baselines(a1,
                                              antennas2=a2,
                                              include_auto=True,
                                              indicies=True)
            blList = []
            for bl in range(len(baselines)):
                blList.append((a1[baselines[bl][0]], a2[baselines[bl][1]]))

            ## Run the cross multiply and accumulate
            vis = XEngine2(d1, d2, v1, v2)

            # Select the right range of channels to save
            toUse = numpy.where((freq > 5.0e6) & (freq < 93.0e6))
            toUse = toUse[0]

            # If we are in the first polarazation product of the first iteration,  setup
            # the FITS IDI file.
            if s == 0 and pol == pols[0]:
                pol1, pol2 = fxc.pol_to_pols(pol)

                if len(stands) > 255:
                    fits = fitsidi.ExtendedIdi(filename, ref_time=ref_time)
                else:
                    fits = fitsidi.Idi(filename, ref_time=ref_time)
                fits.set_stokes(pols)
                fits.set_frequency(freq[toUse])
                fits.set_geometry(site, [a for a in mapper if a.pol == pol1])

            # Convert the setTime to a MJD and save the visibilities to the FITS IDI file
            obsTime = astro.unix_to_taimjd(setTime)
            fits.add_data_set(obsTime, readT, blList, vis[:, toUse], pol=pol)
        print("->  Cummulative Wall Time: %.3f s (%.3f s per integration)" %
              ((time.time() - wallTime), (time.time() - wallTime) / (s + 1)))

    # Cleanup after everything is done
    fits.write()
    fits.close()
    del (fits)
    del (data)
    del (vis)
    return True
コード例 #10
0
def process_chunk(idf,
                  site,
                  good,
                  filename,
                  int_time=5.0,
                  LFFT=64,
                  overlap=1,
                  pfb=False,
                  pols=[
                      'xx',
                  ],
                  chunk_size=100):
    """
    Given a lsl.reader.ldp.TBNFile instances and various parameters for the 
    cross-correlation, write cross-correlate the data and save it to a file.
    """

    # Get antennas
    antennas = []
    for a in site.antennas:
        if a.digitizer != 0:
            antennas.append(a)

    # Get the metadata
    sample_rate = idf.get_info('sample_rate')
    central_freq = idf.get_info('freq1')

    # Create the list of good digitizers and a digitizer to Antenna instance mapping.
    # These are:
    #  toKeep  -> mapping of digitizer number to array location
    #  mapper -> mapping of Antenna instance to array location
    toKeep = [antennas[i].digitizer - 1 for i in good]
    mapper = [antennas[i] for i in good]

    # Create a list of unqiue stands to know what style of IDI file to create
    stands = set([antennas[i].stand.id for i in good])

    # Main loop over the input file to read in the data and organize it.  Several control
    # variables are defined for this:
    #  ref_time -> time (in seconds since the UNIX epoch) for the first data set
    #  setTime -> time (in seconds since the UNIX epoch) for the current data set
    ref_time = 0.0
    setTime = 0.0
    wallTime = time.time()
    for s in range(chunk_size):
        try:
            readT, t, data = idf.read(int_time)
        except Exception as e:
            print("Error: %s" % str(e))
            continue

        ## Prune out what we don't want
        data = data[toKeep, :]

        setTime = t
        if s == 0:
            ref_time = setTime

        # Setup the set time as a python datetime instance so that it can be easily printed
        setDT = datetime.utcfromtimestamp(setTime)
        setDT.replace(tzinfo=UTC())
        print("Working on set #%i (%.3f seconds after set #1 = %s)" %
              ((s + 1),
               (setTime - ref_time), setDT.strftime("%Y/%m/%d %H:%M:%S.%f")))

        # Loop over polarization products
        for pol in pols:
            print("->  %s" % pol)
            blList, freq, vis = fxc.FXMaster(data,
                                             mapper,
                                             LFFT=LFFT,
                                             overlap=overlap,
                                             pfb=pfb,
                                             include_auto=True,
                                             verbose=False,
                                             sample_rate=sample_rate,
                                             central_freq=central_freq,
                                             pol=pol,
                                             return_baselines=True,
                                             gain_correct=True)

            # Select the right range of channels to save
            toUse = numpy.where((freq > 5.0e6) & (freq < 93.0e6))
            toUse = toUse[0]

            # If we are in the first polarazation product of the first iteration,  setup
            # the FITS IDI file.
            if s == 0 and pol == pols[0]:
                pol1, pol2 = fxc.pol_to_pols(pol)

                if len(stands) > 255:
                    fits = fitsidi.ExtendedIdi(filename, ref_time=ref_time)
                else:
                    fits = fitsidi.Idi(filename, ref_time=ref_time)
                fits.set_stokes(pols)
                fits.set_frequency(freq[toUse])
                fits.set_geometry(site, [a for a in mapper if a.pol == pol1])

            # Convert the setTime to a MJD and save the visibilities to the FITS IDI file
            obsTime = astro.unix_to_taimjd(setTime)
            fits.add_data_set(obsTime, readT, blList, vis[:, toUse], pol=pol)
        print("->  Cummulative Wall Time: %.3f s (%.3f s per integration)" %
              ((time.time() - wallTime), (time.time() - wallTime) / (s + 1)))

    # Cleanup after everything is done
    fits.write()
    fits.close()
    del (fits)
    del (data)
    del (vis)
    return True
コード例 #11
0
    def test_multi_if(self):
        """Test writing more than one IF to a FITS IDI file."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-MultiIF.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_frequency(data['freq'] + 10e6)
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(
            testTime, 6.0, data['bl'],
            numpy.concatenate([data['vis'], 10 * data['vis']], axis=1))
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        # Check that all of the extensions are there
        extNames = [hdu.name for hdu in hdulist]
        for ext in [
                'ARRAY_GEOMETRY', 'FREQUENCY', 'ANTENNA', 'BANDPASS', 'SOURCE',
                'UV_DATA'
        ]:
            self.assertTrue(ext in extNames)

        # Load the mapper
        try:
            mp = hdulist['NOSTA_MAPPER'].data
            nosta = mp.field('NOSTA')
            noact = mp.field('NOACT')
        except KeyError:
            ag = hdulist['ARRAY_GEOMETRY'].data
            nosta = ag.field('NOSTA')
            noact = ag.field('NOSTA')
        mapper = {}
        for s, a in zip(nosta, noact):
            mapper[s] = a

        # Correct number of visibilities
        uv = hdulist['UV_DATA'].data
        self.assertEqual(len(uv.field('FLUX')), data['vis'].shape[0])

        # Correct number of frequencies
        for vis in uv.field('FLUX'):
            self.assertEqual(len(vis), 2 * 2 * len(data['freq']))

        # Correct values
        for bl, vis in zip(uv.field('BASELINE'), uv.field('FLUX')):
            # Convert mapped stands to real stands
            stand1 = mapper[(bl >> 8) & 255]
            stand2 = mapper[bl & 255]

            # Find out which visibility set in the random data corresponds to the
            # current visibility
            i = 0
            for ant1, ant2 in data['bl']:
                if ant1.stand.id == stand1 and ant2.stand.id == stand2:
                    break
                else:
                    i = i + 1

            # Extract the data and run the comparison - IF 1
            visData = numpy.zeros(2 * len(data['freq']), dtype=numpy.complex64)
            visData.real = vis[0::2]
            visData.imag = vis[1::2]
            for vd, sd in zip(visData[:len(data['freq'])], data['vis'][i, :]):
                self.assertAlmostEqual(vd, sd, 8)

            # Extract the data and run the comparison - IF 2
            visData = numpy.zeros(2 * len(data['freq']), dtype=numpy.complex64)
            visData.real = vis[0::2]
            visData.imag = vis[1::2]
            for vd, sd in zip(visData[len(data['freq']):],
                              10 * data['vis'][i, :]):
                self.assertAlmostEqual(vd, sd, 8)

        hdulist.close()
コード例 #12
0
    def test_uvdata(self):
        """Test the UV_DATA table."""

        testTime = time.time() - 2 * 86400.0
        testFile = os.path.join(self.testPath, 'idi-test-UV.fits')

        # Get some data
        data = self.__initData()

        # Start the file
        fits = fitsidi.Idi(testFile, ref_time=testTime)
        fits.set_stokes(['xx'])
        fits.set_frequency(data['freq'])
        fits.set_geometry(data['site'], data['antennas'])
        fits.add_data_set(testTime, 6.0, data['bl'], data['vis'])
        fits.write()

        # Open the file and examine
        hdulist = astrofits.open(testFile)
        uv = hdulist['UV_DATA'].data

        # Load the mapper
        try:
            mp = hdulist['NOSTA_MAPPER'].data
            nosta = mp.field('NOSTA')
            noact = mp.field('NOACT')
        except KeyError:
            ag = hdulist['ARRAY_GEOMETRY'].data
            nosta = ag.field('NOSTA')
            noact = ag.field('NOSTA')
        mapper = {}
        for s, a in zip(nosta, noact):
            mapper[s] = a

        # Correct number of visibilities
        self.assertEqual(len(uv.field('FLUX')), data['vis'].shape[0])

        # Correct number of frequencies
        for vis in uv.field('FLUX'):
            self.assertEqual(len(vis), 2 * len(data['freq']))

        # Correct values
        for bl, vis in zip(uv.field('BASELINE'), uv.field('FLUX')):
            # Convert mapped stands to real stands
            stand1 = mapper[(bl >> 8) & 255]
            stand2 = mapper[bl & 255]

            # Find out which visibility set in the random data corresponds to the
            # current visibility
            i = 0
            for ant1, ant2 in data['bl']:
                if ant1.stand.id == stand1 and ant2.stand.id == stand2:
                    break
                else:
                    i = i + 1

            # Extract the data and run the comparison
            visData = numpy.zeros(len(data['freq']), dtype=numpy.complex64)
            visData.real = vis[0::2]
            visData.imag = vis[1::2]
            for vd, sd in zip(visData, data['vis'][i, :]):
                self.assertAlmostEqual(vd, sd, 8)
            i = i + 1

        hdulist.close()
コード例 #13
0
def process_chunk(idf,
                  site,
                  good,
                  filename,
                  LFFT=64,
                  overlap=1,
                  pfb=False,
                  pols=['xx', 'yy']):
    """
    Given an lsl.reader.ldp.TBWFile instances and various parameters for the 
    cross-correlation, write cross-correlate the data and save it to a file.
    """

    # Get antennas
    antennas = site.antennas

    # Get the metadata
    sample_rate = idf.get_info('sample_rate')

    # Create the list of good digitizers and a digitizer to Antenna instance mapping.
    # These are:
    #  toKeep  -> mapping of digitizer number to array location
    #  mapper -> mapping of Antenna instance to array location
    toKeep = [antennas[i].digitizer - 1 for i in good]
    mapper = [antennas[i] for i in good]

    # Create a list of unqiue stands to know what style of IDI file to create
    stands = set([antennas[i].stand.id for i in good])

    wallTime = time.time()
    readT, t, data = idf.read()
    setTime = t
    ref_time = t

    # Setup the set time as a python datetime instance so that it can be easily printed
    setDT = setTime.datetime
    print("Working on set #1 (%.3f seconds after set #1 = %s)" %
          ((setTime - ref_time), setDT.strftime("%Y/%m/%d %H:%M:%S.%f")))

    # In order for the TBW stuff to actaully run, we need to run in with sub-
    # integrations.  8 sub-integrations (61.2 ms / 8 = 7.7 ms per section)
    # seems to work ok with a "reasonable" number of channels.
    nSec = 8
    secSize = data.shape[1] // nSec

    # Loop over polarizations (there should be only 1)
    for pol in pols:
        print("-> %s" % pol)
        try:
            tempVis *= 0  # pylint:disable=undefined-variable
        except NameError:
            pass

        # Set up the progress bar so we can keep up with how the sub-integrations
        # are progressing
        pb = ProgressBar(max=nSec)
        sys.stdout.write(pb.show() + '\r')
        sys.stdout.flush()

        # Loop over sub-integrations (set by nSec)
        for k in range(nSec):
            blList, freq, vis = fxc.FXMaster(data[toKeep, k * secSize:(k + 1) *
                                                  secSize],
                                             mapper,
                                             LFFT=LFFT,
                                             overlap=overlap,
                                             pfb=pfb,
                                             include_auto=True,
                                             verbose=False,
                                             sample_rate=sample_rate,
                                             central_freq=0.0,
                                             Pol=pol,
                                             return_baselines=True,
                                             gain_correct=True)

            toUse = numpy.where((freq >= 5.0e6) & (freq <= 93.0e6))
            toUse = toUse[0]

            try:
                tempVis += vis
            except:
                tempVis = vis

            pb.inc(amount=1)
            sys.stdout.write(pb.show() + '\r')
            sys.stdout.flush()

        # Average the sub-integrations together
        vis = tempVis / float(nSec)

        # Set up the FITS IDI file is we need to
        if pol == pols[0]:
            pol1, pol2 = fxc.pol_to_pols(pol)

            if len(stands) > 255:
                fits = fitsidi.ExtendedIdi(filename, ref_time=ref_time)
            else:
                fits = fitsidi.Idi(filename, ref_time=ref_time)
            fits.set_stokes(pols)
            fits.set_frequency(freq[toUse])
            fits.set_geometry(site, [a for a in mapper if a.pol == pol1])

        # Add the visibilities
        obsTime = astro.unix_to_taimjd(setTime)
        fits.add_data_set(obsTime, readT, blList, vis[:, toUse], pol=pol)
        sys.stdout.write(pb.show() + '\r')
        sys.stdout.write('\n')
        sys.stdout.flush()
    print("->  Cummulative Wall Time: %.3f s (%.3f s per integration)" %
          ((time.time() - wallTime), (time.time() - wallTime)))

    fits.write()
    fits.close()
    del (fits)
    del (data)
    del (vis)
    return True