コード例 #1
0
def fine_tune_boundary_start(fh, start, max_frames=4, verbose=True):
    fh.seek(start)
    
    # Report
    if verbose:
        print("Tuning boundary at %i of '%s'..." % (start, os.path.basename(fh.name)))
        
    # Align on the start of a Mark5C packet...
    while True:
        try:
            junkFrame = drx.read_frame(fh)
            try:
                # ... that has a valid decimation
                srate = junkFrame.sample_rate
                break
            except ZeroDivisionError:
                pass
        except errors.SyncError:
            fh.seek(-drx.FRAME_SIZE+1, 1)
    fh.seek(-drx.FRAME_SIZE, 1)
    # ... and save that location
    frame_begin = junkFrame.payload.timetag
    file_begin = fh.tell()
    if verbose:
        print("  start @ %i with %i" % (file_begin, frame_begin))
        
    # Get how much the timetags should change and other basic information
    fh.seek(file_begin)
    ids = []
    for i in xrange(24*8):
        junkFrame = drx.read_frame(fh)
        b,t,p = junkFrame.id
        id = (t,p)
        if id not in ids:
            ids.append(id)
    ttStep = 4096*junkFrame.header.decimation
    if verbose:
        print("  %i frames with a timetag step of %i" % (len(ids), ttStep))
        
    # Load in the times to figure out what to do
    fh.seek(file_begin)
    timetags = []
    for i in xrange(max_frames):
        junkFrame = drx.read_frame(fh)
        timetags.append( junkFrame.payload.timetag )
    skips = [timetags[i]-timetags[i-1] for i in xrange(1, max_frames)]
    try:
        offset = min([skips.index(0), skips.index(ttStep)])
    except ValueError:
        try:
            offset = skips.index(0)
        except ValueError:
            offset = 0
    if verbose:
        print("  -> shifting boundary by %i frame(s)" % offset)
    start += drx.FRAME_SIZE*offset
    return start
コード例 #2
0
    def test_write_frame(self):
        """Test that the DRX data writer works."""

        testFile = os.path.join(self.testPath, 'drx-test-W.dat')

        nFrames = os.path.getsize(drxFile) // drxReader.FRAME_SIZE

        # Read in a TBN frame from the test file
        fh = open(drxFile, 'rb')
        origFrames = []
        for i in range(nFrames):
            origFrames.append(drxReader.read_frame(fh))
        fh.close()

        # Write the data to a TBN test frame
        fh = open(testFile, 'wb')
        for origFrame in origFrames:
            rawFrame = drxWriter.frame_to_frame(origFrame)
            rawFrame.tofile(fh)
        fh.close()

        # Read in the
        fh = open(testFile, 'rb')
        fakeFrames = []
        for i in range(nFrames):
            fakeFrames.append(drxReader.read_frame(fh))
        fh.close()

        for fakeFrame, origFrame in zip(fakeFrames, origFrames):
            # Test values returned by info functions
            self.assertEqual(fakeFrame.id[0], origFrame.id[0])
            self.assertEqual(fakeFrame.id[1], origFrame.id[1])
            self.assertEqual(fakeFrame.id[2], origFrame.id[2])
            self.assertAlmostEqual(fakeFrame.sample_rate,
                                   origFrame.sample_rate, 4)

            # Test raw header values
            self.assertEqual(fakeFrame.header.second_count,
                             origFrame.header.second_count)
            self.assertEqual(fakeFrame.header.decimation,
                             origFrame.header.decimation)
            self.assertEqual(fakeFrame.header.time_offset,
                             origFrame.header.time_offset)

            # Test raw data values
            self.assertEqual(fakeFrame.payload.timetag,
                             origFrame.payload.timetag)
            self.assertEqual(fakeFrame.payload.flags, origFrame.payload.flags)
            for i in range(4096):
                self.assertEqual(fakeFrame.payload.data[i].real,
                                 origFrame.payload.data[i].real)
                self.assertEqual(fakeFrame.payload.data[i].imag,
                                 origFrame.payload.data[i].imag)
コード例 #3
0
    def test_basic_drx(self):
        """Test building a basic DRX signal"""

        testFile = os.path.join(self.testPath, 'drx.dat')

        fh = open(testFile, 'wb')
        dp.basic_signal(fh,
                        numpy.array([1, 2, 3, 4]),
                        10,
                        mode='DRX',
                        filter=6,
                        ntuning=2,
                        start_time=1000)
        fh.close()

        # Check the file size
        fileSize = os.path.getsize(testFile)
        nSamples = fileSize // drx.FRAME_SIZE
        self.assertEqual(nSamples, 10 * 4 * 2 * 2)

        # Check the file size
        fh = open(testFile, 'rb')
        frame = drx.read_frame(fh)
        fh.close()
        self.assertEqual(frame.payload.timetag, 1000 * dp_common.fS)
        self.assertEqual(frame.header.frame_count, 0)
        self.assertEqual(frame.header.second_count, 0)
コード例 #4
0
    def test_drx_read(self):
        """Test reading in a frame from a DRX file."""

        fh = open(drxFile, 'rb')
        # First frame is really DRX and stores the IDs
        frame1 = drx.read_frame(fh)
        beam, tune, pol = frame1.id
        self.assertEqual(beam, 4)
        self.assertEqual(tune, 1)
        self.assertEqual(pol, 1)
        # Second frame
        frame2 = drx.read_frame(fh)
        beam, tune, pol = frame2.id
        self.assertEqual(beam, 4)
        self.assertEqual(tune, 2)
        self.assertEqual(pol, 0)
        fh.close()
コード例 #5
0
    def test_drx_reorder(self):
        """Test the reorder function of the TBN ring buffer."""

        fh = open(drxFile, 'rb')
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
        fh.seek(-drx.FRAME_SIZE, 1)

        # Create the FrameBuffer instance
        frameBuffer = buffer.DRXFrameBuffer(beams=[
            b,
        ],
                                            tunes=[1, 2],
                                            pols=[0, 1],
                                            nsegments=2,
                                            reorder=True)

        # Go
        while True:
            try:
                cFrame = drx.read_frame(fh)
            except errors.EOFError:
                break
            except errors.SyncError:
                continue

            frameBuffer.append(cFrame)
            cFrames = frameBuffer.get()

            if cFrames is None:
                continue

            # Make sure it has the right number of frames
            self.assertEqual(len(cFrames), 4)

            # Check the order
            for i in range(1, len(cFrames)):
                pB, pT, pP = cFrames[i - 1].id
                cB, cT, cP = cFrames[i].id

                pID = 4 * pB + 2 * (pT - 1) + pP
                cID = 4 * cB + 2 * (cT - 1) + cP
                self.assertTrue(cID > pID)

        fh.close()
コード例 #6
0
    def test_drx_buffer_flush(self):
        """Test the DRX ring buffer's flush() function."""

        fh = open(drxFile, 'rb')
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
        fh.seek(-drx.FRAME_SIZE, 1)

        # Create the FrameBuffer instance
        frameBuffer = buffer.DRXFrameBuffer(beams=[
            b,
        ],
                                            tunes=[1, 2],
                                            pols=[0, 1],
                                            nsegments=2)

        # Go
        while True:
            try:
                cFrame = drx.read_frame(fh)
            except errors.EOFError:
                break
            except errors.SyncError:
                continue

            frameBuffer.append(cFrame)
            cFrames = frameBuffer.get()

            if cFrames is None:
                continue

        fh.close()

        # Flush the buffer
        for cFrames in frameBuffer.flush():
            # Make sure the dump has one of the expected time tags
            self.assertTrue(
                cFrames[0].payload.timetag in (257355782095346056, ))

            # Make sure it has the right number of frames
            self.assertEqual(len(cFrames), 4)
コード例 #7
0
    def test_sim_frame(self):
        """Test the drx.SimFrame class."""

        # Read in a DRX frame from the test file
        fh = open(drxFile, 'rb')
        origFrame = drxReader.read_frame(fh)
        fh.close()

        fakeFrame = drxWriter.SimFrame()
        fakeFrame.load_frame(origFrame)
        # Test the validity of the SimFrame
        self.assertTrue(fakeFrame.is_valid())
コード例 #8
0
    def test_drx_rate(self):
        """Test finding out the DRX sample rate."""

        fh = open(drxFile, 'rb')
        cFrame = drx.read_frame(fh)
        fh.seek(0)

        # Sample rate
        self.assertEqual(cFrame.sample_rate, drx.get_sample_rate(fh))

        # Filter code
        self.assertEqual(cFrame.filter_code,
                         drx.get_sample_rate(fh, filter_code=True))
        fh.close()
コード例 #9
0
    def test_drx_sort(self):
        """Test sorting DRX frames by time tags."""

        fh = open(drxFile, 'rb')
        # Frames 1 through 10
        frames = []
        for i in range(1, 11):
            frames.append(drx.read_frame(fh))
        fh.close()

        frames.sort()
        frames = frames[::-1]

        for i in range(1, len(frames)):
            self.assertTrue(frames[i - 1] >= frames[i])
コード例 #10
0
    def test_drx_comps(self):
        """Test the DRX frame comparison operators (>, <, etc.) for time tags."""

        fh = open(drxFile, 'rb')
        # Frames 1 through 10
        frames = []
        for i in range(1, 11):
            frames.append(drx.read_frame(fh))
        fh.close()

        self.assertTrue(0 < frames[0])
        self.assertFalse(0 > frames[0])
        self.assertTrue(frames[-1] >= frames[0])
        self.assertFalse(frames[-1] <= frames[0])
        self.assertTrue(frames[0] == frames[0])
        self.assertFalse(frames[0] == frames[-1])
        self.assertFalse(frames[0] != frames[0])
コード例 #11
0
    def test_drx_errors(self):
        """Test reading in all frames from a truncated DRX file."""

        fh = open(drxFile, 'rb')
        # Frames 1 through 32
        for i in range(1, 33):
            frame = drx.read_frame(fh)

        # Last frame should be an error (errors.EOFError)
        self.assertRaises(errors.EOFError, drx.read_frame, fh)
        fh.close()

        # If we offset in the file by 1 byte, we should be a
        # sync error (errors.SyncError).
        fh = open(drxFile, 'rb')
        fh.seek(1)
        self.assertRaises(errors.SyncError, drx.read_frame, fh)
        fh.close()
コード例 #12
0
    def test_frame_header_errors(self):
        """Test the header error scenarios when validating a DRX SimFrame."""

        # Read in a DRX frame from the test file
        fh = open(drxFile, 'rb')
        origFrame = drxReader.read_frame(fh)
        fh.close()

        # Try to validate frame with the wrong beam number
        fakeFrame = drxWriter.SimFrame()
        fakeFrame.load_frame(copy.deepcopy(origFrame))
        fakeFrame.beam = 5
        self.assertRaises(ValueError, fakeFrame.is_valid, raise_errors=True)

        # Try to validate frame with the wrong tuning number
        fakeFrame = drxWriter.SimFrame()
        fakeFrame.load_frame(copy.deepcopy(origFrame))
        fakeFrame.tune = 3
        self.assertRaises(ValueError, fakeFrame.is_valid, raise_errors=True)
コード例 #13
0
ファイル: test_reader.py プロジェクト: lwa-project/lsl
    def test_drx_math(self):
        """Test mathematical operations on DRX frame data via frames."""

        fh = open(drxFile, 'rb')
        # Frames 1 through 10
        frames = []
        for i in range(1, 11):
            frames.append(drx.read_frame(fh))
        fh.close()

        # Multiplication
        frameT = frames[0] * 2.0
        numpy.testing.assert_allclose(frameT.payload.data,
                                      2 * frames[0].payload.data,
                                      atol=1e-6)
        frameT *= 2.0
        numpy.testing.assert_allclose(frameT.payload.data,
                                      4 * frames[0].payload.data,
                                      atol=1e-6)
        frameT = frames[0] * frames[1]
        numpy.testing.assert_allclose(frameT.payload.data,
                                      frames[0].payload.data *
                                      frames[1].payload.data,
                                      atol=1e-6)

        # Addition
        frameA = frames[0] + 2.0
        numpy.testing.assert_allclose(frameA.payload.data,
                                      2 + frames[0].payload.data,
                                      atol=1e-6)
        frameA += 2.0
        numpy.testing.assert_allclose(frameA.payload.data,
                                      4 + frames[0].payload.data,
                                      atol=1e-6)
        frameA = frames[0] + frames[1]
        numpy.testing.assert_allclose(frameA.payload.data,
                                      frames[0].payload.data +
                                      frames[1].payload.data,
                                      atol=1e-6)
コード例 #14
0
    def test_drx_math(self):
        """Test mathematical operations on DRX frame data via frames."""

        fh = open(drxFile, 'rb')
        # Frames 1 through 10
        frames = []
        for i in range(1, 11):
            frames.append(drx.read_frame(fh))
        fh.close()

        # Multiplication
        frameT = frames[0] * 2.0
        for i in range(4096):
            self.assertAlmostEqual(frameT.payload.data[i],
                                   2 * frames[0].payload.data[i], 2)
        frameT *= 2.0
        for i in range(4096):
            self.assertAlmostEqual(frameT.payload.data[i],
                                   4 * frames[0].payload.data[i], 2)
        frameT = frames[0] * frames[1]
        for i in range(4096):
            self.assertAlmostEqual(
                frameT.payload.data[i],
                frames[0].payload.data[i] * frames[1].payload.data[i], 2)

        # Addition
        frameA = frames[0] + 2.0
        for i in range(4096):
            self.assertAlmostEqual(frameA.payload.data[i],
                                   2 + frames[0].payload.data[i], 2)
        frameA += 2.0
        for i in range(4096):
            self.assertAlmostEqual(frameA.payload.data[i],
                                   4 + frames[0].payload.data[i], 2)
        frameA = frames[0] + frames[1]
        for i in range(4096):
            self.assertAlmostEqual(
                frameA.payload.data[i],
                frames[0].payload.data[i] + frames[1].payload.data[i], 2)
コード例 #15
0
    def test_frame_data_errors(self):
        """Test the data error scenarios when validating a DRX SimFrame."""

        # Read in a DRX frame from the test file
        fh = open(drxFile, 'rb')
        origFrame = drxReader.read_frame(fh)
        fh.close()

        # Try to validate frame with the wrong data type
        fakeFrame = drxWriter.SimFrame()
        fakeFrame.load_frame(copy.deepcopy(origFrame))
        fakeFrame.data = fakeFrame.payload.data.real
        self.assertRaises(ValueError, fakeFrame.is_valid, raise_errors=True)

        # Try to validate frame with the wrong data size
        fakeFrame = drxWriter.SimFrame()
        fakeFrame.load_frame(copy.deepcopy(origFrame))
        fakeFrame.data = None
        self.assertRaises(ValueError, fakeFrame.is_valid, raise_errors=True)
        fakeFrame = drxWriter.SimFrame()
        fakeFrame.load_frame(copy.deepcopy(origFrame))
        fakeFrame.data = fakeFrame.payload.data[0:50]
        self.assertRaises(ValueError, fakeFrame.is_valid, raise_errors=True)
コード例 #16
0
def main(args):
    LFFT = args.fft_length

    stand1 = int(args.dipole_id_x)
    stand2 = int(args.dipole_id_y)
    filenames = args.filename

    # Build up the station
    if args.lwasv:
        site = stations.lwasv
    else:
        site = stations.lwa1

    # Figure out which antennas we need
    antennas = []
    for ant in site.antennas:
        if ant.stand.id == stand1 and ant.pol == 0:
            antennas.append(ant)
    for ant in site.antennas:
        if ant.stand.id == stand2 and ant.pol == 0:
            antennas.append(ant)

    # Loop through the input files...
    for filename in filenames:
        fh = open(filename, "rb")
        nFramesFile = os.path.getsize(filename) // drx.FRAME_SIZE
        #junkFrame = drx.read_frame(fh)
        #fh.seek(0)
        while True:
            try:
                junkFrame = drx.read_frame(fh)
                try:
                    srate = junkFrame.sample_rate
                    t0 = junkFrame.time
                    break
                except ZeroDivisionError:
                    pass
            except errors.SyncError:
                fh.seek(-drx.FRAME_SIZE + 1, 1)

        fh.seek(-drx.FRAME_SIZE, 1)

        beam, tune, pol = junkFrame.id
        srate = junkFrame.sample_rate

        tunepols = drx.get_frames_per_obs(fh)
        tunepols = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
        beampols = tunepols

        # Offset in frames for beampols beam/tuning/pol. sets
        offset = int(args.skip * srate / 4096 * beampols)
        offset = int(1.0 * offset / beampols) * beampols
        fh.seek(offset * drx.FRAME_SIZE, 1)

        # Iterate on the offsets until we reach the right point in the file.  This
        # is needed to deal with files that start with only one tuning and/or a
        # different sample rate.
        while True:
            ## Figure out where in the file we are and what the current tuning/sample
            ## rate is
            junkFrame = drx.read_frame(fh)
            srate = junkFrame.sample_rate
            t1 = junkFrame.time
            tunepols = drx.get_frames_per_obs(fh)
            tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
            beampols = tunepol
            fh.seek(-drx.FRAME_SIZE, 1)

            ## See how far off the current frame is from the target
            tDiff = t1 - (t0 + args.skip)

            ## Half that to come up with a new seek parameter
            tCorr = -tDiff / 8.0
            cOffset = int(tCorr * srate / 4096 * beampols)
            cOffset = int(1.0 * cOffset / beampols) * beampols
            offset += cOffset

            ## If the offset is zero, we are done.  Otherwise, apply the offset
            ## and check the location in the file again/
            if cOffset is 0:
                break
            fh.seek(cOffset * drx.FRAME_SIZE, 1)

        # Update the offset actually used
        args.skip = t1 - t0
        offset = int(round(args.skip * srate / 4096 * beampols))
        offset = int(1.0 * offset / beampols) * beampols

        tnom = junkFrame.header.time_offset
        tStart = junkFrame.time

        # Get the DRX frequencies
        cFreq1 = 0.0
        cFreq2 = 0.0
        for i in xrange(4):
            junkFrame = drx.read_frame(fh)
            b, t, p = junkFrame.id
            if p == 0 and t == 1:
                cFreq1 = junkFrame.central_freq
            elif p == 0 and t == 2:
                cFreq2 = junkFrame.central_freq
            else:
                pass
        fh.seek(-4 * drx.FRAME_SIZE, 1)

        # Align the files as close as possible by the time tags and then make sure that
        # the first frame processed is from tuning 1, pol 0.
        junkFrame = drx.read_frame(fh)
        beam, tune, pol = junkFrame.id
        pair = 2 * (tune - 1) + pol
        j = 0
        while pair != 0:
            junkFrame = drx.read_frame(fh)
            beam, tune, pol = junkFrame.id
            pair = 2 * (tune - 1) + pol
            j += 1
        fh.seek(-drx.FRAME_SIZE, 1)
        print("Shifted beam %i data by %i frames (%.4f s)" %
              (beam, j, j * 4096 / srate / 4))

        # Set integration time
        tInt = args.avg_time
        nFrames = int(round(tInt * srate / 4096))
        tInt = nFrames * 4096 / srate

        # Read in some data
        tFile = nFramesFile / 4 * 4096 / srate

        # Report
        print("Filename: %s" % filename)
        print("  Sample Rate: %i Hz" % srate)
        print("  Tuning 1: %.1f Hz" % cFreq1)
        print("  Tuning 2: %.1f Hz" % cFreq2)
        print("  ===")
        print("  Integration Time: %.3f s" % tInt)
        print("  Integrations in File: %i" % int(tFile / tInt))

        nChunks = int(tFile / tInt)
        pb = ProgressBar(max=nChunks)
        for i in xrange(nChunks):
            junkFrame = drx.read_frame(fh)
            tStart = junkFrame.time
            fh.seek(-drx.FRAME_SIZE, 1)

            count1 = [0, 0]
            data1 = numpy.zeros((2, 4096 * nFrames), dtype=numpy.complex64)
            count2 = [0, 0]
            data2 = numpy.zeros((2, 4096 * nFrames), dtype=numpy.complex64)
            for j in xrange(nFrames):
                for k in xrange(4):
                    cFrame = drx.read_frame(fh)
                    beam, tune, pol = cFrame.id
                    pair = 2 * (tune - 1) + pol

                    if tune == 1:
                        data1[pol, count1[pol] * 4096:(count1[pol] + 1) *
                              4096] = cFrame.payload.data
                        count1[pol] += 1
                    else:
                        data2[pol, count2[pol] * 4096:(count2[pol] + 1) *
                              4096] = cFrame.payload.data
                        count2[pol] += 1

            # Correlate
            blList1, freq1, vis1 = fxc.FXMaster(data1,
                                                antennas,
                                                LFFT=LFFT,
                                                overlap=1,
                                                include_auto=True,
                                                verbose=False,
                                                sample_rate=srate,
                                                central_freq=cFreq1,
                                                pol='XX',
                                                return_baselines=True,
                                                gain_correct=False,
                                                clip_level=0)

            blList2, freq2, vis2 = fxc.FXMaster(data2,
                                                antennas,
                                                LFFT=LFFT,
                                                overlap=1,
                                                include_auto=True,
                                                verbose=False,
                                                sample_rate=srate,
                                                central_freq=cFreq2,
                                                pol='XX',
                                                return_baselines=True,
                                                gain_correct=False,
                                                clip_level=0)

            if nChunks != 1:
                outfile = os.path.split(filename)[1]
                outfile = os.path.splitext(outfile)[0]
                outfile = "%s-vis-%04i.npz" % (outfile, i + 1)
            else:
                outfile = os.path.split(filename)[1]
                outfile = os.path.splitext(outfile)[0]
                outfile = "%s-vis.npz" % outfile
            numpy.savez(outfile,
                        srate=srate,
                        freq1=freq1,
                        vis1=vis1,
                        freq2=freq2,
                        vis2=vis2,
                        tStart=tStart,
                        tInt=tInt,
                        stands=numpy.array([stand1, stand2]))

            del data1
            del data2

            pb.inc(amount=1)
            sys.stdout.write(pb.show() + '\r')
            sys.stdout.flush()

        sys.stdout.write(pb.show() + '\r')
        sys.stdout.write('\n')
        sys.stdout.flush()

        # Plot
        fig = plt.figure()
        i = 0
        for bl, vi in zip(blList1, vis1):
            ax = fig.add_subplot(4, 3, i + 1)
            ax.plot(freq1 / 1e6, numpy.unwrap(numpy.angle(vi)))
            ax.set_title('Stand %i - Stand %i' %
                         (bl[0].stand.id, bl[1].stand.id))
            ax = fig.add_subplot(4, 3, i + 4)
            ax.plot(freq1 / 1e6, numpy.abs(vi))
            i += 1

            coeff = numpy.polyfit(freq1, numpy.unwrap(numpy.angle(vi)), 1)
            #print(coeff[0]/2/numpy.pi*1e9, coeff[1]*180/numpy.pi)

        i = 6
        for bl, vi in zip(blList2, vis2):
            ax = fig.add_subplot(4, 3, i + 1)
            ax.plot(freq2 / 1e6, numpy.unwrap(numpy.angle(vi)))
            ax.set_title('Stand %i - Stand %i' %
                         (bl[0].stand.id, bl[1].stand.id))
            ax = fig.add_subplot(4, 3, i + 4)
            ax.plot(freq2 / 1e6, numpy.abs(vi))
            i += 1

            coeff = numpy.polyfit(freq2, numpy.unwrap(numpy.angle(vi)), 1)
コード例 #17
0
    def test_drx_basic(self):
        """Test the DRX ring buffer."""

        fh = open(drxFile, 'rb')
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
        fh.seek(-drx.FRAME_SIZE, 1)

        # Create the FrameBuffer instance
        frameBuffer = buffer.DRXFrameBuffer(beams=[
            b,
        ],
                                            tunes=[1, 2],
                                            pols=[0, 1],
                                            nsegments=2)

        # Go
        dumped = []
        while True:
            try:
                cFrame = drx.read_frame(fh)
            except errors.EOFError:
                break
            except errors.SyncError:
                continue

            frameBuffer.append(cFrame)
            cFrames = frameBuffer.get()

            if cFrames is None:
                continue

            dumped.append(cFrames[0].payload.timetag)

        fh.close()

        # Make sure we have the right number of frames in the buffer
        nFrames = 0
        for key in frameBuffer.buffer.keys():
            nFrames = nFrames + len(frameBuffer.buffer[key])
        self.assertEqual(nFrames, 1)
        self.assertEqual(nFrames + len(dumped) * 4, 32 + 1)

        # Make sure nothing has happened that shouldn't have
        self.assertEqual(frameBuffer.full, 7)
        self.assertEqual(frameBuffer.partial, 1)
        self.assertEqual(frameBuffer.missing, 1)
        self.assertEqual(frameBuffer.dropped, 0)

        # Make sure we have the right keys
        for key in dumped:
            self.assertTrue(key in (257355782095018376, 257355782095059336,
                                    257355782095100296, 257355782095141256,
                                    257355782095182216, 257355782095223176,
                                    257355782095264136, 257355782095305096))

        for key in frameBuffer.buffer.keys():
            self.assertTrue(key in (257355782095346056, ))

        # Make sure the buffer keys have the right sizes
        self.assertEqual(len(frameBuffer.buffer[257355782095346056]), 1)
コード例 #18
0
def main(args):
    fh = open(args.filename, "rb")
    nFramesFile = os.path.getsize(args.filename) // drx.FRAME_SIZE

    while True:
        junkFrame = drx.read_frame(fh)
        try:
            srate = junkFrame.sample_rate
            break
        except ZeroDivisionError:
            pass
    fh.seek(-drx.FRAME_SIZE, 1)

    print(junkFrame.header.time_offset)
    beams = drx.get_beam_count(fh)
    tunepols = drx.get_frames_per_obs(fh)
    tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
    beampols = tunepol

    # Offset in frames for beampols beam/tuning/pol. sets
    offset = int(round(args.skip * srate / 4096 * beampols))
    offset = int(1.0 * offset / beampols) * beampols
    args.skip = 1.0 * offset / beampols * 4096 / srate
    fh.seek(offset * drx.FRAME_SIZE)

    # Make sure that the file chunk size contains is an intger multiple
    # of the beampols.
    maxFrames = int(19144 / beampols) * beampols

    # Number of frames to integrate over
    toClip = False
    oldAverage = args.plot_range
    if args.plot_range < 4096 / srate:
        toClip = True
        args.plot_range = 4096 / srate
    nFrames = int(args.plot_range * srate / 4096 * beampols)
    nFrames = int(1.0 * nFrames / beampols) * beampols
    args.plot_range = 1.0 * nFrames / beampols * 4096 / srate

    # Number of remaining chunks
    nChunks = int(math.ceil(1.0 * (nFrames) / maxFrames))

    # File summary
    print("Filename: %s" % args.filename)
    print("Beams: %i" % beams)
    print("Tune/Pols: %i %i %i %i" % tunepols)
    print("Sample Rate: %i Hz" % srate)
    print("Frames: %i (%.3f s)" %
          (nFramesFile, 1.0 * nFramesFile / beampols * 4096 / srate))
    print("---")
    print("Offset: %.3f s (%i frames)" % (args.skip, offset))
    print("Plot time: %.3f s (%i frames; %i frames per beam/tune/pol)" %
          (args.plot_range, nFrames, nFrames // beampols))
    print("Chunks: %i" % nChunks)

    # Sanity check
    if offset > nFramesFile:
        raise RuntimeError("Requested offset is greater than file length")
    if nFrames > (nFramesFile - offset):
        raise RuntimeError(
            "Requested integration time+offset is greater than file length")

    junkFrame = drx.read_frame(fh)
    b, t, p = junkFrame.id
    while 2 * (t - 1) + p != 0:
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
        print(b, t, p)
    print(fh.tell())
    fh.seek(-drx.FRAME_SIZE, 1)

    # Master loop over all of the file chuncks
    standMapper = []
    for i in range(nChunks):
        # Find out how many frames remain in the file.  If this number is larger
        # than the maximum of frames we can work with at a time (maxFrames),
        # only deal with that chunk
        framesRemaining = nFrames - i * maxFrames
        if framesRemaining > maxFrames:
            framesWork = maxFrames
        else:
            framesWork = framesRemaining
        print("Working on chunk %i, %i frames remaining" %
              (i, framesRemaining))

        count = {}
        data = numpy.zeros((beampols, framesWork * 4096 // beampols),
                           dtype=numpy.csingle)

        # Inner loop that actually reads the frames into the data array
        print("Working on %.1f ms of data" %
              ((framesWork * 4096 / beampols / srate) * 1000.0))
        t0 = time.time()

        for j in xrange(framesWork):
            # Read in the next frame and anticipate any problems that could occur
            try:
                cFrame = drx.read_frame(fh, verbose=False)
            except errors.EOFError:
                break
            except errors.SyncError:
                #print("WARNING: Mark 5C sync error on frame #%i" % (int(fh.tell())/drx.FRAME_SIZE-1))
                continue

            beam, tune, pol = cFrame.id
            aStand = 4 * (beam - 1) + 2 * (tune - 1) + pol
            #print(aStand, beam, tune, pol)
            if aStand not in standMapper:
                standMapper.append(aStand)
                oStand = 1 * aStand
                aStand = standMapper.index(aStand)
                print(
                    "Mapping beam %i, tune. %1i, pol. %1i (%2i) to array index %3i"
                    % (beam, tune, pol, oStand, aStand))
            else:
                aStand = standMapper.index(aStand)

            if aStand not in count.keys():
                count[aStand] = 0
            #if cFrame.header.frame_count % 10000 == 0 and args.verbose:
            #	print("%2i,%1i,%1i -> %2i  %5i  %i" % (beam, tune, pol, aStand, cFrame.header.frame_count, cFrame.payload.timetag))

            #print(data.shape, count[aStand]*4096, (count[aStand]+1)*4096, cFrame.payload.data.shape)
            data[aStand, count[aStand] * 4096:(count[aStand] + 1) *
                 4096] = cFrame.payload.data
            # Update the counters so that we can average properly later on
            count[aStand] += 1

        # The plots:  This is setup for the current configuration of 20 beampols
        fig = plt.figure()
        figsX = int(round(math.sqrt(beampols)))
        figsY = beampols // figsX

        t1X = 1
        t1Y = 1

        offset = 0
        samples = 65536
        for sec in xrange(data.shape[1] // samples):
            if toClip:
                print("Plotting only the first %i samples (%.3f ms) of data" %
                      (samples, oldAverage * 1000.0))

            sortedMapper = sorted(standMapper)
            for k, aStand in enumerate(sortedMapper):
                i = standMapper.index(aStand)

                if standMapper[i] % 2 == 0:
                    ref = data[0, :]
                    t1R = t1X
                else:
                    ref = data[1, :]
                    t1R = t1Y

                (lag, cc), junkI, junkQ = crossCorrelate(
                    data[i, sec * samples:(sec + 1) * samples],
                    ref[offset + sec * samples:offset + (sec + 1) * samples])
                best = numpy.where(cc == cc.max())[0][0]
                if args.verbose:
                    print('tune %i pol. %s' %
                          (standMapper[i] % 4 // 2 + 1, standMapper[i] % 2))
                    print(' -> best peak of %.0f at a lag of %i samples' %
                          (cc.max(), lag[best]))
                    print(' -> NCM with tuning 1 of %.3f' % (cc.max() / t1R))

                # Plot
                ax = fig.add_subplot(figsX, figsY, k + 1)
                ax.plot(lag, cc, label='Same', color='blue')

                # Center on the peak
                best = numpy.where(cc == cc.max())[0][0]
                ax.set_xlim([lag[best - 50], lag[best + 50]])

                ax.set_title('Beam %i, Tune. %i, Pol. %i' %
                             (standMapper[i] // 4 + 1,
                              standMapper[i] % 4 // 2 + 1, standMapper[i] % 2))
                ax.set_xlabel('Lag [samples]')
                ax.set_ylabel('Analysis Sets')

                # Save the tuning 1 values for the peak of the CC function
                if standMapper[i] % 4 / 2 + 1 == 1:
                    if standMapper[i] % 2 == 0:
                        t1X = cc.max()
                    else:
                        t1Y = cc.max()

        plt.show()

        # Save image if requested
        if args.output is not None:
            fig.savefig(args.output)
コード例 #19
0
def main(args):
    # Get the file names
    meta = args.metadata
    data = args.filename

    # Get all observations and their start times
    try:
        ## LWA-1
        sdf = metabundle.get_sdf(meta)
        ses = metabundle.get_session_spec(meta)
        obs = metabundle.get_observation_spec(meta)
    except:
        ## LWA-SV
        ### Try again
        sdf = metabundleADP.get_sdf(meta)
        ses = metabundleADP.get_session_spec(meta)
        obs = metabundleADP.get_observation_spec(meta)
    obs.sort(_obs_comp)
    tStart = []
    oDetails = []
    for i,o in enumerate(obs):
        tStart.append( mjdmpm_to_datetime(o['mjd'], o['mpm']) )
        oDetails.append( {'m': o['mode'], 'd': o['dur'] / 1000.0, 'f': o['bw'], 
                          'p': o['project_id'], 's': o['session_id'], 'o': o['obs_id'], 
                          't': sdf.sessions[0].observations[o['obs_id']-1].target} )

        print("Observation #%i" % (o['obs_id']))
        print(" Start: %i, %i -> %s" % (o['mjd'], o['mpm'], tStart[-1]))
        print(" Mode: %s" % mode_to_string(o['mode']))
        print(" BW: %i" % o['bw'])
        print(" Target: %s" % sdf.sessions[0].observations[o['obs_id']-1].target)
    print(" ")

    # Figure out where in the file the various bits are.
    fh = open(data, 'rb')
    lf = drx.read_frame(fh)
    beam, j, k = lf.id
    if beam != obs[0]['drx_beam']:
        print('ERROR: Beam mis-match, metadata is for #%i, file is for #%i' % (obs[0]['drx_beam'], beam))
        sys.exit()
    firstFrame = lf.time.datetime
    if abs(firstFrame - min(tStart)) > timedelta(seconds=30):
        print('ERROR: Time mis-match, metadata is for %s, file is for %s' % (min(tStart), firstFrame))
        sys.exit()
    fh.seek(0)

    for i in range(len(tStart)):
        eof = False

        ## Get observation properties
        oStart = tStart[i]
        oMode = mode_to_string(oDetails[i]['m'])
        oDur  = oDetails[i]['d']
        oBW   = oDetails[i]['f']
        print("Seeking %s observation of %.3f seconds at %s" % (oMode, oDur, oStart))

        ## Get the correct reader to use
        if oMode == 'TBW':
            reader = tbw
            bwKey = None
            bwMult = 520.0 / 400
            fCount = 400
        elif oMode == 'TBN':
            reader = tbn
            bwKey = tbn.FILTER_CODES
            bwMult = 520.0 / 512
            fCount = 512
        else:
            reader = drx
            bwKey = drx.FILTER_CODES
            bwMult = 4.0 / 4096
            fCount = 4096

        ## Jump ahead to where the next frame should be, if needed
        if i != 0:
            pDur  = oDetails[i-1]['d']
            pBW   = oDetails[i-1]['f']

            nFramesSkip = int(pDur*bwKey[pBW]*bwMult)
            fh.seek(nFramesSkip*reader.FRAME_SIZE, 1)
            if fh.tell() >= os.path.getsize(data):
                fh.seek(-10*reader.FRAME_SIZE, 2)
                
        ## Figure out where we are and make sure we line up on a frame
        ## NOTE: This should never be needed
        fail = True
        while fail:
            try:
                frame = reader.read_frame(fh)
                fail = False
            except errors.SyncError:
                fh.seek(1, 1)
            except errors.EOFError:
                break
        fh.seek(-reader.FRAME_SIZE, 1)	

        ## Go in search of the start of the observation
        if frame.time.datetime < oStart:
            ### We aren't at the beginning yet, seek fowards
            print("-> At byte %i, time is %s < %s" % (fh.tell(), frame.time.datetime, oStart))

            while frame.time.datetime < oStart:
                try:
                    frame = reader.read_frame(fh)
                except errors.SyncError:		
                    fh.seek(1, 1)
                except errors.EOFError:
                    break
                #print(frame.time.datetime, oStart)

        elif frame.time.datetime > oStart:
            ### We've gone too far, seek backwards
            print("-> At byte %i, time is %s > %s" % (fh.tell(), frame.time.datetime, oStart))

            while frame.time.datetime > oStart:
                if fh.tell() == 0:
                    break
                fh.seek(-2*reader.FRAME_SIZE, 1)
                try:
                    frame = reader.read_frame(fh)
                except errors.SyncError:		
                    fh.seek(-1, 1)
                except errors.EOFError:
                    break
                #print(frame.time.datetime, oStart)
                
        else:
            ### We're there already
            print("-> At byte %i, time is %s = %s" % (fh.tell(), frame.time.datetime, oStart))
            
        ## Jump back exactly one frame so that the filehandle is in a position 
        ## to read the first frame that is part of the observation
        try:
            frame = reader.read_frame(fh)
            print("-> At byte %i, time is %s = %s" % (fh.tell(), frame.time.datetime, oStart))
            fh.seek(-reader.FRAME_SIZE, 1)
        except errors.EOFError:
            pass
            
        ## Update the bytes ranges
        if fh.tell() < os.path.getsize(data):
            oDetails[i]['b'] = fh.tell()
            oDetails[i]['e'] = -1
        else:
            oDetails[i]['b'] = -1
            oDetails[i]['e'] = -1

        if i != 0:
            oDetails[i-1]['e'] = fh.tell()

        ## Progress report
        if oDetails[i]['b'] >= 0:
            print('-> Obs.', oDetails[i]['o'], 'starts at byte', oDetails[i]['b'])
        else:
            print('-> Obs.', oDetails[i]['o'], 'starts after the end of the file')
    print(" ")

    # Report
    for i in range(len(tStart)):
        if oDetails[i]['b'] < 0:
            print("%s, Session %i, Observation %i: not found" % (oDetails[i]['p'], oDetails[i]['s'], oDetails[i]['o']))

        else:
            print("%s, Session %i, Observation %i: %i to %i (%i bytes)" % (oDetails[i]['p'], oDetails[i]['s'], oDetails[i]['o'], oDetails[i]['b'], oDetails[i]['e'], (oDetails[i]['e'] - oDetails[i]['b'])))
    print(" ")

    # Split
    if not args.list:
        for i in range(len(tStart)):
            if oDetails[i]['b'] < 0:
                continue
                
            ## Report
            print("Working on Observation %i" % (i+1,))
            
            ## Create the output name
            if args.source:
                outname = '%s_%i_%s.dat' % (oDetails[i]['p'], oDetails[i]['s'], oDetails[i]['t'].replace(' ', '').replace('/','').replace('&','and'))
            else:
                outname = '%s_%i_%i.dat' % (oDetails[i]['p'], oDetails[i]['s'], oDetails[i]['o'])
                
            oMode = mode_to_string(oDetails[i]['m'])

            ## Get the correct reader to use
            if oMode == 'TBW':
                reader = tbw

            elif oMode == 'TBN':
                reader = tbn
            else:
                reader = drx

            ## Get the number of frames
            if oDetails[i]['e'] > 0:
                nFramesRead = (oDetails[i]['e'] - oDetails[i]['b']) // reader.FRAME_SIZE
            else:
                nFramesRead = (os.path.getsize(data) - oDetails[i]['b']) // reader.FRAME_SIZE

            ## Split
            if os.path.exists(outname):
                if not args.force:
                    yn = input("WARNING: '%s' exists, overwrite? [Y/n] " % outname)
                else:
                    yn = 'y'
                    
                if yn not in ('n', 'N'):
                    os.unlink(outname)
                else:
                    print("WARNING: output file '%s' already exists, skipping" % outname)
                    continue
                    
            fh.seek(oDetails[i]['b'])
            
            t0 = time.time()
            oh = open(outname, 'wb')
            for sl in [2**i for i in range(17)[::-1]]:
                while nFramesRead >= sl:
                    temp = fh.read(sl*reader.FRAME_SIZE)
                    oh.write(temp)
                    nFramesRead -= sl
            oh.close()
            t1 = time.time()
            print("  Copied %i bytes in %.3f s (%.3f MB/s)" % (os.path.getsize(outname), t1-t0, os.path.getsize(outname)/1024.0**2/(t1-t0)))
    print(" ")
コード例 #20
0
def identify_section(fh, start=0, stop=-1, strict=True, min_frames=4096, verbose=True):
    if stop <= start:
        stop = os.path.getsize(fh.name)
    fh.seek(start)
    
    # Report
    if verbose:
        print("Working on %i to %i of '%s'..." % (start, stop, os.path.basename(fh.name)))
        
    # Make sure this is enough to work with
    if stop-start < drx.FRAME_SIZE*min_frames:
        if verbose:
            print("  too small for analysis, skipping")
        return None
        
    # Align on the start of a Mark5C packet...
    while True:
        try:
            junkFrame = drx.read_frame(fh)
            try:
                # ... that has a valid decimation
                srate = junkFrame.sample_rate
                break
            except ZeroDivisionError:
                pass
        except errors.SyncError:
            fh.seek(-drx.FRAME_SIZE+1, 1)
    fh.seek(-drx.FRAME_SIZE, 1)
    # ... and save that location
    frame_begin = junkFrame.payload.timetag
    file_begin = fh.tell()
    if verbose:
        print("  start @ %i with %i" % (file_begin, frame_begin))
        
    # Find the last valid Mark5C packet...
    fh.seek(stop-drx.FRAME_SIZE)
    while True:
        try:
            junkFrame = drx.read_frame(fh)
            try:
                # ... that has a valid decimation
                srate = junkFrame.sample_rate
                break
            except ZeroDivisionError:
                pass
        except errors.SyncError:
            fh.seek(-drx.FRAME_SIZE-1, 1)
    fh.seek(-drx.FRAME_SIZE, 1)
    # ... and save that location
    frame_end = junkFrame.payload.timetag
    file_end = fh.tell() + drx.FRAME_SIZE
    if verbose:
        print("  stop  @ %i with %i" % (file_end, frame_end))
        
    # Get how much the timetags should change and other basic information
    fh.seek(file_begin)
    ids = []
    for i in xrange(24*8):
        junkFrame = drx.read_frame(fh)
        b,t,p = junkFrame.id
        id = (t,p)
        if id not in ids:
            ids.append(id)
    ttStep = 4096*junkFrame.header.decimation
    if verbose:
        print("  %i frames with a timetag step of %i" % (len(ids), ttStep))
        
    # Difference
    nBytes = file_end - file_begin
    nFrames = nBytes // drx.FRAME_SIZE
    ttDiffFound = frame_end - frame_begin
    ttDiffExpected = nFrames // len(ids) * ttStep
    if verbose:
        print("  -> found timetag difference of    %i" % ttDiffFound)
        print("  -> expected timetag difference is %i" % ttDiffExpected)
        
    # Decide what to do
    if abs(ttDiffFound - ttDiffExpected) > ttStep*(1-strict):
        if verbose:
            print("  ====> mis-match, subsampling")
        file_middle = file_begin + (nFrames // 2) * drx.FRAME_SIZE
        parts0 = identify_section(fh, file_begin, file_middle, strict=strict, min_frames=min_frames, verbose=verbose)
        parts1 = identify_section(fh, file_middle, file_end, strict=strict, min_frames=min_frames, verbose=verbose)
        
    else:
        if verbose:
            print("  ====> good, done")
        parts0 = [[file_begin, file_end],]
        parts1 = None
        
    # Sort and merge
    partsList = []
    for parts in (parts0, parts1):
        if parts is None:
            continue
        for part in parts:
            partsList.append( part )
    partsList.sort()
        
    # Merge
    parts = []
    if len(partsList) > 0:
        parts.append( partsList[0] )
        for part in partsList[1:]:
            if part[0] == parts[-1][1]:
                parts[-1][1] = part[1]
            else:
                parts.append(part)
                
    return parts
コード例 #21
0
def main(args):
    filename = args.filename

    fh = open(filename, "rb")
    nFramesFile = os.path.getsize(filename) // drx.FRAME_SIZE
    while True:
        try:
            junkFrame = drx.read_frame(fh)
            try:
                srate = junkFrame.sample_rate
                break
            except ZeroDivisionError:
                pass
        except errors.SyncError:
            fh.seek(-drx.FRAME_SIZE + 1, 1)

    fh.seek(-drx.FRAME_SIZE, 1)

    beam, tune, pol = junkFrame.id
    tunepols = max(drx.get_frames_per_obs(fh))

    # Date & Central Frequnecy
    beginDate = junkFrame.time.datetime
    central_freq1 = 0.0
    central_freq2 = 0.0
    for i in xrange(32):
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
        if p == 0 and t == 1:
            central_freq1 = junkFrame.central_freq
        elif p == 0 and t == 2:
            central_freq2 = junkFrame.central_freq
        else:
            pass
    fh.seek(-32 * drx.FRAME_SIZE, 1)

    # Report on the file
    print("Filename: %s" % filename)
    print("Date of First Frame: %s" % str(beginDate))
    print("Beam: %i" % beam)
    print("Tune/Pols: %i" % tunepols)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" %
          (central_freq1, central_freq2))
    print(" ")

    # Convert chunk length to total frame count
    chunkLength = int(args.length * srate / 4096 * tunepols)
    chunkLength = int(1.0 * chunkLength / tunepols) * tunepols

    # Convert chunk skip to total frame count
    chunkSkip = int(args.skip * srate / 4096 * tunepols)
    chunkSkip = int(1.0 * chunkSkip / tunepols) * tunepols

    # Output arrays
    clipFraction = []
    meanPower = []

    # Go!
    i = 1
    done = False
    print("   |           Clipping              |          Power          |")
    print("   |      1X      1Y      2X      2Y |    1X    1Y    2X    2Y |")
    print("---+---------------------------------+-------------------------+")

    while True:
        count = {0: 0, 1: 0, 2: 0, 3: 0}
        data = numpy.empty((4, chunkLength * 4096 // tunepols),
                           dtype=numpy.csingle)
        for j in xrange(chunkLength):
            # Read in the next frame and anticipate any problems that could occur
            try:
                cFrame = drx.read_frame(fh, verbose=False)
            except errors.EOFError:
                done = True
                break
            except errors.SyncError:
                continue

            beam, tune, pol = cFrame.id
            aStand = 2 * (tune - 1) + pol

            try:
                data[aStand, count[aStand] * 4096:(count[aStand] + 1) *
                     4096] = cFrame.payload.data

                # Update the counters so that we can average properly later on
                count[aStand] += 1
            except ValueError:
                pass

        if done:
            break

        else:
            data = numpy.abs(data)**2
            data = data.astype(numpy.int32)

            clipFraction.append(numpy.zeros(4))
            meanPower.append(data.mean(axis=1))
            for j in xrange(4):
                bad = numpy.nonzero(data[j, :] > args.trim_level)[0]
                clipFraction[-1][j] = 1.0 * len(bad) / data.shape[1]

            clip = clipFraction[-1]
            power = meanPower[-1]
            print(
                "%2i | %6.2f%% %6.2f%% %6.2f%% %6.2f%% | %5.2f %5.2f %5.2f %5.2f |"
                % (i, clip[0] * 100.0, clip[1] * 100.0, clip[2] * 100.0,
                   clip[3] * 100.0, power[0], power[1], power[2], power[3]))

            i += 1
            fh.seek(drx.FRAME_SIZE * chunkSkip, 1)

    clipFraction = numpy.array(clipFraction)
    meanPower = numpy.array(meanPower)

    clip = clipFraction.mean(axis=0)
    power = meanPower.mean(axis=0)

    print("---+---------------------------------+-------------------------+")
    print("%2s | %6.2f%% %6.2f%% %6.2f%% %6.2f%% | %5.2f %5.2f %5.2f %5.2f |" %
          ('M', clip[0] * 100.0, clip[1] * 100.0, clip[2] * 100.0,
           clip[3] * 100.0, power[0], power[1], power[2], power[3]))
コード例 #22
0
ファイル: createConfigFile.py プロジェクト: lwa-project/eLWA
def main(args):
    # Parse the command line
    filenames = args.filename

    # Check if the first argument on the command line is a directory.  If so,
    # use what is in that directory
    if os.path.isdir(filenames[0]):
        filenames = [
            os.path.join(filenames[0], filename)
            for filename in os.listdir(filenames[0])
        ]
        filenames.sort()

    # Convert the filenames to absolute paths
    filenames = [os.path.abspath(filename) for filename in filenames]

    # Open the database connection to NRAO to find the antenna locations
    try:
        db = database('params')
    except Exception as e:
        sys.stderr.write("WARNING: %s" % str(e))
        sys.stderr.flush()
        db = None

    # Pass 1 - Get the LWA metadata so we know where we are pointed
    context = {
        'observer': 'Unknown',
        'project': 'Unknown',
        'session': None,
        'vlaref': None
    }
    setup = None
    sources = []
    metadata = {}
    lwasite = {}
    for filename in filenames:
        # Figure out what to do with the file
        ext = os.path.splitext(filename)[1]
        if ext == '.tgz':
            ## LWA Metadata
            try:
                ## Extract the SDF
                if len(sources) == 0:
                    try:
                        sdf = metabundle.get_sdf(filename)
                    except Exception as e:
                        sdf = metabundleADP.get_sdf(filename)

                    context['observer'] = sdf.observer.name
                    context['project'] = sdf.id
                    context['session'] = sdf.sessions[0].id

                    comments = sdf.project_office.sessions[0]
                    mtch = CORR_CHANNELS.search(comments)
                    if mtch is not None:
                        corr_channels = int(mtch.group('channels'), 10)
                    else:
                        corr_channels = None
                    mtch = CORR_INTTIME.search(comments)
                    if mtch is not None:
                        corr_inttime = float(mtch.group('inttime'))
                    else:
                        corr_inttime = None
                    mtch = CORR_BASIS.search(comments)
                    if mtch is not None:
                        corr_basis = mtch.group('basis')
                    else:
                        sys.stderr.write(
                            "WARNING: No output correlation polarization basis defined, assuming 'linear'.\n"
                        )
                        corr_basis = 'linear'
                    if corr_channels is not None and corr_inttime is not None:
                        setup = {
                            'channels': corr_channels,
                            'inttime': corr_inttime,
                            'basis': corr_basis
                        }
                    else:
                        sys.stderr.write(
                            "WARNING: No or incomplete correlation configuration defined, setting to be defined at correlation time.\n"
                        )

                    for o, obs in enumerate(sdf.sessions[0].observations):
                        if type(obs).__name__ == 'Solar':
                            name = 'Sun'
                            intent = 'target'
                            ra = None
                            dec = None
                        elif type(obs).__name__ == 'Jovian':
                            name = 'Jupiter'
                            intent = 'target'
                            ra = None
                            dec = None
                        else:
                            name = obs.target
                            intent = obs.name
                            ra = ephem.hours(str(obs.ra))
                            dec = ephem.degrees(str(obs.dec))
                        tStart = mjdmpm_to_datetime(obs.mjd, obs.mpm)
                        tStop = mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
                        sources.append({
                            'name': name,
                            'intent': intent,
                            'ra2000': ra,
                            'dec2000': dec,
                            'start': tStart,
                            'stop': tStop
                        })

                        ### Alternate phase centers
                        comments = sdf.project_office.observations[0][o]

                        alts = {}
                        for mtch in ALT_TARGET.finditer(comments):
                            alt_id = int(mtch.group('id'), 10)
                            alt_name = mtch.group('target')
                            try:
                                alts[alt_id]['name'] = alt_name
                            except KeyError:
                                alts[alt_id] = {
                                    'name': alt_name,
                                    'intent': 'dummy',
                                    'ra': None,
                                    'dec': None
                                }
                        for mtch in ALT_INTENT.finditer(comments):
                            alt_id = int(mtch.group('id'), 10)
                            alt_intent = mtch.group('intent')
                            try:
                                alts[alt_id]['intent'] = alt_intent
                            except KeyError:
                                alts[alt_id] = {
                                    'name': None,
                                    'intent': alt_intent,
                                    'ra': None,
                                    'dec': None
                                }
                        for mtch in ALT_RA.finditer(comments):
                            alt_id = int(mtch.group('id'), 10)
                            alt_ra = ephem.hours(mtch.group('ra'))
                            try:
                                alts[alt_id]['ra'] = alt_ra
                            except KeyError:
                                alts[alt_id] = {
                                    'name': None,
                                    'intent': 'dummy',
                                    'ra': alt_ra,
                                    'dec': None
                                }
                        for mtch in ALT_DEC.finditer(comments):
                            alt_id = int(mtch.group('id'), 10)
                            alt_dec = ephem.degrees(mtch.group('dec'))
                            try:
                                alts[alt_id]['dec'] = alt_dec
                            except KeyError:
                                alts[alt_id] = {
                                    'name': None,
                                    'intent': 'dummy',
                                    'ra': None,
                                    'dec': alt_dec
                                }
                        for alt_id in sorted(alts.keys()):
                            alt_name, alt_ra, alt_dec = alts[alt_id]
                            if alt_name is None or alt_ra is None or alt_dec is None:
                                sys.stderr.write(
                                    "WARNING: Incomplete alternate phase center %i, skipping.\n"
                                    % alt_id)
                            else:
                                sources.append({
                                    'name': alt_name,
                                    'ra2000': alt_ra,
                                    'dec2000': alt_dec,
                                    'start': tStart,
                                    'stop': tStop
                                })

                ## Extract the file information so that we can pair things together
                fileInfo = metabundle.get_session_metadata(filename)
                for obsID in fileInfo.keys():
                    metadata[fileInfo[obsID]['tag']] = filename

                ## Figure out LWA1 vs LWA-SV
                try:
                    cs = metabundle.get_command_script(filename)
                    for c in cs:
                        if c['subsystem_id'] == 'DP':
                            site = 'LWA1'
                            break
                        elif c['subsystem_id'] == 'ADP':
                            site = 'LWA-SV'
                            break
                except (RuntimeError, ValueError):
                    site = 'LWA-SV'
                for obsID in fileInfo.keys():
                    lwasite[fileInfo[obsID]['tag']] = site

            except Exception as e:
                sys.stderr.write("ERROR reading metadata file: %s\n" % str(e))
                sys.stderr.flush()

    # Setup what we need to write out a configuration file
    corrConfig = {
        'context': context,
        'setup': setup,
        'source': {
            'name': '',
            'ra2000': '',
            'dec2000': ''
        },
        'inputs': []
    }

    metadata = {}
    for filename in filenames:
        #print("%s:" % os.path.basename(filename))

        # Skip over empty files
        if os.path.getsize(filename) == 0:
            continue

        # Open the file
        fh = open(filename, 'rb')

        # Figure out what to do with the file
        ext = os.path.splitext(filename)[1]
        if ext == '':
            ## DRX
            try:
                ## Get the site
                try:
                    sitename = lwasite[os.path.basename(filename)]
                except KeyError:
                    sitename = 'LWA1'

                ## Get the location so that we can set site-specific parameters
                if sitename == 'LWA1':
                    xyz = LWA1_ECEF
                    off = args.lwa1_offset
                elif sitename == 'LWA-SV':
                    xyz = LWASV_ECEF
                    off = args.lwasv_offset
                else:
                    raise RuntimeError("Unknown LWA site '%s'" % site)

                ## Move into the LWA1 coordinate system
                ### ECEF to LWA1
                rho = xyz - LWA1_ECEF
                sez = numpy.dot(LWA1_ROT, rho)
                enz = sez[[1, 0, 2]]  # pylint: disable=invalid-sequence-index
                enz[1] *= -1

                ## Read in the first few frames to get the start time
                frames = [drx.read_frame(fh) for i in xrange(1024)]
                streams = []
                freq1, freq2 = 0.0, 0.0
                for frame in frames:
                    beam, tune, pol = frame.id
                    if tune == 1:
                        freq1 = frame.central_freq
                    else:
                        freq2 = frame.central_freq
                    if (beam, tune, pol) not in streams:
                        streams.append((beam, tune, pol))
                tStart = frames[0].time.datetime
                tStartAlt = (frames[-1].time - 1023 // len(streams) * 4096 /
                             frames[-1].sample_rate).datetime
                tStartDiff = tStart - tStartAlt
                if abs(tStartDiff) > timedelta(microseconds=10000):
                    sys.stderr.write(
                        "WARNING: Stale data found at the start of '%s', ignoring\n"
                        % os.path.basename(filename))
                    sys.stderr.flush()
                    tStart = tStartAlt
                ### ^ Adjustment to the start time to deal with occasional problems
                ###   with stale data in the DR buffers at LWA-SV

                ## Read in the last few frames to find the end time
                fh.seek(os.path.getsize(filename) - 1024 * drx.FRAME_SIZE)
                backed = 0
                while backed < 2 * drx.FRAME_SIZE:
                    try:
                        drx.read_frame(fh)
                        fh.seek(-drx.FRAME_SIZE, 1)
                        break
                    except errors.SyncError:
                        backed += 1
                        fh.seek(-drx.FRAME_SIZE - 1, 1)
                for i in xrange(32):
                    try:
                        frame = drx.read_frame(fh)
                        beam, tune, _ = frame.id
                        if tune == 1:
                            freq1 = frame.central_freq
                        else:
                            freq2 = frame.central_freq
                    except errors.SyncError:
                        continue
                tStop = frame.time.datetime

                ## Save
                corrConfig['inputs'].append({
                    'file':
                    filename,
                    'type':
                    'DRX',
                    'antenna':
                    sitename,
                    'pols':
                    'X, Y',
                    'location': (enz[0], enz[1], enz[2]),
                    'clockoffset': (off, off),
                    'fileoffset':
                    0,
                    'beam':
                    beam,
                    'tstart':
                    tStart,
                    'tstop':
                    tStop,
                    'freq': (freq1, freq2)
                })

            except Exception as e:
                sys.stderr.write("ERROR reading DRX file: %s\n" % str(e))
                sys.stderr.flush()

        elif ext == '.vdif':
            ## VDIF
            try:
                ## Read in the GUPPI header
                header = vdif.read_guppi_header(fh)

                ## Read in the first frame
                vdif.FRAME_SIZE = vdif.get_frame_size(fh)
                frame = vdif.read_frame(fh)
                antID = frame.id[0] - 12300
                tStart = frame.time.datetime
                nThread = vdif.get_thread_count(fh)

                ## Read in the last frame
                nJump = int(os.path.getsize(filename) / vdif.FRAME_SIZE)
                nJump -= 30
                fh.seek(nJump * vdif.FRAME_SIZE, 1)
                mark = fh.tell()
                while True:
                    try:
                        frame = vdif.read_frame(fh)
                        tStop = frame.time.datetime
                    except Exception as e:
                        break

                ## Find the antenna location
                pad, edate = db.get_pad('EA%02i' % antID, tStart)
                x, y, z = db.get_xyz(pad, tStart)
                #print("  Pad: %s" % pad)
                #print("  VLA relative XYZ: %.3f, %.3f, %.3f" % (x,y,z))

                ## Move into the LWA1 coordinate system
                ### relative to ECEF
                xyz = numpy.array([x, y, z])
                xyz += VLA_ECEF
                ### ECEF to LWA1
                rho = xyz - LWA1_ECEF
                sez = numpy.dot(LWA1_ROT, rho)
                enz = sez[[1, 0, 2]]  # pylint: disable=invalid-sequence-index
                enz[1] *= -1

                ## Set an apparent position if WiDAR is already applying a delay model
                apparent_enz = (None, None, None)
                if args.no_vla_delay_model:
                    apparent_xyz = VLA_ECEF
                    apparent_rho = apparent_xyz - LWA1_ECEF
                    apparent_sez = numpy.dot(LWA1_ROT, apparent_rho)
                    apparent_enz = apparent_sez[[1, 0, 2]]  # pylint: disable=invalid-sequence-index
                    apparent_enz[1] *= -1

                ## VLA time offset
                off = args.vla_offset

                ## Save
                corrConfig['context']['observer'] = header['OBSERVER']
                try:
                    corrConfig['context']['project'] = header[
                        'BASENAME'].split('_')[0]
                    corrConfig['context']['session'] = header[
                        'BASENAME'].split('_')[1].replace('sb', '')
                except IndexError:
                    corrConfig['context']['project'] = header[
                        'BASENAME'].split('.')[0]
                    corrConfig['context']['session'] = header[
                        'BASENAME'].split('.')[1].replace('sb', '')
                corrConfig['context']['vlaref'] = re.sub(
                    '\.[0-9]+\.[0-9]+\.[AB][CD]-.*', '', header['BASENAME'])
                corrConfig['source']['name'] = header['SRC_NAME']
                corrConfig['source']['intent'] = 'target'
                corrConfig['source']['ra2000'] = header['RA_STR']
                corrConfig['source']['dec2000'] = header['DEC_STR']
                corrConfig['inputs'].append({
                    'file':
                    filename,
                    'type':
                    'VDIF',
                    'antenna':
                    'EA%02i' % antID,
                    'pols':
                    'Y, X',
                    'location': (enz[0], enz[1], enz[2]),
                    'apparent_location':
                    (apparent_enz[0], apparent_enz[1], apparent_enz[2]),
                    'clockoffset': (off, off),
                    'fileoffset':
                    0,
                    'pad':
                    pad,
                    'tstart':
                    tStart,
                    'tstop':
                    tStop,
                    'freq':
                    header['OBSFREQ']
                })

            except Exception as e:
                sys.stderr.write("ERROR reading VDIF file: %s\n" % str(e))
                sys.stderr.flush()

        elif ext == '.tgz':
            ## LWA Metadata
            try:
                ## Extract the file information so that we can pair things together
                fileInfo = metabundle.get_session_metadata(filename)
                for obsID in fileInfo.keys():
                    metadata[fileInfo[obsID]['tag']] = filename

            except Exception as e:
                sys.stderr.write("ERROR reading metadata file: %s\n" % str(e))
                sys.stderr.flush()

        # Done
        fh.close()

    # Close out the connection to NRAO
    try:
        db.close()
    except AttributeError:
        pass

    # Choose a VDIF reference file, if there is one, and mark whether or
    # not DRX files were found
    vdifRefFile = None
    isDRX = False
    for cinp in corrConfig['inputs']:
        if cinp['type'] == 'VDIF':
            if vdifRefFile is None:
                vdifRefFile = cinp
        elif cinp['type'] == 'DRX':
            isDRX = True

    # Set a state variable so that we can generate a warning about missing
    # DRX files
    drxFound = False

    # Purge DRX files that don't make sense
    toPurge = []
    drxFound = False
    lwasvFound = False
    for cinp in corrConfig['inputs']:
        ### Sort out multiple DRX files - this only works if we have only one LWA station
        if cinp['type'] == 'DRX':
            if vdifRefFile is not None:
                l0, l1 = cinp['tstart'], cinp['tstop']
                v0, v1 = vdifRefFile['tstart'], vdifRefFile['tstop']
                ve = (v1 - v0).total_seconds()
                overlapWithVDIF = (v0 >= l0 and v0 < l1) or (l0 >= v0
                                                             and l0 < v1)
                lvo = (min([v1, l1]) - max([v0, l0])).total_seconds()
                if not overlapWithVDIF or lvo < 0.25 * ve:
                    toPurge.append(cinp)
                drxFound = True
            if cinp['antenna'] == 'LWA-SV':
                lwasvFound = True
    for cinp in toPurge:
        del corrConfig['inputs'][corrConfig['inputs'].index(cinp)]

    # Sort the inputs based on the antenna name - this puts LWA1 first,
    # LWA-SV second, and the VLA at the end in 'EA' antenna order, i.e.,
    # EA01, EA02, etc.
    corrConfig['inputs'].sort(key=lambda x: 0 if x['antenna'] == 'LWA1' else (
        1 if x['antenna'] == 'LWA-SV' else int(x['antenna'][2:], 10)))

    # VDIF/DRX warning check/report
    if vdifRefFile is not None and isDRX and not drxFound:
        sys.stderr.write(
            "WARNING: DRX files provided but none overlapped with VDIF data")

    # Duplicate antenna check
    antCounts = {}
    for cinp in corrConfig['inputs']:
        try:
            antCounts[cinp['antenna']] += 1
        except KeyError:
            antCounts[cinp['antenna']] = 1
    for ant in antCounts.keys():
        if antCounts[ant] != 1:
            sys.stderr.write("WARNING: Antenna '%s' is defined %i times" %
                             (ant, antCounts[ant]))

    # Update the file offsets to get things lined up better
    tMax = max([cinp['tstart'] for cinp in corrConfig['inputs']])
    for cinp in corrConfig['inputs']:
        diff = tMax - cinp['tstart']
        offset = diff.days * 86400 + diff.seconds + diff.microseconds / 1e6
        cinp['fileoffset'] = max([0, offset])

    # Reconcile the source lists for when we have eLWA data.  This is needed so
    # that we use the source information contained in the VDIF files rather than
    # the stub information contained in the SDFs
    if len(sources) <= 1:
        if corrConfig['source']['name'] != '':
            ## Update the source information with what comes from the VLA
            try:
                sources[0] = corrConfig['source']
            except IndexError:
                sources.append(corrConfig['source'])
    # Update the dwell time using the minimum on-source time for all inputs if
    # there is only one source, i.e., for full eLWA runs
    if len(sources) == 1:
        sources[0]['start'] = max(
            [cinp['tstart'] for cinp in corrConfig['inputs']])
        sources[0]['stop'] = min(
            [cinp['tstop'] for cinp in corrConfig['inputs']])

    # Render the configuration
    startRef = sources[0]['start']
    s = 0
    for source in sources:
        startOffset = source['start'] - startRef
        startOffset = startOffset.total_seconds()

        dur = source['stop'] - source['start']
        dur = dur.total_seconds()

        ## Skip over dummy scans and scans that start after the files end
        if source['intent'] in (None, 'dummy'):
            continue
        if source['start'] > max(
            [cinp['tstop'] for cinp in corrConfig['inputs']]):
            print(
                "Skipping scan of %s which starts at %s, %.3f s after the data end"
                % (source['name'], source['start'],
                   (source['start'] -
                    max([cinp['tstop']
                         for cinp in corrConfig['inputs']])).total_seconds()))
            continue

        ## Small correction for the first scan to compensate for stale data at LWA-SV
        if lwasvFound and s == 0:
            startOffset += 10.0
            dur -= 10.0

        ## Skip over scans that are too short
        if dur < args.minimum_scan_length:
            continue

        ## Setup
        if args.output is None:
            fh = sys.stdout
        else:
            outname = args.output
            if len(sources) > 1:
                outname += str(s + 1)
            fh = open(outname, 'w')

        try:
            repo = git.Repo(os.path.dirname(os.path.abspath(__file__)))
            try:
                branch = repo.active_branch.name
                hexsha = repo.active_branch.commit.hexsha
            except TypeError:
                branch = '<detached>'
                hexsha = repo.head.commit.hexsha
            shortsha = hexsha[-7:]
            dirty = ' (dirty)' if repo.is_dirty() else ''
        except git.exc.GitError:
            branch = 'unknown'
            hexsha = 'unknown'
            shortsha = 'unknown'
            dirty = ''

        ## Preamble
        fh.write("# Created\n")
        fh.write("#  on %s\n" % datetime.now())
        fh.write("#  using %s, revision %s.%s%s\n" %
                 (os.path.basename(__file__), branch, shortsha, dirty))
        fh.write("\n")
        ## Observation context
        fh.write("Context\n")
        fh.write("  Observer  %s\n" % corrConfig['context']['observer'])
        fh.write("  Project   %s\n" % corrConfig['context']['project'])
        if corrConfig['context']['session'] is not None:
            fh.write("  Session   %s\n" % corrConfig['context']['session'])
        if corrConfig['context']['vlaref'] is not None:
            fh.write("  VLARef    %s\n" % corrConfig['context']['vlaref'])
        fh.write("EndContext\n")
        fh.write("\n")
        ## Configuration, if present
        if corrConfig['setup'] is not None:
            fh.write("Configuration\n")
            fh.write("  Channels     %i\n" % corrConfig['setup']['channels'])
            fh.write("  IntTime      %.3f\n" % corrConfig['setup']['inttime'])
            fh.write("  PolBasis     %s\n" % corrConfig['setup']['basis'])
            fh.write("EndConfiguration\n")
            fh.write("\n")
        ## Source
        fh.write("Source\n")
        fh.write("# Observation start is %s\n" % source['start'])
        fh.write("# Duration is %s\n" % (source['stop'] - source['start'], ))
        fh.write("  Name     %s\n" % source['name'])
        fh.write("  Intent   %s\n" % source['intent'].lower())
        if source['name'] not in ('Sun', 'Jupiter'):
            fh.write("  RA2000   %s\n" % source['ra2000'])
            fh.write("  Dec2000  %s\n" % source['dec2000'])
        fh.write("  Duration %.3f\n" % dur)
        fh.write("SourceDone\n")
        fh.write("\n")
        ## Input files
        for cinp in corrConfig['inputs']:
            fh.write("Input\n")
            fh.write("# Start time is %s\n" % cinp['tstart'])
            fh.write("# Stop time is %s\n" % cinp['tstop'])
            try:
                fh.write("# Beam is %i\n" % cinp['beam'])
            except KeyError:
                pass
            try:
                fh.write("# VLA pad is %s\n" % cinp['pad'])
            except KeyError:
                pass
            try:
                fh.write("# Frequency tuning 1 is %.3f Hz\n" % cinp['freq'][0])
                fh.write("# Frequency tuning 2 is %.3f Hz\n" % cinp['freq'][1])
            except TypeError:
                fh.write("# Frequency tuning is %.3f Hz\n" % cinp['freq'])
            fh.write("  File             %s\n" % cinp['file'])
            try:
                metaname = metadata[os.path.basename(cinp['file'])]
                fh.write("  MetaData         %s\n" % metaname)
            except KeyError:
                if cinp['type'] == 'DRX':
                    sys.stderr.write(
                        "WARNING: No metadata found for '%s', source %i\n" %
                        (os.path.basename(cinp['file']), s + 1))
                    sys.stderr.flush()
                pass
            fh.write("  Type             %s\n" % cinp['type'])
            fh.write("  Antenna          %s\n" % cinp['antenna'])
            fh.write("  Pols             %s\n" % cinp['pols'])
            fh.write("  Location         %.6f, %.6f, %.6f\n" %
                     cinp['location'])
            try:
                if cinp['apparent_location'][0] is not None:
                    fh.write("  ApparentLocation %.6f, %.6f, %.6f\n" %
                             cinp['apparent_location'])
            except KeyError:
                pass
            fh.write("  ClockOffset      %s, %s\n" % cinp['clockoffset'])
            fh.write("  FileOffset       %.3f\n" %
                     (startOffset + cinp['fileoffset'], ))
            fh.write("InputDone\n")
            fh.write("\n")
        if fh != sys.stdout:
            fh.close()

        # Increment the source/file counter
        s += 1
コード例 #23
0
def main(args):
    filename = args[0]

    fh = open(filename, "rb")
    nFramesFile = os.path.getsize(filename) / drx.FRAME_SIZE
    junkFrame = drx.read_frame(fh)
    beam, tune, pol = junkFrame.id
    while 2 * (tune - 1) + pol != 0:
        junkFrame = drx.read_frame(fh)
        beam, tune, pol = junkFrame.id
    fh.seek(fh.tell() - drx.FRAME_SIZE)

    srate = junkFrame.sample_rate
    beams = drx.get_beam_count(fh)
    tunepols = drx.get_frames_per_obs(fh)
    tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
    beampols = tunepol

    # File summary
    out = "Filename: %s" % filename
    out += "\nBeams: %i" % beams
    out += "\nTune/Pols: %i %i %i %i" % tunepols
    out += "\nSample Rate: %i Hz" % srate
    out += "\nFrames: %i (%.3f s)" % (nFramesFile, 1.0 * nFramesFile /
                                      beampols * 4096 / srate)
    out += "\n==="
    print(out)

    tuningOffset = numpy.zeros(nFramesFile / 8, dtype=numpy.int64)
    try:
        screen = curses.initscr()
        curses.noecho()
        curses.cbreak()
        screen.nodelay(1)

        strdict = {'preamble': out}
        for i in xrange(tuningOffset.size):
            screen.clear()

            beamIDs = [0, 0, 0, 0]
            timetags = numpy.zeros(4, dtype=numpy.int64) - 1
            time_offsets = numpy.zeros(4, dtype=numpy.int64) - 1
            timeValues = numpy.zeros(4, dtype=numpy.float64)
            for j in xrange(4):
                # Read in the next frame and anticipate any problems that could occur
                try:
                    cFrame = drx.read_frame(fh, verbose=False)
                except errors.EOFError:
                    break
                except errors.SyncError:
                    #print("WARNING: Mark 5C sync error on frame #%i" % (int(fh.tell())/drx.FRAME_SIZE-1))
                    continue

                ## Save the time time, time offset, and computed time values
                beam, tune, pol = cFrame.id
                aStand = 2 * (tune - 1) + pol
                if timetags[aStand] == -1:
                    beamIDs[aStand] = (beam, tune, pol)
                    timetags[aStand] = cFrame.payload.timetag
                    time_offsets[aStand] = cFrame.header.time_offset
                    timeValues[aStand] = cFrame.time

            k = 0
            for id, tt, to, tv in zip(beamIDs, timetags, time_offsets,
                                      timeValues):
                strdict['b%i' % k] = id[0]
                strdict['t%i' % k] = id[1]
                strdict['p%i' % k] = id[2]

                strdict['tt%i' % k] = tt
                strdict['os%i' % k] = to
                strdict['tv%i' % k] = tv

                k += 1

            t1t = timetags[0] - time_offsets[0]
            t2t = timetags[3] - time_offsets[3]
            tuningOffset[i] = t2t - t1t

            strdict['ttd'] = t2t - t1t
            strdict['tvd'] = (t2t - t1t) / fS

            screen.addstr(0, 0, display.safe_substitute(strdict))
            screen.refresh()

            # Check for keypress and exit if Q
            c = screen.getch()
            if (c > 0):
                if chr(c) == 'q':
                    break
                if chr(c) == 'Q':
                    break

            curses.nocbreak()
            curses.echo()
            curses.endwin()

    except KeyboardInterrupt:
        curses.nocbreak()
        curses.echo()
        curses.endwin()

        tuningOffset = tuningOffset[0:i]

    print(display.safe_substitute(strdict))

    print(
        "T2-T1 time tag offset range: %i to %i (based on %i sets of frames)" %
        (tuningOffset.min(), tuningOffset.max(), len(tuningOffset)))
コード例 #24
0
def main(args):
    skip = args.skip
    fh = open(args.filename, "rb")
    
    # Get the first frame and find out what the firt time tag is, which the
    # first frame number is, and what the sample rate it.  From the sample 
    # rate, estimate how the time tag should advance between frames.
    while True:
        junkFrame = drx.read_frame(fh)
        try:
            sample_rate = junkFrame.sample_rate
            break
        except ZeroDivisionError:
            pass
    tagSkip = int(fS / sample_rate * junkFrame.payload.data.shape[0])
    fh.seek(-drx.FRAME_SIZE, 1)

    # Store the information about the first frame.
    prevTime = junkFrame.payload.timetag
    prevDate = junkFrame.time.datetime
    prevFrame = junkFrame.header.frame_count

    # Skip ahead
    fh.seek(int(skip*sample_rate/4096)*4*drx.FRAME_SIZE)

    # Report on the file
    print("Filename: %s" % os.path.basename(args.filename))
    print("Date of first frame: %i -> %s" % (prevTime, str(prevDate)))
    print("Sample rate: %i Hz" % sample_rate)
    print("Time tag skip per frame: %i" % tagSkip)
    if skip != 0:
        print("Skipping ahead %i frames (%.6f seconds)" % (int(skip*sample_rate/4096)*4, int(skip*sample_rate/4096)*4096/sample_rate))

    k = 0
    #k = 1
    prevTime = [0, 0, 0, 0]
    prevDate = ['', '', '', '']
    prevNumb = [0, 0, 0, 0]
    for i in xrange(4):
        currFrame = drx.read_frame(fh)
        beam, tune, pol = currFrame.id
        rID = 2*(tune-1) + pol

        prevTime[rID] = currFrame.payload.timetag
        prevDate[rID] = currFrame.time.datetime
        prevNumb[rID] = 1 + k // 4
        #prevNumb[rID] = k
        
        k += 1
    
    while True:
        try:
            currFrame = drx.read_frame(fh)
        except errors.EOFError:
            break
        except errors.SyncError:
            currNumb = 1 + k // 4
            
            print("ERROR: invalid frame (sync. word error) @ frame %8i" % currNumb)
            continue
        
        beam, tune, pol = currFrame.id
        rID = 2*(tune-1) + pol
        currTime = currFrame.payload.timetag
        currDate = currFrame.time.datetime
        currNumb = 1 + k // 4
        #currNumb = k

        if tune == 1 and pol == 0 and currNumb % 50000 == 0:
            print("Beam %i, tune %i, pol %i: frame %8i -> %i (%s)" % (beam, tune, pol, currNumb, currTime, currDate))

        if currTime < prevTime[rID]:
            print("ERROR: t.t. %i @ frame %i < t.t. %i @ frame %i" % (currTime, currNumb, prevTime[rID], prevNumb[rID]))
            print("       -> difference: %i (%.3f frames; %.5f seconds); %s" % (currTime-prevTime[rID], float(currTime-prevTime[rID])/tagSkip, float(currTime-prevTime[rID])/fS, str(currDate)))
            print("       -> beam %i, tuning %i, pol %i" % (beam, tune, pol))
        elif currTime > (prevTime[rID] + tagSkip):
            print("ERROR: t.t. %i @ frame %i > t.t. %i @ frame %i + skip" % (currTime, currNumb, prevTime[rID], prevNumb[rID]))
            print("       -> difference: %i (%.3f frames; %.5f seconds); %s" % (currTime-prevTime[rID], float(currTime-prevTime[rID])/tagSkip, float(currTime-prevTime[rID])/fS, str(currDate)))
            print("       -> beam %i, tuning %i, pol %i" % (beam, tune, pol))
        elif currTime < (prevTime[rID] + tagSkip):
            print("ERROR: t.t %i @ frame %i < t.t. %i @ frame %i + skip" % (currTime, currNumb, prevTime[rID], prevNumb[rID]))
            print("       -> difference: %i (%.3f frames; %.5f seconds; %s" % (currTime-prevTime[rID], float(currTime-prevTime[rID])/tagSkip, float(currTime-prevTime[rID])/fS, str(currDate)))
            print("       -> beam %i, tuning %i, pol %i" % (beam, tune, pol))
        else:
            pass
        
        prevTime[rID] = currTime
        prevDate[rID] = currDate
        prevNumb[rID] = currNumb
        k += 1
        
        del currFrame
        
    fh.close()
コード例 #25
0
def main(args):
    # Set the site
    site = None
    if args.lwa1:
        site = 'lwa1'
    elif args.lwasv:
        site = 'lwasv'
        
    # Open the file and file good data (not raw DRX data)
    fh = open(args.filename, 'rb')

    try:
        for i in xrange(5):
            junkFrame = drx.read_frame(fh)
        raise RuntimeError("ERROR: '%s' appears to be a raw DRX file, not a DR spectrometer file" % args.filename)
    except errors.SyncError:
        fh.seek(0)
        
    # Interrogate the file to figure out what frames sizes to expect, now many 
    # frames there are, and what the transform length is
    FRAME_SIZE = drspec.get_frame_size(fh)
    nFrames = os.path.getsize(args.filename) // FRAME_SIZE
    nChunks = nFrames
    LFFT = drspec.get_transform_size(fh)

    # Read in the first frame to figure out the DP information
    junkFrame = drspec.read_frame(fh)
    fh.seek(-FRAME_SIZE, 1)
    srate = junkFrame.sample_rate
    t0 = junkFrame.time
    tInt = junkFrame.header.nints*LFFT/srate
    
    # Offset in frames for beampols beam/tuning/pol. sets
    offset = int(round(args.skip / tInt))
    fh.seek(offset*FRAME_SIZE, 1)
    
    # Iterate on the offsets until we reach the right point in the file.  This
    # is needed to deal with files that start with only one tuning and/or a 
    # different sample rate.  
    while True:
        ## Figure out where in the file we are and what the current tuning/sample 
        ## rate is
        junkFrame = drspec.read_frame(fh)
        srate = junkFrame.sample_rate
        t1 = junkFrame.time
        tInt = junkFrame.header.nints*LFFT/srate
        fh.seek(-FRAME_SIZE, 1)
        
        ## See how far off the current frame is from the target
        tDiff = t1 - (t0 + args.skip)
        
        ## Half that to come up with a new seek parameter
        tCorr = -tDiff / 2.0
        cOffset = int(round(tCorr / tInt))
        offset += cOffset
        
        ## If the offset is zero, we are done.  Otherwise, apply the offset
        ## and check the location in the file again/
        if cOffset is 0:
            break
        fh.seek(cOffset*FRAME_SIZE, 1)
        
    # Update the offset actually used
    args.skip = t1 - t0
    nChunks = (os.path.getsize(args.filename) - fh.tell()) // FRAME_SIZE
    
    # Update the file contents
    beam = junkFrame.id
    central_freq1, central_freq2 = junkFrame.central_freq
    srate = junkFrame.sample_rate
    data_products = junkFrame.data_products
    t0 = junkFrame.time
    tInt = junkFrame.header.nints*LFFT/srate
    beginDate = junkFrame.time.datetime
        
    # Report
    print("Filename: %s" % args.filename)
    if args.metadata is not None:
        print("Metadata: %s" % args.metadata)
    elif args.sdf is not None:
        print("SDF: %s" % args.sdf)
    print("Date of First Frame: %s" % beginDate)
    print("Beam: %i" % beam)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" % (central_freq1, central_freq2))
    print("Data Products: %s" % ','.join(data_products))
    print("Frames: %i (%.3f s)" % (nFrames, nFrames*tInt))
    print("---")
    print("Offset: %.3f s (%i frames)" % (args.skip, offset))
    print("Transform Length: %i" % LFFT)
    print("Integration: %.3f s" % tInt)
    
    # Setup the output file
    outname = os.path.split(args.filename)[1]
    outname = os.path.splitext(outname)[0]
    outname = '%s-waterfall.hdf5' % outname
    
    if os.path.exists(outname):
        if not args.force:
            yn = raw_input("WARNING: '%s' exists, overwrite? [Y/n] " % outname)
        else:
            yn = 'y'
            
        if yn not in ('n', 'N'):
            os.unlink(outname)
        else:
            raise RuntimeError("Output file '%s' already exists" % outname)
            
    f = hdfData.create_new_file(outname)
    obsList = {}
    if args.metadata is not None:
        try:
            project = metabundle.get_sdf(args.metadata)
        except Exception as e:
            if adpReady:
                project = metabundleADP.get_sdf(args.metadata)
            else:
                raise e
                
        sdfBeam  = project.sessions[0].drx_beam
        spcSetup = project.sessions[0].spcSetup
        if sdfBeam != beam:
            raise RuntimeError("Metadata is for beam #%i, but data is from beam #%i" % (sdfBeam, beam))
            
        for i,obs in enumerate(project.sessions[0].observations):
            sdfStart = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm)
            sdfStop  = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
            obsChunks = int(numpy.ceil(obs.dur/1000.0 * drx.FILTER_CODES[obs.filter] / (spcSetup[0]*spcSetup[1])))
            
            obsList[i+1] = (sdfStart, sdfStop, obsChunks)
            
        hdfData.fill_from_metabundle(f, args.metadata)
        
    elif args.sdf is not None:
        try:
            project = sdf.parse_sdf(args.sdf)
        except Exception as e:
            if adpReady:
                project = sdfADP.parse_sdf(args.sdf)
            else:
                raise e
                
        sdfBeam  = project.sessions[0].drx_beam
        spcSetup = project.sessions[0].spcSetup
        if sdfBeam != beam:
            raise RuntimeError("Metadata is for beam #%i, but data is from beam #%i" % (sdfBeam, beam))
            
        for i,obs in enumerate(project.sessions[0].observations):
            sdfStart = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm)
            sdfStop  = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
            obsChunks = int(numpy.ceil(obs.dur/1000.0 * drx.FILTER_CODES[obs.filter] / (spcSetup[0]*spcSetup[1])))
            
            obsList[i+1] = (sdfStart, sdfStop, obsChunks)
            
        hdfData.fill_from_sdf(f, args.sdf, station=site)
        
    else:
        obsList[1] = (beginDate, datetime(2222,12,31,23,59,59), nChunks)
        
        hdfData.fill_minimum(f, 1, beam, srate, station=site)
        
    data_products = junkFrame.data_products
    for o in sorted(obsList.keys()):
        for t in (1,2):
            hdfData.create_observation_set(f, o, t, numpy.arange(LFFT, dtype=numpy.float64), obsList[o][2], data_products)
            
    f.attrs['FileGenerator'] = 'drspec2hdf.py'
    f.attrs['InputData'] = os.path.basename(args.filename)
    
    # Create the various HDF group holders
    ds = {}
    for o in sorted(obsList.keys()):
        obs = hdfData.get_observation_set(f, o)
        
        ds['obs%i' % o] = obs
        ds['obs%i-time' % o] = hdfData.get_time(f, o)
        
        for t in (1,2):
            ds['obs%i-freq%i' % (o, t)] = hdfData.get_data_set(f, o, t, 'freq')
            for p in data_products:
                ds["obs%i-%s%i" % (o, p, t)] = hdfData.get_data_set(f, o, t, p)
            ds['obs%i-Saturation%i' % (o, t)] = hdfData.get_data_set(f, o, t, 'Saturation')
            
    # Loop over DR spectrometer frames to fill in the HDF5 file
    pbar = progress.ProgressBar(max=nChunks)
    o = 1
    j = 0
    
    firstPass = True
    for i in xrange(nChunks):
        frame = drspec.read_frame(fh)
        
        cTime = frame.time.datetime
        if cTime > obsList[o][1]:
            # Increment to the next observation
            o += 1
            
            # If we have reached the end, exit...
            try:
                obsList[o]
                
                firstPass = True
            except KeyError:
                sys.stdout.write('%s\r' % (' '*pbar.span))
                sys.stdout.flush()
                print("End of observing block according to SDF, exiting")
                break
                
        if cTime < obsList[o][0]:
            # Skip over data that occurs before the start of the observation
            continue
            
        try:
            if frame.time > oTime + 1.001*tInt:
                print('Warning: Time tag error at frame %i; %.3f > %.3f + %.3f' % (i, frame.time, oTime, tInt))
        except NameError:
            pass
        oTime = frame.time
        
        if firstPass:
            # Otherwise, continue on...
            central_freq1, central_freq2 = frame.central_freq
            srate = frame.sample_rate
            tInt  = frame.header.nints*LFFT/srate
            
            freq = numpy.fft.fftshift( numpy.fft.fftfreq(LFFT, d=1.0/srate) )
            freq = freq.astype(numpy.float64)
            
            sys.stdout.write('%s\r' % (' '*pbar.span))
            sys.stdout.flush()
            print("Switching to Obs. #%i" % o)
            print("-> Tunings: %.1f Hz, %.1f Hz" % (central_freq1, central_freq2))
            print("-> Sample Rate: %.1f Hz" % srate)
            print("-> Integration Time: %.3f s" % tInt)
            sys.stdout.write(pbar.show()+'\r')
            sys.stdout.flush()
            
            j = 0
            ds['obs%i-freq1' % o][:] = freq + central_freq1
            ds['obs%i-freq2' % o][:] = freq + central_freq2
            
            obs = ds['obs%i' % o]
            obs.attrs['tInt'] = tInt
            obs.attrs['tInt_Units'] = 's'
            obs.attrs['LFFT'] = LFFT
            obs.attrs['nChan'] = LFFT
            obs.attrs['RBW'] = freq[1]-freq[0]
            obs.attrs['RBW_Units'] = 'Hz'
            
            firstPass = False
            
        # Load the data from the spectrometer frame into the HDF5 group
        ds['obs%i-time' % o][j] = (frame.time[0], frame.time[1])
        
        ds['obs%i-Saturation1' % o][j,:] = frame.payload.saturations[0:2]
        ds['obs%i-Saturation2' % o][j,:] = frame.payload.saturations[2:4]
        
        for t in (1,2):
            for p in data_products:
                ds['obs%i-%s%i' % (o, p, t)][j,:] = getattr(frame.payload, "%s%i" % (p, t-1), None)
        j += 1
        
        # Update the progress bar
        pbar.inc()
        if i % 10 == 0:
            sys.stdout.write(pbar.show()+'\r')
            sys.stdout.flush()
            
    sys.stdout.write(pbar.show()+'\n')
    sys.stdout.flush()
    
    # Done
    fh.close()

    # Save the output to a HDF5 file
    f.close()
コード例 #26
0
    def test_drx_buffer_reorder_flush(self):
        """Test the DRX ring buffer's flush() function with reordering."""

        fh = open(drxFile, 'rb')
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
        fh.seek(-drx.FRAME_SIZE, 1)

        # Create the FrameBuffer instance
        frameBuffer = buffer.DRXFrameBuffer(beams=[
            b,
        ],
                                            tunes=[1, 2],
                                            pols=[0, 1],
                                            nsegments=2,
                                            reorder=True)

        # Go
        while True:
            try:
                cFrame = drx.read_frame(fh)
            except errors.EOFError:
                break
            except errors.SyncError:
                continue

            frameBuffer.append(cFrame)
            cFrames = frameBuffer.get()

            if cFrames is None:
                continue

            # Make sure it has the right number of frames
            self.assertEqual(len(cFrames), 4)

            # Check the order
            for i in range(1, len(cFrames)):
                pB, pT, pP = cFrames[i - 1].id
                cB, cT, cP = cFrames[i].id

                pID = 4 * pB + 2 * (pT - 1) + pP
                cID = 4 * cB + 2 * (cT - 1) + cP
                self.assertTrue(cID > pID)

        fh.close()

        # Flush the buffer
        for cFrames in frameBuffer.flush():
            # Make sure the dump has one of the expected time tags
            self.assertTrue(
                cFrames[0].payload.timetag in (257355782095346056, ))

            # Make sure it has the right number of frames
            self.assertEqual(len(cFrames), 4)

            # Check the order
            for i in range(1, len(cFrames)):
                pB, pT, pP = cFrames[i - 1].id
                cB, cT, cP = cFrames[i].id

                pID = 4 * pB + 2 * (pT - 1) + pP
                cID = 4 * cB + 2 * (cT - 1) + cP
                self.assertTrue(cID > pID)
コード例 #27
0
def main(args):
    fh = open(args.filename, "rb")

    try:
        for i in xrange(5):
            junkFrame = drx.read_frame(fh)
        raise RuntimeError(
            "ERROR: '%s' appears to be a raw DRX file, not a DR spectrometer file"
            % args.filename)
    except errors.SyncError:
        fh.seek(0)

    # Interrogate the file to figure out what frames sizes to expect, now many
    # frames there are, and what the transform length is
    FRAME_SIZE = drspec.get_frame_size(fh)
    nFrames = os.path.getsize(args.filename) / FRAME_SIZE
    nChunks = nFrames
    LFFT = drspec.get_transform_size(fh)

    # Read in the first frame to figure out the DP information
    junkFrame = drspec.read_frame(fh)
    fh.seek(-FRAME_SIZE, 1)
    srate = junkFrame.sample_rate
    t0 = junkFrame.time
    tInt = junkFrame.header.nints * LFFT / srate

    # Update the file contents
    beam = junkFrame.id
    central_freq1, central_freq2 = junkFrame.central_freq
    srate = junkFrame.sample_rate
    data_products = junkFrame.data_products
    t0 = junkFrame.time
    tInt = junkFrame.header.nints * LFFT / srate
    beginDate = junkFrame.time.datetime

    # Report
    print("Filename: %s" % args.filename)
    print("Date of First Frame: %s" % beginDate)
    print("Beam: %i" % beam)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" %
          (central_freq1, central_freq2))
    print("Data Products: %s" % ','.join(data_products))
    print("Frames: %i (%.3f s)" % (nFrames, nFrames * tInt))
    print("---")
    print("Transform Length: %i" % LFFT)
    print("Integration: %.3f s" % tInt)

    # Convert chunk length to total frame count
    chunkLength = int(args.length / tInt)

    # Convert chunk skip to total frame count
    chunkSkip = int(args.skip / tInt)

    # Output arrays
    clipFraction = []
    meanPower = []

    # Go!
    i = 1
    done = False
    print("   |%sClipping%s |%sPower %s |" %
          (" " * (8 * len(data_products) - 4), " " *
           (8 * len(data_products) - 4), " " *
           (6 * len(data_products) - 3), " " * (6 * len(data_products) - 3)))
    out = "   |      1X      1Y      2X      2Y |"
    for t in (1, 2):
        for dp in data_products:
            out += "%6s" % ("%i%s" % (t, dp))
    out += " |"
    print(out)
    print("-" * len(out))

    while True:
        count = {0: 0, 1: 0, 2: 0, 3: 0}
        sats = numpy.empty((4, chunkLength), dtype=numpy.float32)
        data = numpy.empty((2 * len(data_products), chunkLength * LFFT),
                           dtype=numpy.float32)
        for j in xrange(chunkLength):
            # Read in the next frame and anticipate any problems that could occur
            try:
                cFrame = frame = drspec.read_frame(fh)
            except errors.EOFError:
                done = True
                break
            except errors.SyncError:
                continue

            for t in (1, 2):
                for p, dp in enumerate(data_products):
                    l = len(data_products) * (t - 1) + p
                    data[l, j * LFFT:(j + 1) * LFFT] = getattr(
                        cFrame.payload, '%s%i' % (dp, t - 1))
            sats[:,
                 j] = numpy.array(cFrame.payload.saturations) / (tInt * srate)

        if done:
            break

        else:
            clipFraction.append(sats.mean(axis=1))
            meanPower.append(data.mean(axis=1))

            clip = clipFraction[-1]
            power = meanPower[-1]

            out = "%2i | %6.2f%% %6.2f%% %6.2f%% %6.2f%% |" % (
                i, clip[0] * 100.0, clip[1] * 100.0, clip[2] * 100.0,
                clip[3] * 100.0)
            for t in (1, 2):
                for p in xrange(len(data_products)):
                    out += " %5.2f" % (power[len(data_products) *
                                             (t - 1) + p], )
            out += " |"
            print(out)

            i += 1
            fh.seek(FRAME_SIZE * chunkSkip, 1)

    clipFraction = numpy.array(clipFraction)
    meanPower = numpy.array(meanPower)

    clip = clipFraction.mean(axis=0)
    power = meanPower.mean(axis=0)

    print("-" * len(out))
    out = "%2s | %6.2f%% %6.2f%% %6.2f%% %6.2f%% |" % (
        'M', clip[0] * 100.0, clip[1] * 100.0, clip[2] * 100.0,
        clip[3] * 100.0)
    for t in (1, 2):
        for p in xrange(len(data_products)):
            out += " %5.2f" % (power[len(data_products) * (t - 1) + p], )
    out += " |"
    print(out)
コード例 #28
0
def main(args):
    for filename in args:
        with open(filename, 'rb') as fh:
            nframe = os.path.getsize(filename) // drx.FRAME_SIZE
            frame0 = drx.read_frame(fh)
            fh.seek((nframe-1)*drx.FRAME_SIZE, 0)
            frameN = drx.read_frame(fh)
            fh.seek(0)
            
            print('Time Range:')
            print('  Start:', frame0.time.dp_timetag, '->', frame0.time.datetime)
            print('  Stop: ', frameN.time.dp_timetag, '->', frameN.time.datetime)
            print('  Total frames:', nframe)
            
            sets = []
            times = {}
            tuning_word1, tuning_word2 = None, None
            central_freq1, central_freq2 = None, None
            sample_rate1, sample_rate2 = None, None
            for i in range(1024):
                try:
                    frame = drx.read_frame(fh)
                except errors.EOFError:
                    break
                    
                id = frame.id
                if id not in sets:
                    sets.append(id)
                    
                tt = frame.time.dp_timetag
                try:
                    times[id].append(tt)
                except KeyError:
                    times[id] = [tt,]
                    
                beam, tune, pol = id
                if tune == 1:
                    tuning_word1 = frame.payload.tuning_word
                    central_freq1 = frame.central_freq
                    sample_rate1 = frame.sample_rate
                else:
                    tuning_word2 = frame.payload.tuning_word
                    central_freq2 = frame.central_freq
                    sample_rate2 = frame.sample_rate
            sets.sort()
            expected_step = int(round(4096 * (196e6 / sample_rate1)))
            
            print('Frequency Range:')
            if central_freq1 is not None:
                print('  Tuning 1:', tuning_word1, '->', '%.3f MHz' % (central_freq1/1e6,), '@', '%.3f MHz' % (sample_rate1/1e6,))
            else:
                print('  Tuning 1:', 'not found')
            if central_freq2 is not None:
                print('  Tuning 2:', tuning_word2, '->', '%.3f MHz' % (central_freq2/1e6,), '@', '%.3f MHz' % (sample_rate2/1e6,))
            else:
                print('  Tuning 2:', 'not found')
                
            print('Frame Sets:')
            for id in sets:
                print('  Beam %i, tuning %i, pol. %i' % id)
                
            print('Time Flow:')
            for id in sets:
                tts = times[id]
                offsets = {}
                for i in range(1, len(tts)):
                    step = tts[i] - tts[i-1]
                    try:
                        offsets[step] += 1
                    except KeyError:
                        offsets[step] = 1
                print('  Beam %i, tuning %i, pol. %i' % id)
                for step in sorted(list(offsets.keys())):
                    print('    Timetag step of %i:' % step, '%i occurances' % offsets[step], '*' if step == expected_step else '')
コード例 #29
0
def main(args):
    fh = open(args.filename, "rb")
    nFramesFile = os.path.getsize(args.filename) // drx.FRAME_SIZE

    while True:
        junkFrame = drx.read_frame(fh)
        try:
            srate = junkFrame.sample_rate
            break
        except ZeroDivisionError:
            pass
    fh.seek(-drx.FRAME_SIZE, 1)

    beams = drx.get_beam_count(fh)
    tunepols = drx.get_frames_per_obs(fh)
    tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
    beampols = tunepol

    # Offset in frames for beampols beam/tuning/pol. sets
    offset = int(round(args.offset * srate / 4096 * beampols))
    offset = int(1.0 * offset / beampols) * beampols
    args.offset = 1.0 * offset / beampols * 4096 / srate
    fh.seek(offset * drx.FRAME_SIZE)

    # Make sure that the file chunk size contains is an intger multiple
    # of the beampols.
    maxFrames = int((19144 * 4) / beampols) * beampols

    # Setup the statistics data set
    if args.stats:
        if args.plot_range < 0.1:
            args.plot_range = 0.5

    # Number of frames to integrate over
    nFrames = int(args.plot_range * srate / 4096 * beampols)
    nFrames = int(1.0 * nFrames / beampols) * beampols
    args.plot_range = 1.0 * nFrames / beampols * 4096 / srate

    # Number of remaining chunks
    nChunks = int(math.ceil(1.0 * (nFrames) / maxFrames))

    # File summary
    print("Filename: %s" % args.filename)
    print("Beams: %i" % beams)
    print("Tune/Pols: %i %i %i %i" % tunepols)
    print("Sample Rate: %i Hz" % srate)
    print("Frames: %i (%.3f s)" %
          (nFramesFile, 1.0 * nFramesFile / beampols * 4096 / srate))
    print("---")
    print("Offset: %.3f s (%i frames)" % (args.offset, offset))
    print("Plot time: %.3f s (%i frames; %i frames per beam/tune/pol)" %
          (args.plot_range, nFrames, nFrames // beampols))
    print("Chunks: %i" % nChunks)

    # Sanity check
    if offset > nFramesFile:
        raise RuntimeError("Requested offset is greater than file length")
    if nFrames > (nFramesFile - offset):
        raise RuntimeError(
            "Requested integration time+offset is greater than file length")

    # Align the file handle so that the first frame read in the
    # main analysis loop is from tuning 1, polarization 0
    junkFrame = drx.read_frame(fh)
    b, t, p = junkFrame.id
    while 2 * (t - 1) + p != 0:
        junkFrame = drx.read_frame(fh)
        b, t, p = junkFrame.id
    fh.seek(-drx.FRAME_SIZE, 1)

    # Master loop over all of the file chuncks
    standMapper = []
    for i in xrange(nChunks):
        # Find out how many frames remain in the file.  If this number is larger
        # than the maximum of frames we can work with at a time (maxFrames),
        # only deal with that chunk
        framesRemaining = nFrames - i * maxFrames
        if framesRemaining > maxFrames:
            framesWork = maxFrames
        else:
            framesWork = framesRemaining
        print("Working on chunk %i, %i frames remaining" %
              (i, framesRemaining))

        count = {0: 0, 1: 0, 2: 0, 3: 0}
        data = numpy.zeros((beampols, framesWork * 4096 // beampols),
                           dtype=numpy.csingle)

        # Inner loop that actually reads the frames into the data array
        print("Working on %.1f ms of data" %
              ((framesWork * 4096 / beampols / srate) * 1000.0))
        t0 = time.time()

        for j in xrange(framesWork):
            # Read in the next frame and anticipate any problems that could occur
            try:
                cFrame = drx.read_frame(fh, verbose=False)
            except errors.EOFError:
                break
            except errors.SyncError:
                #print("WARNING: Mark 5C sync error on frame #%i" % (int(fh.tell())/drx.FRAME_SIZE-1))
                continue

            beam, tune, pol = cFrame.id
            aStand = 2 * (tune - 1) + pol

            data[aStand, count[aStand] * 4096:(count[aStand] + 1) *
                 4096] = numpy.abs(cFrame.payload.data)**2

            # Update the counters so that we can average properly later on
            count[aStand] += 1

        # Statistics
        print("Running robust statistics")
        means = [robust.mean(data[i, :]) for i in xrange(data.shape[0])]
        stds = [robust.std(data[i, :]) for i in xrange(data.shape[0])]

        if args.stats:
            ## Report statistics
            print("Mean: %s" % ' '.join(["%.3f" % m for m in means]))
            print("StdD: %s" % ' '.join(["%.3f" % s for s in stds]))
            print("Levels:")

            ## Count'em up
            j = 0
            counts = [
                1,
            ] * data.shape[0]
            while (means[i] + j * stds[i] <= 98) and max(counts) != 0:
                counts = [
                    len(
                        numpy.where(
                            numpy.abs(data[i, :] - means[i]) >= j *
                            stds[i])[0]) for i in xrange(data.shape[0])
                ]
                print(" %2isigma (%5.1f%%): %s" %
                      (j, 100.0 * (1 - erf(j / numpy.sqrt(2))), ' '.join([
                          "%7i (%5.1f%%)" % (c, 100.0 * c / data.shape[1])
                          for c in counts
                      ])))
                j += 1

            ## Why j-2?  Well, j is 1 more than the last iteration.  So, that last iteration
            ## is j-1,  which is always filled with 0s by construction.  So, the last crazy
            ## bin is j-2.
            jP = j - 2
            if jP > 20:
                counts = [
                    len(
                        numpy.where(
                            numpy.abs(data[i, :] - means[i]) >= jP *
                            stds[i])[0]) for i in xrange(data.shape[0])
                ]
                for i in xrange(data.shape[0]):
                    if counts[i] > 0:
                        break

                if counts[i] == 1:
                    print(
                        " -> Clip-o-rama likely occuring with %i %i-sigma detection on tuning %i, pol %i"
                        % (counts[i], jP, i // 2 + 1, i % 2))
                else:
                    print(
                        " -> Clip-o-rama likely occuring with %i %i-sigma detections on tuning %i, pol %i"
                        % (counts[i], jP, i // 2 + 1, i % 2))

        else:
            outfile = os.path.splitext(args.filename)[0]
            outfile = '%s.txt' % outfile
            fh = open(outfile, 'w')

            # Plot possible clip-o-rama and flag it
            print("Computing power derivatives w.r.t. time")
            deriv = numpy.zeros_like(data)
            for i in xrange(data.shape[0]):
                deriv[i, :] = numpy.roll(data[i, :], -1) - data[i, :]

            # The plots:  This is setup for the current configuration of 20 beampols
            print("Plotting")
            fig = plt.figure()
            figsX = int(round(math.sqrt(beampols)))
            figsY = beampols // figsX

            for i in xrange(data.shape[0]):
                ax = fig.add_subplot(figsX, figsY, i + 1)
                ax.plot(args.offset + numpy.arange(0, data.shape[1]) / srate,
                        data[i, :])

                ## Mark areas of crazy derivatives
                bad = numpy.where(
                    deriv[i, :] > 20 * stds[i] * numpy.sqrt(2))[0]
                for j in bad:
                    fh.write(
                        "Clip-o-rama on tuning %i, pol. %i at %.6f seconds\n" %
                        (i // 2 + 1, i % 2, args.offset + j / srate))
                    print("Clip-o-rama on tuning %i, pol. %i at %.6f seconds" %
                          (i // 2 + 1, i % 2, args.offset + j / srate))
                    ax.vlines(args.offset + j / srate,
                              -10,
                              100,
                              linestyle='--',
                              color='red',
                              linewidth=2.0)

                ## Mark areas of crazy power levels
                bad = numpy.where(data[i, :] == 98)[0]
                for j in bad:
                    fh.write(
                        "Saturation on tuning %i, pol. %i at %.6f seconds\n" %
                        (i // 2 + 1, i % 2, args.offset + j / srate))
                    print("Saturation on tuning %i, pol. %i at %.6f seconds" %
                          (i // 2 + 1, i % 2, args.offset + j / srate))
                    ax.vlines(args.offset + j / srate,
                              -10,
                              100,
                              linestyle='-.',
                              color='red')

                ax.set_ylim([-10, 100])

                ax.set_title('Beam %i, Tune. %i, Pol. %i' %
                             (beam, i // 2 + 1, i % 2))
                ax.set_xlabel('Time [seconds]')
                ax.set_ylabel('I$^2$ + Q$^2$')

            fh.close()
            if args.do_plot:
                plt.show()
コード例 #30
0
def main(args):
    LFFT = args.fft_length

    stand1 = 0
    stand2 = int(args.dipole_id_y)
    filenames = args.filename

    # Build up the station
    if args.lwasv:
        site = stations.lwasv
    else:
        site = stations.lwa1

    # Get the antennas we need (and a fake one for the beam)
    rawAntennas = site.antennas

    antennas = []

    dipole = None
    xyz = numpy.zeros((len(rawAntennas), 3))
    i = 0
    for ant in rawAntennas:
        if ant.stand.id == stand2 and ant.pol == 0:
            dipole = ant
        xyz[i, 0] = ant.stand.x
        xyz[i, 1] = ant.stand.y
        xyz[i, 2] = ant.stand.z
        i += 1
    arrayX = xyz[:, 0].mean()
    arrayY = xyz[:, 1].mean()
    arrayZ = xyz[:, 2].mean()

    ## Fake one down here...
    beamStand = stations.Stand(0, arrayX, arrayY, arrayZ)
    beamFEE = stations.FEE('Beam', 0, gain1=0, gain2=0, status=3)
    beamCable = stations.Cable('Beam', 0, vf=1.0)
    beamAntenna = stations.Antenna(0,
                                   stand=beamStand,
                                   pol=0,
                                   theta=0,
                                   phi=0,
                                   status=3)
    beamAntenna.fee = beamFEE
    beamAntenna.feePort = 1
    beamAntenna.cable = beamCable

    antennas.append(beamAntenna)

    ## Dipole down here...
    ### NOTE
    ### Here we zero out the cable length for the dipole since the delay
    ### setup that is used for these observations already takes the
    ### cable/geometric delays into account.  We shouldn't need anything
    ### else to get good fringes.
    dipole.cable.length = 0
    antennas.append(dipole)

    # Loop over the input files...
    for filename in filenames:
        fh = open(filename, "rb")
        nFramesFile = os.path.getsize(filename) // drx.FRAME_SIZE
        #junkFrame = drx.read_frame(fh)
        #fh.seek(0)
        while True:
            try:
                junkFrame = drx.read_frame(fh)
                try:
                    srate = junkFrame.sample_rate
                    t0 = junkFrame.time
                    break
                except ZeroDivisionError:
                    pass
            except errors.SyncError:
                fh.seek(-drx.FRAME_SIZE + 1, 1)

        fh.seek(-drx.FRAME_SIZE, 1)

        beam, tune, pol = junkFrame.id
        srate = junkFrame.sample_rate

        tunepols = drx.get_frames_per_obs(fh)
        tunepols = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
        beampols = tunepols

        # Offset in frames for beampols beam/tuning/pol. sets
        offset = int(args.skip * srate / 4096 * beampols)
        offset = int(1.0 * offset / beampols) * beampols
        fh.seek(offset * drx.FRAME_SIZE, 1)

        # Iterate on the offsets until we reach the right point in the file.  This
        # is needed to deal with files that start with only one tuning and/or a
        # different sample rate.
        while True:
            ## Figure out where in the file we are and what the current tuning/sample
            ## rate is
            junkFrame = drx.read_frame(fh)
            srate = junkFrame.sample_rate
            t1 = junkFrame.time
            tunepols = drx.get_frames_per_obs(fh)
            tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3]
            beampols = tunepol
            fh.seek(-drx.FRAME_SIZE, 1)

            ## See how far off the current frame is from the target
            tDiff = t1 - (t0 + args.skip)

            ## Half that to come up with a new seek parameter
            tCorr = -tDiff / 8.0
            cOffset = int(tCorr * srate / 4096 * beampols)
            cOffset = int(1.0 * cOffset / beampols) * beampols
            offset += cOffset

            ## If the offset is zero, we are done.  Otherwise, apply the offset
            ## and check the location in the file again/
            if cOffset is 0:
                break
            fh.seek(cOffset * drx.FRAME_SIZE, 1)

        # Update the offset actually used
        args.skip = t1 - t0
        offset = int(round(args.skip * srate / 4096 * beampols))
        offset = int(1.0 * offset / beampols) * beampols

        tnom = junkFrame.header.time_offset
        tStart = junkFrame.time

        # Get the DRX frequencies
        cFreq1 = 0.0
        cFreq2 = 0.0
        for i in xrange(32):
            junkFrame = drx.read_frame(fh)
            b, t, p = junkFrame.id
            if p == 0 and t == 1:
                cFreq1 = junkFrame.central_freq
            elif p == 0 and t == 2:
                cFreq2 = junkFrame.central_freq
            else:
                pass
        fh.seek(-32 * drx.FRAME_SIZE, 1)

        # Align the files as close as possible by the time tags and then make sure that
        # the first frame processed is from tuning 1, pol 0.
        junkFrame = drx.read_frame(fh)
        beam, tune, pol = junkFrame.id
        pair = 2 * (tune - 1) + pol
        j = 0
        while pair != 0:
            junkFrame = drx.read_frame(fh)
            beam, tune, pol = junkFrame.id
            pair = 2 * (tune - 1) + pol
            j += 1
        fh.seek(-drx.FRAME_SIZE, 1)
        print("Shifted beam %i data by %i frames (%.4f s)" %
              (beam, j, j * 4096 / srate / 4))

        # Set integration time
        tInt = args.avg_time
        nFrames = int(round(tInt * srate / 4096))
        tInt = nFrames * 4096 / srate

        # Read in some data
        tFile = nFramesFile / 4 * 4096 / srate

        # Report
        print("Filename: %s" % filename)
        print("  Sample Rate: %i Hz" % srate)
        print("  Tuning 1: %.1f Hz" % cFreq1)
        print("  Tuning 2: %.1f Hz" % cFreq2)
        print("  ===")
        print("  Integration Time: %.3f s" % tInt)
        print("  Integrations in File: %i" % int(tFile / tInt))
        print("  Duration of File: %f" % tFile)
        print("  Offset: %f s" % offset)

        if args.duration != 0:
            nChunks = int(round(args.duration / tInt))
        else:
            nChunks = int(tFile / tInt)

        print("Processing: %i integrations" % nChunks)

        # Here we start the HDF5 file
        outname = os.path.split(filename)[1]
        outname = os.path.splitext(outname)[0]
        outname = "%s.hdf5" % outname
        outfile = h5py.File(outname)
        group1 = outfile.create_group("Time")
        group2 = outfile.create_group("Frequencies")
        group3 = outfile.create_group("Visibilities")
        out = raw_input("Target Name: ")
        outfile.attrs["OBJECT"] = out
        out = raw_input("Polarization (X/Y): ")
        outfile.attrs["POLARIZATION"] = out
        dset1 = group1.create_dataset("Timesteps", (nChunks, 3),
                                      numpy.float64,
                                      maxshape=(nChunks, 3))
        dset2 = group2.create_dataset("Tuning1", (LFFT, ),
                                      numpy.float64,
                                      maxshape=(LFFT, ))
        dset3 = group2.create_dataset("Tuning2", (LFFT, ),
                                      numpy.float64,
                                      maxshape=(LFFT, ))
        dset4 = group3.create_dataset("Tuning1", (nChunks, 3, LFFT),
                                      numpy.complex64,
                                      maxshape=(nChunks, 3, LFFT))
        dset5 = group3.create_dataset("Tuning2", (nChunks, 3, LFFT),
                                      numpy.complex64,
                                      maxshape=(nChunks, 3, LFFT))

        drxBuffer = buffer.DRXFrameBuffer(beams=[
            beam,
        ],
                                          tunes=[1, 2],
                                          pols=[0, 1])
        data = numpy.zeros((2, 2, 4096 * nFrames), dtype=numpy.complex64)

        pb = ProgressBarPlus(max=nChunks)
        tsec = numpy.zeros(1, dtype=numpy.float64)
        for i in xrange(nChunks):
            j = 0
            while j < nFrames:
                for k in xrange(4):
                    try:
                        cFrame = drx.read_frame(fh)
                        drxBuffer.append(cFrame)
                    except errors.SyncError:
                        pass

                cFrames = drxBuffer.get()
                if cFrames is None:
                    continue

                for cFrame in cFrames:
                    if j == 0:
                        tStart = cFrame.time
                    beam, tune, pol = cFrame.id
                    pair = 2 * (tune - 1) + pol

                    if tune == 1:
                        data[0, pol,
                             j * 4096:(j + 1) * 4096] = cFrame.payload.data
                    else:
                        data[1, pol,
                             j * 4096:(j + 1) * 4096] = cFrame.payload.data

                j += 1

            # Correlate
            blList1, freq1, vis1 = fxc.FXMaster(data[0, :, :],
                                                antennas,
                                                LFFT=LFFT,
                                                overlap=1,
                                                include_auto=True,
                                                verbose=False,
                                                sample_rate=srate,
                                                central_freq=cFreq1,
                                                pol='XX',
                                                return_baselines=True,
                                                gain_correct=False,
                                                clip_level=0)

            blList2, freq2, vis2 = fxc.FXMaster(data[1, :, :],
                                                antennas,
                                                LFFT=LFFT,
                                                overlap=1,
                                                include_auto=True,
                                                verbose=False,
                                                sample_rate=srate,
                                                central_freq=cFreq2,
                                                pol='XX',
                                                return_baselines=True,
                                                gain_correct=False,
                                                clip_level=0)

            if i == 0:
                tsec = tInt / 2
                outfile.attrs["STANDS"] = numpy.array([stand1, stand2])
                outfile.attrs["SRATE"] = srate
                date = datetime.fromtimestamp(tStart).date()
                outfile.attrs["DATE"] = str(date)
                dset2.write_direct(freq1)
                dset3.write_direct(freq2)
            else:
                tsec += tInt

            temp = numpy.zeros(3, dtype=numpy.float64)
            temp[0] = tStart
            temp[1] = tInt
            temp[2] = tsec
            dset1.write_direct(temp, dest_sel=numpy.s_[i])
            dset4.write_direct(vis1, dest_sel=numpy.s_[i])
            dset5.write_direct(vis2, dest_sel=numpy.s_[i])

            pb.inc(amount=1)
            sys.stdout.write(pb.show() + '\r')
            sys.stdout.flush()

        sys.stdout.write(pb.show() + '\r')
        sys.stdout.write('\n')
        sys.stdout.flush()
        outfile.close()

        # Plot
        fig = plt.figure()
        i = 0
        for bl, vi in zip(blList1, vis1):
            ax = fig.add_subplot(4, 3, i + 1)
            ax.plot(freq1 / 1e6, numpy.unwrap(numpy.angle(vi)))
            ax.set_title('Stand %i - Stand %i' %
                         (bl[0].stand.id, bl[1].stand.id))
            ax = fig.add_subplot(4, 3, i + 4)
            ax.plot(freq1 / 1e6, numpy.abs(vi))
            i += 1

            coeff = numpy.polyfit(freq1, numpy.unwrap(numpy.angle(vi)), 1)
            #print(coeff[0]/2/numpy.pi*1e9, coeff[1]*180/numpy.pi)

        i = 6
        for bl, vi in zip(blList2, vis2):
            ax = fig.add_subplot(4, 3, i + 1)
            ax.plot(freq2 / 1e6, numpy.unwrap(numpy.angle(vi)))
            ax.set_title('Stand %i - Stand %i' %
                         (bl[0].stand.id, bl[1].stand.id))
            ax = fig.add_subplot(4, 3, i + 4)
            ax.plot(freq2 / 1e6, numpy.abs(vi))
            i += 1

            coeff = numpy.polyfit(freq2, numpy.unwrap(numpy.angle(vi)), 1)