예제 #1
0
    def test_drspec_metadata(self):
        """Test finding out the DR spectrometer metadata."""

        fh = open(drspecFile, 'rb')
        cFrame = drspec.read_frame(fh)
        fh.seek(0)

        # Beam
        self.assertEqual(cFrame.id, 1)

        # Sample rate
        self.assertAlmostEqual(cFrame.sample_rate, 19.6e6, 1)
        self.assertAlmostEqual(cFrame.sample_rate, drspec.get_sample_rate(fh),
                               1)

        # Filter code
        self.assertEqual(cFrame.filter_code, 7)
        self.assertEqual(cFrame.filter_code,
                         drspec.get_sample_rate(fh, filter_code=True))

        # FFT windows per integration
        self.assertEqual(cFrame.ffts_per_integration, 6144)
        self.assertEqual(cFrame.ffts_per_integration,
                         drspec.get_ffts_per_integration(fh))

        # Transform size
        self.assertEqual(cFrame.transform_size, 1024)
        self.assertEqual(cFrame.transform_size, drspec.get_transform_size(fh))

        # Integration time
        self.assertAlmostEqual(cFrame.integration_time, 0.32099265, 8)
        self.assertAlmostEqual(cFrame.integration_time,
                               drspec.get_integration_time(fh), 8)

        fh.close()
예제 #2
0
    def test_drspec_sort(self):
        """Test sorting DR spectrometer frames by time tags."""

        fh = open(drspecFile, 'rb')
        # Frames 1 through 7
        frames = []
        for i in range(1, 8):
            frames.append(drspec.read_frame(fh))

        frames.sort()
        frames = frames[::-1]

        for i in range(1, len(frames)):
            self.assertTrue(frames[i - 1] >= frames[i])
        fh.close()
예제 #3
0
    def test_drspec_read(self):
        """Test reading in a frame from a DR spectrometer file."""

        fh = open(drspecFile, 'rb')
        # First frame is really DR spectrometer and stores the IDs
        frame1 = drspec.read_frame(fh)
        beam = frame1.id
        self.assertEqual(beam, 1)

        # Second frame
        frame2 = drspec.read_frame(fh)
        beam = frame2.id
        self.assertEqual(beam, 1)
        fh.close()

        # The special "data" attribute
        data = frame2.payload.data
        self.assertEqual(len(data.shape), 1)
        self.assertEqual(data.shape[0], 1024)
        for attr in ('XX0', 'XX1', 'YY0', 'YY1'):
            d0 = getattr(frame2.payload, attr, None)
            d1 = data[attr]
            for i in range(1024):
                self.assertAlmostEqual(d0[i], d1[i], 6)
예제 #4
0
    def test_drspec_comps(self):
        """Test the DR spectrometer frame comparison operators (>, <, etc.) for time tags."""

        fh = open(drspecFile, 'rb')
        # Frames 1 through 7
        frames = []
        for i in range(1, 8):
            frames.append(drspec.read_frame(fh))
        fh.close()

        self.assertTrue(0 < frames[0])
        self.assertFalse(0 > frames[0])
        self.assertTrue(frames[-1] >= frames[0])
        self.assertFalse(frames[-1] <= frames[0])
        self.assertTrue(frames[0] == frames[0])
        self.assertFalse(frames[0] == frames[-1])
        self.assertFalse(frames[0] != frames[0])
예제 #5
0
    def test_drspec_errors(self):
        """Test reading in all frames from a truncated DR spectrometer file."""

        fh = open(drspecFile, 'rb')
        # Frames 1 through 8
        for i in range(1, 8):
            frame = drspec.read_frame(fh)

        # Last frame should be an error (errors.EOFError)
        self.assertRaises(errors.EOFError, drspec.read_frame, fh)
        fh.close()

        # If we offset in the file by 1 byte, we should be a
        # sync error (errors.SyncError).
        fh = open(drspecFile, 'rb')
        fh.seek(1)
        self.assertRaises(errors.SyncError, drspec.read_frame, fh)
        fh.close()
예제 #6
0
    def test_drspec_math(self):
        """Test mathematical operations on DR spectrometer frame data via frames."""

        fh = open(drspecFile, 'rb')
        # Frames 1 through 7
        frames = []
        for i in range(1, 8):
            frames.append(drspec.read_frame(fh))
        fh.close()

        npts = frames[0].payload.XX0.size

        # Multiplication
        frameT = frames[0] * 2.0
        for i in range(npts):
            self.assertAlmostEqual(frameT.payload.XX0[i],
                                   2 * frames[0].payload.XX0[i], 2)
        frameT *= 2.0
        for i in range(npts):
            self.assertAlmostEqual(frameT.payload.XX1[i],
                                   4 * frames[0].payload.XX1[i], 2)
        frameT = frames[0] * frames[1]
        for i in range(npts):
            self.assertAlmostEqual(
                frameT.payload.YY0[i],
                frames[0].payload.YY0[i] * frames[1].payload.YY0[i], 2)

        # Addition
        frameA = frames[0] + 2.0
        for i in range(npts):
            self.assertAlmostEqual(frameA.payload.XX0[i],
                                   2 + frames[0].payload.XX0[i], 2)
        frameA += 2.0
        for i in range(npts):
            self.assertAlmostEqual(frameA.payload.XX1[i],
                                   4 + frames[0].payload.XX1[i], 2)
        frameA = frames[0] + frames[1]
        for i in range(npts):
            self.assertAlmostEqual(
                frameA.payload.YY0[i],
                frames[0].payload.YY0[i] + frames[1].payload.YY0[i], 2)
예제 #7
0
    def test_drspec_math(self):
        """Test mathematical operations on DR spectrometer frame data via frames."""

        fh = open(drspecFile, 'rb')
        # Frames 1 through 7
        frames = []
        for i in range(1, 8):
            frames.append(drspec.read_frame(fh))
        fh.close()

        npts = frames[0].payload.XX0.size

        # Multiplication
        frameT = frames[0] * 2.0
        numpy.testing.assert_allclose(frameT.payload.XX0,
                                      2 * frames[0].payload.XX0,
                                      atol=1e-6)
        frameT *= 2.0
        numpy.testing.assert_allclose(frameT.payload.XX1,
                                      4 * frames[0].payload.XX1,
                                      atol=1e-6)
        frameT = frames[0] * frames[1]
        numpy.testing.assert_allclose(frameT.payload.YY0,
                                      frames[0].payload.YY0 *
                                      frames[1].payload.YY0,
                                      atol=1e-6)

        # Addition
        frameA = frames[0] + 2.0
        numpy.testing.assert_allclose(frameA.payload.XX0,
                                      2 + frames[0].payload.XX0,
                                      atol=1e-6)
        frameA += 2.0
        numpy.testing.assert_allclose(frameA.payload.XX0,
                                      4 + frames[0].payload.XX0,
                                      atol=1e-6)
        frameA = frames[0] + frames[1]
        numpy.testing.assert_allclose(frameA.payload.YY0,
                                      frames[0].payload.YY0 +
                                      frames[1].payload.YY0,
                                      atol=1e-6)
예제 #8
0
def main(args):
    # Set the site
    site = None
    if args.lwa1:
        site = 'lwa1'
    elif args.lwasv:
        site = 'lwasv'
        
    # Open the file and file good data (not raw DRX data)
    fh = open(args.filename, 'rb')

    try:
        for i in xrange(5):
            junkFrame = drx.read_frame(fh)
        raise RuntimeError("ERROR: '%s' appears to be a raw DRX file, not a DR spectrometer file" % args.filename)
    except errors.SyncError:
        fh.seek(0)
        
    # Interrogate the file to figure out what frames sizes to expect, now many 
    # frames there are, and what the transform length is
    FRAME_SIZE = drspec.get_frame_size(fh)
    nFrames = os.path.getsize(args.filename) // FRAME_SIZE
    nChunks = nFrames
    LFFT = drspec.get_transform_size(fh)

    # Read in the first frame to figure out the DP information
    junkFrame = drspec.read_frame(fh)
    fh.seek(-FRAME_SIZE, 1)
    srate = junkFrame.sample_rate
    t0 = junkFrame.time
    tInt = junkFrame.header.nints*LFFT/srate
    
    # Offset in frames for beampols beam/tuning/pol. sets
    offset = int(round(args.skip / tInt))
    fh.seek(offset*FRAME_SIZE, 1)
    
    # Iterate on the offsets until we reach the right point in the file.  This
    # is needed to deal with files that start with only one tuning and/or a 
    # different sample rate.  
    while True:
        ## Figure out where in the file we are and what the current tuning/sample 
        ## rate is
        junkFrame = drspec.read_frame(fh)
        srate = junkFrame.sample_rate
        t1 = junkFrame.time
        tInt = junkFrame.header.nints*LFFT/srate
        fh.seek(-FRAME_SIZE, 1)
        
        ## See how far off the current frame is from the target
        tDiff = t1 - (t0 + args.skip)
        
        ## Half that to come up with a new seek parameter
        tCorr = -tDiff / 2.0
        cOffset = int(round(tCorr / tInt))
        offset += cOffset
        
        ## If the offset is zero, we are done.  Otherwise, apply the offset
        ## and check the location in the file again/
        if cOffset is 0:
            break
        fh.seek(cOffset*FRAME_SIZE, 1)
        
    # Update the offset actually used
    args.skip = t1 - t0
    nChunks = (os.path.getsize(args.filename) - fh.tell()) // FRAME_SIZE
    
    # Update the file contents
    beam = junkFrame.id
    central_freq1, central_freq2 = junkFrame.central_freq
    srate = junkFrame.sample_rate
    data_products = junkFrame.data_products
    t0 = junkFrame.time
    tInt = junkFrame.header.nints*LFFT/srate
    beginDate = junkFrame.time.datetime
        
    # Report
    print("Filename: %s" % args.filename)
    if args.metadata is not None:
        print("Metadata: %s" % args.metadata)
    elif args.sdf is not None:
        print("SDF: %s" % args.sdf)
    print("Date of First Frame: %s" % beginDate)
    print("Beam: %i" % beam)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" % (central_freq1, central_freq2))
    print("Data Products: %s" % ','.join(data_products))
    print("Frames: %i (%.3f s)" % (nFrames, nFrames*tInt))
    print("---")
    print("Offset: %.3f s (%i frames)" % (args.skip, offset))
    print("Transform Length: %i" % LFFT)
    print("Integration: %.3f s" % tInt)
    
    # Setup the output file
    outname = os.path.split(args.filename)[1]
    outname = os.path.splitext(outname)[0]
    outname = '%s-waterfall.hdf5' % outname
    
    if os.path.exists(outname):
        if not args.force:
            yn = raw_input("WARNING: '%s' exists, overwrite? [Y/n] " % outname)
        else:
            yn = 'y'
            
        if yn not in ('n', 'N'):
            os.unlink(outname)
        else:
            raise RuntimeError("Output file '%s' already exists" % outname)
            
    f = hdfData.create_new_file(outname)
    obsList = {}
    if args.metadata is not None:
        try:
            project = metabundle.get_sdf(args.metadata)
        except Exception as e:
            if adpReady:
                project = metabundleADP.get_sdf(args.metadata)
            else:
                raise e
                
        sdfBeam  = project.sessions[0].drx_beam
        spcSetup = project.sessions[0].spcSetup
        if sdfBeam != beam:
            raise RuntimeError("Metadata is for beam #%i, but data is from beam #%i" % (sdfBeam, beam))
            
        for i,obs in enumerate(project.sessions[0].observations):
            sdfStart = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm)
            sdfStop  = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
            obsChunks = int(numpy.ceil(obs.dur/1000.0 * drx.FILTER_CODES[obs.filter] / (spcSetup[0]*spcSetup[1])))
            
            obsList[i+1] = (sdfStart, sdfStop, obsChunks)
            
        hdfData.fill_from_metabundle(f, args.metadata)
        
    elif args.sdf is not None:
        try:
            project = sdf.parse_sdf(args.sdf)
        except Exception as e:
            if adpReady:
                project = sdfADP.parse_sdf(args.sdf)
            else:
                raise e
                
        sdfBeam  = project.sessions[0].drx_beam
        spcSetup = project.sessions[0].spcSetup
        if sdfBeam != beam:
            raise RuntimeError("Metadata is for beam #%i, but data is from beam #%i" % (sdfBeam, beam))
            
        for i,obs in enumerate(project.sessions[0].observations):
            sdfStart = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm)
            sdfStop  = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
            obsChunks = int(numpy.ceil(obs.dur/1000.0 * drx.FILTER_CODES[obs.filter] / (spcSetup[0]*spcSetup[1])))
            
            obsList[i+1] = (sdfStart, sdfStop, obsChunks)
            
        hdfData.fill_from_sdf(f, args.sdf, station=site)
        
    else:
        obsList[1] = (beginDate, datetime(2222,12,31,23,59,59), nChunks)
        
        hdfData.fill_minimum(f, 1, beam, srate, station=site)
        
    data_products = junkFrame.data_products
    for o in sorted(obsList.keys()):
        for t in (1,2):
            hdfData.create_observation_set(f, o, t, numpy.arange(LFFT, dtype=numpy.float64), obsList[o][2], data_products)
            
    f.attrs['FileGenerator'] = 'drspec2hdf.py'
    f.attrs['InputData'] = os.path.basename(args.filename)
    
    # Create the various HDF group holders
    ds = {}
    for o in sorted(obsList.keys()):
        obs = hdfData.get_observation_set(f, o)
        
        ds['obs%i' % o] = obs
        ds['obs%i-time' % o] = hdfData.get_time(f, o)
        
        for t in (1,2):
            ds['obs%i-freq%i' % (o, t)] = hdfData.get_data_set(f, o, t, 'freq')
            for p in data_products:
                ds["obs%i-%s%i" % (o, p, t)] = hdfData.get_data_set(f, o, t, p)
            ds['obs%i-Saturation%i' % (o, t)] = hdfData.get_data_set(f, o, t, 'Saturation')
            
    # Loop over DR spectrometer frames to fill in the HDF5 file
    pbar = progress.ProgressBar(max=nChunks)
    o = 1
    j = 0
    
    firstPass = True
    for i in xrange(nChunks):
        frame = drspec.read_frame(fh)
        
        cTime = frame.time.datetime
        if cTime > obsList[o][1]:
            # Increment to the next observation
            o += 1
            
            # If we have reached the end, exit...
            try:
                obsList[o]
                
                firstPass = True
            except KeyError:
                sys.stdout.write('%s\r' % (' '*pbar.span))
                sys.stdout.flush()
                print("End of observing block according to SDF, exiting")
                break
                
        if cTime < obsList[o][0]:
            # Skip over data that occurs before the start of the observation
            continue
            
        try:
            if frame.time > oTime + 1.001*tInt:
                print('Warning: Time tag error at frame %i; %.3f > %.3f + %.3f' % (i, frame.time, oTime, tInt))
        except NameError:
            pass
        oTime = frame.time
        
        if firstPass:
            # Otherwise, continue on...
            central_freq1, central_freq2 = frame.central_freq
            srate = frame.sample_rate
            tInt  = frame.header.nints*LFFT/srate
            
            freq = numpy.fft.fftshift( numpy.fft.fftfreq(LFFT, d=1.0/srate) )
            freq = freq.astype(numpy.float64)
            
            sys.stdout.write('%s\r' % (' '*pbar.span))
            sys.stdout.flush()
            print("Switching to Obs. #%i" % o)
            print("-> Tunings: %.1f Hz, %.1f Hz" % (central_freq1, central_freq2))
            print("-> Sample Rate: %.1f Hz" % srate)
            print("-> Integration Time: %.3f s" % tInt)
            sys.stdout.write(pbar.show()+'\r')
            sys.stdout.flush()
            
            j = 0
            ds['obs%i-freq1' % o][:] = freq + central_freq1
            ds['obs%i-freq2' % o][:] = freq + central_freq2
            
            obs = ds['obs%i' % o]
            obs.attrs['tInt'] = tInt
            obs.attrs['tInt_Units'] = 's'
            obs.attrs['LFFT'] = LFFT
            obs.attrs['nChan'] = LFFT
            obs.attrs['RBW'] = freq[1]-freq[0]
            obs.attrs['RBW_Units'] = 'Hz'
            
            firstPass = False
            
        # Load the data from the spectrometer frame into the HDF5 group
        ds['obs%i-time' % o][j] = (frame.time[0], frame.time[1])
        
        ds['obs%i-Saturation1' % o][j,:] = frame.payload.saturations[0:2]
        ds['obs%i-Saturation2' % o][j,:] = frame.payload.saturations[2:4]
        
        for t in (1,2):
            for p in data_products:
                ds['obs%i-%s%i' % (o, p, t)][j,:] = getattr(frame.payload, "%s%i" % (p, t-1), None)
        j += 1
        
        # Update the progress bar
        pbar.inc()
        if i % 10 == 0:
            sys.stdout.write(pbar.show()+'\r')
            sys.stdout.flush()
            
    sys.stdout.write(pbar.show()+'\n')
    sys.stdout.flush()
    
    # Done
    fh.close()

    # Save the output to a HDF5 file
    f.close()
예제 #9
0
def main(args):
    fh = open(args.filename, "rb")

    try:
        for i in xrange(5):
            junkFrame = drx.read_frame(fh)
        raise RuntimeError(
            "ERROR: '%s' appears to be a raw DRX file, not a DR spectrometer file"
            % args.filename)
    except errors.SyncError:
        fh.seek(0)

    # Interrogate the file to figure out what frames sizes to expect, now many
    # frames there are, and what the transform length is
    FRAME_SIZE = drspec.get_frame_size(fh)
    nFrames = os.path.getsize(args.filename) / FRAME_SIZE
    nChunks = nFrames
    LFFT = drspec.get_transform_size(fh)

    # Read in the first frame to figure out the DP information
    junkFrame = drspec.read_frame(fh)
    fh.seek(-FRAME_SIZE, 1)
    srate = junkFrame.sample_rate
    t0 = junkFrame.time
    tInt = junkFrame.header.nints * LFFT / srate

    # Update the file contents
    beam = junkFrame.id
    central_freq1, central_freq2 = junkFrame.central_freq
    srate = junkFrame.sample_rate
    data_products = junkFrame.data_products
    t0 = junkFrame.time
    tInt = junkFrame.header.nints * LFFT / srate
    beginDate = junkFrame.time.datetime

    # Report
    print("Filename: %s" % args.filename)
    print("Date of First Frame: %s" % beginDate)
    print("Beam: %i" % beam)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" %
          (central_freq1, central_freq2))
    print("Data Products: %s" % ','.join(data_products))
    print("Frames: %i (%.3f s)" % (nFrames, nFrames * tInt))
    print("---")
    print("Transform Length: %i" % LFFT)
    print("Integration: %.3f s" % tInt)

    # Convert chunk length to total frame count
    chunkLength = int(args.length / tInt)

    # Convert chunk skip to total frame count
    chunkSkip = int(args.skip / tInt)

    # Output arrays
    clipFraction = []
    meanPower = []

    # Go!
    i = 1
    done = False
    print("   |%sClipping%s |%sPower %s |" %
          (" " * (8 * len(data_products) - 4), " " *
           (8 * len(data_products) - 4), " " *
           (6 * len(data_products) - 3), " " * (6 * len(data_products) - 3)))
    out = "   |      1X      1Y      2X      2Y |"
    for t in (1, 2):
        for dp in data_products:
            out += "%6s" % ("%i%s" % (t, dp))
    out += " |"
    print(out)
    print("-" * len(out))

    while True:
        count = {0: 0, 1: 0, 2: 0, 3: 0}
        sats = numpy.empty((4, chunkLength), dtype=numpy.float32)
        data = numpy.empty((2 * len(data_products), chunkLength * LFFT),
                           dtype=numpy.float32)
        for j in xrange(chunkLength):
            # Read in the next frame and anticipate any problems that could occur
            try:
                cFrame = frame = drspec.read_frame(fh)
            except errors.EOFError:
                done = True
                break
            except errors.SyncError:
                continue

            for t in (1, 2):
                for p, dp in enumerate(data_products):
                    l = len(data_products) * (t - 1) + p
                    data[l, j * LFFT:(j + 1) * LFFT] = getattr(
                        cFrame.payload, '%s%i' % (dp, t - 1))
            sats[:,
                 j] = numpy.array(cFrame.payload.saturations) / (tInt * srate)

        if done:
            break

        else:
            clipFraction.append(sats.mean(axis=1))
            meanPower.append(data.mean(axis=1))

            clip = clipFraction[-1]
            power = meanPower[-1]

            out = "%2i | %6.2f%% %6.2f%% %6.2f%% %6.2f%% |" % (
                i, clip[0] * 100.0, clip[1] * 100.0, clip[2] * 100.0,
                clip[3] * 100.0)
            for t in (1, 2):
                for p in xrange(len(data_products)):
                    out += " %5.2f" % (power[len(data_products) *
                                             (t - 1) + p], )
            out += " |"
            print(out)

            i += 1
            fh.seek(FRAME_SIZE * chunkSkip, 1)

    clipFraction = numpy.array(clipFraction)
    meanPower = numpy.array(meanPower)

    clip = clipFraction.mean(axis=0)
    power = meanPower.mean(axis=0)

    print("-" * len(out))
    out = "%2s | %6.2f%% %6.2f%% %6.2f%% %6.2f%% |" % (
        'M', clip[0] * 100.0, clip[1] * 100.0, clip[2] * 100.0,
        clip[3] * 100.0)
    for t in (1, 2):
        for p in xrange(len(data_products)):
            out += " %5.2f" % (power[len(data_products) * (t - 1) + p], )
    out += " |"
    print(out)
예제 #10
0
def main(args):
    # Length of the FFT and the window to use
    LFFT = args.fft_length
    if args.bartlett:
        window = numpy.bartlett
    elif args.blackman:
        window = numpy.blackman
    elif args.hanning:
        window = numpy.hanning
    else:
        window = fxc.null_window
    args.window = window

    # Open the file and find good data (not spectrometer data)
    fh = open(args.filename, "rb")

    try:
        for i in xrange(5):
            junkFrame = drspec.read_frame(fh)
        raise RuntimeError(
            "ERROR: '%s' appears to be a DR spectrometer file, not a raw DRX file"
            % args.filename)
    except errors.SyncError:
        fh.seek(0)

    # Good, we seem to have a real DRX file, switch over to the LDP interface
    fh.close()
    idf = LWA1DataFile(args.filename,
                       ignore_timetag_errors=args.ignore_time_errors)

    # Metadata
    nFramesFile = idf.get_info('nframe')
    beam = idf.get_info('beam')
    srate = idf.get_info('sample_rate')
    beampols = idf.get_info('nbeampol')
    beams = max([1, beampols // 4])

    # Number of frames to integrate over
    nFramesAvg = int(args.average * srate / 4096) * beampols
    nFramesAvg = int(1.0 * (nFramesAvg // beampols) * 4096 /
                     float(LFFT)) * LFFT / 4096 * beampols
    args.average = 1.0 * (nFramesAvg // beampols) * 4096 / srate
    maxFrames = nFramesAvg

    # Offset into the file, if needed
    offset = idf.offset(args.skip)

    # Number of remaining chunks (and the correction to the number of
    # frames to read in).
    if args.metadata is not None:
        args.duration = 0
    if args.duration == 0:
        args.duration = 1.0 * nFramesFile / beampols * 4096 / srate
        args.duration -= args.skip
    else:
        args.duration = int(
            round(args.duration * srate * beampols / 4096) / beampols * 4096 /
            srate)
    nChunks = int(round(args.duration / args.average))
    if nChunks == 0:
        nChunks = 1
    nFrames = nFramesAvg * nChunks

    # Date & Central Frequency
    t1 = idf.get_info('start_time')
    beginDate = t1.datetime
    central_freq1 = idf.get_info('freq1')
    central_freq2 = idf.get_info('freq2')

    # File summary
    print("Filename: %s" % args.filename)
    print("Date of First Frame: %s" % str(beginDate))
    print("Beams: %i" % beams)
    print("Tune/Pols: %i" % beampols)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" %
          (central_freq1, central_freq2))
    print("Frames: %i (%.3f s)" %
          (nFramesFile, 1.0 * nFramesFile / beampols * 4096 / srate))
    print("---")
    print("Offset: %.3f s (%i frames)" % (args.skip, offset))
    print("Integration: %.3f s (%i frames; %i frames per beam/tune/pol)" %
          (args.average, nFramesAvg, nFramesAvg / beampols))
    print("Duration: %.3f s (%i frames; %i frames per beam/tune/pol)" %
          (args.average * nChunks, nFrames, nFrames / beampols))
    print("Chunks: %i" % nChunks)
    print(" ")

    # Estimate clip level (if needed)
    if args.estimate_clip_level:
        estimate = idf.estimate_levels(fh, sigma=5.0)
        clip1 = (estimate[0] + estimate[1]) / 2.0
        clip2 = (estimate[2] + estimate[3]) / 2.0
    else:
        clip1 = args.clip_level
        clip2 = args.clip_level

    # Make the pseudo-antennas for Stokes calculation
    antennas = []
    for i in xrange(4):
        if i // 2 == 0:
            newAnt = stations.Antenna(1)
        else:
            newAnt = stations.Antenna(2)

        if i % 2 == 0:
            newAnt.pol = 0
        else:
            newAnt.pol = 1

        antennas.append(newAnt)

    # Setup the output file
    outname = os.path.split(args.filename)[1]
    outname = os.path.splitext(outname)[0]
    outname = '%s-waterfall.hdf5' % outname

    if os.path.exists(outname):
        if not args.force:
            yn = raw_input("WARNING: '%s' exists, overwrite? [Y/n] " % outname)
        else:
            yn = 'y'

        if yn not in ('n', 'N'):
            os.unlink(outname)
        else:
            raise RuntimeError("Output file '%s' already exists" % outname)

    f = hdfData.create_new_file(outname)

    # Look at the metadata and come up with a list of observations.  If
    # there are no metadata, create a single "observation" that covers the
    # whole file.
    obsList = {}
    if args.metadata is not None:
        try:
            project = metabundle.get_sdf(args.metadata)
        except Exception as e:
            project = metabundleADP.get_sdf(args.metadata)

        sdfBeam = project.sessions[0].drx_beam
        spcSetup = project.sessions[0].spcSetup
        if sdfBeam != beam:
            raise RuntimeError(
                "Metadata is for beam #%i, but data is from beam #%i" %
                (sdfBeam, beam))

        for i, obs in enumerate(project.sessions[0].observations):
            sdfStart = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm)
            sdfStop = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
            obsDur = obs.dur / 1000.0
            obsSR = drx.FILTER_CODES[obs.filter]

            obsList[i + 1] = (sdfStart, sdfStop, obsDur, obsSR)

        print("Observations:")
        for i in sorted(obsList.keys()):
            obs = obsList[i]
            print(" #%i: %s to %s (%.3f s) at %.3f MHz" %
                  (i, obs[0], obs[1], obs[2], obs[3] / 1e6))
        print(" ")

        hdfData.fill_from_metabundle(f, args.metadata)

    elif args.sdf is not None:
        try:
            project = sdf.parse_sdf(args.sdf)
        except Exception as e:
            project = sdfADP.parse_sdf(args.sdf)

        sdfBeam = project.sessions[0].drx_beam
        spcSetup = project.sessions[0].spcSetup
        if sdfBeam != beam:
            raise RuntimeError(
                "Metadata is for beam #%i, but data is from beam #%i" %
                (sdfBeam, beam))

        for i, obs in enumerate(project.sessions[0].observations):
            sdfStart = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm)
            sdfStop = mcs.mjdmpm_to_datetime(obs.mjd, obs.mpm + obs.dur)
            obsDur = obs.dur / 1000.0
            obsSR = drx.FILTER_CODES[obs.filter]

            obsList[i + 1] = (sdfStart, sdfStop, obsDur, obsSR)

        site = 'lwa1'
        if args.lwasv:
            site = 'lwasv'
        hdfData.fill_from_sdf(f, args.sdf, station=site)

    else:
        obsList[1] = (datetime.utcfromtimestamp(t1),
                      datetime(2222, 12, 31, 23, 59, 59), args.duration, srate)

        site = 'lwa1'
        if args.lwasv:
            site = 'lwasv'
        hdfData.fill_minimum(f, 1, beam, srate, station=site)

    if (not args.stokes):
        data_products = ['XX', 'YY']
    else:
        data_products = ['I', 'Q', 'U', 'V']

    for o in sorted(obsList.keys()):
        for t in (1, 2):
            hdfData.create_observation_set(
                f, o, t, numpy.arange(LFFT, dtype=numpy.float64),
                int(round(obsList[o][2] / args.average)), data_products)

    f.attrs['FileGenerator'] = 'hdfWaterfall.py'
    f.attrs['InputData'] = os.path.basename(args.filename)

    # Create the various HDF group holders
    ds = {}
    for o in sorted(obsList.keys()):
        obs = hdfData.get_observation_set(f, o)

        ds['obs%i' % o] = obs
        ds['obs%i-time' % o] = hdfData.get_time(f, o)

        for t in (1, 2):
            ds['obs%i-freq%i' % (o, t)] = hdfData.get_data_set(f, o, t, 'freq')
            for p in data_products:
                ds["obs%i-%s%i" % (o, p, t)] = hdfData.get_data_set(f, o, t, p)
            ds['obs%i-Saturation%i' % (o, t)] = hdfData.get_data_set(
                f, o, t, 'Saturation')

    # Load in the correct analysis function
    if (not args.stokes):
        processDataBatch = processDataBatchLinear
    else:
        processDataBatch = processDataBatchStokes

    # Go!
    for o in sorted(obsList.keys()):
        try:
            processDataBatch(idf,
                             antennas,
                             obsList[o][0],
                             obsList[o][2],
                             obsList[o][3],
                             args,
                             ds,
                             obsID=o,
                             clip1=clip1,
                             clip2=clip2)
        except RuntimeError as e:
            print("Observation #%i: %s, abandoning this observation" %
                  (o, str(e)))

    # Save the output to a HDF5 file
    f.close()

    # Close out the data file
    idf.close()
def main(args):
    skip = args.skip
    fh = open(args.filename, "rb")

    try:
        for i in xrange(5):
            junkFrame = drx.read_frame(fh)
        raise RuntimeError(
            "ERROR: '%s' appears to be a raw DRX file, not a DR spectrometer file"
            % args.filename)
    except errors.SyncError:
        fh.seek(0)

    # Interrogate the file to figure out what frames sizes to expect, now many
    # frames there are, and what the transform length is
    FRAME_SIZE = drspec.get_frame_size(fh)
    nFrames = os.path.getsize(args.filename) / FRAME_SIZE
    nChunks = nFrames
    LFFT = drspec.get_transform_size(fh)

    # Read in the first frame to figure out the DP information
    junkFrame = drspec.read_frame(fh)
    fh.seek(-FRAME_SIZE, 1)
    srate = junkFrame.sample_rate
    t0 = junkFrame.time
    tInt = junkFrame.header.nints * LFFT / srate

    # Offset in frames for beampols beam/tuning/pol. sets
    offset = int(round(skip / tInt))
    fh.seek(offset * FRAME_SIZE, 1)

    # Iterate on the offsets until we reach the right point in the file.  This
    # is needed to deal with files that start with only one tuning and/or a
    # different sample rate.
    while True:
        ## Figure out where in the file we are and what the current tuning/sample
        ## rate is
        junkFrame = drspec.read_frame(fh)
        srate = junkFrame.sample_rate
        t1 = junkFrame.time
        tInt = junkFrame.header.nints * LFFT / srate
        fh.seek(-FRAME_SIZE, 1)

        ## See how far off the current frame is from the target
        tDiff = t1 - (t0 + skip)

        ## Half that to come up with a new seek parameter
        tCorr = -tDiff / 2.0
        cOffset = int(round(tCorr / tInt))
        offset += cOffset

        ## If the offset is zero, we are done.  Otherwise, apply the offset
        ## and check the location in the file again/
        if cOffset is 0:
            break
        fh.seek(cOffset * FRAME_SIZE, 1)

    # Update the offset actually used
    skip = t1 - t0
    nChunks = (os.path.getsize(args.filename) - fh.tell()) // FRAME_SIZE

    # Update the file contents
    beam = junkFrame.id
    central_freq1, central_freq2 = junkFrame.central_freq
    srate = junkFrame.sample_rate
    data_products = junkFrame.data_products
    t0 = junkFrame.time
    tInt = junkFrame.header.nints * LFFT / srate
    beginDate = junkFrame.time.datetime

    # Report
    print("Filename: %s" % args.filename)
    print("Date of First Frame: %s" % beginDate)
    print("Beam: %i" % beam)
    print("Sample Rate: %i Hz" % srate)
    print("Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" %
          (central_freq1, central_freq2))
    print("Data Products: %s" % ','.join(data_products))
    print("Frames: %i (%.3f s)" % (nFrames, nFrames * tInt))
    print("---")
    print("Transform Length: %i" % LFFT)
    print("Integration: %.3f s" % tInt)

    for i in xrange(nChunks):
        frame = drspec.read_frame(fh)

        cTime = frame.time
        if i % 1000 == 0:
            print("Frame %i: %s" % (i, datetime.utcfromtimestamp(cTime)))

        try:
            if cTime > oTime + 1.001 * tInt:
                print(
                    'Warning: Time tag error at frame %i; %.3f > %.3f + %.3f' %
                    (i, cTime, oTime, tInt))
        except NameError:
            pass
        oTime = frame.time

        cFreq1, cFreq2 = frame.central_freq
        try:
            if cFreq1 != oFreq1:
                print(
                    'Warning: Tuning 1 frequncy changed at frame %i; %.3f Hz != %.3f Hz'
                    % (i, cFreq1, oFreq1))
            if cFreq2 != oFreq2:
                print(
                    'Warning: Tuning 2 frequncy changed at frame %i; %.3f Hz != %.3f Hz'
                    % (i, cFreq2, oFreq2))
        except NameError:
            pass
        oFreq1, oFreq2 = frame.central_freq

        del frame
        if i % 100 == 0:
            gc.collect()

    # Done
    fh.close()