def main(args): t0=time.time() #nChunks = 10000#10000, the size of a file. #nFramesAvg = 1*4#200, 50 frames per pol, the subintergration time. nChunks = 3000 #10000 #the size of a file. nFramesAvg = 4*16 #the intergration time. fcl = 6000+7000 fch = fcl + 3343 for offset_i in range(0, 1409):# one offset = nChunks*nFramesAvg skiped #for offset_i in range(1500*2, 1500*3):# one offset = nChunks*nFramesAvg skiped #for offset_i in range(1500*4, 1500*5):# one offset = nChunks*nFramesAvg skiped offset = nChunks*nFramesAvg*offset_i # Parse command line options config = parseOptions(args) # Length of the FFT #LFFT = config['LFFT'] LFFT = 4096*16 # Build the DRX file try: #drxFile = drsu.getFileByName(config['args'][0], config['args'][1]) fh = open(config['args'][0], "rb") nFramesFile = os.path.getsize(config['args'][0]) / drx.FrameSize except: print config['args'] sys.exit(1) #drxFile.open() #nFramesFile = drxFile.size / drx.FrameSize while True: try: junkFrame = drx.readFrame(fh) try: srate = junkFrame.getSampleRate() #t0 = junkFrame.getTime() break except ZeroDivisionError: pass except errors.syncError: fh.seek(-drx.FrameSize+1, 1) fh.seek(-drx.FrameSize, 1) beam,tune,pol = junkFrame.parseID() beams = drx.getBeamCount(fh) tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol config['offset'] = offset/srate/beampols*4096 if offset != 0: fh.seek(offset*drx.FrameSize, 1) # Make sure that the file chunk size contains is an integer multiple # of the FFT length so that no data gets dropped. This needs to # take into account the number of beampols in the data, the FFT length, # and the number of samples per frame. maxFrames = int(1.0*config['maxFrames']/beampols*4096/float(LFFT))*LFFT/4096*beampols # Number of frames to integrate over # nFramesAvg = int(config['average'] * srate / 4096 * beampols) # nFramesAvg = int(1.0 * nFramesAvg / beampols*4096/float(LFFT))*LFFT/4096*beampols config['average'] = 1.0 * nFramesAvg / beampols * 4096 / srate maxFrames = nFramesAvg # Number of remaining chunks (and the correction to the number of # frames to read in). # nChunks = int(round(config['duration'] / config['average'])) config['duration']=nChunks*config['average'] if nChunks == 0: nChunks = 1 nFrames = nFramesAvg*nChunks # Date & Central Frequnecy beginDate = ephem.Date(unix_to_utcjd(junkFrame.getTime()) - DJD_OFFSET) centralFreq1 = 0.0 centralFreq2 = 0.0 for i in xrange(4): junkFrame = drx.readFrame(fh) b,t,p = junkFrame.parseID() if p == 0 and t == 1: try: centralFreq1 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq1 = fS * ((junkFrame.data.flags>>32) & (2**32-1)) / 2**32 elif p == 0 and t == 2: try: centralFreq2 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq2 = fS * ((junkFrame.data.flags>>32) & (2**32-1)) / 2**32 else: pass fh.seek(-4*drx.FrameSize, 1) config['freq1'] = centralFreq1 config['freq2'] = centralFreq2 # File summary print "Filename: %s" % config['args'] print "Date of First Frame: %s" % str(beginDate) print "Beams: %i" % beams print "Tune/Pols: %i %i %i %i" % tunepols print "Sample Rate: %i Hz" % srate print "Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" % (centralFreq1, centralFreq2) print "Frames: %i (%.3f s)" % (nFramesFile, 1.0 * nFramesFile / beampols * 4096 / srate) print "---" print "Offset: %.3f s (%i frames)" % (config['offset'], offset) print "Integration: %.3f s (%i frames; %i frames per beam/tune/pol)" % (config['average'], nFramesAvg, nFramesAvg / beampols) print "Duration: %.3f s (%i frames; %i frames per beam/tune/pol)" % (config['average']*nChunks, nFrames, nFrames / beampols) print "Chunks: %i" % nChunks #sys.exit() # Sanity check if nFrames > (nFramesFile - offset): raise RuntimeError("Requested integration time+offset is greater than file length") # Estimate clip level (if needed) if config['estimate']: filePos = fh.tell() # Read in the first 100 frames for each tuning/polarization count = {0:0, 1:0, 2:0, 3:0} data = numpy.zeros((4, 4096*100), dtype=numpy.csingle) for i in xrange(4*100): try: cFrame = drx.readFrame(fh, Verbose=False) except errors.eofError: break except errors.syncError: continue beam,tune,pol = cFrame.parseID() aStand = 2*(tune-1) + pol data[aStand, count[aStand]*4096:(count[aStand]+1)*4096] = cFrame.data.iq count[aStand] += 1 # Go back to where we started fh.seek(filePos) # Compute the robust mean and standard deviation for I and Q for each # tuning/polarization meanI = [] meanQ = [] stdsI = [] stdsQ = [] #for i in xrange(4): for i in xrange(2): meanI.append( robust.mean(data[i,:].real) ) meanQ.append( robust.mean(data[i,:].imag) ) stdsI.append( robust.std(data[i,:].real) ) stdsQ.append( robust.std(data[i,:].imag) ) # Come up with the clip levels based on 4 sigma clip1 = (meanI[0] + meanI[1] + meanQ[0] + meanQ[1]) / 4.0 #clip2 = (meanI[2] + meanI[3] + meanQ[2] + meanQ[3]) / 4.0 clip2 = 0 clip1 += 5*(stdsI[0] + stdsI[1] + stdsQ[0] + stdsQ[1]) / 4.0 #clip2 += 5*(stdsI[2] + stdsI[3] + stdsQ[2] + stdsQ[3]) / 4.0 clip2 += 0 clip1 = int(round(clip1)) clip2 = int(round(clip2)) # Report again else: clip1 = config['clip'] clip2 = config['clip'] # Master loop over all of the file chunks #masterSpectra = numpy.zeros((nChunks, 4, LFFT-1)) masterSpectra = numpy.zeros((nChunks, 4, fch-fcl)) masterTimes = numpy.zeros(nChunks) for i in xrange(nChunks): # Find out how many frames remain in the file. If this number is larger # than the maximum of frames we can work with at a time (maxFrames), # only deal with that chunk framesRemaining = nFrames - i*maxFrames if framesRemaining > maxFrames: framesWork = maxFrames else: framesWork = framesRemaining if framesRemaining%(nFrames/10)==0: print "Working on chunk %i, %i frames remaining" % (i, framesRemaining) count = {0:0, 1:0, 2:0, 3:0} data = numpy.zeros((4,framesWork*4096/beampols), dtype=numpy.csingle) # If there are fewer frames than we need to fill an FFT, skip this chunk if data.shape[1] < LFFT: break # Inner loop that actually reads the frames into the data array if framesRemaining%(nFrames/10)==0: print "Working on %.1f ms of data" % ((framesWork*4096/beampols/srate)*1000.0) for j in xrange(framesWork): # Read in the next frame and anticipate any problems that could occur try: cFrame = drx.readFrame(fh, Verbose=False) except errors.eofError: print "EOF Error" break except errors.syncError: print "Sync Error" continue beam,tune,pol = cFrame.parseID() aStand = 2*(tune-1) + pol if j is 0: cTime = cFrame.getTime() data[aStand, count[aStand]*4096:(count[aStand]+1)*4096] = cFrame.data.iq count[aStand] += 1 # Calculate the spectra for this block of data and then weight the results by # the total number of frames read. This is needed to keep the averages correct. #freq, tempSpec1 = fxc.SpecMaster(data[:2,:], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip1) freq, tempSpec1 = fxc.SpecMaster(data[:2,:], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate) #freq, tempSpec2 = fxc.SpecMaster(data[2:,:], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip2) freq, tempSpec2 = fxc.SpecMaster(data[2:,:], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate) # Save the results to the various master arrays masterTimes[i] = cTime masterSpectra[i,0,:] = tempSpec1[0,fcl:fch] masterSpectra[i,1,:] = tempSpec1[1,fcl:fch] masterSpectra[i,2,:] = tempSpec2[0,fcl:fch] masterSpectra[i,3,:] = tempSpec2[1,fcl:fch] # We don't really need the data array anymore, so delete it del(data) #drxFile.close() # Now that we have read through all of the chunks, perform the final averaging by # dividing by all of the chunks outname = "%s_%i_fft_offset_%.9i_frames" % (config['args'][0], beam,offset) # numpy.savez(outname, freq=freq, freq1=freq+config['freq1'], freq2=freq+config['freq2'], times=masterTimes, spec=masterSpectra, tInt=(maxFrames*4096/beampols/srate), srate=srate, standMapper=[4*(beam-1) + i for i in xrange(masterSpectra.shape[1])]) #print 'fInt = ',(freq+config['freq1'])[1]-(freq+config['freq1'])[0] #print 'tInt = ',maxFrames*4096/beampols/srate masterSpectra[:,0,:]=masterSpectra[:,0:2,:].mean(1) masterSpectra[:,1,:]=masterSpectra[:,2:4,:].mean(1) numpy.save(outname[-46:], masterSpectra[:,1,:]) #numpy.save(outname[-46:], masterSpectra[:,0:2,:]) #numpy.save(outname[-46:], masterSpectra) # spec = numpy.squeeze( (masterWeight*masterSpectra).sum(axis=0) / masterWeight.sum(axis=0) ) # offset_i = offset_i + 1 print time.time()-t0
def main(args): # Parse command line options config = parseOptions(args) # Length of the FFT LFFT = config['LFFT'] # Open the file and find good data (not spectrometer data) filename = config['args'][0] fh = open(filename, "rb") nFramesFile = os.path.getsize(filename) / drx.FrameSize try: for i in xrange(5): junkFrame = drspec.readFrame(fh) raise RuntimeError("ERROR: '%s' appears to be a DR spectrometer file, not a raw DRX file" % filename) except errors.syncError: fh.seek(0) while True: try: junkFrame = drx.readFrame(fh) try: srate = junkFrame.getSampleRate() t0 = junkFrame.getTime() break except ZeroDivisionError: pass except errors.syncError: fh.seek(-drx.FrameSize+1, 1) fh.seek(-drx.FrameSize, 1) beam,tune,pol = junkFrame.parseID() beams = drx.getBeamCount(fh) tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol # Offset in frames for beampols beam/tuning/pol. sets inoffset = config['offset'] offset = int(config['offset'] * srate / 4096 * beampols) offset = int(1.0 * offset / beampols) * beampols fh.seek(offset*drx.FrameSize, 1) # Iterate on the offsets until we reach the right point in the file. This # is needed to deal with files that start with only one tuning and/or a # different sample rate. while True: ## Figure out where in the file we are and what the current tuning/sample ## rate is junkFrame = drx.readFrame(fh) srate = junkFrame.getSampleRate() t1 = junkFrame.getTime() tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol fh.seek(-drx.FrameSize, 1) ## See how far off the current frame is from the target tDiff = t1 - (t0 + config['offset']) ## Half that to come up with a new seek parameter tCorr = -tDiff / 2.0 cOffset = int(tCorr * srate / 4096 * beampols) cOffset = int(1.0 * cOffset / beampols) * beampols offset += cOffset ## If the offset is zero, we are done. Otherwise, apply the offset ## and check the location in the file again/ if cOffset is 0: break fh.seek(cOffset*drx.FrameSize, 1) # Update the offset actually used config['offset'] = t1 - t0 offset = int(round(config['offset'] * srate / 4096 * beampols)) offset = int(1.0 * offset / beampols) * beampols # Make sure that the file chunk size contains is an integer multiple # of the FFT length so that no data gets dropped. This needs to # take into account the number of beampols in the data, the FFT length, # and the number of samples per frame. maxFrames = int(1.0*config['maxFrames']/beampols*4096/float(LFFT))*LFFT/4096*beampols # Number of frames to integrate over print "Line 673: config['average']", config['average'], ' sample rate ', srate, ' beampols ', beampols nFramesAvg = int(config['average'] * srate / 4096 * beampols) if( nFramesAvg == 0): nFramesAvg = 1 * beampols else: nFramesAvg = int(1.0 * nFramesAvg / beampols*4096/float(LFFT))*LFFT/4096*beampols config['average'] = 1.0 * nFramesAvg / beampols * 4096 / srate maxFrames = nFramesAvg print "Line 678: config['average']", config['average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg # Number of remaining chunks (and the correction to the number of # frames to read in). if config['metadata'] is not None: config['duration'] = 0 if config['duration'] == 0: config['duration'] = 1.0 * nFramesFile / beampols * 4096 / srate else: config['duration'] = int(round(config['duration'] * srate * beampols / 4096) / beampols * 4096 / srate) nChunks = int(round(config['duration'] / config['average'])) if nChunks == 0: nChunks = 1 nFrames = nFramesAvg*nChunks print "Line 693: config['average']", config['average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg, " nChunks ", nChunks # Date & Central Frequency t1 = junkFrame.getTime() beginDate = ephem.Date(unix_to_utcjd(junkFrame.getTime()) - DJD_OFFSET) centralFreq1 = 0.0 centralFreq2 = 0.0 for i in xrange(4): junkFrame = drx.readFrame(fh) b,t,p = junkFrame.parseID() if p == 0 and t == 1: try: centralFreq1 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq1 = fS * ((junkFrame.data.flags>>32) & (2**32-1)) / 2**32 elif p == 0 and t == 2: try: centralFreq2 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq2 = fS * ((junkFrame.data.flags>>32) & (2**32-1)) / 2**32 else: pass fh.seek(-4*drx.FrameSize, 1) config['freq1'] = centralFreq1 config['freq2'] = centralFreq2 # File summary print "Filename: %s" % filename print "Date of First Frame: %s" % str(beginDate) print "Beams: %i" % beams print "Tune/Pols: %i %i %i %i" % tunepols print "Sample Rate: %i Hz" % srate print "Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" % (centralFreq1, centralFreq2) print "Frames: %i (%.3f s)" % (nFramesFile, 1.0 * nFramesFile / beampols * 4096 / srate) print "---" print "Offset: %.3f s (%i frames)" % (config['offset'], offset) print "Integration: %.6f s (%i frames; %i frames per beam/tune/pol)" % (config['average'], nFramesAvg, nFramesAvg / beampols) print "Duration: %.3f s (%i frames; %i frames per beam/tune/pol)" % (config['average']*nChunks, nFrames, nFrames / beampols) print "Chunks: %i" % nChunks print " " # Estimate clip level (if needed) if config['estimate']: clip1, clip2 = estimateClipLevel(fh, beampols) else: clip1 = config['clip'] clip2 = config['clip'] # Make the pseudo-antennas for Stokes calculation antennas = [] for i in xrange(4): if i / 2 == 0: newAnt = stations.Antenna(1) else: newAnt = stations.Antenna(2) if i % 2 == 0: newAnt.pol = 0 else: newAnt.pol = 1 antennas.append(newAnt) # Setup the output file outname = os.path.split(filename)[1] outname = os.path.splitext(outname)[0] if( config['return'] == 'FFT' ): outname = '%s-%d-waterfall-complex.hdf5' %(outname, inoffset) else: outname = '%s-waterfall.hdf5' % outname if os.path.exists(outname): #yn = raw_input("WARNING: '%s' exists, overwrite? [Y/n] " % outname) #if yn not in ('n', 'N'): # os.unlink(outname) #else: raise RuntimeError("Output file '%s' already exists" % outname) f = hdfData.createNewFile(outname) # Look at the metadata and come up with a list of observations. If # there are no metadata, create a single "observation" that covers the # whole file. obsList = {} if config['metadata'] is not None: sdf = metabundle.getSessionDefinition(config['metadata']) sdfBeam = sdf.sessions[0].drxBeam spcSetup = sdf.sessions[0].spcSetup if sdfBeam != beam: raise RuntimeError("Metadata is for beam #%i, but data is from beam #%i" % (sdfBeam, beam)) for i,obs in enumerate(sdf.sessions[0].observations): sdfStart = mcs.mjdmpm2datetime(obs.mjd, obs.mpm) sdfStop = mcs.mjdmpm2datetime(obs.mjd, obs.mpm + obs.dur) obsDur = obs.dur/1000.0 obsSR = drx.filterCodes[obs.filter] obsList[i+1] = (sdfStart, sdfStop, obsDur, obsSR) print "Observations:" for i in sorted(obsList.keys()): obs = obsList[i] print " #%i: %s to %s (%.3f s) at %.3f MHz" % (i, obs[0], obs[1], obs[2], obs[3]/1e6) print " " hdfData.fillFromMetabundle(f, config['metadata']) else: obsList[1] = (datetime.utcfromtimestamp(t1), datetime(2222,12,31,23,59,59), config['duration'], srate) hdfData.fillMinimum(f, 1, beam, srate) if config['linear']: dataProducts = ['XX', 'YY'] else: dataProducts = ['I', 'Q', 'U', 'V'] for o in sorted(obsList.keys()): for t in (1,2): hdfData.createDataSets(f, o, t, numpy.arange(LFFT-1 if float(fxc.__version__) < 0.8 else LFFT, dtype=numpy.float32), int(round(obsList[o][2]/config['average'])), dataProducts, dataOut=config['return']) f.attrs['FileGenerator'] = 'hdfWaterfall.py' f.attrs['InputData'] = os.path.basename(filename) # Create the various HDF group holders ds = {} for o in sorted(obsList.keys()): obs = hdfData.getObservationSet(f, o) ds['obs%i' % o] = obs ds['obs%i-time' % o] = obs.create_dataset('time', (int(round(obsList[o][2]/config['average'])),), 'f8') for t in (1,2): ds['obs%i-freq%i' % (o, t)] = hdfData.getDataSet(f, o, t, 'freq') for p in dataProducts: if( config['return']=='PSD'): ds["obs%i-%s%i" % (o, p, t)] = hdfData.getDataSet(f, o, t, p) else: ds["obs%i-%s%imag" % (o, p, t)] = hdfData.getDataSet(f, o, t, p+'mag') ds["obs%i-%s%iphase" % (o, p, t)] = hdfData.getDataSet(f, o, t, p+'phase') ds['obs%i-Saturation%i' % (o, t)] = hdfData.getDataSet(f, o, t, 'Saturation') # Load in the correct analysis function if config['linear']: processDataBatch = processDataBatchLinear else: processDataBatch = processDataBatchStokes # Go! for o in sorted(obsList.keys()): try: processDataBatch(fh, antennas, obsList[o][0], obsList[o][2], obsList[o][3], config, ds, obsID=o, clip1=clip1, clip2=clip2) except RuntimeError, e: print "Observation #%i: %s, abandoning this observation" % (o, str(e))
def main(args): # Parse command line options config = parseOptions(args) # Length of the FFT LFFT = config['LFFT'] # Open the file and find good data (not spectrometer data) filename = config['args'][0] fh = open(filename, "rb") nFramesFile = os.path.getsize(filename) / drx.FrameSize try: for i in xrange(5): junkFrame = drspec.readFrame(fh) raise RuntimeError( "ERROR: '%s' appears to be a DR spectrometer file, not a raw DRX file" % filename) except errors.syncError: fh.seek(0) while True: try: junkFrame = drx.readFrame(fh) try: srate = junkFrame.getSampleRate() t0 = junkFrame.getTime() break except ZeroDivisionError: pass except errors.syncError: fh.seek(-drx.FrameSize + 1, 1) fh.seek(-drx.FrameSize, 1) beam, tune, pol = junkFrame.parseID() beams = drx.getBeamCount(fh) tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol # Offset in frames for beampols beam/tuning/pol. sets inoffset = config['offset'] offset = int(config['offset'] * srate / 4096 * beampols) offset = int(1.0 * offset / beampols) * beampols fh.seek(offset * drx.FrameSize, 1) # Iterate on the offsets until we reach the right point in the file. This # is needed to deal with files that start with only one tuning and/or a # different sample rate. while True: ## Figure out where in the file we are and what the current tuning/sample ## rate is junkFrame = drx.readFrame(fh) srate = junkFrame.getSampleRate() t1 = junkFrame.getTime() tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol fh.seek(-drx.FrameSize, 1) ## See how far off the current frame is from the target tDiff = t1 - (t0 + config['offset']) ## Half that to come up with a new seek parameter tCorr = -tDiff / 2.0 cOffset = int(tCorr * srate / 4096 * beampols) cOffset = int(1.0 * cOffset / beampols) * beampols offset += cOffset ## If the offset is zero, we are done. Otherwise, apply the offset ## and check the location in the file again/ if cOffset is 0: break fh.seek(cOffset * drx.FrameSize, 1) # Update the offset actually used config['offset'] = t1 - t0 offset = int(round(config['offset'] * srate / 4096 * beampols)) offset = int(1.0 * offset / beampols) * beampols # Make sure that the file chunk size contains is an integer multiple # of the FFT length so that no data gets dropped. This needs to # take into account the number of beampols in the data, the FFT length, # and the number of samples per frame. maxFrames = int(1.0 * config['maxFrames'] / beampols * 4096 / float(LFFT)) * LFFT / 4096 * beampols # Number of frames to integrate over print "Line 673: config['average']", config[ 'average'], ' sample rate ', srate, ' beampols ', beampols nFramesAvg = int(config['average'] * srate / 4096 * beampols) if (nFramesAvg == 0): nFramesAvg = 1 * beampols else: nFramesAvg = int(1.0 * nFramesAvg / beampols * 4096 / float(LFFT)) * LFFT / 4096 * beampols config['average'] = 1.0 * nFramesAvg / beampols * 4096 / srate maxFrames = nFramesAvg print "Line 678: config['average']", config[ 'average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg # Number of remaining chunks (and the correction to the number of # frames to read in). if config['metadata'] is not None: config['duration'] = 0 if config['duration'] == 0: config['duration'] = 1.0 * nFramesFile / beampols * 4096 / srate else: config['duration'] = int( round(config['duration'] * srate * beampols / 4096) / beampols * 4096 / srate) nChunks = int(round(config['duration'] / config['average'])) if nChunks == 0: nChunks = 1 nFrames = nFramesAvg * nChunks print "Line 693: config['average']", config[ 'average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg, " nChunks ", nChunks # Date & Central Frequency t1 = junkFrame.getTime() beginDate = ephem.Date(unix_to_utcjd(junkFrame.getTime()) - DJD_OFFSET) centralFreq1 = 0.0 centralFreq2 = 0.0 for i in xrange(4): junkFrame = drx.readFrame(fh) b, t, p = junkFrame.parseID() if p == 0 and t == 1: try: centralFreq1 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq1 = fS * ((junkFrame.data.flags >> 32) & (2**32 - 1)) / 2**32 elif p == 0 and t == 2: try: centralFreq2 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq2 = fS * ((junkFrame.data.flags >> 32) & (2**32 - 1)) / 2**32 else: pass fh.seek(-4 * drx.FrameSize, 1) config['freq1'] = centralFreq1 config['freq2'] = centralFreq2 # File summary print "Filename: %s" % filename print "Date of First Frame: %s" % str(beginDate) print "Beams: %i" % beams print "Tune/Pols: %i %i %i %i" % tunepols print "Sample Rate: %i Hz" % srate print "Tuning Frequency: %.3f Hz (1); %.3f Hz (2)" % (centralFreq1, centralFreq2) print "Frames: %i (%.3f s)" % (nFramesFile, 1.0 * nFramesFile / beampols * 4096 / srate) print "---" print "Offset: %.3f s (%i frames)" % (config['offset'], offset) print "Integration: %.6f s (%i frames; %i frames per beam/tune/pol)" % ( config['average'], nFramesAvg, nFramesAvg / beampols) print "Duration: %.3f s (%i frames; %i frames per beam/tune/pol)" % ( config['average'] * nChunks, nFrames, nFrames / beampols) print "Chunks: %i" % nChunks print " " # Estimate clip level (if needed) if config['estimate']: clip1, clip2 = estimateClipLevel(fh, beampols) else: clip1 = config['clip'] clip2 = config['clip'] # Make the pseudo-antennas for Stokes calculation antennas = [] for i in xrange(4): if i / 2 == 0: newAnt = stations.Antenna(1) else: newAnt = stations.Antenna(2) if i % 2 == 0: newAnt.pol = 0 else: newAnt.pol = 1 antennas.append(newAnt) # Setup the output file outname = os.path.split(filename)[1] outname = os.path.splitext(outname)[0] if (config['return'] == 'FFT'): outname = '%s-%d-waterfall-complex.hdf5' % (outname, inoffset) else: outname = '%s-waterfall.hdf5' % outname if os.path.exists(outname): #yn = raw_input("WARNING: '%s' exists, overwrite? [Y/n] " % outname) #if yn not in ('n', 'N'): # os.unlink(outname) #else: raise RuntimeError("Output file '%s' already exists" % outname) f = hdfData.createNewFile(outname) # Look at the metadata and come up with a list of observations. If # there are no metadata, create a single "observation" that covers the # whole file. obsList = {} if config['metadata'] is not None: sdf = metabundle.getSessionDefinition(config['metadata']) sdfBeam = sdf.sessions[0].drxBeam spcSetup = sdf.sessions[0].spcSetup if sdfBeam != beam: raise RuntimeError( "Metadata is for beam #%i, but data is from beam #%i" % (sdfBeam, beam)) for i, obs in enumerate(sdf.sessions[0].observations): sdfStart = mcs.mjdmpm2datetime(obs.mjd, obs.mpm) sdfStop = mcs.mjdmpm2datetime(obs.mjd, obs.mpm + obs.dur) obsDur = obs.dur / 1000.0 obsSR = drx.filterCodes[obs.filter] obsList[i + 1] = (sdfStart, sdfStop, obsDur, obsSR) print "Observations:" for i in sorted(obsList.keys()): obs = obsList[i] print " #%i: %s to %s (%.3f s) at %.3f MHz" % ( i, obs[0], obs[1], obs[2], obs[3] / 1e6) print " " hdfData.fillFromMetabundle(f, config['metadata']) else: obsList[1] = (datetime.utcfromtimestamp(t1), datetime(2222, 12, 31, 23, 59, 59), config['duration'], srate) hdfData.fillMinimum(f, 1, beam, srate) if config['linear']: dataProducts = ['XX', 'YY'] else: dataProducts = ['I', 'Q', 'U', 'V'] for o in sorted(obsList.keys()): for t in (1, 2): hdfData.createDataSets( f, o, t, numpy.arange(LFFT - 1 if float(fxc.__version__) < 0.8 else LFFT, dtype=numpy.float32), int(round(obsList[o][2] / config['average'])), dataProducts, dataOut=config['return']) f.attrs['FileGenerator'] = 'hdfWaterfall.py' f.attrs['InputData'] = os.path.basename(filename) # Create the various HDF group holders ds = {} for o in sorted(obsList.keys()): obs = hdfData.getObservationSet(f, o) ds['obs%i' % o] = obs ds['obs%i-time' % o] = obs.create_dataset( 'time', (int(round(obsList[o][2] / config['average'])), ), 'f8') for t in (1, 2): ds['obs%i-freq%i' % (o, t)] = hdfData.getDataSet(f, o, t, 'freq') for p in dataProducts: if (config['return'] == 'PSD'): ds["obs%i-%s%i" % (o, p, t)] = hdfData.getDataSet( f, o, t, p) else: ds["obs%i-%s%imag" % (o, p, t)] = hdfData.getDataSet( f, o, t, p + 'mag') ds["obs%i-%s%iphase" % (o, p, t)] = hdfData.getDataSet( f, o, t, p + 'phase') ds['obs%i-Saturation%i' % (o, t)] = hdfData.getDataSet( f, o, t, 'Saturation') # Load in the correct analysis function if config['linear']: processDataBatch = processDataBatchLinear else: processDataBatch = processDataBatchStokes # Go! for o in sorted(obsList.keys()): try: processDataBatch(fh, antennas, obsList[o][0], obsList[o][2], obsList[o][3], config, ds, obsID=o, clip1=clip1, clip2=clip2) except RuntimeError, e: print "Observation #%i: %s, abandoning this observation" % (o, str(e))
def processDataBatchStokes(fh, antennas, tStart, duration, sampleRate, config, dataSets, obsID=1, clip1=0, clip2=0): """ Process a chunk of data in a raw DRX file into Stokes parameters and add the contents to an HDF5 file. """ # Length of the FFT LFFT = config['LFFT'] # Find the start of the observation junkFrame = drx.readFrame(fh) srate = junkFrame.getSampleRate() t0 = junkFrame.getTime() fh.seek(-drx.FrameSize, 1) print 'Looking for #%i at %s with sample rate %.1f Hz...' % (obsID, tStart, sampleRate) while datetime.utcfromtimestamp(t0) < tStart or srate != sampleRate: junkFrame = drx.readFrame(fh) srate = junkFrame.getSampleRate() t0 = junkFrame.getTime() print '... Found #%i at %s with sample rate %.1f Hz' % (obsID, datetime.utcfromtimestamp(t0), srate) tDiff = datetime.utcfromtimestamp(t0) - tStart try: duration = duration - tDiff.total_seconds() except: duration = duration - (tDiff.seconds + tDiff.microseconds/1e6) beam,tune,pol = junkFrame.parseID() beams = drx.getBeamCount(fh) tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol # Make sure that the file chunk size contains is an integer multiple # of the FFT length so that no data gets dropped. This needs to # take into account the number of beampols in the data, the FFT length, # and the number of samples per frame. maxFrames = int(1.0*config['maxFrames']/beampols*4096/float(LFFT))*LFFT/4096*beampols # Number of frames to integrate over print "Line 455: config['average']", config['average'], ' sample rate ', srate, ' beampols ', beampols nFramesAvg = int(round(config['average'] * srate / 4096 * beampols)) nFramesAvg = int(1.0 * nFramesAvg / beampols*4096/float(LFFT))*LFFT/4096*beampols config['average'] = 1.0 * nFramesAvg / beampols * 4096 / srate maxFrames = nFramesAvg print "Line 460: config['average']", config['average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg # Number of remaining chunks (and the correction to the number of # frames to read in). nChunks = int(round(duration / config['average'])) if nChunks == 0: nChunks = 1 nFrames = nFramesAvg*nChunks print "Line 468: config['average']", config['average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg, " nChunks ", nChunks # Date & Central Frequency beginDate = ephem.Date(unix_to_utcjd(junkFrame.getTime()) - DJD_OFFSET) centralFreq1 = 0.0 centralFreq2 = 0.0 for i in xrange(4): junkFrame = drx.readFrame(fh) b,t,p = junkFrame.parseID() if p == 0 and t == 1: try: centralFreq1 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq1 = fS * ((junkFrame.data.flags>>32) & (2**32-1)) / 2**32 elif p == 0 and t == 2: try: centralFreq2 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq2 = fS * ((junkFrame.data.flags>>32) & (2**32-1)) / 2**32 else: pass fh.seek(-4*drx.FrameSize, 1) freq = numpy.fft.fftshift(numpy.fft.fftfreq(LFFT, d=1/srate)) if float(fxc.__version__) < 0.8: freq = freq[1:] dataSets['obs%i-freq1' % obsID][:] = freq + centralFreq1 dataSets['obs%i-freq2' % obsID][:] = freq + centralFreq2 obs = dataSets['obs%i' % obsID] obs.attrs['tInt'] = config['average'] obs.attrs['tInt_Unit'] = 's' obs.attrs['LFFT'] = LFFT obs.attrs['nChan'] = LFFT-1 if float(fxc.__version__) < 0.8 else LFFT obs.attrs['RBW'] = freq[1]-freq[0] obs.attrs['RBW_Units'] = 'Hz' dataProducts = ['I', 'Q', 'U', 'V'] done = False for i in xrange(nChunks): # Find out how many frames remain in the file. If this number is larger # than the maximum of frames we can work with at a time (maxFrames), # only deal with that chunk framesRemaining = nFrames - i*maxFrames if framesRemaining > maxFrames: framesWork = maxFrames else: framesWork = framesRemaining print "Working on chunk %i, %i frames remaining" % (i+1, framesRemaining) count = {0:0, 1:0, 2:0, 3:0} data = numpy.zeros((4,framesWork*4096/beampols), dtype=numpy.csingle) # If there are fewer frames than we need to fill an FFT, skip this chunk if data.shape[1] < LFFT: break # Inner loop that actually reads the frames into the data array print "Working on %.1f ms of data" % ((framesWork*4096/beampols/srate)*1000.0) for j in xrange(framesWork): # Read in the next frame and anticipate any problems that could occur try: cFrame = drx.readFrame(fh, Verbose=False) except errors.eofError: done = True break except errors.syncError: continue beam,tune,pol = cFrame.parseID() aStand = 2*(tune-1) + pol if j is 0: cTime = cFrame.getTime() try: data[aStand, count[aStand]*4096:(count[aStand]+1)*4096] = cFrame.data.iq count[aStand] += 1 except ValueError: raise RuntimeError("Invalid Shape") # Save out some easy stuff dataSets['obs%i-time' % obsID][i] = cTime if config['countSats']: sats = ((data.real**2 + data.imag**2) >= 49).sum(axis=1) dataSets['obs%i-Saturation1' % obsID][i,:] = sats[0:2] dataSets['obs%i-Saturation2' % obsID][i,:] = sats[2:4] else: dataSets['obs%i-Saturation1' % obsID][i,:] = -1 dataSets['obs%i-Saturation2' % obsID][i,:] = -1 # Calculate the spectra for this block of data and then weight the results by # the total number of frames read. This is needed to keep the averages correct. if clip1 == clip2: freq, tempSpec1 = fxc.StokesMaster(data, antennas, LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip1) for t in (1,2): for l,p in enumerate(dataProducts): dataSets['obs%i-%s%i' % (obsID, p, t)][i,:] = tempSpec1[l,t-1,:] else: freq, tempSpec1 = fxc.StokesMaster(data[:2,:], antennas[:2], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip1) freq, tempSpec2 = fxc.StokesMaster(data[2:,:], antennas[2:], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip2) for l,p in enumerate(dataProducts): dataSets['obs%i-%s%i' % (obsID, p, 1)][i,:] = tempSpec1[l,0,:] dataSets['obs%i-%s%i' % (obsID, p, 2)][i,:] = tempSpec2[l,0,:] # We don't really need the data array anymore, so delete it del(data) # Are we done yet? if done: break return True
def processDataBatchStokes(fh, antennas, tStart, duration, sampleRate, config, dataSets, obsID=1, clip1=0, clip2=0): """ Process a chunk of data in a raw DRX file into Stokes parameters and add the contents to an HDF5 file. """ # Length of the FFT LFFT = config['LFFT'] # Find the start of the observation junkFrame = drx.readFrame(fh) srate = junkFrame.getSampleRate() t0 = junkFrame.getTime() fh.seek(-drx.FrameSize, 1) print 'Looking for #%i at %s with sample rate %.1f Hz...' % (obsID, tStart, sampleRate) while datetime.utcfromtimestamp(t0) < tStart or srate != sampleRate: junkFrame = drx.readFrame(fh) srate = junkFrame.getSampleRate() t0 = junkFrame.getTime() print '... Found #%i at %s with sample rate %.1f Hz' % ( obsID, datetime.utcfromtimestamp(t0), srate) tDiff = datetime.utcfromtimestamp(t0) - tStart try: duration = duration - tDiff.total_seconds() except: duration = duration - (tDiff.seconds + tDiff.microseconds / 1e6) beam, tune, pol = junkFrame.parseID() beams = drx.getBeamCount(fh) tunepols = drx.getFramesPerObs(fh) tunepol = tunepols[0] + tunepols[1] + tunepols[2] + tunepols[3] beampols = tunepol # Make sure that the file chunk size contains is an integer multiple # of the FFT length so that no data gets dropped. This needs to # take into account the number of beampols in the data, the FFT length, # and the number of samples per frame. maxFrames = int(1.0 * config['maxFrames'] / beampols * 4096 / float(LFFT)) * LFFT / 4096 * beampols # Number of frames to integrate over print "Line 455: config['average']", config[ 'average'], ' sample rate ', srate, ' beampols ', beampols nFramesAvg = int(round(config['average'] * srate / 4096 * beampols)) nFramesAvg = int(1.0 * nFramesAvg / beampols * 4096 / float(LFFT)) * LFFT / 4096 * beampols config['average'] = 1.0 * nFramesAvg / beampols * 4096 / srate maxFrames = nFramesAvg print "Line 460: config['average']", config[ 'average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg # Number of remaining chunks (and the correction to the number of # frames to read in). nChunks = int(round(duration / config['average'])) if nChunks == 0: nChunks = 1 nFrames = nFramesAvg * nChunks print "Line 468: config['average']", config[ 'average'], ' sample rate ', srate, ' beampols ', beampols, " nFramesAvg ", nFramesAvg, " nChunks ", nChunks # Date & Central Frequency beginDate = ephem.Date(unix_to_utcjd(junkFrame.getTime()) - DJD_OFFSET) centralFreq1 = 0.0 centralFreq2 = 0.0 for i in xrange(4): junkFrame = drx.readFrame(fh) b, t, p = junkFrame.parseID() if p == 0 and t == 1: try: centralFreq1 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq1 = fS * ((junkFrame.data.flags >> 32) & (2**32 - 1)) / 2**32 elif p == 0 and t == 2: try: centralFreq2 = junkFrame.getCentralFreq() except AttributeError: from lsl.common.dp import fS centralFreq2 = fS * ((junkFrame.data.flags >> 32) & (2**32 - 1)) / 2**32 else: pass fh.seek(-4 * drx.FrameSize, 1) freq = numpy.fft.fftshift(numpy.fft.fftfreq(LFFT, d=1 / srate)) if float(fxc.__version__) < 0.8: freq = freq[1:] dataSets['obs%i-freq1' % obsID][:] = freq + centralFreq1 dataSets['obs%i-freq2' % obsID][:] = freq + centralFreq2 obs = dataSets['obs%i' % obsID] obs.attrs['tInt'] = config['average'] obs.attrs['tInt_Unit'] = 's' obs.attrs['LFFT'] = LFFT obs.attrs['nChan'] = LFFT - 1 if float(fxc.__version__) < 0.8 else LFFT obs.attrs['RBW'] = freq[1] - freq[0] obs.attrs['RBW_Units'] = 'Hz' dataProducts = ['I', 'Q', 'U', 'V'] done = False for i in xrange(nChunks): # Find out how many frames remain in the file. If this number is larger # than the maximum of frames we can work with at a time (maxFrames), # only deal with that chunk framesRemaining = nFrames - i * maxFrames if framesRemaining > maxFrames: framesWork = maxFrames else: framesWork = framesRemaining print "Working on chunk %i, %i frames remaining" % (i + 1, framesRemaining) count = {0: 0, 1: 0, 2: 0, 3: 0} data = numpy.zeros((4, framesWork * 4096 / beampols), dtype=numpy.csingle) # If there are fewer frames than we need to fill an FFT, skip this chunk if data.shape[1] < LFFT: break # Inner loop that actually reads the frames into the data array print "Working on %.1f ms of data" % ( (framesWork * 4096 / beampols / srate) * 1000.0) for j in xrange(framesWork): # Read in the next frame and anticipate any problems that could occur try: cFrame = drx.readFrame(fh, Verbose=False) except errors.eofError: done = True break except errors.syncError: continue beam, tune, pol = cFrame.parseID() aStand = 2 * (tune - 1) + pol if j is 0: cTime = cFrame.getTime() try: data[aStand, count[aStand] * 4096:(count[aStand] + 1) * 4096] = cFrame.data.iq count[aStand] += 1 except ValueError: raise RuntimeError("Invalid Shape") # Save out some easy stuff dataSets['obs%i-time' % obsID][i] = cTime if config['countSats']: sats = ((data.real**2 + data.imag**2) >= 49).sum(axis=1) dataSets['obs%i-Saturation1' % obsID][i, :] = sats[0:2] dataSets['obs%i-Saturation2' % obsID][i, :] = sats[2:4] else: dataSets['obs%i-Saturation1' % obsID][i, :] = -1 dataSets['obs%i-Saturation2' % obsID][i, :] = -1 # Calculate the spectra for this block of data and then weight the results by # the total number of frames read. This is needed to keep the averages correct. if clip1 == clip2: freq, tempSpec1 = fxc.StokesMaster(data, antennas, LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip1) for t in (1, 2): for l, p in enumerate(dataProducts): dataSets['obs%i-%s%i' % (obsID, p, t)][i, :] = tempSpec1[l, t - 1, :] else: freq, tempSpec1 = fxc.StokesMaster(data[:2, :], antennas[:2], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip1) freq, tempSpec2 = fxc.StokesMaster(data[2:, :], antennas[2:], LFFT=LFFT, window=config['window'], verbose=config['verbose'], SampleRate=srate, ClipLevel=clip2) for l, p in enumerate(dataProducts): dataSets['obs%i-%s%i' % (obsID, p, 1)][i, :] = tempSpec1[l, 0, :] dataSets['obs%i-%s%i' % (obsID, p, 2)][i, :] = tempSpec2[l, 0, :] # We don't really need the data array anymore, so delete it del (data) # Are we done yet? if done: break return True