def parse_gnuradio_header(header_file, verbose=False): headers = [] index = 0 rx_time = datetime.timedelta(seconds=0) with open(header_file, "rb") as handle: file_length = os.path.getsize(header_file) while True: if file_length - handle.tell() < parse_file_metadata.HEADER_LENGTH: break header_str = handle.read(parse_file_metadata.HEADER_LENGTH) try: header = pmt.deserialize_str(header_str) except RuntimeError: break info = parse_file_metadata.parse_header(header, verbose) if info["nbytes"] == 0: break if info["extra_len"] > 0: extra_str = handle.read(info["extra_len"]) if len(extra_str) == 0: break try: extra = pmt.deserialize_str(extra_str) except RuntimeError: break parse_file_metadata.parse_extra_dict(extra, info, verbose) if len(headers) > 0: last_rx_time = headers[-1]["rx_time"] samples_delta = headers[-1]["nitems"] / headers[-1]["rx_rate"] samples_delta = datetime.timedelta(seconds=samples_delta) info["rx_time"] = last_rx_time + samples_delta info["index"] = index index = index + info["nitems"] else: info["rx_time"] = datetime.timedelta(seconds=0.0) info["index"] = 0 index = info["nitems"] headers.append(info) return headers
def parse_gnuradio_header(header_file, verbose=False): headers = [] index = 0 rx_time = datetime.timedelta(seconds=0) with open(header_file, 'rb') as handle: file_length = os.path.getsize(header_file) while True: if file_length - handle.tell() < parse_file_metadata.HEADER_LENGTH: break header_str = handle.read(parse_file_metadata.HEADER_LENGTH) try: header = pmt.deserialize_str(header_str) except RuntimeError: break info = parse_file_metadata.parse_header(header, verbose) if info['nbytes'] == 0: break if (info['extra_len'] > 0): extra_str = handle.read(info['extra_len']) if (len(extra_str) == 0): break try: extra = pmt.deserialize_str(extra_str) except RuntimeError: break parse_file_metadata.parse_extra_dict(extra, info, verbose) if len(headers) > 0: last_rx_time = headers[-1]['rx_time'] samples_delta = headers[-1]['nitems'] / headers[-1]['rx_rate'] samples_delta = datetime.timedelta(seconds=samples_delta) info['rx_time'] = last_rx_time + samples_delta info['index'] = index index = index + info['nitems'] else: info['rx_time'] = datetime.timedelta(seconds=0.0) info['index'] = 0 index = info['nitems'] headers.append(info) return headers
def find_package(self): while ((int(math.floor(self.new_timestamp)) - int(math.ceil(self.info['rx_time']))) > 0): # read out next header bytes self.hdr_start = self.handle.tell() self.header_str = self.handle.read(parse_file_metadata.HEADER_LENGTH) if(len(self.header_str) == 0): break # Convert from string to PMT (should be a dictionary) try: print "\033[1;0H" self.header = pmt.deserialize_str(self.header_str) except Exception as e: print ERROR,e else: print SUCCESS,"Found base header." # Get base header info self.info = parse_file_metadata.parse_header(self.header, True) # Get extra header length if(self.info["extra_len"] > 0): self.extra_str = self.handle.read(self.info["extra_len"]) if(len(self.extra_str) == 0): break # Read extra header try: self.extra = pmt.deserialize_str(self.extra_str) except Exception as e: print ERROR,e else: print SUCCESS,"Found extra header." print "\nExtra Header:" self.extra_info = parse_file_metadata.parse_extra_dict(self.extra, self.info, True) # move pointer self.nread += parse_file_metadata.HEADER_LENGTH + self.info["extra_len"] + self.info['nbytes'] self.handle.seek(self.nread, 0) print "\n\n"
def parseHeaders(self): # read out header bytes into a string header_str = self.inFile.read(parse_file_metadata.HEADER_LENGTH) # Convert from created string to PMT dict try: header = pmt.deserialize_str(header_str) except RuntimeError: logging.info( f"[Process: {self.radioNum}] Could not deserialize header\n") self.stop() # Convert from PMT dict to Python dict info = parse_file_metadata.parse_header(header) if (info["extra_len"] > 0): extra_str = self.inFile.read(info["extra_len"]) # Extra header info try: extra = pmt.deserialize_str(extra_str) except RuntimeError: logging.info( f"[Process: {self.radioNum}] Could not deserialize extra headers\n" ) self.stop() info = parse_file_metadata.parse_extra_dict(extra, info) return info
def create_package(self): if (int(math.floor(self.new_timestamp)) - int(math.ceil(self.info['rx_time'])) == 0): self.package_name = self.package_prefix + "_{0}.raw".format( self.new_timestamp) self.package_fullpath = self.package_path + self.package_name self.package = open(self.package_fullpath, "wb") for i in range(0, self.package_amount): # read out next header bytes self.hdr_start = self.handle.tell() self.header_str = self.handle.read( parse_file_metadata.HEADER_LENGTH) if (len(self.header_str) == 0): break # Convert from string to PMT (should be a dictionary) try: print "\033[1;0H" self.header = pmt.deserialize_str(self.header_str) except Exception as e: print ERROR, e else: self.package.write(self.header_str) print SUCCESS, "Write base header." # Get base header info self.info = parse_file_metadata.parse_header(self.header, True) # Get extra header length if (self.info["extra_len"] > 0): self.extra_str = self.handle.read(self.info["extra_len"]) if (len(self.extra_str) == 0): break # Read extra header try: self.extra = pmt.deserialize_str(self.extra_str) except Exception as e: print ERROR, e else: self.package.write(self.extra_str) print SUCCESS, "Write base header." print "\nExtra Header:" extra_info = parse_file_metadata.parse_extra_dict( self.extra, self.info, True) self.data_str = self.handle.read(self.info['nbytes']) self.package.write(self.data_str) # move pointer self.nread += parse_file_metadata.HEADER_LENGTH + self.info[ "extra_len"] + self.info['nbytes'] self.handle.seek(self.nread, 0) print "\n\n" self.package.close() print "\033[21;0H%15s %15s %15s" % ( "| Timestamp|", "|Upload_Bytes|", "| All_Bytes|") self.uploadtask = S3_uploadtask(self.config, self.uploadtaskID, self.new_timestamp, self.package_name, self.package_fullpath) self.uploadtask.setDaemon(True) self.uploadtask.start() self.uploadtaskID += 1 self.old_timestamp = self.new_timestamp
def read_gnuradio_header_element(file_handle): """ Read a header element/header structure from the current position of a GNU Radio header file. The header file contains multiple header elements, one for each issued stream tag. Parameters ---------- file_handle: File handle for the header file, as obtained using open(). Returns ------- info: dict Header structure. header_length: int Length of the header element in bytes. """ header_str = file_handle.read(parse_file_metadata.HEADER_LENGTH) if len(header_str) == 0: return None, 0 header = pmt.deserialize_str(header_str) info = parse_file_metadata.parse_header(header, False) #get extra information if info["extra_len"] > 0: extra_str = file_handle.read(info["extra_len"]) extra = pmt.deserialize_str(extra_str) extra_info = parse_file_metadata.parse_extra_dict(extra, info, False) return info, parse_file_metadata.HEADER_LENGTH + info["extra_len"]
def main(filename): #init nheaders = 0 nread = 0 while (True): #open the file handle = open(filename, "rb") handle.seek(nread, 0) hdr_start = handle.tell() #read the base header header_str = handle.read(parse_file_metadata.HEADER_LENGTH) header = pmt.deserialize_str(header_str) print "Package {0} Header".format(nheaders) info = parse_file_metadata.parse_header(header, True) #read the extra header if (info["extra_len"] > 0): extra_str = handle.read(info["extra_len"]) if (len(extra_str) == 0): break extra = pmt.deserialize_str(extra_str) extra_info = parse_file_metadata.parse_extra_dict( extra, info, True) #read the data data_str = handle.read(info['nbytes']) #prepare for next package if (info['nbytes'] != 0): nheaders += 1 nread += parse_file_metadata.HEADER_LENGTH + info[ "extra_len"] + info['nbytes'] handle.close() #create package file package_name = "Package_{0}.raw".format(int(math.ceil( info['rx_time']))) package_path = "./Data/package/" + package_name package = open(package_path, "wb") package.write(header_str) package.write(extra_str) package.write(data_str) package.close() while (True): size = os.path.getsize(filename) if (parse_file_metadata.HEADER_LENGTH + info["extra_len"] + data_length < size - nread): break time.sleep(0.5) os.system("clear")
def __init__(self): gr.top_block.__init__(self, "Meta To Data") ################################################## # Variables ################################################## meta_file = "./Data/meta.raw" data_file = "./Data/data.raw" header_file = "./Data/header.txt" ################################################## # Blocks ################################################## self.blocks_file_sink_0 = blocks.file_sink(gr.sizeof_gr_complex * 1, data_file, False) self.blocks_file_sink_0.set_unbuffered(False) self.blocks_file_meta_source_0 = blocks.file_meta_source( meta_file, True, False, '') ################################################## # Connections ################################################## self.connect((self.blocks_file_meta_source_0, 0), (self.blocks_file_sink_0, 0)) ################################################## # Get header info ################################################## print "##################################################" print "# Header Info #" print "##################################################" handle = open(meta_file, "rb") hdr_start = handle.tell() header_str = handle.read(parse_file_metadata.HEADER_LENGTH) header = pmt.deserialize_str(header_str) info = parse_file_metadata.parse_header(header, True) if (info["extra_len"] > 0): extra_str = handle.read(info["extra_len"]) extra = pmt.deserialize_str(extra_str) extra_info = parse_file_metadata.parse_extra_dict(extra, info, True) handle.close() header = open(header_file, "wb") rx_rate = extra_info['rx_rate'] rx_freq = pmt.to_python(extra_info['rx_freq']) rx_time = extra_info['rx_time'] header.write("%.00f\n" % rx_rate) header.write("%.00f\n" % rx_freq) header.write("%.22f\n" % rx_time) header.close() print
def parse_metadata(self, filepath): head_file = filepath+".hdr" hlen = parse_file_metadata.HEADER_LENGTH headers = [] extra_tags = [] overs = [] if not os.path.isfile(head_file): return {'total power':{}}, [] with open(head_file,'rb') as fd: for h_str in iter(partial(fd.read, hlen), ''): h_pmt = pmt.deserialize_str(h_str) h_parsed=parse_file_metadata.parse_header(h_pmt,False) headers.append(h_parsed) if(h_parsed["extra_len"] > 0): extra_str = fd.read(h_parsed["extra_len"]) if(len(extra_str) == 0): break extra = pmt.deserialize_str(extra_str) e_parsed = parse_file_metadata.parse_extra_dict(extra, h_parsed, False) extra_tags.append(e_parsed) # Load the extra data into the tagging system for the LightCurve. tags = pd.DataFrame({'total power':[{} for _ in xrange(self.data_len)]}) nums_done = 0 segment_start_time = headers[0]['rx_time'] segments = 1 for i in xrange(len(extra_tags)): j = int(nums_done + extra_tags[i]['nitems']) if not extra_tags[i]['rx_time'] == segment_start_time: should = segment_start_time + j/extra_tags[i]['rx_rate'] miss_sec = extra_tags[i]['rx_time']-should overs.append({'new_seg':j, 'new_time':extra_tags[i]['rx_time']}) segment_start_time = extra_tags[i]['rx_time'] segments += 1 j = int(nums_done + extra_tags[i]['nitems']) tags['total power'][j] = extra_tags[i] nums_done += extra_tags[i]['nitems'] new = self.import_tags(extra_tags, 'total power') return {'total power': headers[0]}, overs
def main(filename, detached=False): handle = open(filename, "rb") nheaders = 0 nread = 0 while (True): # read out next header bytes hdr_start = handle.tell() header_str = handle.read(parse_file_metadata.HEADER_LENGTH) if (len(header_str) == 0): break # Convert from string to PMT (should be a dictionary) try: header = pmt.deserialize_str(header_str) except RuntimeError: sys.stderr.write( "Could not deserialize header: invalid or corrupt data file.\n" ) sys.exit(1) print "HEADER {0}".format(nheaders) info = parse_file_metadata.parse_header(header, True) print "%.22f" % (info["rx_time"]) if (info["extra_len"] > 0): extra_str = handle.read(info["extra_len"]) if (len(extra_str) == 0): break try: extra = pmt.deserialize_str(extra_str) except RuntimeError: sys.stderr.write( "Could not deserialize extras: invalid or corrupt data file.\n" ) sys.exit(1) print "\nExtra Header:" extra_info = parse_file_metadata.parse_extra_dict( extra, info, True) nheaders += 1 nread += parse_file_metadata.HEADER_LENGTH + info["extra_len"] if (not detached): nread += info['nbytes'] handle.seek(nread, 0) print "\n\n"
def gr2fits(filename, merge=True, verbose=False): try: handle = open(filename, 'rb') except: raise IOError('File %s does not exist' % filename) nheaders = 0 nread = 0 baseband = [] fitsout = fits.HDUList() while (True): """ /opt/local/bin/gr_read_file_metadata note that there can be > 1 metadata blocks I think they can come every 1e6 items """ # read out next header bytes hdr_start = handle.tell() header_str = handle.read(parse_file_metadata.HEADER_LENGTH) if (len(header_str) == 0): break # Convert from string to PMT (should be a dictionary) try: header = pmt.deserialize_str(header_str) except RuntimeError: raise IOError( "Could not deserialize header: invalid or corrupt data file.\n" ) if verbose: print("HEADER {0}".format(nheaders)) info = parse_file_metadata.parse_header(header, verbose) if (info["extra_len"] > 0): extra_str = handle.read(info["extra_len"]) if (len(extra_str) == 0): break try: extra = pmt.deserialize_str(extra_str) except RuntimeError: sys.stderr.write( "Could not deserialize extras: invalid or corrupt data file.\n" ) break if verbose: print("\nExtra Header:") extra_info = parse_file_metadata.parse_extra_dict( extra, info, verbose) nheaders += 1 nread += parse_file_metadata.HEADER_LENGTH + info["extra_len"] handle.seek(nread, 0) h = extra_info if h['size'] == 8 and h['cplx']: dtype = scipy.complex64 d = scipy.fromfile(handle, dtype=dtype, count=h['nitems']) t0 = np.arange(2 * len(d)) / h['rx_rate'] / 2 t = np.arange(len(d)) / h['rx_rate'] nread += info['nbytes'] handle.seek(nread, 0) fitsout.append(fits.ImageHDU(data=np.c_[d.real, d.imag])) fitsout[-1].header['NITEMS'] = (h['nitems'], 'Number of complex samples') fitsout[-1].header['RATE'] = (h['rx_rate'], '[Hz] sample rate') fitsout[-1].header['RX_FREQ'] = (pmt.to_float(h['rx_freq']) / 1e6, '[MHz] Radio frequency') fitsout[-1].header['RX_TIME'] = (h['rx_time'], '[s] Time of start of block') if merge: totallen = 0 for i in xrange(0, len(fitsout)): totallen += fitsout[i].header['NAXIS2'] d = np.zeros((totallen, 2), dtype=fitsout[1].data.dtype) nmax = 0 for i in xrange(0, len(fitsout)): d[nmax:nmax + fitsout[i].header['NAXIS2']] = fitsout[i].data nmax += fitsout[i].header['NAXIS2'] newfitsout = fits.HDUList() newfitsout.append(fits.PrimaryHDU(data=d)) newfitsout[0].header = fitsout[1].header newfitsout[0].header['NITEMS'] = totallen newfitsout[0].header['EXPTIME'] = (d.shape[0] / newfitsout[0].header['RATE'], '[s] Duration of file') fitsout = newfitsout fitsout.verify('silentfix') if os.path.exists(filename + '.fits'): os.remove(filename + '.fits') fitsout.writeto(filename + '.fits') print('Wrote %s.fits' % filename) return fitsout