def test_converted_data_content(): if not os.path.exists(datapath): print('Please run test_process_testfile first') out = reload_data(datapath) testfile_path, expected_output = get_testfile_paths() ad = par3.AllRead(testfile_path) ad.mapfile() # assert that they have the same number of pings assert out.multibeam.raw_ping[0].time.shape[0] == ad.map.getnum(78) # assert that there are the same number of attitude/navigation packets totatt = 0 for i in range(ad.map.getnum(65)): rec = ad.getrecord(65, i) totatt += rec.data['Time'].shape[0] assert out.multibeam.raw_att.time.shape[0] == totatt totnav = 0 for i in range(ad.map.getnum(110)): rec = ad.getrecord(110, i) totnav += rec.data['Time'].shape[0] assert out.multibeam.raw_nav.time.shape[0] == totnav out.close() out = None
def fast_read_multibeam_metadata(multibeam_file: str, gather_times: bool = True, gather_serialnumber: bool = True): """ Return metadata from a multibeam file using the fast read methods. Fast read methods allow getting small amounts of data without reading the entire file. These include: the start and end time of the file in utc seconds, the serial number(s) of the multibeam sonar in the file. Use gather_times and gather_serialnumber to select which/both of these options. Multibeam file must be one of the multibeam files that we support in Kluster, see kluster_variables.supported_multibeam Parameters ---------- multibeam_file multibeam file gather_times if True, returns the start and end time of the file gather_serialnumber if True, returns the serial number(s) of the multibeam sonar in the file Returns ------- str the type of multibeam file discovered, i.e. 'kongsberg_all' list [UTC start time in seconds, UTC end time in seconds] or None if gather_times is False list [serialnumber: int, secondaryserialnumber: int, sonarmodelnumber: str] or None if gather_serialnumber is False """ _check_multibeam_file(multibeam_file) fileext = os.path.splitext(multibeam_file)[1] if fileext == '.all': mtype = 'kongsberg_all' aread = par3.AllRead(multibeam_file) if gather_times: start_end = aread.fast_read_start_end_time() else: start_end = None if gather_serialnumber: serialnums = aread.fast_read_serial_number() else: serialnums = None aread.close() elif fileext == '.kmall': mtype = 'kongsberg_kmall' km = kmall.kmall(multibeam_file) if gather_times: start_end = km.fast_read_start_end_time() else: start_end = None if gather_serialnumber: serialnums = km.fast_read_serial_number() else: serialnums = None km.closeFile() else: raise NotImplementedError( 'fqpr_drivers: {} is supported by kluster, but not currently supported by fast_read_multibeam_metadata' .format(multibeam_file)) return mtype, start_end, serialnums
def file_browse(self, e): msg, file_path = RegistryHelpers.GetFilenameFromUserQT( self, RegistryKey='Kluster', Title='Select a raw file to read', AppName='\\analyzer', bMulti=False, bSave=False, fFilter='all files (*.*)') if file_path: self.fil_text.setText(file_path) self.filename = file_path fext = os.path.splitext(self.filename)[1] self.functioncombobox.clear() if fext == '.all': self.filetype = 'kongsberg_all' self.fileobject = par3.AllRead(self.filename) self.functioncombobox.addItems([ 'read_first_fifty_records', 'kluster_read_test', 'bscorr_generation' ]) elif fext == '.kmall': self.filetype = 'kongsberg_kmall' self.fileobject = kmall.kmall(self.filename) self.functioncombobox.addItems(['read_first_fifty_records']) elif fext in ['.out', '.sbet', 'smrmsg']: if sbet.is_sbet(file_path): self.filetype = 'applanix_sbet' self.fileobject = sbet.read(file_path, numcolumns=17) self.functioncombobox.addItems( ['read_first_fifty_records']) elif sbet.is_smrmsg(file_path): self.filetype = 'applanix_smrmsg' self.fileobject = sbet.read(file_path, numcolumns=10) self.functioncombobox.addItems( ['read_first_fifty_records']) else: print( f'Not a recognized file type, tried sbet and smrmsg: {self.filename}' ) self.filetype = '' self.fileobject = None else: try: poscheck = int(fext[1:]) self.filetype = 'posmv' self.fileobject = PCSio.PCSFile(self.filename) self.functioncombobox.addItems( ['read_first_fifty_records']) except: print(f'Not a recognized file type: {self.filename}') self.filetype = '' self.fileobject = None self.ftypelabel.setText(f'File Type: {self.filetype}')
def sequential_read_multibeam(multibeam_file: str, start_pointer: int = 0, end_pointer: int = 0, first_installation_rec: bool = False): """ Run the sequential read function built in to all multibeam drivers in Kluster. Sequential read takes a multibeam file (with an optional start/end pointer in bytes) and reads all the datagrams of interest sequentially, skipping any that are not in the required datagram lookups. Parameters ---------- multibeam_file multibeam file of interest start_pointer the start pointer that we start the read at end_pointer the end pointer where we finish the read first_installation_rec if True, will just read the installation parameters entry and finish Returns ------- dict nested dictionary object containing all the numpy arrays for the data of interest """ _check_multibeam_file(multibeam_file) multibeam_extension = os.path.splitext(multibeam_file)[1] if multibeam_extension == '.all': ar = par3.AllRead(multibeam_file, start_ptr=start_pointer, end_ptr=end_pointer) recs = ar.sequential_read_records( first_installation_rec=first_installation_rec) ar.close() elif multibeam_extension == '.kmall': km = kmall.kmall(multibeam_file) # kmall doesnt have ping-wise serial number in header, we have to provide it from install params serial_translator = km.fast_read_serial_number_translator() recs = km.sequential_read_records( start_ptr=start_pointer, end_ptr=end_pointer, first_installation_rec=first_installation_rec, serial_translator=serial_translator) km.closeFile() else: raise NotImplementedError( 'fqpr_drivers: {} is supported by kluster, but not currently supported by sequential_read_multibeam' .format(multibeam_file)) return recs
def test_converted_data_content(self): out = reload_data(self.datapath) ad = par3.AllRead(self.testfile) ad.mapfile() # assert that they have the same number of pings assert out.multibeam.raw_ping[0].time.shape[0] == ad.map.getnum(78) # assert that there are the same number of attitude/navigation packets totatt = 0 for i in range(ad.map.getnum(65)): rec = ad.getrecord(65, i) totatt += rec.data['Time'].shape[0] assert out.multibeam.raw_att.time.shape[0] == totatt ad.close() out.close()
def file_browse_two(self, e): msg, file_path = RegistryHelpers.GetFilenameFromUserQT( self, RegistryKey='Kluster', Title='Select a raw file to read', AppName='\\analyzer', bMulti=False, bSave=False, fFilter='all files (*.*)') if file_path: fext = os.path.splitext(file_path)[1] if self.filetype == 'kongsberg_all' and fext != '.all': print('Expected .all file, got {}'.format(file_path)) elif self.filetype == 'kongsberg_kmall' and fext != '.kmall': print('Expected .kmall file, got {}'.format(file_path)) elif self.filetype in [ 'applanix_sbet', 'applanix_smrmsg' ] and fext not in ['.out', '.sbet', 'smrmsg']: print('Expected .kmall file, got {}'.format(file_path)) else: self.fil_two_text.setText(file_path) self.filenametwo = file_path if fext == '.all': self.fileobjecttwo = par3.AllRead(file_path) elif fext == '.kmall': self.fileobjecttwo = kmall.kmall(file_path) elif fext in ['.out', '.sbet', 'smrmsg']: if sbet.is_sbet(file_path): self.fileobjecttwo = sbet.read(file_path, numcolumns=17) elif sbet.is_smrmsg(file_path): self.fileobjecttwo = sbet.read(file_path, numcolumns=10) else: print( f'Not a recognized file type, tried sbet and smrmsg: {file_path}' ) self.fileobjecttwo = None else: try: poscheck = int(fext[1:]) self.fileobjecttwo = PCSio.PCSFile(self.filename) except: print(f'Not a recognized file type: {self.filename}') self.fileobjecttwo = None
def _xyz_from_allfile(filname: str): """ function using par to pull out the xyz88 datagram and return the xyz for each ping. Times returned are a sum of ping time and delay time (to match Kluster, I do this so that times are unique across sector identifiers). Parameters ---------- filname str, path to .all file Returns ------- np.array 2d numpy array (time, beam) of the alongtrack offsets from the xyz88 record np.array 2d numpy array (time, beam) of the acrosstrack offsets from the xyz88 record np.array 2d numpy array (time, beam) of the depth offsets from the xyz88 record np.array numpy array of the times from the xyz88 record np.array numpy array of the ping counter index from the xyz88 record """ pfil = par3.AllRead(filname) pfil.mapfile() num88 = len(pfil.map.packdir['88']) numbeams = pfil.getrecord(88, 0).data['Depth'].shape[0] dpths = np.zeros((num88, numbeams)) xs = np.zeros((num88, numbeams)) ys = np.zeros((num88, numbeams)) tms = np.zeros(num88) cntrs = np.zeros(num88) for i in range(num88): try: rec88 = pfil.getrecord(88, i) rec78 = pfil.getrecord(78, i) dpths[i, :] = rec88.data['Depth'] ys[i, :] = rec88.data['AcrossTrack'] xs[i, :] = rec88.data['AlongTrack'] tms[i] = rec88.time + rec78.tx_data.Delay[ 0] # match par sequential_read, ping time = timestamp + delay cntrs[i] = rec88.Counter except IndexError: break # ideally this would do it, but we have to sort by prim/stbd arrays when cntr/times are equal between heads for dual head cntrsorted = np.argsort(cntrs) tms = tms[cntrsorted] xs = xs[cntrsorted] ys = ys[cntrsorted] dpths = dpths[cntrsorted] cntrs = cntrs[cntrsorted] pfil.close() return xs, ys, dpths, tms, cntrs