def time_from_string(date_string, time_string, ms_time_string, timezone_offset, timeoffset): # read in date yyyy = int(date_string[0:4]) mm = int(date_string[4:6]) dd = int(date_string[6:8]) # read in time hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) if len(ms_time_string) == 6: usec = int(ms_time_string[0:6]) elif len(ms_time_string) == 3: usec = int(ms_time_string[0:3]) * 1000 if yyyy < 2000: return 0 epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs) # time_tuple = dt_obj.timetuple() # epoch_time = time.mktime(time_tuple) epoch_timestamp = float(epoch_time + usec / 1e6 + timeoffset) return epoch_timestamp
def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string = "autosub" category = category output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / ".." / filepath) file_list = get_file_list(path) data_list = [] for file in file_list: with file.open("r", errors="ignore") as nmea_file: for line in nmea_file.readlines(): parts = line.split("\t") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(" ")[0] hour_str = str(parts[1]).split(",")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format) data_list.append(data) return data_list
def convert(self, date, timestr): yyyy = int(date[6:10]) mm = int(date[3:5]) dd = int(date[0:2]) hour = int(timestr[0:2]) mins = int(timestr[3:5]) secs = int(timestr[6:8]) if hour < 0: hour = 0 mins = 0 secs = 0 epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, 0) return epoch_time
def parse_NOC_polpred(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string = "autosub" category = category output_format = ftype if category == Category.TIDE: filepath = mission.tide.filepath timezone = mission.tide.timezone timeoffset = mission.tide.timeoffset timezone_offset = read_timezone(timezone) tide = Tide(mission.tide.std_offset) tide.sensor_string = sensor_string path = get_raw_folder(outpath / ".." / filepath) file_list = get_file_list(path) data_list = [] Console.info("... parsing NOC tide data") # Data format sample # Date Time Level Speed Direc'n # m m/s deg # 6/ 9/2019 00:00 0.74 0.14 51 # 6/ 9/2019 01:00 0.58 0.15 56 for file in file_list: with file.open("r", errors="ignore") as tide_file: for line in tide_file.readlines()[6:]: # we have to skip the first 5 rows of the file dd = int(line[0:2]) mm = int(line[3:5]) yyyy = int(line[6:10]) hour = int(line[12:14]) mins = int(line[15:17]) secs = 0 # current models only provide resolution in minutes msec = 0 epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) epoch_timestamp = epoch_time + msec / 1000 + timeoffset tide.epoch_timestamp = epoch_timestamp tide.height = float(line[22:28]) tide.height_std = tide.height * tide.height_std_factor data = tide.export(output_format) data_list.append(data) return data_list
def timestamp_from_filename(filename, timezone_offset, timeoffset): # read in date yyyy = 2000 + int(filename[0:2]) mm = int(filename[2:4]) dd = int(filename[4:6]) # read in time hour = int(filename[7:9]) mins = int(filename[9:11]) secs = int(filename[11:13]) msec = int(filename[14:18]) epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) epoch_timestamp = float(epoch_time + msec / 1000 + timeoffset) return epoch_timestamp
def acfr_timestamp_from_filename(filename, timezone_offset, timeoffset): filename_split = filename.strip().split("_") date_string = filename_split[1] time_string = filename_split[2] ms_time_string = filename_split[3] # read in date yyyy = int(date_string[0:4]) mm = int(date_string[4:6]) dd = int(date_string[6:8]) # read in time hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) msec = int(ms_time_string[0:3]) epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs) # time_tuple = dt_obj.timetuple() # epoch_time = time.mktime(time_tuple) epoch_timestamp = float(epoch_time + msec / 1000 + timeoffset) return epoch_timestamp
def get(self, hour, mins, secs, msec): epoch_time = date_time_to_epoch(self.year, self.month, self.day, hour, mins, secs, self.tz_offset) return epoch_time + msec / 1000 + self.offset
def parse_seaxerocks_images(mission, vehicle, category, ftype, outpath): data_list = [] if ftype == "acfr": data_list = "" # parser meta data class_string = "measurement" frame_string = "body" category_stereo = "image" category_laser = "laser" sensor_string = "seaxerocks_3" timezone = mission.image.timezone timeoffset = mission.image.timeoffset camera1_filepath = mission.image.cameras[0].path camera2_filepath = mission.image.cameras[1].path camera3_filepath = mission.image.cameras[2].path camera1_label = mission.image.cameras[0].name camera2_label = mission.image.cameras[1].name camera3_label = mission.image.cameras[2].name epoch_timestamp_stereo = [] epoch_timestamp_laser = [] epoch_timestamp_camera1 = [] epoch_timestamp_camera2 = [] epoch_timestamp_camera3 = [] stereo_index = [] laser_index = [] camera1_index = [] camera2_index = [] camera3_index = [] camera1_filename = [] camera2_filename = [] camera3_filename = [] camera1_serial = list(camera1_label) camera2_serial = list(camera2_label) camera3_serial = list(camera3_label) for i in range(1, len(camera1_label)): if camera1_label[i] == "/": camera1_serial[i] = "_" for i in range(1, len(camera2_label)): if camera2_label[i] == "/": camera2_serial[i] = "_" for i in range(1, len(camera3_label)): if camera3_label[i] == "/": camera3_serial[i] = "_" camera1_serial = "".join(camera1_serial) camera2_serial = "".join(camera2_serial) camera3_serial = "".join(camera3_serial) i = 0 # read in timezone # TODO change ALL timezones to integers if isinstance(timezone, str): if timezone == "utc" or timezone == "UTC": timezone_offset = 0 elif timezone == "jst" or timezone == "JST": timezone_offset = 9 else: try: timezone_offset = float(timezone) except ValueError: print( "Error: timezone", timezone, "in mission.cfg not recognised, \ please enter value from UTC in hours", ) return # convert to seconds from utc # timeoffset = -timezone_offset*60*60 + timeoffset Console.info(" Parsing " + sensor_string + " images...") cam1_path = get_raw_folder(outpath / ".." / camera1_filepath / "..") cam1_filetime = cam1_path / "FileTime.csv" with cam1_filetime.open("r", encoding="utf-8", errors="ignore") as filein: for line in filein.readlines(): stereo_index_timestamps = line.strip().split(",") index_string = stereo_index_timestamps[0] date_string = stereo_index_timestamps[1] time_string = stereo_index_timestamps[2] ms_time_string = stereo_index_timestamps[3] # read in date if date_string != "date": # ignore header stereo_index.append(index_string) if len(date_string) != 8: Console.warn( "Date string ({}) in FileTime.csv file has " "unexpected length. Expected length: 8.".format( date_string)) yyyy = int(date_string[0:4]) mm = int(date_string[4:6]) dd = int(date_string[6:8]) # read in time if len(time_string) != 6: Console.warn( "Time string ({}) in FileTime.csv file has " "unexpected length. Expected length: 6.".format( time_string)) hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) msec = int(ms_time_string[0:3]) epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) epoch_timestamp_stereo.append( float(epoch_time + msec / 1000 + timeoffset)) camera1_list = ["{}.raw".format(i) for i in stereo_index] camera2_list = ["{}.raw".format(i) for i in stereo_index] for i in range(len(camera1_list)): camera1_image = camera1_list[i].split(".") camera2_image = camera2_list[i].split(".") camera1_index.append(camera1_image[0]) camera2_index.append(camera2_image[0]) j = 0 for i in range(len(camera1_list)): # find corresponding timestamp even if some images are deletec if camera1_index[i] == stereo_index[j]: epoch_timestamp_camera1.append(epoch_timestamp_stereo[j]) epoch_timestamp_camera2.append(epoch_timestamp_stereo[j]) date = epoch_to_day(epoch_timestamp_stereo[0]) if ftype == "acfr": camera1_filename.append("sx3_" + date[2:4] + date[5:7] + date[8:10] + "_image" + str(camera1_index[i]) + "_FC.png") camera2_filename.append("sx3_" + date[2:4] + date[5:7] + date[8:10] + "_image" + str(camera2_index[i]) + "_AC.png") j = j + 1 elif stereo_index[j] > camera1_index[i]: j = j + 1 else: j = j - 1 if ftype == "oplab": camera1_filename = [line for line in camera1_list] camera2_filename = [line for line in camera2_list] for i in range(len(camera1_list)): if ftype == "acfr": data = ("VIS: " + str(float(epoch_timestamp_camera1[i])) + " [" + str(float(epoch_timestamp_camera1[i])) + "] " + str(camera1_filename[i]) + " exp: 0\n") data_list += data data = ("VIS: " + str(float(epoch_timestamp_camera2[i])) + " [" + str(float(epoch_timestamp_camera2[i])) + "] " + str(camera2_filename[i]) + " exp: 0\n") data_list += data if ftype == "oplab": data = { "epoch_timestamp": float(epoch_timestamp_camera1[i]), "class": class_string, "sensor": sensor_string, "frame": frame_string, "category": category_stereo, "camera1": [{ "epoch_timestamp": float(epoch_timestamp_camera1[i]), "serial": camera1_serial, "filename": str(camera1_filepath + "/" + camera1_filename[i]), }], "camera2": [{ "epoch_timestamp": float(epoch_timestamp_camera2[i]), "serial": camera2_serial, "filename": str(camera2_filepath + "/" + camera2_filename[i]), }], } data_list.append(data) cam3_path = get_raw_folder(outpath / ".." / camera3_filepath) cam3_filetime = cam3_path / "FileTime.csv" with cam3_filetime.open("r", encoding="utf-8", errors="ignore") as filein: for line in filein.readlines(): laser_index_timestamps = line.strip().split(",") if len(laser_index_timestamps) < 4: Console.warn("The laser FileTime.csv is apparently corrupt...") continue index_string = laser_index_timestamps[0] date_string = laser_index_timestamps[1] time_string = laser_index_timestamps[2] ms_time_string = laser_index_timestamps[3] # read in date if date_string != "date": # ignore header laser_index.append(index_string) yyyy = int(date_string[0:4]) mm = int(date_string[4:6]) dd = int(date_string[6:8]) # read in time hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) msec = int(ms_time_string[0:3]) epoch_time = date_time_to_epoch(yyyy, mm, dd, hour, mins, secs, timezone_offset) epoch_timestamp_laser.append( float(epoch_time + msec / 1000 + timeoffset)) # try use pandas for all parsers, should be faster camera3_list = ["{}".format(i) for i in laser_index] # The LM165 images are saved either as jpg or as tif, and are split into # subfolders either at every 1000 or every 10000 images. Find out which # convention is used in current dataset by looking at the files. if len(camera3_list) > 0: s, extension = determine_extension_and_images_per_folder( cam3_path, camera3_list, camera3_label) for i in range(len(camera3_list)): camera3_filename.append("{}/image{}.{}".format( camera3_list[i][s:(s + 3)], camera3_list[i], extension, # noqa: E203 )) camera3_index.append(camera3_list[i]) j = 0 # original comment: find corresponding timestamp even if some images are # deleted for i in range(len(camera3_filename)): if camera3_index[i] == laser_index[j]: epoch_timestamp_camera3.append(epoch_timestamp_laser[j]) j = j + 1 # Jin: incomplete? it means that laser data is missing for this image # file, so no epoch_timestamp data, and do what when this happens? elif laser_index[j] > camera3_index[i]: j = j + 1 else: # Jin: incomplete and possibly wrong? it means that this laser # data is extra, with no accompanying image file, so it should be # j+1 till index match? j = j - 1 if ftype == "oplab": data = { "epoch_timestamp": float(epoch_timestamp_camera3[i]), "class": class_string, "sensor": sensor_string, "frame": frame_string, "category": category_laser, "serial": camera3_serial, "filename": camera3_filepath + "/" + str(camera3_filename[i]), } data_list.append(data) Console.info(" ...done parsing " + sensor_string + " images.") return data_list
def parse_gaps(mission, vehicle, category, ftype, outpath): Console.info(" Parsing GAPS data...") # parser meta data class_string = "measurement" sensor_string = "gaps" frame_string = "inertial" timezone = mission.usbl.timezone timeoffset = mission.usbl.timeoffset filepath = mission.usbl.filepath usbl_id = mission.usbl.label latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude # define headers used in phins header_absolute = "$PTSAG" # '<< $PTSAG' #georeferenced strings header_heading = "$HEHDT" # '<< $HEHDT' # gaps std models distance_std_factor = mission.usbl.std_factor distance_std_offset = mission.usbl.std_offset broken_packet_flag = False # read in timezone timezone_offset = read_timezone(timezone) # convert to seconds from utc # timeoffset = -timezone_offset*60*60 + timeoffset # determine file paths path = (outpath / ".." / filepath).absolute() filepath = get_raw_folder(path) all_list = os.listdir(str(filepath)) gaps_list = [line for line in all_list if ".dat" in line] Console.info(" " + str(len(gaps_list)) + " GAPS file(s) found") # extract data from files data_list = [] if ftype == "acfr": data_list = "" for i in range(len(gaps_list)): path_gaps = filepath / gaps_list[i] with path_gaps.open("r", errors="ignore") as gaps: # initialise flag flag_got_time = 0 for line in gaps.readlines(): line_split = line.strip().split("*") line_split_no_checksum = line_split[0].strip().split(",") broken_packet_flag = False # print(line_split_no_checksum) # keep on upating ship position to find the prior interpolation # value of ship coordinates # line_split_no_checksum[0] == header_absolute: if header_absolute in line_split_no_checksum[0]: # start with a ship coordinate if line_split_no_checksum[6] == str( usbl_id) and flag_got_time == 2: if (line_split_no_checksum[11] == "F" and line_split_no_checksum[13] == "1"): # read in date yyyy = int(line_split_no_checksum[5]) mm = int(line_split_no_checksum[4]) dd = int(line_split_no_checksum[3]) # read in time time_string = str(line_split_no_checksum[2]) try: hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) msec = int(time_string[7:10]) except ValueError: broken_packet_flag = True pass if secs >= 60: mins += 1 secs = 0 broken_packet_flag = True if mins >= 60: hour += 1 mins = 0 broken_packet_flag = True if hour >= 24: dd += 1 hour = 0 broken_packet_flag = True epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset) # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs) # time_tuple = dt_obj.timetuple() # epoch_time = time.mktime(time_tuple) epoch_timestamp = epoch_time + msec / 1000 + timeoffset # get position latitude_negative_flag = False longitude_negative_flag = False latitude_string = line_split_no_checksum[7] latitude_degrees = int(latitude_string[0:2]) latitude_minutes = float(latitude_string[2:10]) if line_split_no_checksum[8] == "S": latitude_negative_flag = True longitude_string = line_split_no_checksum[9] longitude_degrees = int(longitude_string[0:3]) longitude_minutes = float(longitude_string[3:11]) if line_split_no_checksum[10] == "W": longitude_negative_flag = True depth = float(line_split_no_checksum[12]) latitude = latitude_degrees + latitude_minutes / 60.0 longitude = longitude_degrees + longitude_minutes / 60.0 if latitude_negative_flag: latitude = -latitude if longitude_negative_flag: longitude = -longitude # flag raised to proceed flag_got_time = 3 else: flag_got_time = 0 if line_split_no_checksum[6] == "0": if flag_got_time < 3: # read in date yyyy = int(line_split_no_checksum[5]) mm = int(line_split_no_checksum[4]) dd = int(line_split_no_checksum[3]) # print(yyyy,mm,dd) # read in time time_string = str(line_split_no_checksum[2]) # print(time_string) hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) try: msec = int(time_string[7:10]) except ValueError: broken_packet_flag = True pass if secs >= 60: mins += 1 secs = 0 broken_packet_flag = True if mins >= 60: hour += 1 mins = 0 broken_packet_flag = True if hour >= 24: dd += 1 hour = 0 broken_packet_flag = True epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset) # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs) # time_tuple = dt_obj.timetuple() # epoch_time = time.mktime(time_tuple) epoch_timestamp_ship_prior = (epoch_time + msec / 1000 + timeoffset) # get position latitude_string = line_split_no_checksum[7] latitude_degrees_ship_prior = int( latitude_string[0:2]) latitude_minutes_ship_prior = float( latitude_string[2:10]) latitude_prior = ( latitude_degrees_ship_prior + latitude_minutes_ship_prior / 60.0) if line_split_no_checksum[8] == "S": latitude_prior = -latitude_prior longitude_string = line_split_no_checksum[9] longitude_degrees_ship_prior = int( longitude_string[0:3]) longitude_minutes_ship_prior = float( longitude_string[3:11]) longitude_prior = ( longitude_degrees_ship_prior + longitude_minutes_ship_prior / 60.0) if line_split_no_checksum[10] == "W": longitude_prior = -longitude_prior # flag raised to proceed if flag_got_time < 2: flag_got_time = flag_got_time + 1 elif flag_got_time >= 3: if line_split_no_checksum[6] == "0": # read in date yyyy = int(line_split_no_checksum[5]) mm = int(line_split_no_checksum[4]) dd = int(line_split_no_checksum[3]) # read in time time_string = str(line_split_no_checksum[2]) hour = int(time_string[0:2]) mins = int(time_string[2:4]) secs = int(time_string[4:6]) msec = int(time_string[7:10]) # calculate epoch time epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset, ) # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs) # time_tuple = dt_obj.timetuple() # epoch_time = time.mktime(time_tuple) epoch_timestamp_ship_posterior = (epoch_time + msec / 1000 + timeoffset) # get position latitude_string = line_split_no_checksum[7] latitude_degrees_ship_posterior = int( latitude_string[0:2]) latitude_minutes_ship_posterior = float( latitude_string[2:10]) latitude_posterior = ( latitude_degrees_ship_posterior + latitude_minutes_ship_posterior / 60.0) if line_split_no_checksum[8] == "S": latitude_posterior = -latitude_posterior longitude_string = line_split_no_checksum[9] longitude_degrees_ship_posterior = int( longitude_string[0:3]) longitude_minutes_ship_posterior = float( longitude_string[3:11]) longitude_posterior = ( longitude_degrees_ship_posterior + longitude_minutes_ship_posterior / 60.0) if line_split_no_checksum[10] == "W": longitude_posterior = -longitude_posterior # flag raised to proceed flag_got_time = flag_got_time + 1 # line_split_no_checksum[0] == header_heading: if header_heading in line_split_no_checksum[0]: if flag_got_time < 3: heading_ship_prior = float(line_split_no_checksum[1]) if flag_got_time < 2: flag_got_time = flag_got_time + 1 else: heading_ship_posterior = float( line_split_no_checksum[1]) flag_got_time = flag_got_time + 1 if flag_got_time >= 5: # interpolate for the ships location and heading inter_time = (epoch_timestamp - epoch_timestamp_ship_prior ) / (epoch_timestamp_ship_posterior - epoch_timestamp_ship_prior) longitude_ship = (inter_time * (longitude_posterior - longitude_prior) + longitude_prior) latitude_ship = (inter_time * (latitude_posterior - latitude_prior) + latitude_prior) heading_ship = ( inter_time * (heading_ship_posterior - heading_ship_prior) + heading_ship_prior) while heading_ship > 360: heading_ship = heading_ship - 360 while heading_ship < 0: heading_ship = heading_ship + 360 lateral_distance, bearing = latlon_to_metres( latitude, longitude, latitude_ship, longitude_ship) # determine range to input to uncertainty model distance = math.sqrt(lateral_distance * lateral_distance + depth * depth) distance_std = distance_std_factor * distance + distance_std_offset # determine uncertainty in terms of latitude and longitude latitude_offset, longitude_offset = metres_to_latlon( abs(latitude), abs(longitude), distance_std, distance_std, ) latitude_std = abs(abs(latitude) - latitude_offset) longitude_std = abs(abs(longitude) - longitude_offset) # calculate in metres from reference lateral_distance_ship, bearing_ship = latlon_to_metres( latitude_ship, longitude_ship, latitude_reference, longitude_reference, ) eastings_ship = (math.sin(bearing_ship * math.pi / 180.0) * lateral_distance_ship) northings_ship = ( math.cos(bearing_ship * math.pi / 180.0) * lateral_distance_ship) lateral_distance_target, bearing_target = latlon_to_metres( latitude, longitude, latitude_reference, longitude_reference, ) eastings_target = ( math.sin(bearing_target * math.pi / 180.0) * lateral_distance_target) northings_target = ( math.cos(bearing_target * math.pi / 180.0) * lateral_distance_target) if not broken_packet_flag: if ftype == "oplab" and category == Category.USBL: data = { "epoch_timestamp": float(epoch_timestamp), "class": class_string, "sensor": sensor_string, "frame": frame_string, "category": category, "data_ship": [ { "latitude": float(latitude_ship), "longitude": float(longitude_ship), }, { "northings": float(northings_ship), "eastings": float(eastings_ship), }, { "heading": float(heading_ship) }, ], "data_target": [ { "latitude": float(latitude), "latitude_std": float(latitude_std), }, { "longitude": float(longitude), "longitude_std": float(longitude_std), }, { "northings": float(northings_target), "northings_std": float(distance_std), }, { "eastings": float(eastings_target), "eastings_std": float(distance_std), }, { "depth": float(depth), "depth_std": float(distance_std), }, { "distance_to_ship": float(distance) }, ], } data_list.append(data) elif ftype == "oplab" and category == Category.DEPTH: data = { "epoch_timestamp": float(epoch_timestamp), "epoch_timestamp_depth": float(epoch_timestamp), "class": class_string, "sensor": sensor_string, "frame": "inertial", "category": Category.DEPTH, "data": [{ "depth": float(depth), "depth_std": float(distance_std), }], } data_list.append(data) if ftype == "acfr": data = ( "SSBL_FIX: " + str(float(epoch_timestamp)) + " ship_x: " + str(float(northings_ship)) + " ship_y: " + str(float(eastings_ship)) + " target_x: " + str(float(northings_target)) + " target_y: " + str(float(eastings_target)) + " target_z: " + str(float(depth)) + " target_hr: " + str(float(lateral_distance)) + " target_sr: " + str(float(distance)) + " target_bearing: " + str(float(bearing)) + "\n") data_list += data else: Console.warn("Badly formatted packet (GAPS TIME)") Console.warn(line) # print(hour,mins,secs) # reset flag flag_got_time = 0 Console.info(" ...done parsing GAPS data.") return data_list
def parse_usbl_dump(mission, vehicle, category, ftype, outpath): # parser meta data class_string = "measurement" sensor_string = "jamstec_usbl" frame_string = "inertial" # gaps std models distance_std_factor = mission.usbl.std_factor distance_std_offset = mission.usbl.std_offset timezone = mission.usbl.timezone timeoffset = mission.usbl.timeoffset filepath = mission.usbl.filepath filename = mission.usbl.filename label = mission.usbl.label filepath = get_raw_folder(outpath / ".." / filepath) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude # read in timezone timezone_offset = read_timezone(timezone) # convert to seconds from utc # timeoffset = -timezone_offset*60*60 + timeoffset # extract data from files Console.info("... parsing usbl dump") data_list = [] if ftype == "acfr": data_list = "" usbl_file = filepath / filename with usbl_file.open("r", encoding="utf-8", errors="ignore") as filein: for line in filein.readlines(): line_split = line.strip().split(",") if line_split[2] == label: date = line_split[0].split("-") # read in date yyyy = int(date[0]) mm = int(date[1]) dd = int(date[2]) timestamp = line_split[1].split(":") # read in time hour = int(timestamp[0]) mins = int(timestamp[1]) secs = int(timestamp[2]) msec = 0 epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset ) # dt_obj = datetime(yyyy,mm,dd,hour,mins,secs) # time_tuple = dt_obj.timetuple() # epoch_time = time.mktime(time_tuple) epoch_timestamp = epoch_time + msec / 1000 + timeoffset if line_split[6] != "": # get position latitude_full = line_split[6].split(" ") latitude_list = latitude_full[0].split("-") latitude_degrees = int(latitude_list[0]) latitude_minutes = float(latitude_list[1]) if latitude_full[1] != "N": latitude_degrees = latitude_degrees * -1 # southern hemisphere latitude = latitude_degrees + latitude_minutes / 60 longitude_full = line_split[7].split(" ") longitude_list = longitude_full[0].split("-") longitude_degrees = int(longitude_list[0]) longitude_minutes = float(longitude_list[1]) if longitude_full[1] != "E": longitude_degrees = ( longitude_degrees * -1 ) # southern hemisphere longitude = longitude_degrees + longitude_minutes / 60 depth_full = line_split[8].split("=") depth = float(depth_full[1]) distance_full = line_split[11].split("=") distance = float(distance_full[1]) distance_std = distance_std_factor * distance + distance_std_offset lateral_distance_full = line_split[9].split("=") lateral_distance = float(lateral_distance_full[1]) bearing_full = line_split[10].split("=") bearing = float(bearing_full[1]) # determine uncertainty in terms of latitude and longitude latitude_offset, longitude_offset = metres_to_latlon( latitude, longitude, distance_std, distance_std ) latitude_std = latitude - latitude_offset longitude_std = longitude - longitude_offset # calculate in metres from reference eastings_ship = 0 northings_ship = 0 latitude_ship = 0 longitude_ship = 0 heading_ship = 0 lateral_distance_target, bearing_target = latlon_to_metres( latitude, longitude, latitude_reference, longitude_reference, ) eastings_target = ( math.sin(bearing_target * math.pi / 180.0) * lateral_distance_target ) northings_target = ( math.cos(bearing_target * math.pi / 180.0) * lateral_distance_target ) if ftype == "oplab": data = { "epoch_timestamp": float(epoch_timestamp), "class": class_string, "sensor": sensor_string, "frame": frame_string, "category": category, "data_ship": [ { "latitude": float(latitude_ship), "longitude": float(longitude_ship), }, { "northings": float(northings_ship), "eastings": float(eastings_ship), }, {"heading": float(heading_ship)}, ], "data_target": [ { "latitude": float(latitude), "latitude_std": float(latitude_std), }, { "longitude": float(longitude), "longitude_std": float(longitude_std), }, { "northings": float(northings_target), "northings_std": float(distance_std), }, { "eastings": float(eastings_target), "eastings_std": float(distance_std), }, { "depth": float(depth), "depth_std": float(distance_std), }, {"distance_to_ship": float(distance)}, ], } data_list.append(data) if ftype == "acfr": data = ( "SSBL_FIX: " + str(float(epoch_timestamp)) + " ship_x: " + str(float(northings_ship)) + " ship_y: " + str(float(eastings_ship)) + " target_x: " + str(float(northings_target)) + " target_y: " + str(float(eastings_target)) + " target_z: " + str(float(depth)) + " target_hr: " + str(float(lateral_distance)) + " target_sr: " + str(float(distance)) + " target_bearing: " + str(float(bearing)) + "\n" ) data_list += data return data_list