def parse(src: SourceFile) -> (list, TemplateFile): content = src.load() sensors = [] # ... code goes here ... parsed_src = TemplateFile(src,'<platformID>') return sensors, parsed_src
def parse(src: SourceFile) -> (list, MiMaFile): content = src.load() sensors = [] parsed_src = MiMaFile(src,'<platformID>') return [], parsed_src
def parse(src: SourceFile): """ Parses mdr files Args: src (SourceFile): specifies the file to parse Returns: an empty list an MDR_File """ filestring = src.load() # cleaning up and splitting mdr file filestring = normalize_newlines(filestring) # newlines are different on windows and linux if filestring[-1:] != '\n': filestring += '\n' content = filestring.split('PORT: 1\n') #content example: ['STARTOFFSET: 0\nENDOFFSET: 4608\n', 'STARTOFFSET: 4864\nENDOFFSET: 5376\n', 'STARTOFFSET: 6400\nENDOFFSET: 6656\n', 'STARTOFFSET: 7128\nENDOFFSET: 608920\n', ''] if '' in content: content.remove('') if '\n' in content: content.remove('\n') # parsing chunked data gaps = [] for elem in content: try: startoffset = int( elem[elem.index(':')+1 : elem.index('\n')] ) endoffset = int( elem[elem.rindex(':')+1 : elem.rindex('\n')] ) gaps.append( (startoffset,endoffset) ) except Exception as e: print( 'could not parse mdr element', repr(elem), e) mdr = MDR_File(src, gaps) return [], mdr
def parse(src: SourceFile): """ Parses mdr files Args: src (SourceFile): specifies the file to parse Returns: a list of Sensors a SurfLog class instance """ surflog = src.load() # TODO: everything below this isParag = False struct = {} sensors = {} errwarnodd = {} surf = [] for line in surflog: line = line.strip() if grex.START.search(line) and isParag == False: isParag = True struct["surflog"] = surflog.geturl() elif grex.END.search(line) and isParag == True: isParag = False struct["sensors"] = sensors.copy() struct["errwarnodd"] = errwarnodd.copy() try: struct["inGliderdos?"] except: struct["inGliderdos?"] = False try: struct["sensors"]["m_water_vy"] struct["sensors"]["m_water_vx"] driftCalc(struct) # adds drift params to struct except: pass surf.append(struct.copy()) struct = {} sensors = {} errwarnodd = {} if isParag: GLIDER = grex.GLIDER.search(line) MISSION = grex.MISSION.search(line) REASON = grex.REASON.search(line) GPS = grex.GPS.search(line) SENSOR = grex.SENSOR.search(line) TIMESTAMP = grex.TIMESTAMP.search(line) ERRWARNODD = grex.ERRWARNODD.search(line) WAYPOINT = grex.WAYPOINT.search(line) GLIDERDOS = grex.GLIDERDOS.search(line) if GLIDER: struct["glidername"] = GLIDER.group("glidername") if MISSION: struct["mission"] = MISSION.group("mission") struct["filename"] = MISSION.group("filename") struct["the8x3_filename"] = MISSION.group("the8x3_filename") if TIMESTAMP: year = TIMESTAMP.group("year") month = TIMESTAMP.group("month") month = str(list(calendar.month_abbr).index(month)) day = TIMESTAMP.group("day") hours = TIMESTAMP.group("hours") minutes = TIMESTAMP.group("minutes") seconds = TIMESTAMP.group("seconds") dt_obj = datetime.datetime(int(year), int(month), int(day), int(hours), int(minutes), int(seconds)) time_tuple = dt_obj.timetuple() timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time_tuple) epoch = calendar.timegm(time_tuple) struct["timestamp"] = timestamp struct["epoch"] = epoch # seconds since 1970-01-01 00:00:00 +0000 (UTC) # year+'-'+month+'-'+day+' '+hours+':'+minutes+':'+seconds if GPS: struct["gps_lat"] = float(dm2dd(GPS.group("current_lat"))) struct["gps_lon"] = float(dm2dd(GPS.group("current_lon"))) struct["gps_secs_ago"] = GPS.group("gps_secs_ago") struct["coord"] = {"lat": struct["gps_lat"], "lon": struct["gps_lon"], "alt": 0} if SENSOR: sensorName = SENSOR.group("sensor") sensors[sensorName] = float(SENSOR.group("value")) sensors[sensorName + " secs_ago"] = SENSOR.group("secs_ago") # beware the float: 1e+308 secs ago if WAYPOINT: struct["wpt_lat"] = WAYPOINT.group("waypoint_lat") struct["wpt_lon"] = WAYPOINT.group("waypoint_lon") struct["wpt_range"] = WAYPOINT.group("waypoint_range") struct["wpt_bearing"] = WAYPOINT.group("waypoint_bearing") if REASON: struct["surface_reason"] = REASON.group("surface_reason") if ERRWARNODD: errwarnodd["total_errors"] = ERRWARNODD.group("total_errors") errwarnodd["segment_errors"] = ERRWARNODD.group("segment_errors") errwarnodd["total_warnings"] = ERRWARNODD.group("total_warnings") errwarnodd["mission_warnings"] = ERRWARNODD.group("mission_warnings") errwarnodd["segment_warnings"] = ERRWARNODD.group("segment_warnings") errwarnodd["total_oddities"] = ERRWARNODD.group("total_oddities") errwarnodd["mission_oddities"] = ERRWARNODD.group("mission_oddities") errwarnodd["segment_oddities"] = ERRWARNODD.group("segment_oddities") if GLIDERDOS: struct["inGliderdos?"] = True struct["sensors"] = sensors.copy() struct["errwarnodd"] = errwarnodd.copy() try: struct["inGliderdos?"] except: struct["inGliderdos?"] = False try: struct["sensors"]["m_water_vy"] struct["sensors"]["m_water_vx"] driftCalc(struct) except: pass surf.append(struct.copy()) return surf
def parse(src: SourceFile): """ Parses asc or mrg files generated from sbd, tbd, mbd, nbd, dbd, ebd files. 'xbd' refers to a file containing sbd-file content, tbd-file content or both (mrg) Binary data is not parsed by this function so for practical purposes this function can only accept content from .mrg, _sbd.asc, or _tbd.asc files Args: src (SourceFile): specifies the file to parse Returns: a list of Sensors an XBD_File class containing src data and additional metadata Bug-ish: because of the way .mrg files are made sci_m_present_time entries are unanimously jammed into the m_present_time timestream. This may possibly cause a bug when the science and flight computers become out of sync. """ xbd_filestring = src.load() line = xbd_filestring.split('\n') def getVal(line): return line[line.index(':')+2:] #Collect XBD file metadata rawline = getVal(line[4]) # xbd file metadata onboard_filename = rawline[rawline.index('-')+1:] platformID = rawline[:rawline.index('-')] the8x3_filename = getVal(line[5]) mission = getVal(line[8]) sensors_per_cycle = int(getVal(line[10])) """ Data in an xbd file is stored like in csv spreadsheet. Each column is a respective sensor The first row is a sensor's name. This is line 14 of a xbd file. The second row is a sensor's unit. This is line 15 of a xbd file. The subsequent rows are the data (datarow). The data for a given row was all captured at them same time as the timestamp datum featured in that row. """ sensors = line[14].split() # list of reported sensors units = line[15].split() # associated list of units for a given sensor values = [] for datarow in line[17:]: # line17 is where the data starts if datarow == '': continue # skips empty lines, usually the last line datarow = datarow.strip().split(' ') # each element of datarow is a single sensor value. values.append(datarow) """ Here the raw data is grabbed out from the spreadsheet-like format we got it in, and associated with the appropriate sensor name. Data is not yet time-associated. """ sensor_dicts = [] timedata = [] for i in range(sensors_per_cycle): #sensors_per_cycle = the number of differnt sensors being reported ydata = [] for datarow in values: ydata.append(float(datarow[i])) if units[i] == 'timestamp': # the term timestamp is used extensively otherwise in this program so best to avoid it. units[i] = 'epoch-seconds' # moreover, epoch-seconds is a more clearer unit name for this sensor if timedata == []: # m_present_time and sci_m_present_time are the only sensors with 'timestamp' as the unit. They ARE the xdata. timedata = ydata # ... since m_present_time is TYPICALLY before sci_m_present_time and includes sci_m_present_time # ... it should be safe to use this operation to set timedata for sbd's tbd's and mrg's. start_time = min(timedata) end_time = max(timedata) sensor = {'name':sensors[i], 'units':units[i], 'y': ydata} sensor_dicts.append(sensor) """ Associate timestamps to sensor datapoints Generate folio leafs for each sensor """ sensors =[] for sensor in sensor_dicts: # compare times and values, strip out entries with nan times,yvals = delNaN(timedata,sensor['y']) sensor = Sensor(sensor['name'], sensor['units'], list(zip(times, yvals)), PARSER_TAG, ['raw']) sensors.append(sensor) xbdfile = XBD_File(src, platformID, mission, start_time, end_time, onboard_filename, the8x3_filename) return sensors, xbdfile