def getNameMMSI(logfile,outfile): for line in logfile: fields = line.split(',')[:6] if '1'!=fields[2]: # Must be the start of a sequence continue if len(fields[5])<39: continue bv = binary.ais6tobitvec(fields[5][:39]) # Hacked for speed mmsi = ais_msg_5.decodeUserID(bv) name = aisstring.unpad(ais_msg_5.decodename(bv)) outfile.write(str(mmsi)+' '+str(name)+'\n')
def parse_msgs(infile, verbose=False): for line in infile: line = line.strip() try: match = uscg_ais_nmea_regex.search(line).groupdict() except AttributeError: continue msg_type = match['body'][0] if msg_type not in ('6', '8'): continue if msg_type == '6' and len(match['body']) < 15: continue if msg_type == '8' and len(match['body']) < 10: continue try: bv = binary.ais6tobitvec(match['body'][:15]) except ValueError: sys.stderr.write('bad msg: %s\n' % line.strip()) continue r = {} r['MessageID']=int(bv[0:6]) r['UserID']=int(bv[8:38]) if '6' == msg_type: dac = int(bv[72:82]) fi = int(bv[82:88]) elif '8' == msg_type: dac = int(bv[40:50]) fi = int(bv[50:56]) elif verbose: print 'not a bbm:', line if verbose: print msg_type, dac, fi, r['UserID'], line.rstrip() else: print msg_type, dac, fi, r['UserID'], match['station']
def getPosition(logfile, outfile, minDist=None): ''' Pull the positions from the log file @param logfile: file like object @param outfile: file like object destination @param minDist: how far apart points must be apart to be considered unique ''' # FIX: use the right utm zone. 14 is the central US so it will kind of work params = {'proj':'utm', 'zone':14} #int(options.zone)} proj = None if minDist != None: proj = Proj(params) positions = {} # Last recoded ship position for line in logfile: fields = line.split(',') # FIX: use regex instead if '!AIVDM' != fields[0]: continue if '1' != fields[1] and '1' != fields[2]: # Must be the start of a sequence continue if fields[5][0] not in ('1','2','3'): continue bv = binary.ais6tobitvec(fields[5][:39]) # Hacked for speed timestamp = fields[-1].strip() mmsi = str(ais_msg_1.decodeUserID(bv)) lon = ais_msg_1.decodelongitude(bv) lat = ais_msg_1.decodelatitude(bv) d = None if mmsi not in positions: positions[mmsi] = (lon, lat) elif minDist != None: lonOld, latOld = positions[mmsi] oldUTM = proj(lonOld, latOld) newUTM = proj(lon, lat) d = dist(oldUTM[0], oldUTM[1], newUTM[0], newUTM[1]) if str(d)=='nan': continue #pass # FIX: Print but do not save nan values??? elif d < minDist: continue else: positions[mmsi] = (lon, lat) lon = str(lon) lat = str(lat) if len(mmsi) < 9: mmsi += ' '*(9-len(mmsi)) fLen = 12 # field length ... how much space if len(lon)>fLen: lon = lon[:fLen] if len(lon)<fLen: lon += ' '*(fLen-len(lon)) if len(lat)>fLen: lat = lat[:fLen] if len(lat)<fLen: lat += ' '*(fLen-len(lat)) outfile.write(timestamp+' '+str(mmsi)+' '+lon+' '+lat+'\n')
def build_dist_database(database_filename, log_files, verbose=False): cx = sqlite3.connect(database_filename) print 'WARNING: not saving the station name' cx.execute(''' CREATE TABLE IF NOT EXISTS distance ( -- Save space, no key -- ts INTEGER, -- Save more space julian_day INTEGER, -- x REAL, -- y REAL, dist_km REAL --, --station VARCHAR(15) ); ''') cu = cx.cursor() counts = {'nogps': 0} for filename in log_files: if verbose: print 'file:', filename sys.stdout.flush() for line_num, line in enumerate(file(filename)): if 'AIVDM,1,1' not in line: continue match = uscg_ais_nmea_regex.search(line).groupdict() message_id = match['body'][0] # First letter is the message type if message_id not in ('1', '2', '3'): continue if len(match['body']) != 28: # 6 bits per character raise AisErrorBadNumBits('expected 168, got %d' % len(match['body']) / 6) bits = binary.ais6tobitvec( match['body'][:20] ) # Don't need any of the other bits, so do not waste time x = binary.signedIntFromBV(bits[61:89]) / 600000. y = binary.signedIntFromBV(bits[89:116]) / 600000. if x > 180 or y > 90: counts['nogps'] += 1 continue station = match['station'] julian_day = int( datetime.datetime.utcfromtimestamp(int( match['timeStamp'])).strftime('%j')) d_km = dist_utm_km((x, y), station_locations[station]) #cu.execute('INSERT INTO distance VALUES (:julian_day, :x, :y, :dist_km, :station)', #{'julian_day': julian_day, 'x':x, 'y':y, 'dist_km': d_km, 'station':station} ) #cu.execute('INSERT INTO distance VALUES (:julian_day, :x, :y, :dist_km)', # {'julian_day': julian_day, 'x':x, 'y':y, 'dist_km': d_km, } ) cu.execute('INSERT INTO distance VALUES (:julian_day, :dist_km)', { 'julian_day': julian_day, 'dist_km': d_km, }) if line_num % 10000 == 9999: cx.commit() cx.commit() if False: print 'Creating indexes' try: cx.execute('CREATE INDEX idx_dist_day ON distance(julian_day);') cx.execute('CREATE INDEX idx_dist_dist ON distance(dist_km);') #cx.execute('CREATE INDEX idx_dist_station ON distance(station);') cx.commit() except sqlite3.OperationalError: print 'Appears indexes were already created' return cx, counts
def decode_aivdm(msg): bv = binary.ais6tobitvec(msg.split(',')[5]) print decode(bv)
buf = StringIO.StringIO() for field in options.fieldList: buf.write(field + ',') result = buf.getvalue() if result[-1] == ',': print result[:-1] else: print result if options.doDecode: if len(args) == 0: args = sys.stdin for msg in args: bv = None if msg[0] in ('$', '!') and msg[3:6] in ('VDM', 'VDO'): # Found nmea # FIX: do checksum bv = binary.ais6tobitvec(msg.split(',')[5]) else: # either binary or nmeapayload... expect mostly nmeapayloads # assumes that an all 0 and 1 string can not be a nmeapayload binaryMsg = True for c in msg: if c not in ('0', '1'): binaryMsg = False break if binaryMsg: bv = BitVector(bitstring=msg) else: # nmeapayload bv = binary.ais6tobitvec(msg) params = decode(bv) #print 'params:',params printFields(params,
def main(): from optparse import OptionParser parser = OptionParser(usage="%prog [options]") parser.add_option('--doc-test',dest='doctest',default=False,action='store_true', help='run the documentation tests') parser.add_option('--unit-test',dest='unittest',default=False,action='store_true', help='run the unit tests') parser.add_option('-v','--verbose',dest='verbose',default=False,action='store_true', help='Make the test output verbose') # FIX: remove nmea from binary messages. No way to build the whole packet? # FIX: or build the surrounding msg 8 for a broadcast? typeChoices = ('binary','nmeapayload','nmea') # FIX: what about a USCG type message? parser.add_option('-t', '--type', choices=typeChoices, type='choice', dest='ioType', default='nmeapayload', help='What kind of string to write for encoding ('+', '.join(typeChoices)+') [default: %default]') outputChoices = ('std','html','csv','sql' , 'kml','kml-full') parser.add_option('-T', '--output-type', choices=outputChoices, type='choice', dest='outputType', default='std', help='What kind of string to output ('+', '.join(outputChoices)+') ' '[default: %default]') parser.add_option('-o','--output',dest='outputFileName',default=None, help='Name of the python file to write [default: stdout]') parser.add_option('-f', '--fields', dest='fieldList', default=None, action='append', choices=fieldList, help='Which fields to include in the output. Currently only for csv ' 'output [default: all]') parser.add_option('-p', '--print-csv-field-list', dest='printCsvfieldList', default=False,action='store_true', help='Print the field name for csv') parser.add_option('-c', '--sql-create', dest='sqlCreate', default=False, action='store_true', help='Print out an sql create command for the table.') parser.add_option('--latex-table', dest='latexDefinitionTable', default=False,action='store_true', help='Print a LaTeX table of the type') parser.add_option('--text-table', dest='textDefinitionTable', default=False, action='store_true', help='Print delimited table of the type (for Word table importing)') parser.add_option('--delimt-text-table', dest='delimTextDefinitionTable', default=' ', help='Delimiter for text table [default: \'%default\'] ' '(for Word table importing)') dbChoices = ('sqlite','postgres') parser.add_option('-D', '--db-type', dest='dbType', default='postgres', choices=dbChoices,type='choice', help='What kind of database ('+', '.join(dbChoices)+') ' '[default: %default]') addMsgOptions(parser) (options,args) = parser.parse_args() success = True if options.doctest: import os; print os.path.basename(sys.argv[0]), 'doctests ...', sys.argv = [sys.argv[0]] if options.verbose: sys.argv.append('-v') numfail, numtests = doctest.testmod() if not numfail: print 'ok' else: print 'FAILED' success = False if not success: sys.exit('Something Failed') del success # Hide success from epydoc if options.unittest: sys.argv = [sys.argv[0]] if options.verbose: sys.argv.append('-v') unittest.main() outfile = sys.stdout if None!=options.outputFileName: outfile = file(options.outputFileName,'w') if options.doEncode: # Make sure all non required options are specified. if None==options.time_monthField: parser.error("missing value for time_monthField") if None==options.time_dayField: parser.error("missing value for time_dayField") if None==options.time_hourField: parser.error("missing value for time_hourField") if None==options.time_minField: parser.error("missing value for time_minField") if None==options.stationidField: parser.error("missing value for stationidField") if None==options.pos_longitudeField: parser.error("missing value for pos_longitudeField") if None==options.pos_latitudeField: parser.error("missing value for pos_latitudeField") if None==options.flowField: parser.error("missing value for flowField") msgDict = { 'time_month': options.time_monthField, 'time_day': options.time_dayField, 'time_hour': options.time_hourField, 'time_min': options.time_minField, 'stationid': options.stationidField, 'pos_longitude': options.pos_longitudeField, 'pos_latitude': options.pos_latitudeField, 'flow': options.flowField, 'reserved': '0', } bits = encode(msgDict) if 'binary' == options.ioType: print str(bits) elif 'nmeapayload'==options.ioType: # FIX: figure out if this might be necessary at compile time bitLen=len(bits) if bitLen % 6 != 0: bits = bits + BitVector(size=(6 - (bitLen%6))) # Pad out to multiple of 6 print binary.bitvectoais6(bits)[0] # FIX: Do not emit this option for the binary message payloads. Does not make sense. elif 'nmea' == options.ioType: nmea = uscg.create_nmea(bits) print nmea else: sys.exit('ERROR: unknown ioType. Help!') if options.sqlCreate: sqlCreateStr(outfile,options.fieldList,dbType=options.dbType) if options.latexDefinitionTable: latexDefinitionTable(outfile) # For conversion to word tables if options.textDefinitionTable: textDefinitionTable(outfile,options.delimTextDefinitionTable) if options.printCsvfieldList: # Make a csv separated list of fields that will be displayed for csv if None == options.fieldList: options.fieldList = fieldList import StringIO buf = StringIO.StringIO() for field in options.fieldList: buf.write(field+',') result = buf.getvalue() if result[-1] == ',': print result[:-1] else: print result if options.doDecode: if len(args)==0: args = sys.stdin for msg in args: bv = None if msg[0] in ('$','!') and msg[3:6] in ('VDM','VDO'): # Found nmea # FIX: do checksum bv = binary.ais6tobitvec(msg.split(',')[5]) else: # either binary or nmeapayload... expect mostly nmeapayloads # assumes that an all 0 and 1 string can not be a nmeapayload binaryMsg=True for c in msg: if c not in ('0','1'): binaryMsg=False break if binaryMsg: bv = BitVector(bitstring=msg) else: # nmeapayload bv = binary.ais6tobitvec(msg) printFields(decode(bv) ,out=outfile ,format=options.outputType ,fieldList=options.fieldList ,dbType=options.dbType )
def load_data(cx, datafile=sys.stdin, verbose=False, uscg=True): """Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. """ v = verbose # Hmm... "v"... the irony cu = cx.cursor() lineNum = 0 next_key = 0 max_key = get_max_key(cx) if max_key is not None: next_key = max_key + 1 print 'keys_starting_at:',next_key message_set = (1,2,3,4,5,18,19) counts = {} for msg_num in message_set: counts[msg_num] = 0 counts['checksum_failed'] = 0 track_dups = TrackDuplicates(lookback_length=1000) for line in datafile: lineNum += 1 if lineNum%1000==0: print lineNum cx.commit() if len(line)<15 or line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message #print 'FIX: validate checksum' if not nmea.checksum.isChecksumValid(line): print >> sys.stderr, 'WARNING: invalid checksum:\n\t',line, print >> sys.stderr, ' ',nmea.checksum.checksumStr(line) counts['checksum_failed'] += 1 fields=line.split(',') # FIX: use this split throughout below... try: msg_num = int(binary.ais6tobitvec(fields[5][0])) except: print 'line would not decode',line continue if verbose: print 'msg_num:',msg_num if msg_num not in message_set: if verbose: print 'skipping',line print ' not in msg set:',str(message_set) continue try: bv = binary.ais6tobitvec(fields[5]) except: print >> sys.stderr, 'ERROR: Unable to decode bits in line:\n\t',line traceback.print_exc(file=sys.stderr) continue # FIX: need to take padding into account ... right before the * if msg_num in (1,2,3,4,18): if len(bv) != 168: print 'ERROR: skipping bad one slot message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 168 continue elif msg_num == 5: # 426 has 2 pad bits if len(bv) not in (424,426): print 'ERROR: skipping bad shipdata message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 424 continue ins = None try: if msg_num== 1: ins = ais.ais_msg_1_handcoded.sqlInsert(ais.ais_msg_1_handcoded.decode(bv),dbType='sqlite') elif msg_num== 2: ins = ais.ais_msg_2_handcoded.sqlInsert(ais.ais_msg_2_handcoded.decode(bv),dbType='sqlite') elif msg_num== 3: ins = ais.ais_msg_3_handcoded.sqlInsert(ais.ais_msg_3_handcoded.decode(bv),dbType='sqlite') elif msg_num== 4: ins = ais.ais_msg_4_handcoded.sqlInsert(ais.ais_msg_4_handcoded.decode(bv),dbType='sqlite') elif msg_num== 5: ins = ais.ais_msg_5.sqlInsert(ais.ais_msg_5.decode(bv),dbType='sqlite') elif msg_num==18: ins = ais.ais_msg_18.sqlInsert(ais.ais_msg_18.decode(bv),dbType='sqlite') # Class B position elif msg_num==19: ins = ais.ais_msg_19.sqlInsert(ais.ais_msg_19.decode(bv),dbType='sqlite') # Class B position else: print 'Warning... not handling type',msg_num,'line:',lineNum continue except: print 'ERROR: some decode error?','line:',lineNum print ' ',line continue counts[msg_num] += 1 if uscg: from aisutils.uscg import uscg_ais_nmea_regex match = uscg_ais_nmea_regex.search(line).groupdict() try: cg_sec = int(float(match['timeStamp'])) ins.add('cg_sec', cg_sec) #ins.add('cg_sec', int(float(match['timeStamp'])) ) ins.add('cg_timestamp', str(datetime.datetime.utcfromtimestamp(float(match['timeStamp']))) ) ins.add('cg_r', match['station'] ) except: print >> sys.stderr, match print >> sys.stderr, 'bad uscg sections',line, continue # Optional fields that are not always there if match['time_of_arrival'] is not None: try: ins.add('cg_t_arrival', float(match['time_of_arrival'])) except: print >> sys.stderr, 'WARNING: corrupted time of arrival (T) in line. T ignored\n\t',line pass # Not critical if corrupted if match['slot'] is not None: ins.add('cg_s_slotnum', int(match['slot']) ) if msg_num in (1,2,3,4): pkt_id,dup_flag = track_dups.check_packet(cg_sec,fields[5]) # Pass in the NMEA payload string of data if v: print 'dup_check:',pkt_id,dup_flag,fields[5] ins.add('pkt_id',pkt_id) ins.add('dup_flag',dup_flag) ins.add('key',next_key) next_key += 1 if verbose: print str(ins) try: cu.execute(str(ins)) except pysqlite2.dbapi2.OperationalError, params: #except OperationalError, params: if -1 != params.message.find('no such table'): print 'ERROR:',params.message sys.exit('You probably need to run with --with-create') print 'params',params print type(params) print 'ERROR: sql error?','line:',lineNum print ' ', str(ins) print ' ',line if False: # Give some debugging flexibility from IPython.Shell import IPShellEmbed ipshell = IPShellEmbed(argv=[]) ipshell() sys.exit('Gave up')
''' import sys from aisutils.BitVector import BitVector from aisutils import binary import ais_msg_8 import sls.waterlevel if __name__=='__main__': print 'FIX: broken' vdm='!AIVDM,1,1,4,A,8030ot1?0@>PSpPPPC<2<oURAU=>T08f@02PSpPPP3C3<oU=d5<U00BH@02PSpPPP3C3EoU:A5<TwPPO@02PSpPPP2hk<oRWU5;si0Pl@02O<0PPPP3D<oPPEU;M418g@02PSpPPP2hlEoRQgU;j@17p@00,2*32' msg=vdm.split(',')[5] bvMsg = binary.ais6tobitvec(msg) msg8 = ais_msg_8.decode(bvMsg) bvMsg8 = msg8['BinaryData'] del msg8['BinaryData'] ais_msg_8.printFields(msg8) print # Now deal with the St Lawrence Seaway Header slsHdr = sls_header.decode(bvMsg8) bvHdr = slsHdr['BinaryData'] del slsHdr['BinaryData'] sls_header.printFields(slsHdr) #print slsHdr.keys()
def loadData(cx, datafile, verbose=False, uscg=True): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' cu = cx.cursor() lineNum = 0 #counts = {1:0,2:0,3:0,4:0,5:0} counts = {} countsTotal = {} # Includes ignored messages for i in range(64): counts[i] = 0 countsTotal[i] = 0 for line in datafile: lineNum += 1 if lineNum % 1000 == 0: print lineNum cx.commit() # if lineNum>3000: # print 'Early exit from load' # break if line[3:6] not in ('VDM|VDO'): # if verbose: # sys.stderr.write continue # Not an AIS VHF message try: msgNum = int(binary.ais6tobitvec(line.split(',')[5][0])) except: print 'line would not decode', line continue countsTotal[msgNum] += 1 if verbose: print 'msgNum:', msgNum # if msgNum not in (1,2,3,4,5): # if verbose: print 'skipping',line # continue payload = bv = binary.ais6tobitvec(line.split(',')[5]) # FIX: need to take badding into account ... right before the * if msgNum in (1, 2, 3): # if len(bv) != 168: if len(bv) < 168: print 'ERROR: skipping bad position message, line:', lineNum print ' ', line, print ' Got length', len(bv), 'expected', 168 continue # elif msgNum == 4: elif msgNum == 5: # if len(bv) != 424: if len(bv) < 424: print 'ERROR: skipping bad shipdata message, line:', lineNum print ' ', line, print ' Got length', len(bv), 'expected', 424 continue fields = line.split(',') cg_timestamp = None cg_station = None if uscg: try: cg_sec = int(float(fields[-1])) # US Coast Guard time stamp. print 'cg_sec:', cg_sec, type(cg_sec) cg_timestamp = datetime.datetime.utcfromtimestamp( float(cg_sec)) except: print 'ERROR getting timestamp for', lineNum, line #print len(fields),fields for i in range(len(fields) - 1, 5, -1): if 0 < len(fields[i]) and 'r' == fields[i][0]: cg_station = fields[i] break # Found it so ditch the for loop #print station #sys.exit('stations please work') ins = None try: if msgNum == 1: ins = ais.ais_msg_1.sqlInsert(ais.ais_msg_1.decode(bv), dbType='sqlite') elif msgNum == 2: ins = ais.ais_msg_2.sqlInsert(ais.ais_msg_2.decode(bv), dbType='sqlite') elif msgNum == 3: ins = ais.ais_msg_3.sqlInsert(ais.ais_msg_3.decode(bv), dbType='sqlite') elif msgNum == 4: ins = ais.ais_msg_4.sqlInsert(ais.ais_msg_4.decode(bv), dbType='sqlite') elif msgNum == 5: ins = ais.ais_msg_5.sqlInsert(ais.ais_msg_5.decode(bv), dbType='sqlite') elif msgNum == 18: ins = ais.ais_msg_18.sqlInsert(ais.ais_msg_18.decode(bv), dbType='sqlite') elif msgNum == 19: ins = ais.ais_msg_19.sqlInsert(ais.ais_msg_19.decode(bv), dbType='sqlite') else: if verbose: print 'Warning... not handling type', msgNum, 'line:', lineNum continue except: print 'ERROR: some decode error?', 'line:', lineNum print ' ', line continue counts[msgNum] += 1 if uscg: # FIX: make cg_timestamp work if None != cg_timestamp: ins.add('cg_timestamp', cg_timestamp) if None != cg_station: ins.add('cg_r', cg_station) if verbose: print str(ins) # FIX: redo this correctly??? #try: print str(ins) cu.execute(str(ins)) #except: # sys.stderr.write('FIX: write some sort of exception handler\n') # except pysqlite2.dbapi2.OperationalError, params: # #except OperationalError, params: # if -1 != params.message.find('no such table'): # print 'ERROR:',params.message # sys.exit('You probably need to run with --with-create') # print 'params',params # print type(params) # print 'ERROR: sql error?','line:',lineNum # print ' ', str(ins) # print ' ',line # if False: # # Give some debugging flexibility # from IPython.Shell import IPShellEmbed # ipshell = IPShellEmbed(argv=[]) # ipshell() # sys.exit('Gave up') #print counts print '\nMessages found:' for key in countsTotal: if countsTotal[key] > 0: print str(key) + ':', countsTotal[key] print '\nMessages processed:' for key in counts: if counts[key] > 0: print str(key) + ':', counts[key] cx.commit()
def main(): from optparse import OptionParser parser = OptionParser(usage="%prog [options]") parser.add_option("--unit-test", dest="unittest", default=False, action="store_true", help="run the unit tests") parser.add_option( "-v", "--verbose", dest="verbose", default=False, action="store_true", help="Make the test output verbose" ) # FIX: remove nmea from binary messages. No way to build the whole packet? # FIX: or build the surrounding msg 8 for a broadcast? typeChoices = ("binary", "nmeapayload", "nmea") # FIX: what about a USCG type message? parser.add_option( "-t", "--type", choices=typeChoices, type="choice", dest="ioType", default="nmeapayload", help="What kind of string to write for encoding (" + ", ".join(typeChoices) + ") [default: %default]", ) outputChoices = ("std", "html", "csv", "sql") parser.add_option( "-T", "--output-type", choices=outputChoices, type="choice", dest="outputType", default="std", help="What kind of string to output (" + ", ".join(outputChoices) + ") " "[default: %default]", ) parser.add_option( "-o", "--output", dest="outputFileName", default=None, help="Name of the python file to write [default: stdout]" ) parser.add_option( "-f", "--fields", dest="fieldList", default=None, action="append", choices=fieldList, help="Which fields to include in the output. Currently only for csv " "output [default: all]", ) parser.add_option( "-p", "--print-csv-field-list", dest="printCsvfieldList", default=False, action="store_true", help="Print the field name for csv", ) parser.add_option( "-c", "--sql-create", dest="sqlCreate", default=False, action="store_true", help="Print out an sql create command for the table.", ) parser.add_option( "--latex-table", dest="latexDefinitionTable", default=False, action="store_true", help="Print a LaTeX table of the type", ) parser.add_option( "--text-table", dest="textDefinitionTable", default=False, action="store_true", help="Print delimited table of the type (for Word table importing)", ) parser.add_option( "--delimt-text-table", dest="delimTextDefinitionTable", default=" ", help="Delimiter for text table [default: '%default'] " "(for Word table importing)", ) dbChoices = ("sqlite", "postgres") parser.add_option( "-D", "--db-type", dest="dbType", default="postgres", choices=dbChoices, type="choice", help="What kind of database (" + ", ".join(dbChoices) + ") " "[default: %default]", ) addMsgOptions(parser) options, args = parser.parse_args() if options.unittest: sys.argv = [sys.argv[0]] if options.verbose: sys.argv.append("-v") unittest.main() outfile = sys.stdout if None != options.outputFileName: outfile = file(options.outputFileName, "w") if options.doEncode: # Make sure all non required options are specified. if None == options.RepeatIndicatorField: parser.error("missing value for RepeatIndicatorField") if None == options.UserIDField: parser.error("missing value for UserIDField") if None == options.monthField: parser.error("missing value for monthField") if None == options.dayField: parser.error("missing value for dayField") if None == options.hourField: parser.error("missing value for hourField") if None == options.minField: parser.error("missing value for minField") if None == options.stationidField: parser.error("missing value for stationidField") if None == options.waterlevelField: parser.error("missing value for waterlevelField") if None == options.datumField: parser.error("missing value for datumField") if None == options.sigmaField: parser.error("missing value for sigmaField") if None == options.sourceField: parser.error("missing value for sourceField") msgDict = { "MessageID": "8", "RepeatIndicator": options.RepeatIndicatorField, "UserID": options.UserIDField, "Spare": "0", "dac": "366", "fid": "63", "month": options.monthField, "day": options.dayField, "hour": options.hourField, "min": options.minField, "stationid": options.stationidField, "waterlevel": options.waterlevelField, "datum": options.datumField, "sigma": options.sigmaField, "source": options.sourceField, } bits = encode(msgDict) if "binary" == options.ioType: print str(bits) elif "nmeapayload" == options.ioType: # FIX: figure out if this might be necessary at compile time bitLen = len(bits) if bitLen % 6 != 0: bits = bits + BitVector(size=(6 - (bitLen % 6))) # Pad out to multiple of 6 print binary.bitvectoais6(bits)[0] # FIX: Do not emit this option for the binary message payloads. Does not make sense. elif "nmea" == options.ioType: nmea = uscg.create_nmea(bits) print nmea else: sys.exit("ERROR: unknown ioType. Help!") if options.sqlCreate: sqlCreateStr(outfile, options.fieldList, dbType=options.dbType) if options.latexDefinitionTable: latexDefinitionTable(outfile) # For conversion to word tables if options.textDefinitionTable: textDefinitionTable(outfile, options.delimTextDefinitionTable) if options.printCsvfieldList: # Make a csv separated list of fields that will be displayed for csv if None == options.fieldList: options.fieldList = fieldList import StringIO buf = StringIO.StringIO() for field in options.fieldList: buf.write(field + ",") result = buf.getvalue() if result[-1] == ",": print result[:-1] else: print result if options.doDecode: if len(args) == 0: args = sys.stdin for msg in args: bv = None if msg[0] in ("$", "!") and msg[3:6] in ("VDM", "VDO"): # Found nmea # FIX: do checksum bv = binary.ais6tobitvec(msg.split(",")[5]) else: # either binary or nmeapayload... expect mostly nmeapayloads # assumes that an all 0 and 1 string can not be a nmeapayload binaryMsg = True for c in msg: if c not in ("0", "1"): binaryMsg = False break if binaryMsg: bv = BitVector(bitstring=msg) else: # nmeapayload bv = binary.ais6tobitvec(msg) printFields( decode(bv), out=outfile, format=options.outputType, fieldList=options.fieldList, dbType=options.dbType, )
def load_data(cx, datafile=sys.stdin, verbose=False, uscg=True): """Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. """ v = verbose # Hmm... "v"... the irony cu = cx.cursor() lineNum = 0 next_key = 0 max_key = get_max_key(cx) if max_key is not None: next_key = max_key + 1 print 'keys_starting_at:', next_key message_set = (1, 2, 3, 4, 5, 18, 19) counts = {} for msg_num in message_set: counts[msg_num] = 0 counts['checksum_failed'] = 0 track_dups = TrackDuplicates(lookback_length=1000) for line in datafile: lineNum += 1 if lineNum % 1000 == 0: print lineNum cx.commit() if len(line) < 15 or line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message #print 'FIX: validate checksum' if not nmea.checksum.isChecksumValid(line): print >> sys.stderr, 'WARNING: invalid checksum:\n\t', line, print >> sys.stderr, ' ', nmea.checksum.checksumStr(line) counts['checksum_failed'] += 1 fields = line.split(',') # FIX: use this split throughout below... try: msg_num = int(binary.ais6tobitvec(fields[5][0])) except: print 'line would not decode', line continue if verbose: print 'msg_num:', msg_num if msg_num not in message_set: if verbose: print 'skipping', line print ' not in msg set:', str(message_set) continue try: bv = binary.ais6tobitvec(fields[5]) except: print >> sys.stderr, 'ERROR: Unable to decode bits in line:\n\t', line traceback.print_exc(file=sys.stderr) continue # FIX: need to take padding into account ... right before the * if msg_num in (1, 2, 3, 4, 18): if len(bv) != 168: print 'ERROR: skipping bad one slot message, line:', lineNum print ' ', line, print ' Got length', len(bv), 'expected', 168 continue elif msg_num == 5: # 426 has 2 pad bits if len(bv) not in (424, 426): print 'ERROR: skipping bad shipdata message, line:', lineNum print ' ', line, print ' Got length', len(bv), 'expected', 424 continue ins = None try: if msg_num == 1: ins = ais.ais_msg_1_handcoded.sqlInsert( ais.ais_msg_1_handcoded.decode(bv), dbType='sqlite') elif msg_num == 2: ins = ais.ais_msg_2_handcoded.sqlInsert( ais.ais_msg_2_handcoded.decode(bv), dbType='sqlite') elif msg_num == 3: ins = ais.ais_msg_3_handcoded.sqlInsert( ais.ais_msg_3_handcoded.decode(bv), dbType='sqlite') elif msg_num == 4: ins = ais.ais_msg_4_handcoded.sqlInsert( ais.ais_msg_4_handcoded.decode(bv), dbType='sqlite') elif msg_num == 5: ins = ais.ais_msg_5.sqlInsert(ais.ais_msg_5.decode(bv), dbType='sqlite') elif msg_num == 18: ins = ais.ais_msg_18.sqlInsert( ais.ais_msg_18.decode(bv), dbType='sqlite') # Class B position elif msg_num == 19: ins = ais.ais_msg_19.sqlInsert( ais.ais_msg_19.decode(bv), dbType='sqlite') # Class B position else: print 'Warning... not handling type', msg_num, 'line:', lineNum continue except: print 'ERROR: some decode error?', 'line:', lineNum print ' ', line continue counts[msg_num] += 1 if uscg: from aisutils.uscg import uscg_ais_nmea_regex match = uscg_ais_nmea_regex.search(line).groupdict() try: cg_sec = int(float(match['timeStamp'])) ins.add('cg_sec', cg_sec) #ins.add('cg_sec', int(float(match['timeStamp'])) ) ins.add( 'cg_timestamp', str( datetime.datetime.utcfromtimestamp( float(match['timeStamp'])))) ins.add('cg_r', match['station']) except: print >> sys.stderr, match print >> sys.stderr, 'bad uscg sections', line, continue # Optional fields that are not always there if match['time_of_arrival'] is not None: try: ins.add('cg_t_arrival', float(match['time_of_arrival'])) except: print >> sys.stderr, 'WARNING: corrupted time of arrival (T) in line. T ignored\n\t', line pass # Not critical if corrupted if match['slot'] is not None: ins.add('cg_s_slotnum', int(match['slot'])) if msg_num in (1, 2, 3, 4): pkt_id, dup_flag = track_dups.check_packet( cg_sec, fields[5]) # Pass in the NMEA payload string of data if v: print 'dup_check:', pkt_id, dup_flag, fields[5] ins.add('pkt_id', pkt_id) ins.add('dup_flag', dup_flag) ins.add('key', next_key) next_key += 1 if verbose: print str(ins) try: cu.execute(str(ins)) except pysqlite2.dbapi2.OperationalError, params: #except OperationalError, params: if -1 != params.message.find('no such table'): print 'ERROR:', params.message sys.exit('You probably need to run with --with-create') print 'params', params print type(params) print 'ERROR: sql error?', 'line:', lineNum print ' ', str(ins) print ' ', line if False: # Give some debugging flexibility from IPython.Shell import IPShellEmbed ipshell = IPShellEmbed(argv=[]) ipshell() sys.exit('Gave up')
def loadData(cx,datafile,verbose=False , uscg=True): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' cu = cx.cursor() lineNum = 0 import psycopg2 # For ProgrammingError exception counts = {1:0,2:0,3:0,5:0} # buf=[] for line in datafile: lineNum += 1 if lineNum%1000==0: print lineNum if line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message try: msgNum = int(binary.ais6tobitvec(line.split(',')[5][0])) except: print '# line would not decode',line continue if verbose: print '# msgNum:',msgNum if msgNum not in (1,2,3,5): if verbose: print '# skipping',line continue payload = bv = binary.ais6tobitvec(line.split(',')[5]) # TODO(schwehr): Need to take padding into account. if msgNum in (1,2,3): if len(bv) < 168: print '# ERROR: skipping bad position message, line:',lineNum print '# ',line, print '# Got length',len(bv), 'expected', 168 continue elif msgNum == 5: if len(bv) < 424: print '# ERROR: skipping bad shipdata message, line:',lineNum print '# ',line, print '# Got length',len(bv), 'expected', 424 continue fields=line.split(',') cg_sec = None cg_station = None if uscg: cg_sec = int(float(fields[-1])) # US Coast Guard time stamp. #print len(fields),fields for i in range(len(fields)-1,5,-1): if 0<len(fields[i]) and 'r' == fields[i][0]: cg_station = fields[i] break # Found it so ditch the for loop ins = None # FIX: redo this for all messages using the new aisutils structure if True: if msgNum==1: ins = ais.ais_msg_1.sqlInsert(ais.ais_msg_1.decode(bv),dbType='postgres') elif msgNum==2: ins = ais.ais_msg_2.sqlInsert(ais.ais_msg_2.decode(bv),dbType='postgres') elif msgNum==3: ins = ais.ais_msg_3.sqlInsert(ais.ais_msg_3.decode(bv),dbType='postgres') elif msgNum==5: params = ais.ais_msg_5.decode(bv) #print params # FIX: make this a command line option params['name'] = params['name'].replace('"','').replace('\\','').strip('@').strip() params['callsign'] = params['callsign'].replace('"','').replace('\\','').strip('@').strip() params['destination'] = params['destination'].replace('"','').replace('\\','').strip('@').strip() #params.callsign = params.callsign.strip() #params. = params..strip() ins = ais.ais_msg_5.sqlInsert(params,dbType='postgres') else: print '# Warning... not handling type',msgNum,'line:',lineNum continue counts[msgNum] += 1 if uscg: if None != cg_sec: ins.add('cg_sec', cg_sec) if None != cg_timestamp: ins.add('cg_timestamp', cg_timestamp) if None != cg_station: ins.add('cg_r', cg_station) if verbose: print str(ins) print '# line:',line try: cu.execute(str(ins)) except Exception, e: print params # TODO(schwehr): Give a better error message. print '# exception:',str(type(Exception)), str(e) print '# ERROR: sql error?','line:',lineNum print '# ', str(ins) print '# ',line sys.exit('EARLY!!!') if lineNum%5000==0: if verbose: print '# committing batch' cx.commit()
else: print 'FAILED' success=False if not success: sys.exit('Something Failed') del success # Hide success from epydoc if options.unittest: sys.argv = [sys.argv[0]] if options.verbose: sys.argv.append('-v') unittest.main() outfile = sys.stdout if None!=options.outputFileName: outfile = file(options.outputFileName,'w') bv=None for msg in args: if 'binary' == options.inputType: bv = BitVector(bitstring=msg) elif 'nmeapayload' == options.inputType: bv = binary.ais6tobitvec(msg) elif 'nmea' == options.inputType: bv = binary.ais6tobitvec(msg.split(',')[5]) else: sys.exit('ERROR: unknown inputType. Help!') #import ais.ais_msg_8 as m8 #m8dict = m8.decode(bv) import ais.waterlevel as wl #wl.printFields(wl.decode(m8dict['BinaryData']),out=outfile,format=options.outputType) wl.printFields(wl.decode(bv),out=outfile,format=options.outputType)
dt_time_of_arrival = 'N/A'.rjust(8) time_of_arrival_prev = time_of_arrival try: slot_num = int(match['slot']) slot_t = slot_num / 2250. * 60 slot_t = '%5.2f' % slot_t except: slot_num = 'N/A' slot_t = 'N/A' print '|', uscg, '|', cg_s, '|', dt, '|', time_of_arrival, '|', dt_time_of_arrival, '|', match[ 't_recver_hhmmss'], '|', slot_num, '|', slot_t, '|', if match['body'][0] in ('1', '2', '3'): bits = binary.ais6tobitvec(match['body']) msg = ais_msg_1.decode(bits) #print msg.keys() #all_keys.update(set(msg.keys())) msg_slot = 'N/A' if 'slot_number' not in msg: msg['slot_number'] = 'N/A' msg['slot_time'] = 'N/A' else: msg['slot_time'] = msg['slot_number'] / 2250. * 60 if 'commstate_utc_hour' not in msg: msg['commstate_utc_hour'] = msg['commstate_utc_min'] = 'N/A' print '{slot_number}|{slot_time}|{commstate_utc_hour}|{commstate_utc_min}|{TimeStamp}|{UserID}|'.format( **msg) elif match['body'][0] == '4':
from aisutils.BitVector import BitVector from aisutils import binary import ais_msg_1 import ais_msg_8 import sls.waterlevel if __name__ == '__main__': # Try to parse some binary message if False: nmeaStr = '!AIVDM,1,1,,A,85OpLV1Kf98p96dWWPLSViUfJlU@SV>cDF2Wq5>`=u8CnEFGCIOq,0*70,r003669983,1165795916' msgPayload = nmeaStr.split(',')[5] print 'nmea string: ', nmeaStr print 'message payload:', msgPayload bv = binary.ais6tobitvec(msgPayload) print len(bv), bv msgDict = ais_msg_8.bin_broadcastDecode(bv) ais_msg_8.bin_broadcastPrintFields(msgDict) bv = bv[39:] print 'dac: ', bv[:10], int(bv[:10]) bv = bv[10:] print 'fid: ', bv[:6], int(bv[:6]) bv = bv[6:] print 'bits:', bv[:16], int(bv[:10]) bv = bv[10:] print 'len: ', len(bv)
'Append the nmea string after the line [default: do not print the line]' ) parser.add_option('-o', '--output', dest='outputFilename', default=None, help='Name of the file to write [default: stdout]') (options, args) = parser.parse_args() o = sys.stdout if None != options.outputFilename: o = open(options.outFilename, 'w') print args for filename in args: print filename for line in file(filename): if line[0] == '#': continue fields = line.split(',')[:6] if '1' != fields[2]: # Must be the start of a sequence continue if len(fields[5]) < 7: continue bv = binary.ais6tobitvec(fields[5][:7]) # Hacked for speed int(bv[8:38]) mmsi = str(int(bv[8:38])) o.write(mmsi) if options.dumpLine: o.write(' ' + line.strip()) o.write('\n')
def loadData(cx,datafile,verbose=False , uscg=True): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' cu = cx.cursor() lineNum = 0 #counts = {1:0,2:0,3:0,4:0,5:0} counts = {} countsTotal = {} # Includes ignored messages for i in range(64): counts[i]=0 countsTotal[i]=0 for line in datafile: lineNum += 1 if lineNum%1000==0: print lineNum cx.commit() if line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message. try: msgNum = int(binary.ais6tobitvec(line.split(',')[5][0])) except: print 'line would not decode',line continue countsTotal[msgNum]+=1 if verbose: print 'msgNum:',msgNum payload = bv = binary.ais6tobitvec(line.split(',')[5]) # TODO(schwehr): Take padding into account.x if msgNum in (1,2,3): if len(bv) < 168: print 'ERROR: skipping bad position message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 168 continue elif msgNum == 5: if len(bv) < 424: print 'ERROR: skipping bad shipdata message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 424 continue fields=line.split(',') cg_timestamp = None cg_station = None if uscg: try: cg_sec = int(float(fields[-1])) # US Coast Guard time stamp. print 'cg_sec:',cg_sec,type(cg_sec) cg_timestamp = datetime.datetime.utcfromtimestamp(float(cg_sec)) except: #print len(fields),fields for i in range(len(fields)-1,5,-1): if 0<len(fields[i]) and 'r' == fields[i][0]: cg_station = fields[i] break # Found it so ditch the for loop ins = None try: if msgNum==1: ins = ais.ais_msg_1.sqlInsert(ais.ais_msg_1.decode(bv),dbType='sqlite') elif msgNum==2: ins = ais.ais_msg_2.sqlInsert(ais.ais_msg_2.decode(bv),dbType='sqlite') elif msgNum==3: ins = ais.ais_msg_3.sqlInsert(ais.ais_msg_3.decode(bv),dbType='sqlite') elif msgNum==4: ins = ais.ais_msg_4.sqlInsert(ais.ais_msg_4.decode(bv),dbType='sqlite') elif msgNum==5: ins = ais.ais_msg_5.sqlInsert(ais.ais_msg_5.decode(bv),dbType='sqlite') elif msgNum==18: ins = ais.ais_msg_18.sqlInsert(ais.ais_msg_18.decode(bv),dbType='sqlite') elif msgNum==19: ins = ais.ais_msg_19.sqlInsert(ais.ais_msg_19.decode(bv),dbType='sqlite') else: if verbose: print 'Warning... not handling type',msgNum,'line:',lineNum continue except: print 'ERROR: some decode error?','line:',lineNum print ' ',line continue counts[msgNum] += 1 if uscg: # FIX: make cg_timestamp work if None != cg_timestamp: ins.add('cg_timestamp',cg_timestamp) if None != cg_station: ins.add('cg_r', cg_station) if verbose: print str(ins) # FIX: redo this correctly??? #try: print str(ins) cu.execute(str(ins)) print '\nMessages found:' for key in countsTotal: if countsTotal[key]>0: print str(key)+':',countsTotal[key] print '\nMessages processed:' for key in counts: if counts[key]>0: print str(key)+':',counts[key] cx.commit() ############################################################ if __name__=='__main__': from optparse import OptionParser parser = OptionParser(usage="%prog [options] file1.ais [file2.ais ...]", version='%prog ') parser.add_option('-d','--database-file',dest='databaseFilename',default='ais.db3' ,help='Name of the sqlite3 database file to write [default: %default]') parser.add_option('-C','--with-create',dest='createTables',default=False, action='store_true' ,help='Create the tables in the database') parser.add_option('--without-uscg',dest='uscgTail',default=True,action='store_false' ,help='Do not look for timestamp and receive station at the end of each line [default: with-uscg]') parser.add_option('-v','--verbose',dest='verbose',default=False,action='store_true' ,help='Make program output more verbose info as it runs') (options,args) = parser.parse_args() cx = sqlite3.connect(options.databaseFilename) print 'FIX: cg_sec and cg_timestamp are currently broken' if options.createTables: createTables(cx,verbose=options.verbose) if len(args)==0: print 'processing from stdin' loadData(cx,sys.stdin,verbose=options.verbose,uscg=options.uscgTail) else: for filename in args: print 'processing file:',filename loadData(cx,file(filename,'r'),verbose=options.verbose,uscg=options.uscgTail)
ws_report.write(ws_report_row,col,'destination'); col += 1 ws_report.write(ws_report_row,col,'first reported (UTCsec)'); col += 1 ws_report_row += 1 msgsByShip={} # FIX: error checking? for filename in args: linenum=0 for line in file(filename): line=line.strip() linenum +=1 if linenum%1000==0: print linenum fields = line.split(',') bv = binary.ais6tobitvec(fields[5][:38]) mmsi = m5.decodeUserID(bv) timestamp=fields[-1] if mmsi in msgsByShip: #if line not in msgsByShip: msgsByShip[mmsi].append(line) #if line in timeByMsgs else: msgsByShip[mmsi]=[line] print 'Finished scan. Now processing ships.\n' ships = msgsByShip.keys() ships.sort()
from aisutils import binary import ais_msg_1 import ais_msg_8 import sls.waterlevel if __name__ == "__main__": # Try to parse some binary message if False: nmeaStr = "!AIVDM,1,1,,A,85OpLV1Kf98p96dWWPLSViUfJlU@SV>cDF2Wq5>`=u8CnEFGCIOq,0*70,r003669983,1165795916" msgPayload = nmeaStr.split(",")[5] print "nmea string: ", nmeaStr print "message payload:", msgPayload bv = binary.ais6tobitvec(msgPayload) print len(bv), bv msgDict = ais_msg_8.bin_broadcastDecode(bv) ais_msg_8.bin_broadcastPrintFields(msgDict) bv = bv[39:] print "dac: ", bv[:10], int(bv[:10]) bv = bv[10:] print "fid: ", bv[:6], int(bv[:6]) bv = bv[6:] print "bits:", bv[:16], int(bv[:10]) bv = bv[10:] print "len: ", len(bv)
''' import sys from aisutils.BitVector import BitVector from aisutils import binary import ais_msg_8 import sls.waterlevel if __name__ == '__main__': print 'FIX: broken' vdm = '!AIVDM,1,1,4,A,8030ot1?0@>PSpPPPC<2<oURAU=>T08f@02PSpPPP3C3<oU=d5<U00BH@02PSpPPP3C3EoU:A5<TwPPO@02PSpPPP2hk<oRWU5;si0Pl@02O<0PPPP3D<oPPEU;M418g@02PSpPPP2hlEoRQgU;j@17p@00,2*32' msg = vdm.split(',')[5] bvMsg = binary.ais6tobitvec(msg) msg8 = ais_msg_8.decode(bvMsg) bvMsg8 = msg8['BinaryData'] del msg8['BinaryData'] ais_msg_8.printFields(msg8) print # Now deal with the St Lawrence Seaway Header slsHdr = sls_header.decode(bvMsg8) bvHdr = slsHdr['BinaryData'] del slsHdr['BinaryData'] sls_header.printFields(slsHdr) #print slsHdr.keys()
def build_dist_database(database_filename, log_files, verbose=False): cx = sqlite3.connect(database_filename) print 'WARNING: not saving the station name' cx.execute(''' CREATE TABLE IF NOT EXISTS distance ( -- Save space, no key -- ts INTEGER, -- Save more space julian_day INTEGER, -- x REAL, -- y REAL, dist_km REAL --, --station VARCHAR(15) ); ''') cu = cx.cursor() counts = {'nogps': 0} for filename in log_files: if verbose: print 'file:',filename sys.stdout.flush() for line_num, line in enumerate(file(filename)): if 'AIVDM,1,1' not in line: continue match = uscg_ais_nmea_regex.search(line).groupdict() message_id = match['body'][0] # First letter is the message type if message_id not in ('1','2','3'): continue if len(match['body']) != 28: # 6 bits per character raise AisErrorBadNumBits('expected 168, got %d' % len(match['body']) / 6) bits = binary.ais6tobitvec(match['body'][:20]) # Don't need any of the other bits, so do not waste time x = binary.signedIntFromBV(bits[61:89]) / 600000. y = binary.signedIntFromBV(bits[89:116]) / 600000. if x > 180 or y > 90: counts['nogps'] += 1 continue station = match['station'] julian_day = int(datetime.datetime.utcfromtimestamp(int(match['timeStamp'])).strftime('%j')) d_km = dist_utm_km( (x,y), station_locations[station] ) #cu.execute('INSERT INTO distance VALUES (:julian_day, :x, :y, :dist_km, :station)', #{'julian_day': julian_day, 'x':x, 'y':y, 'dist_km': d_km, 'station':station} ) #cu.execute('INSERT INTO distance VALUES (:julian_day, :x, :y, :dist_km)', # {'julian_day': julian_day, 'x':x, 'y':y, 'dist_km': d_km, } ) cu.execute('INSERT INTO distance VALUES (:julian_day, :dist_km)', {'julian_day': julian_day, 'dist_km': d_km, } ) if line_num % 10000 == 9999: cx.commit() cx.commit() if False: print 'Creating indexes' try: cx.execute('CREATE INDEX idx_dist_day ON distance(julian_day);') cx.execute('CREATE INDEX idx_dist_dist ON distance(dist_km);') #cx.execute('CREATE INDEX idx_dist_station ON distance(station);') cx.commit() except sqlite3.OperationalError: print 'Appears indexes were already created' return cx, counts
def loadData(cx, datafile, verbose=False, uscg=True): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' cu = cx.cursor() lineNum = 0 import psycopg2 # For ProgrammingError exception counts = {1: 0, 2: 0, 3: 0, 5: 0} # buf=[] for line in datafile: lineNum += 1 if lineNum % 1000 == 0: print lineNum if line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message try: msgNum = int(binary.ais6tobitvec(line.split(',')[5][0])) except: print '# line would not decode', line continue if verbose: print '# msgNum:', msgNum if msgNum not in (1, 2, 3, 5): if verbose: print '# skipping', line continue payload = bv = binary.ais6tobitvec(line.split(',')[5]) # TODO(schwehr): Need to take padding into account. if msgNum in (1, 2, 3): if len(bv) < 168: print '# ERROR: skipping bad position message, line:', lineNum print '# ', line, print '# Got length', len(bv), 'expected', 168 continue elif msgNum == 5: if len(bv) < 424: print '# ERROR: skipping bad shipdata message, line:', lineNum print '# ', line, print '# Got length', len(bv), 'expected', 424 continue fields = line.split(',') cg_sec = None cg_station = None if uscg: cg_sec = int(float(fields[-1])) # US Coast Guard time stamp. #print len(fields),fields for i in range(len(fields) - 1, 5, -1): if 0 < len(fields[i]) and 'r' == fields[i][0]: cg_station = fields[i] break # Found it so ditch the for loop ins = None # FIX: redo this for all messages using the new aisutils structure if True: if msgNum == 1: ins = ais.ais_msg_1.sqlInsert(ais.ais_msg_1.decode(bv), dbType='postgres') elif msgNum == 2: ins = ais.ais_msg_2.sqlInsert(ais.ais_msg_2.decode(bv), dbType='postgres') elif msgNum == 3: ins = ais.ais_msg_3.sqlInsert(ais.ais_msg_3.decode(bv), dbType='postgres') elif msgNum == 5: params = ais.ais_msg_5.decode(bv) #print params # FIX: make this a command line option params['name'] = params['name'].replace('"', '').replace( '\\', '').strip('@').strip() params['callsign'] = params['callsign'].replace( '"', '').replace('\\', '').strip('@').strip() params['destination'] = params['destination'].replace( '"', '').replace('\\', '').strip('@').strip() #params.callsign = params.callsign.strip() #params. = params..strip() ins = ais.ais_msg_5.sqlInsert(params, dbType='postgres') else: print '# Warning... not handling type', msgNum, 'line:', lineNum continue counts[msgNum] += 1 if uscg: if None != cg_sec: ins.add('cg_sec', cg_sec) if None != cg_timestamp: ins.add('cg_timestamp', cg_timestamp) if None != cg_station: ins.add('cg_r', cg_station) if verbose: print str(ins) print '# line:', line try: cu.execute(str(ins)) except Exception, e: print params # TODO(schwehr): Give a better error message. print '# exception:', str(type(Exception)), str(e) print '# ERROR: sql error?', 'line:', lineNum print '# ', str(ins) print '# ', line sys.exit('EARLY!!!') if lineNum % 5000 == 0: if verbose: print '# committing batch' cx.commit()
buf = StringIO.StringIO() for field in options.fieldList: buf.write(field+',') result = buf.getvalue() if result[-1] == ',': print result[:-1] else: print result if options.doDecode: if len(args)==0: args = sys.stdin for msg in args: bv = None if msg[0] in ('$','!') and msg[3:6] in ('VDM','VDO'): # Found nmea # FIX: do checksum bv = binary.ais6tobitvec(msg.split(',')[5]) else: # either binary or nmeapayload... expect mostly nmeapayloads # assumes that an all 0 and 1 string can not be a nmeapayload binaryMsg=True for c in msg: if c not in ('0','1'): binaryMsg=False break if binaryMsg: bv = BitVector(bitstring=msg) else: # nmeapayload bv = binary.ais6tobitvec(msg) printFields(decode(bv) ,out=outfile ,format=options.outputType
#try: match_obj = uscg_ais_nmea_regex.search(line) if match_obj is None: sys.stderr.write(line) continue station = match_obj.group('station') #except: # sys.stderr.write('bad line: %s\n' %line) # continue fields = line.split(',')[:6] if '1'!=fields[2]: # Must be the start of a sequence #if verbose: # print 'skipping based on field 2',line continue if len(fields[5])<39: #if verbose: # print 'skipping',line continue bv = binary.ais6tobitvec(fields[5][:39]) # Hacked for speed #print int(bv[8:38]),aisstring.decode(bv[112:232],True) name = aisstring.decode(bv[112:232],True).strip('@ ') mmsi = str(int(bv[8:38])) imo = str(int(bv[40:70])) #if len(name)<1 or name[0]=='X': print 'TROUBLE with line:',line if len(name)<1: print 'TROUBLE with line:',line o.write (mmsi+' '+imo+' '+station+' '+name+'\n')
def main(): from optparse import OptionParser parser = OptionParser(usage="%prog [options]") parser.add_option('--unit-test',dest='unittest',default=False,action='store_true', help='run the unit tests') parser.add_option('-v','--verbose',dest='verbose',default=False,action='store_true', help='Make the test output verbose') # FIX: remove nmea from binary messages. No way to build the whole packet? # FIX: or build the surrounding msg 8 for a broadcast? typeChoices = ('binary','nmeapayload','nmea') # FIX: what about a USCG type message? parser.add_option('-t', '--type', choices=typeChoices, type='choice', dest='ioType', default='nmeapayload', help='What kind of string to write for encoding ('+', '.join(typeChoices)+') [default: %default]') outputChoices = ('std','html','csv','sql' ) parser.add_option('-T', '--output-type', choices=outputChoices, type='choice', dest='outputType', default='std', help='What kind of string to output ('+', '.join(outputChoices)+') ' '[default: %default]') parser.add_option('-o','--output',dest='outputFileName',default=None, help='Name of the python file to write [default: stdout]') parser.add_option('-f', '--fields', dest='fieldList', default=None, action='append', choices=fieldList, help='Which fields to include in the output. Currently only for csv ' 'output [default: all]') parser.add_option('-p', '--print-csv-field-list', dest='printCsvfieldList', default=False,action='store_true', help='Print the field name for csv') parser.add_option('-c', '--sql-create', dest='sqlCreate', default=False, action='store_true', help='Print out an sql create command for the table.') parser.add_option('--latex-table', dest='latexDefinitionTable', default=False,action='store_true', help='Print a LaTeX table of the type') parser.add_option('--text-table', dest='textDefinitionTable', default=False, action='store_true', help='Print delimited table of the type (for Word table importing)') parser.add_option('--delimt-text-table', dest='delimTextDefinitionTable', default=' ', help='Delimiter for text table [default: \'%default\'] ' '(for Word table importing)') dbChoices = ('sqlite','postgres') parser.add_option('-D', '--db-type', dest='dbType', default='postgres', choices=dbChoices,type='choice', help='What kind of database ('+', '.join(dbChoices)+') ' '[default: %default]') addMsgOptions(parser) options, args = parser.parse_args() if options.unittest: sys.argv = [sys.argv[0]] if options.verbose: sys.argv.append('-v') unittest.main() outfile = sys.stdout if None!=options.outputFileName: outfile = file(options.outputFileName,'w') if options.doEncode: # Make sure all non required options are specified. if None==options.unavail_uintField: parser.error("missing value for unavail_uintField") if None==options.anUIntField: parser.error("missing value for anUIntField") if None==options.anIntField: parser.error("missing value for anIntField") if None==options.aBoolField: parser.error("missing value for aBoolField") if None==options.aStrField: parser.error("missing value for aStrField") if None==options.anUDecimalField: parser.error("missing value for anUDecimalField") if None==options.aDecimalField: parser.error("missing value for aDecimalField") if None==options.aFloatField: parser.error("missing value for aFloatField") msgDict = { 'dac': '366', 'reqDecimal': '122', 'unavail_uint': options.unavail_uintField, 'anUInt': options.anUIntField, 'anInt': options.anIntField, 'aBool': options.aBoolField, 'aStr': options.aStrField, 'anUDecimal': options.anUDecimalField, 'aDecimal': options.aDecimalField, 'aFloat': options.aFloatField, } bits = encode(msgDict) if 'binary' == options.ioType: print str(bits) elif 'nmeapayload'==options.ioType: # FIX: figure out if this might be necessary at compile time bitLen=len(bits) if bitLen % 6 != 0: bits = bits + BitVector(size=(6 - (bitLen%6))) # Pad out to multiple of 6 print binary.bitvectoais6(bits)[0] # FIX: Do not emit this option for the binary message payloads. Does not make sense. elif 'nmea' == options.ioType: nmea = uscg.create_nmea(bits) print nmea else: sys.exit('ERROR: unknown ioType. Help!') if options.sqlCreate: sqlCreateStr(outfile,options.fieldList,dbType=options.dbType) if options.latexDefinitionTable: latexDefinitionTable(outfile) # For conversion to word tables if options.textDefinitionTable: textDefinitionTable(outfile,options.delimTextDefinitionTable) if options.printCsvfieldList: # Make a csv separated list of fields that will be displayed for csv if None == options.fieldList: options.fieldList = fieldList import StringIO buf = StringIO.StringIO() for field in options.fieldList: buf.write(field+',') result = buf.getvalue() if result[-1] == ',': print result[:-1] else: print result if options.doDecode: if len(args)==0: args = sys.stdin for msg in args: bv = None if msg[0] in ('$','!') and msg[3:6] in ('VDM','VDO'): # Found nmea # FIX: do checksum bv = binary.ais6tobitvec(msg.split(',')[5]) else: # either binary or nmeapayload... expect mostly nmeapayloads # assumes that an all 0 and 1 string can not be a nmeapayload binaryMsg=True for c in msg: if c not in ('0','1'): binaryMsg=False break if binaryMsg: bv = BitVector(bitstring=msg) else: # nmeapayload bv = binary.ais6tobitvec(msg) printFields(decode(bv) ,out=outfile ,format=options.outputType ,fieldList=options.fieldList ,dbType=options.dbType )
dt_time_of_arrival = 'N/A'.rjust(8) time_of_arrival_prev = time_of_arrival try: slot_num = int(match['slot']) slot_t = slot_num / 2250. * 60 slot_t = '%5.2f' % slot_t except: slot_num = 'N/A' slot_t = 'N/A' print '|',uscg,'|',cg_s,'|',dt,'|',time_of_arrival,'|', dt_time_of_arrival,'|', match['t_recver_hhmmss'], '|',slot_num, '|',slot_t , '|', if match['body'][0] in ('1','2','3'): bits = binary.ais6tobitvec(match['body']) msg = ais_msg_1.decode(bits) #print msg.keys() #all_keys.update(set(msg.keys())) msg_slot = 'N/A' if 'slot_number' not in msg: msg['slot_number'] = 'N/A' msg['slot_time'] = 'N/A' else: msg['slot_time'] = msg['slot_number'] / 2250. * 60 if 'commstate_utc_hour' not in msg: msg['commstate_utc_hour'] = msg['commstate_utc_min'] = 'N/A' print '{slot_number}|{slot_time}|{commstate_utc_hour}|{commstate_utc_min}|{TimeStamp}|{UserID}|'.format(**msg) elif match['body'][0] == '4': bits = binary.ais6tobitvec(match['body'])