def add_file(self, filename): for line_num, line in enumerate(file(filename)): if len(line) < 10 or line[0] == '#': continue line = line.rstrip() #if line_num > 100: # break #if line_num % 5000 == 0: # print '%s: line %d' % (filename,line_num) try: timestamp = float(line.split(',')[-1]) except: sys.stderr.write('skipping line: %s\n' % (line, ) ) continue self.up.add_time(timestamp) if False: match = uscg_ais_nmea_regex.search(line).groupdict() if match['senNum'] != '1': #print 'skipping senNum',match['senNum'],line.strip() continue # later sentences in an un-normalized stream bits = binary.ais6tobitvec(match['body']) try: self.pos_stats.add_message(match, bits) except AisErrorPositionTooFar, e: print 'ERROR: too far', str(e) print ' Line:', line.rstrip() continue except AisErrorBadNumBits, e: print 'ERROR: bad bit count', str(e) print ' Line:', line.rstrip() continue
def parse_msgs(infile, verbose=False): for line in infile: try: match = uscg_ais_nmea_regex.search(line).groupdict() except AttributeError: continue #if not match: continue if match['body'][0]!='8': continue #print line, bv = binary.ais6tobitvec(match['body']) r = {} r['MessageID']=int(bv[0:6]) r['RepeatIndicator']=int(bv[6:8]) r['UserID']=int(bv[8:38]) r['Spare']=int(bv[38:50]) #r['BinaryData']=bv[40:] r['dac']=int(bv[40:50]) r['fid']=int(bv[50:56]) #if 34==r['fid']: if verbose: print r['dac'], r['fid'], r['UserID'], line.rstrip() else: print r['dac'], r['fid'], r['UserID'], match['station']
def filter_file(infile, outfile, polygonWKT, verbose=False): ''' For messages 1,2, and 3, see if the message is within the bounding box and send it to outfile if it is. Polygon should look something like this... 'POLYGON ((-1.0 50.5, -0.5 51.2, 0.3 50.9, -1 50.5))' param polygon: bounding region for the query type polygon: WKT polygon string ''' import ais.ais_msg_1 as ais_msg_1 import ais.binary as binary try: from cartography.geometry import Geometry except: sys.exit('ERROR: need to install pcl-core for cartography.geometry.Geometry') poly = Geometry.fromWKT(polygonWKT) bbox = poly.envelope() minx = bbox.minx # for speed, throw out points as soon as possible maxx = bbox.maxx miny = bbox.miny maxy = bbox.maxy if verbose: print 'minLon maxLon minLat maxLat filename' print minx, maxx, miny, maxy count = 0 linenum=0 for line in infile: linenum += 1 if linenum%1000==0: sys.stderr.write('line '+str(linenum)+' -- count='+str(count)+'\n') # Trick: Only handle the first 19 characters since that contains the lon/lat txt = line.split(',')[5][:25] #print txt bv = binary.ais6tobitvec(txt) #line[5][:19] # Try to throw out points as soon as possible. Use float rather than decimal. faster?? Maybe not #lon = ais_msg_1.decodelongitude(bv) lon = binary.signedIntFromBV(bv[61:89])/600000.0 if lon<minx or lon>maxx: continue #print 'close1:',lon #lat = ais_msg_1.decodelatitude(bv) lat = binary.signedIntFromBV(bv[89:116])/600000.0 if lat<miny or lat>maxy: continue #print 'close2: POINT ('+str(lon)+' '+str(lat)+')' point = Geometry.fromWKT('POINT ('+str(lon)+' '+str(lat)+')') inside = point.within(poly) if 1==inside: outfile.write(line) count+= 1 return count
def getNameMMSI(logfile,outfile): for line in logfile: fields = line.split(',')[:6] if '1'!=fields[2]: # Must be the start of a sequence continue if len(fields[5])<39: continue bv = binary.ais6tobitvec(fields[5][:39]) # Hacked for speed mmsi = ais_msg_5.decodeUserID(bv) name = aisstring.unpad(ais_msg_5.decodename(bv)) outfile.write(str(mmsi)+' '+str(name)+'\n')
def parse_msgs(infile, verbose=False): for line in infile: line = line.strip() try: match = uscg_ais_nmea_regex.search(line).groupdict() except AttributeError: continue msg_type = match['body'][0] if msg_type not in ('6', '8'): continue print 'cp 1' if msg_type == '6' and len(match['body']) < 15: continue if msg_type == '8' and len(match['body']) < 10: continue try: bv = binary.ais6tobitvec(match['body'][:15]) except ValueError: sys.stderr.write('bad msg: %s\n' % line.strip()) continue r = {} r['MessageID']=int(bv[0:6]) r['UserID']=int(bv[8:38]) if '6' == msg_type: dac = int(bv[72:82]) fi = int(bv[82:88]) elif '8' == msg_type: dac = int(bv[40:50]) fi = int(bv[50:56]) elif verbose: print 'not a bbm:', line if verbose: print msg_type, dac, fi, r['UserID'], line.rstrip() else: print msg_type, dac, fi, r['UserID'], match['station']
def filter_box(infile, outfile, west, east, lower, upper, verbose=False): ''' Do a straight box clip that should be faster than using the WKT. Use geographic coordinates what run +/- 180 east west and +/-90 north south. @bug: Good luck at the +/-180 boundary! ''' assert (upper>lower) assert (west<east) import ais.ais_msg_1 as ais_msg_1 import ais.binary as binary if verbose: print 'xrange:',west,east print 'yrange:',lower,upper count = 0 linenum=0 for line in infile: linenum += 1 if linenum%1000==0: sys.stderr.write('line '+str(linenum)+'\n') # Trick: Only handle the first 19 characters since that contains the lon/lat txt = line.split(',')[5][:25] #print txt bv = binary.ais6tobitvec(txt) #line[5][:19] lon = float(ais_msg_1.decodelongitude(bv)) lat = float(ais_msg_1.decodelatitude(bv)) if west>lon or lon>east: #print 'skip on lon',type(west),type(lon),type(east), west>lon,lon>east continue if lower>lat or lat>upper: continue if verbose: print 'ACCEPT',lon,lat outfile.write(line) count+=1
dt_time_of_arrival = 'N/A'.rjust(8) time_of_arrival_prev = time_of_arrival try: slot_num = int(match['slot']) slot_t = slot_num / 2250. * 60 slot_t = '%5.2f' % slot_t except: slot_num = 'N/A' slot_t = 'N/A' print '|',uscg,'|',cg_s,'|',dt,'|',time_of_arrival,'|', dt_time_of_arrival,'|', match['t_recver_hhmmss'], '|',slot_num, '|',slot_t , '|', if match['body'][0] in ('1','2','3'): bits = binary.ais6tobitvec(match['body']) msg = ais_msg_1.decode(bits) #print msg.keys() #all_keys.update(set(msg.keys())) msg_slot = 'N/A' if 'slot_number' not in msg: msg['slot_number'] = 'N/A' msg['slot_time'] = 'N/A' else: msg['slot_time'] = msg['slot_number'] / 2250. * 60 if 'commstate_utc_hour' not in msg: msg['commstate_utc_hour'] = msg['commstate_utc_min'] = 'N/A' print '{slot_number}|{slot_time}|{commstate_utc_hour}|{commstate_utc_min}|{TimeStamp}|{UserID}|'.format(**msg) elif match['body'][0] == '4': bits = binary.ais6tobitvec(match['body'])
match_obj = uscg_ais_nmea_regex.search(line) if match_obj is None: sys.stderr.write(line) continue station = match_obj.group('station') #except: # sys.stderr.write('bad line: %s\n' %line) # continue fields = line.split(',')[:6] if '1'!=fields[2]: # Must be the start of a sequence #if verbose: # print 'skipping based on field 2',line continue if len(fields[5])<39: #if verbose: # print 'skipping',line continue bv = binary.ais6tobitvec(fields[5][:39]) # Hacked for speed #print int(bv[8:38]),aisstring.decode(bv[112:232],True) name = aisstring.decode(bv[112:232],True).strip('@ ') mmsi = str(int(bv[8:38])) imo = str(int(bv[40:70])) #if len(name)<1 or name[0]=='X': print 'TROUBLE with line:',line if len(name)<1: print 'TROUBLE with line:',line o.write (mmsi+' '+imo+' '+station+' '+name+'\n')
def loadData(cx,datafile,verbose=False , uscg=True): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' cu = cx.cursor() lineNum = 0 #counts = {1:0,2:0,3:0,4:0,5:0} counts = {} countsTotal = {} # Includes ignored messages for i in range(64): counts[i]=0 countsTotal[i]=0 for line in datafile: lineNum += 1 if lineNum%1000==0: print lineNum cx.commit() # if lineNum>3000: # print 'Early exit from load' # break if line[3:6] not in ('VDM|VDO'): # if verbose: # sys.stderr.write continue # Not an AIS VHF message try: msgNum = int(binary.ais6tobitvec(line.split(',')[5][0])) except: print 'line would not decode',line continue countsTotal[msgNum]+=1 if verbose: print 'msgNum:',msgNum # if msgNum not in (1,2,3,4,5): # if verbose: print 'skipping',line # continue payload = bv = binary.ais6tobitvec(line.split(',')[5]) # FIX: need to take badding into account ... right before the * if msgNum in (1,2,3): # if len(bv) != 168: if len(bv) < 168: print 'ERROR: skipping bad position message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 168 continue # elif msgNum == 4: elif msgNum == 5: # if len(bv) != 424: if len(bv) < 424: print 'ERROR: skipping bad shipdata message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 424 continue fields=line.split(',') cg_timestamp = None cg_station = None if uscg: try: cg_sec = int(float(fields[-1])) # US Coast Guard time stamp. print 'cg_sec:',cg_sec,type(cg_sec) cg_timestamp = datetime.datetime.utcfromtimestamp(float(cg_sec)) except: print 'ERROR getting timestamp for',lineNum,line #print len(fields),fields for i in range(len(fields)-1,5,-1): if 0<len(fields[i]) and 'r' == fields[i][0]: cg_station = fields[i] break # Found it so ditch the for loop #print station #sys.exit('stations please work') ins = None try: if msgNum==1: ins = ais.ais_msg_1.sqlInsert(ais.ais_msg_1.decode(bv),dbType='sqlite') elif msgNum==2: ins = ais.ais_msg_2.sqlInsert(ais.ais_msg_2.decode(bv),dbType='sqlite') elif msgNum==3: ins = ais.ais_msg_3.sqlInsert(ais.ais_msg_3.decode(bv),dbType='sqlite') elif msgNum==4: ins = ais.ais_msg_4.sqlInsert(ais.ais_msg_4.decode(bv),dbType='sqlite') elif msgNum==5: ins = ais.ais_msg_5.sqlInsert(ais.ais_msg_5.decode(bv),dbType='sqlite') elif msgNum==18: ins = ais.ais_msg_18.sqlInsert(ais.ais_msg_18.decode(bv),dbType='sqlite') elif msgNum==19: ins = ais.ais_msg_19.sqlInsert(ais.ais_msg_19.decode(bv),dbType='sqlite') else: if verbose: print 'Warning... not handling type',msgNum,'line:',lineNum continue except: print 'ERROR: some decode error?','line:',lineNum print ' ',line continue counts[msgNum] += 1 if uscg: # FIX: make cg_timestamp work if None != cg_timestamp: ins.add('cg_timestamp',cg_timestamp) if None != cg_station: ins.add('cg_r', cg_station) if verbose: print str(ins) # FIX: redo this correctly??? #try: print str(ins) cu.execute(str(ins)) #except: # sys.stderr.write('FIX: write some sort of exception handler\n') # except pysqlite2.dbapi2.OperationalError, params: # #except OperationalError, params: # if -1 != params.message.find('no such table'): # print 'ERROR:',params.message # sys.exit('You probably need to run with --with-create') # print 'params',params # print type(params) # print 'ERROR: sql error?','line:',lineNum # print ' ', str(ins) # print ' ',line # if False: # # Give some debugging flexibility # from IPython.Shell import IPShellEmbed # ipshell = IPShellEmbed(argv=[]) # ipshell() # sys.exit('Gave up') #print counts print '\nMessages found:' for key in countsTotal: if countsTotal[key]>0: print str(key)+':',countsTotal[key] print '\nMessages processed:' for key in counts: if counts[key]>0: print str(key)+':',counts[key] cx.commit()
def build_dist_database(database_filename, log_files, verbose=False): cx = sqlite3.connect(database_filename) print 'WARNING: not saving the station name' cx.execute(''' CREATE TABLE IF NOT EXISTS distance ( -- Save space, no key -- ts INTEGER, -- Save more space julian_day INTEGER, -- x REAL, -- y REAL, dist_km REAL --, --station VARCHAR(15) ); ''') cu = cx.cursor() counts = {'nogps': 0} for filename in log_files: if verbose: print 'file:',filename sys.stdout.flush() for line_num, line in enumerate(file(filename)): if 'AIVDM,1,1' not in line: continue match = uscg_ais_nmea_regex.search(line).groupdict() message_id = match['body'][0] # First letter is the message type if message_id not in ('1','2','3'): continue if len(match['body']) != 28: # 6 bits per character raise AisErrorBadNumBits('expected 168, got %d' % len(match['body']) / 6) bits = binary.ais6tobitvec(match['body'][:20]) # Don't need any of the other bits, so do not waste time x = binary.signedIntFromBV(bits[61:89]) / 600000. y = binary.signedIntFromBV(bits[89:116]) / 600000. if x > 180 or y > 90: counts['nogps'] += 1 continue station = match['station'] julian_day = int(datetime.datetime.utcfromtimestamp(int(match['timeStamp'])).strftime('%j')) d_km = dist_utm_km( (x,y), station_locations[station] ) #cu.execute('INSERT INTO distance VALUES (:julian_day, :x, :y, :dist_km, :station)', #{'julian_day': julian_day, 'x':x, 'y':y, 'dist_km': d_km, 'station':station} ) #cu.execute('INSERT INTO distance VALUES (:julian_day, :x, :y, :dist_km)', # {'julian_day': julian_day, 'x':x, 'y':y, 'dist_km': d_km, } ) cu.execute('INSERT INTO distance VALUES (:julian_day, :dist_km)', {'julian_day': julian_day, 'dist_km': d_km, } ) if line_num % 10000 == 9999: cx.commit() cx.commit() if False: print 'Creating indexes' try: cx.execute('CREATE INDEX idx_dist_day ON distance(julian_day);') cx.execute('CREATE INDEX idx_dist_dist ON distance(dist_km);') #cx.execute('CREATE INDEX idx_dist_station ON distance(station);') cx.commit() except sqlite3.OperationalError: print 'Appears indexes were already created' return cx, counts
# print r['dimC'],r['dimD'],r['ETAminute'],r['ETAhour'],r['ETAday'],r['ETAmonth'],r['draught'], # print r['destination'] ws_report_row += 1 msgsByShip = {} # timeByMsg={} # FIX: error checking? for filename in args: linenum = 0 for line in file(filename): line = line.strip() linenum += 1 if linenum % 1000 == 0: print linenum fields = line.split(",") bv = binary.ais6tobitvec(fields[5][:38]) mmsi = m5.decodeUserID(bv) timestamp = fields[-1] if mmsi in msgsByShip: # if line not in msgsByShip: msgsByShip[mmsi].append(line) # if line in timeByMsgs else: msgsByShip[mmsi] = [line] print "Finished scan. Now processing ships.\n" ships = msgsByShip.keys() ships.sort()
def load_data(cx, datafile=sys.stdin, verbose=False, uscg=True): # uscg=True, payload_table=False): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' # @param payload_table: Create a table that contains the nmea payload text (useful for distinguisig unique messages) v = verbose # Hmm... "v"... the irony cu = cx.cursor() lineNum = 0 next_key = 0 max_key = get_max_key(cx) if max_key is not None: next_key = max_key + 1 print 'keys_starting_at:',next_key message_set = (1,2,3,4,5,18,19) counts = {} for msg_num in message_set: counts[msg_num] = 0 counts['checksum_failed'] = 0 track_dups = TrackDuplicates(lookback_length=1000) for line in datafile: lineNum += 1 if lineNum%1000==0: print lineNum cx.commit() if len(line)<15 or line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message #print 'FIX: validate checksum' if not nmea.checksum.isChecksumValid(line): print >> sys.stderr, 'WARNING: invalid checksum:\n\t',line, print >> sys.stderr, ' ',nmea.checksum.checksumStr(line) counts['checksum_failed'] += 1 fields=line.split(',') # FIX: use this split throughout below... try: msg_num = int(binary.ais6tobitvec(fields[5][0])) except: print 'line would not decode',line continue if verbose: print 'msg_num:',msg_num if msg_num not in message_set: if verbose: print 'skipping',line print ' not in msg set:',str(message_set) continue try: bv = binary.ais6tobitvec(fields[5]) except: print >> sys.stderr, 'ERROR: Unable to decode bits in line:\n\t',line traceback.print_exc(file=sys.stderr) continue # FIX: need to take padding into account ... right before the * if msg_num in (1,2,3,4,18): if len(bv) != 168: print 'ERROR: skipping bad one slot message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 168 continue elif msg_num == 5: # 426 has 2 pad bits if len(bv) not in (424,426): print 'ERROR: skipping bad shipdata message, line:',lineNum print ' ',line, print ' Got length',len(bv), 'expected', 424 continue ins = None try: if msg_num== 1: ins = ais.ais_msg_1_handcoded.sqlInsert(ais.ais_msg_1_handcoded.decode(bv),dbType='sqlite') elif msg_num== 2: ins = ais.ais_msg_2_handcoded.sqlInsert(ais.ais_msg_2_handcoded.decode(bv),dbType='sqlite') elif msg_num== 3: ins = ais.ais_msg_3_handcoded.sqlInsert(ais.ais_msg_3_handcoded.decode(bv),dbType='sqlite') elif msg_num== 4: ins = ais.ais_msg_4_handcoded.sqlInsert(ais.ais_msg_4_handcoded.decode(bv),dbType='sqlite') elif msg_num== 5: ins = ais.ais_msg_5.sqlInsert(ais.ais_msg_5.decode(bv),dbType='sqlite') elif msg_num==18: ins = ais.ais_msg_18.sqlInsert(ais.ais_msg_18.decode(bv),dbType='sqlite') # Class B position elif msg_num==19: ins = ais.ais_msg_19.sqlInsert(ais.ais_msg_19.decode(bv),dbType='sqlite') # Class B position else: print 'Warning... not handling type',msg_num,'line:',lineNum continue except: print 'ERROR: some decode error?','line:',lineNum print ' ',line continue counts[msg_num] += 1 if uscg: from aisutils.uscg import uscg_ais_nmea_regex match = uscg_ais_nmea_regex.search(line).groupdict() try: cg_sec = int(float(match['timeStamp'])) ins.add('cg_sec', cg_sec) #ins.add('cg_sec', int(float(match['timeStamp'])) ) ins.add('cg_timestamp', str(datetime.datetime.utcfromtimestamp(float(match['timeStamp']))) ) ins.add('cg_r', match['station'] ) except: print >> sys.stderr, match print >> sys.stderr, 'bad uscg sections',line, continue # Optional fields that are not always there if match['time_of_arrival'] is not None: try: ins.add('cg_t_arrival', float(match['time_of_arrival'])) except: print >> sys.stderr, 'WARNING: corrupted time of arrival (T) in line. T ignored\n\t',line pass # Not critical if corrupted if match['slot'] is not None: ins.add('cg_s_slotnum', int(match['slot']) ) if msg_num in (1,2,3,4): pkt_id,dup_flag = track_dups.check_packet(cg_sec,fields[5]) # Pass in the NMEA payload string of data if v: print 'dup_check:',pkt_id,dup_flag,fields[5] ins.add('pkt_id',pkt_id) ins.add('dup_flag',dup_flag) ins.add('key',next_key) next_key += 1 if verbose: print str(ins) try: cu.execute(str(ins)) except pysqlite2.dbapi2.OperationalError, params: #except OperationalError, params: if -1 != params.message.find('no such table'): print 'ERROR:',params.message sys.exit('You probably need to run with --with-create') print 'params',params print type(params) print 'ERROR: sql error?','line:',lineNum print ' ', str(ins) print ' ',line if False: # Give some debugging flexibility from IPython.Shell import IPShellEmbed ipshell = IPShellEmbed(argv=[]) ipshell() sys.exit('Gave up')
def loadData(cx,datafile,verbose=False , uscg=True): ''' Try to read data from an open file object. Not yet well tested. @param cx: database connection @param verbose: pring out more if true @param uscg: Process uscg tail information to get timestamp and receive station @rtype: None @return: Nothing @note: can not handle multiline AIS messages. They must be normalized first. ''' cu = cx.cursor() lineNum = 0 import psycopg2 # For ProgrammingError exception counts = {1:0,2:0,3:0,5:0} # buf=[] for line in datafile: lineNum += 1 if lineNum%1000==0: print lineNum # if lineNum%1000==0: # try: # cu.execute('BEGIN;'+';'.join(buf)+'COMMIT;') # except psycopg2.ProgrammingError: # # FIX: how do display the exception? # print 'psycopg2.ProgrammingError:\n ',line # continue # buf=[] # cx.commit() # if lineNum>3000: # print 'Early exit from load' # break if line[3:6] not in ('VDM|VDO'): continue # Not an AIS VHF message try: msgNum = int(binary.ais6tobitvec(line.split(',')[5][0])) except: print '# line would not decode',line continue if verbose: print '# msgNum:',msgNum if msgNum not in (1,2,3,5): if verbose: print '# skipping',line continue payload = bv = binary.ais6tobitvec(line.split(',')[5]) # FIX: need to take padding into account ... right before the * if msgNum in (1,2,3): # if len(bv) != 168: if len(bv) < 168: print '# ERROR: skipping bad position message, line:',lineNum print '# ',line, print '# Got length',len(bv), 'expected', 168 continue elif msgNum == 5: # if len(bv) != 424: if len(bv) < 424: print '# ERROR: skipping bad shipdata message, line:',lineNum print '# ',line, print '# Got length',len(bv), 'expected', 424 continue fields=line.split(',') cg_sec = None cg_station = None if uscg: cg_sec = int(float(fields[-1])) # US Coast Guard time stamp. cg_timestamp = sqlhelp.sec2timestamp(cg_sec) #print len(fields),fields for i in range(len(fields)-1,5,-1): if 0<len(fields[i]) and 'r' == fields[i][0]: cg_station = fields[i] break # Found it so ditch the for loop #print station #sys.exit('stations please work') ins = None # FIX: redo this for all messages using the new aisutils structure # try: if True: if msgNum==1: ins = ais.ais_msg_1.sqlInsert(ais.ais_msg_1.decode(bv),dbType='postgres') elif msgNum==2: ins = ais.ais_msg_2.sqlInsert(ais.ais_msg_2.decode(bv),dbType='postgres') elif msgNum==3: ins = ais.ais_msg_3.sqlInsert(ais.ais_msg_3.decode(bv),dbType='postgres') elif msgNum==5: params = ais.ais_msg_5.decode(bv) #print params # FIX: make this a command line option params['name'] = params['name'].replace('"','').replace('\\','').strip('@').strip() params['callsign'] = params['callsign'].replace('"','').replace('\\','').strip('@').strip() params['destination'] = params['destination'].replace('"','').replace('\\','').strip('@').strip() #params.callsign = params.callsign.strip() #params. = params..strip() ins = ais.ais_msg_5.sqlInsert(params,dbType='postgres') else: print '# Warning... not handling type',msgNum,'line:',lineNum continue # except: # print '# ERROR: some decode error?','line:',lineNum # print '# ',line # continue counts[msgNum] += 1 if uscg: if None != cg_sec: ins.add('cg_sec', cg_sec) if None != cg_timestamp: ins.add('cg_timestamp', cg_timestamp) if None != cg_station: ins.add('cg_r', cg_station) if verbose: print str(ins) print '# line:',line #print str(ins) try: cu.execute(str(ins)) #buf.append(str(ins)) except Exception, e: print params # # FIX: give a better error message print '# exception:',str(type(Exception)), str(e) print '# ERROR: sql error?','line:',lineNum print '# ', str(ins) print '# ',line sys.exit('EARLY!!!') if lineNum%5000==0: if verbose: print '# committing batch' cx.commit()
else: print 'FAILED' success=False if not success: sys.exit('Something Failed') del success # Hide success from epydoc if options.unittest: sys.argv = [sys.argv[0]] if options.verbose: sys.argv.append('-v') unittest.main() outfile = sys.stdout if None!=options.outputFileName: outfile = file(options.outputFileName,'w') bv=None for msg in args: if 'binary' == options.inputType: bv = BitVector(bitstring=msg) elif 'nmeapayload' == options.inputType: bv = binary.ais6tobitvec(msg) elif 'nmea' == options.inputType: bv = binary.ais6tobitvec(msg.split(',')[5]) else: sys.exit('ERROR: unknown inputType. Help!') #import ais.ais_msg_8 as m8 #m8dict = m8.decode(bv) import ais.waterlevel as wl #wl.printFields(wl.decode(m8dict['BinaryData']),out=outfile,format=options.outputType) wl.printFields(wl.decode(bv),out=outfile,format=options.outputType)
def getPosition(logfile, outfile, minDist=None): ''' Pull the positions from the log file @param logfile: file like object @param outfile: file like object destination @param minDist: how far apart points must be apart to be considered unique ''' # FIX: use the right utm zone. 14 is the central US so it will kind of work params = {'proj':'utm', 'zone':14} #int(options.zone)} proj = None if minDist != None: proj = Proj(params) positions = {} # Last recoded ship position for line in logfile: fields = line.split(',') # FIX: use regex instead if '!AIVDM' != fields[0]: continue if '1' != fields[1] and '1' != fields[2]: # Must be the start of a sequence continue if fields[5][0] not in ('1','2','3'): continue bv = binary.ais6tobitvec(fields[5][:39]) # Hacked for speed timestamp = fields[-1].strip() mmsi = str(ais_msg_1.decodeUserID(bv)) lon = ais_msg_1.decodelongitude(bv) lat = ais_msg_1.decodelatitude(bv) d = None if mmsi not in positions: positions[mmsi] = (lon, lat) elif minDist != None: lonOld, latOld = positions[mmsi] oldUTM = proj(lonOld, latOld) newUTM = proj(lon, lat) d = dist(oldUTM[0], oldUTM[1], newUTM[0], newUTM[1]) if str(d)=='nan': continue #pass # FIX: Print but do not save nan values??? elif d < minDist: continue else: positions[mmsi] = (lon, lat) lon = str(lon) lat = str(lat) if len(mmsi) < 9: mmsi += ' '*(9-len(mmsi)) fLen = 12 # field length ... how much space if len(lon)>fLen: lon = lon[:fLen] if len(lon)<fLen: lon += ' '*(fLen-len(lon)) if len(lat)>fLen: lat = lat[:fLen] if len(lat)<fLen: lat += ' '*(fLen-len(lat)) outfile.write(timestamp+' '+str(mmsi)+' '+lon+' '+lat+'\n')