Example #1
0
def decode(nmea=sys.stdin,
           errorcb=ErrorPrinter,
           keep_nmea=False,
           stats=None,
           **kw):
    """Decodes a stream of AIS messages. Takes the same arguments as normalize."""

    if stats is None: stats = {}

    def report_error(e):
        add_error_to_stats(e, stats)
        errorcb(e, stats, **kw)

    for tagblock, line, origline in normalize(nmea=nmea,
                                              errorcb=errorcb,
                                              stats=stats,
                                              **kw):
        try:
            body = ''.join(line.split(',')[5])
            pad = int(line.split('*')[0][-1])
            res = ais.decode(body, pad)
            res.update(tagblock)
            if keep_nmea:
                res['nmea'] = origline
            yield res
        except TooManyErrorsError:
            raise
        except Exception as e:
            report_error(e)
def decode_message(lines, first_line_num):
    info = {}
    comment, message = parse_information_sentence(lines[first_line_num])
    if not message:
        return 1, info

    if "timestamp" in comment:
        info["timestamp"] = comment["timestamp"]
    if "source" in comment:
        info["source"] = comment["source"]
    if "quality" in comment:
        info["quality"] = comment["quality"]
    if "channel" in message:
        info["channel"] = message["channel"]
    to_decode = message["encoded_message"]
    fill_bits = message["fill_bits"]

    tot_lines = 1
    if "group" in comment:
        tot_lines = comment["group"]["sentence_tot"]

    for linenum in range(first_line_num + 1, first_line_num + tot_lines):
        _, tempmsg = parse_information_sentence(lines[linenum])
        to_decode += tempmsg["encoded_message"]
        fill_bits = tempmsg["fill_bits"]

    try:
        info["data"] = ais.decode(to_decode, fill_bits)
    except:
        print "Error: could not decode data " + to_decode + " with fill bits " + str(
            fill_bits)

    return tot_lines, info
Example #3
0
def storedata(request):
    conn = sqlite3.connect(r"ais.db")

    with conn:

        cur = conn.cursor()
        cur.execute("DROP TABLE IF EXISTS AIVDM")
        cur.execute(
            "CREATE TABLE IF NOT EXISTS AIVDM(mmsi INT, x DEC, y DEC, nav_status INT, true_heading INT, timestamp INT)")

        with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "nmea-sample.txt"), 'r') as f:
            for line in f:
                m = re.search('!AIVDM,[\w]?,[\w]?,[\w]?,[\w]?,(?P<ais>[^,]*)', line)
                if m:
                    aisData = m.group('ais')
                    #resp.write("<br /><br />")
                    try:
                        aisDecoded = ais.decode(aisData, 0)

                        if aisDecoded:
                            if aisDecoded.get('mmsi') <= 0 or aisDecoded.get('x') > 180 or aisDecoded.get('y') > 90:
                                continue

                            cur.execute("INSERT INTO AIVDM VALUES(" + str(aisDecoded.get('mmsi')) + ","
                                        + str(aisDecoded.get('x')) + "," + str(aisDecoded.get('y')) + ","
                                        + str(aisDecoded.get('nav_status')) + "," + str(
                                aisDecoded.get('true_heading')) + ","
                                        + str(aisDecoded.get('timestamp'))
                                        + ");")
                    except Exception as e:
                        print e.message

    return HttpResponse("Done")
Example #4
0
def decode(filename):

	file  = gzip.open(filename, 'r')
	global decoded_observations
	global all_observations
	timestamp = '0'

	for line in file:
		all_observations = all_observations +1
		try:
			line_list = line.split(",")

			field_0 = line_list[0]
			if(field_0[0] != "!"):
				# timestamp is present
				timestamp = field_0[:17]
			field_6 = line_list[6].rstrip('\r\n')
			if (str(field_6)[0] == "0"):
				decoded = str((ais.decode(str(line_list[5]), 0)))
				## insert current timestamp value
				timestamp_position = (decoded).find("timestamp") + 12
				decoded_timestamp = decoded[:timestamp_position] + str(timestamp) + decoded[timestamp_position+3:]
				if (all_observations%10 == 0):	#specify if want to only run through a part
					plot_on_map(decoded_timestamp)
					decoded_observations = decoded_observations+1	
			else:
				file.readline()      # skip next line as it is a part of previous observation
		except Exception:
			pass
	
	print ("Successfully decoded observations: " + str(decoded_observations) +"/" + str(all_observations) + " (" + str(float(decoded_observations)/float(all_observations)) + ")")	
Example #5
0
def decode_message(lines, first_line_num):
    info = {}
    comment, message = parse_information_sentence(lines[first_line_num])
    if not message:
        return 1, info

    if "timestamp" in comment:
        info["timestamp"] = comment["timestamp"]
    if "source" in comment:
        info["source"] = comment["source"]
    if "quality" in comment:
        info["quality"] = comment["quality"]
    if "channel" in message:
        info["channel"] = message["channel"]
    to_decode = message["encoded_message"]
    fill_bits = message["fill_bits"]

    tot_lines = 1
    if "group" in comment:
        tot_lines = comment["group"]["sentence_tot"]

    for linenum in range(first_line_num + 1, first_line_num + tot_lines):
        _, tempmsg = parse_information_sentence(lines[linenum])
        to_decode += tempmsg["encoded_message"]
        fill_bits = tempmsg["fill_bits"]

    try:
        info["data"] = ais.decode(to_decode, fill_bits)
    except:
        print "Error: could not decode data " + to_decode + " with fill bits " + str(fill_bits)

    return tot_lines, info
Example #6
0
def DecodeTagSingle(tag_block_message):
  """Decode the payload of one (but NOT more) NMEA TAG block.

  Args:
    tag_block_message: dict, A dictionary with a matches entry.

  Returns:
    A message dictionary compatible with vdm.BareQueue.
  """
  line = tag_block_message['matches'][0]['payload']
  match = vdm.Parse(line)
  if not match:
    logging.info('Single line NMEA TAG block decode failed for: %s',
                 tag_block_message)
    return

  sentence_total = int(match['sen_tot'])
  if sentence_total != 1:
    logging.error('Multi-line message? %s', tag_block_message)
    return

  body = match['body']
  fill_bits = int(match['fill_bits'])
  try:
    decoded = ais.decode(body, fill_bits)
  except ais.DecodeError as error:
    logging.error('Unable to decode: %s', error)
    return

  decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
  return decoded
Example #7
0
def decode(nmea=sys.stdin,
           errorcb=ErrorPrinter,
           keep_nmea=False,
           stats=None,
           **kw):
  """Decodes a stream of AIS messages. Takes the same arguments as normalize."""

  if stats is None: stats={}

  def report_error(e):
    add_error_to_stats(e, stats)
    errorcb(e, stats, **kw)

  for tagblock, line, origline in normalize(nmea=nmea, errorcb=errorcb, stats=stats, **kw):
    try:
      body = ''.join(line.split(',')[5])
      pad = int(line.split('*')[0][-1])
      res = ais.decode(body, pad)
      res.update(tagblock)
      if keep_nmea:
        res['nmea'] = origline
      yield res
    except TooManyErrorsError:
      raise
    except Exception as e:
      report_error(e)
def decode_line(line, timestamp):
    decoded = 'empty'

    try:
        line_list = line.split(",")
        field_0 = line_list[0]

        if(field_0[0] != "!"):
                # timestamp is present
                timestamp = field_0[:17]

        # the message to be decoded is in the 5th field of the list
        # field 6 contains information on the type; types starting with 0 can be successfuly decoded
        field_6 = line_list[6].rstrip('\r\n')

        if (str(field_6)[0] == "0"):
            decoded_without_timestamp = str((ais.decode(str(line_list[5]), 0)))
            ## insert current timestamp value
            timestamp_position = (decoded_without_timestamp).find("timestamp") + 12
            decoded = decoded_without_timestamp[:timestamp_position] + (timestamp) + decoded_without_timestamp[timestamp_position+3:]
        else:
            pass
            # message type invalid
    except Exception:
        pass
    return decoded, timestamp
Example #9
0
def DecodeTagSingle(tag_block_message):
  """Decode the payload of one (but NOT more) NMEA TAG block.

  Args:
    tag_block_message: dict, A dictionary with a matches entry.

  Returns:
    A message dictionary compatible with vdm.BareQueue.
  """
  line = tag_block_message['matches'][0]['payload']
  match = vdm.Parse(line)
  if not match:
    logging.info('Single line NMEA TAG block decode failed for: %s',
                 tag_block_message)
    return

  sentence_total = int(match['sen_tot'])
  if sentence_total != 1:
    logging.error('Multi-line message? %s', tag_block_message)
    return

  body = match['body']
  fill_bits = int(match['fill_bits'])
  try:
    decoded = ais.decode(body, fill_bits)
  except ais.DecodeError as error:
    logging.error('Unable to decode: %s', error)
    return

  decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
  return decoded
Example #10
0
    def loop(self):
        print ('Started processing thread...')

        while self.running:
            try:
                line = self.line_queue.get(True,1) # Block until we have a line
            except Queue.Empty:
                continue # just a timeout with no data
            #print ('line:',line)
            if len(line) < 15 or '!AIVDM' != line[:6]: continue # try to avoid the regex if possible

            try:
                match = uscg_ais_nmea_regex.search(line).groupdict()
            except AttributeError:
                if 'AIVDM' in line:
                    print ('BAD_MATCH:',line)
                continue

            self.norm_queue.put(match)

            # FIX: possibly decouple here
            while self.norm_queue.qsize()>0:

                try:
                    result = self.norm_queue.get(False)
                except Queue.Empty:
                    continue

                if len(result['body'])<10 or result['body'][0] not in ('1', '2', '3', '5', 'B', 'C', 'H') :
                    continue

                try:
                     msg = ais.decode(result['body'])
                except Exception as e:
                    if 'not yet handled' in str(e) or 'not known' in str(e): continue
                    print ('BAD Decode:',result['body'][0]) 
                    print ('\tE:',Exception)
                    print ('\te:',e)
                    continue

                if msg['id'] in (1,2,3,18,19):
                    try:
                        msg['time_stamp'] = float(result['time_stamp'])
                    except TypeError, e:
                        print ('BAD time_stamp:',str(e))
                        traceback.print_exc(file=sys.stderr)
                        continue
                    
                    self.pos_cache.update(msg)
                    continue

                if msg['id'] == 24:
                    self.vessel_names.update_partial(msg)
                    continue

                if msg['id'] in (5,19):
                    msg['name'] = msg['name'].strip(' @')
                    if len(msg['name']) == 0: continue # Skip blank names
                    self.vessel_names.update(msg)
    def loop(self):
        print ('Started processing thread...')

        while self.running:
            try:
                line = self.line_queue.get(True,1) # Block until we have a line
            except Queue.Empty:
                continue # just a timeout with no data
            #print ('line:',line)
            if len(line) < 15 or '!AIVDM' != line[:6]: continue # try to avoid the regex if possible

            try:
                match = uscg_ais_nmea_regex.search(line).groupdict()
            except AttributeError:
                if 'AIVDM' in line:
                    print ('BAD_MATCH:',line)
                continue

            self.norm_queue.put(match)

            # TODO(schwehr): possibly decouple here
            while self.norm_queue.qsize()>0:

                try:
                    result = self.norm_queue.get(False)
                except Queue.Empty:
                    continue

                if len(result['body'])<10 or result['body'][0] not in ('1', '2', '3', '5', 'B', 'C', 'H') :
                    continue

                try:
                     msg = ais.decode(result['body'])
                except Exception as e:
                    if 'not yet handled' in str(e) or 'not known' in str(e): continue
                    print ('BAD Decode:',result['body'][0])
                    print ('\tE:',Exception)
                    print ('\te:',e)
                    continue

                if msg['id'] in (1,2,3,18,19):
                    try:
                        msg['time_stamp'] = float(result['time_stamp'])
                    except TypeError, e:
                        print ('BAD time_stamp:',str(e))
                        traceback.print_exc(file=sys.stderr)
                        continue

                    self.pos_cache.update(msg)
                    continue

                if msg['id'] == 24:
                    self.vessel_names.update_partial(msg)
                    continue

                if msg['id'] in (5,19):
                    msg['name'] = msg['name'].strip(' @')
                    if len(msg['name']) == 0: continue # Skip blank names
                    self.vessel_names.update(msg)
 def testAll(self):
     """Decode one of each top level message"""
     # TODO: message 20
     for entry in test_data.top_level:
         body = ''.join([line.split(',')[5] for line in entry['nmea']])
         pad = int(entry['nmea'][-1].split('*')[0][-1])
         msg = ais.decode(body, pad)
         self.assertDictEqual(msg, entry['result'])
Example #13
0
def decode(s1, s2):
    try:
        msg = ais.decode(s1, int(s2[0]))
        if 'mmsi' in msg and 'x' in msg and 'y' in msg and msg['mmsi']:
            return (str(msg['mmsi']), msg['x'], msg['y'])
    except ais.DecodeError as e:
        pass
    return None
Example #14
0
 def testAll(self):
   """Decode one of each top level message"""
   # TODO: message 20
   for entry in test_data.top_level:
     body = ''.join([line.split(',')[5] for line in entry['nmea']])
     pad = int(entry['nmea'][-1].split('*')[0][-1])
     msg = ais.decode(body, pad)
     self.assertDictEqual(msg, entry['result'])
Example #15
0
def parse_ais (ais_str):
    if 'AIVDM' not in ais_str:
        raise 'Missing AIVDM header'

    result = uscg_ais_nmea_regex.search(ais_str).groupdict()
    if not result:
        raise 'Not well formed'
        
    return ais.decode(result['body'], 0)
Example #16
0
def parse_ais(ais_str):
    if 'AIVDM' not in ais_str:
        raise 'Missing AIVDM header'

    result = uscg_ais_nmea_regex.search(ais_str).groupdict()
    if not result:
        raise 'Not well formed'

    return ais.decode(result['body'], 0)
Example #17
0
    def main(self):
        if not SER_DEBUG:
            line = self.ser.readline()
            if line != b'':
                rospy.loginfo(line.decode("ascii"))
                self.f.write(line.decode("ascii"))
                self.f.write("\n")
                try:
                    data = line.decode("ascii").split(",")[5]
                    parsed = ais.decode(data, 0)

                    trajectory_info = TrajInfo()

                    trajectory_info.nb_sec = int(rospy.get_time())
                    trajectory_info.latitude = parsed['x']
                    trajectory_info.longitude = parsed['y']
                    trajectory_info.vitesse_nd = parsed['sog']
                    trajectory_info.heading = parsed['true_heading']
                    trajectory_info.imo = parsed['mmsi']
                    self.pub_trajInfo.publish(trajectory_info)
                except Exception as e:
                    rospy.logerr(e)
        else:
            line = self.lines[self.i % len(self.lines)]
            self.i += 1
            if line != b'':
                try:
                    data = line.split(",")[5]
                    parsed = ais.decode(data, 0)
                    rospy.loginfo(parsed)

                    trajectory_info = TrajInfo()

                    trajectory_info.nb_sec = int(rospy.get_time())
                    trajectory_info.latitude = parsed['x']
                    trajectory_info.longitude = parsed['y']
                    trajectory_info.vitesse_nd = parsed['sog']
                    trajectory_info.heading = parsed['cog']
                    trajectory_info.imo = parsed['mmsi']
                    self.pub_trajInfo.publish(trajectory_info)
                    time.sleep(3)
                except Exception as e:
                    rospy.logerr(e)
Example #18
0
def serialhandle():
    while True:
        line = ser.readline()
        line = line.decode("ISO-8859-1")
        if re.match("\!AIVDM,1", line):
            aismsg = line.split(",")
            aisdata = ais.decode(aismsg[5], int(aismsg[6][:1]))
            if aisdata["mmsi"] == 258968000:
                lat = aisdata["y"]
                lon = aisdata["x"]
                now = datetime.datetime.now()
                dataq.put([lat, lon, now])
Example #19
0
 def testAll(self):
   """Decode one of each top level message"""
   # TODO: message 20
   for entry in test_data.top_level:
     body = ''.join([line.split(',')[5] for line in entry['nmea']])
     pad = int(entry['nmea'][-1].split('*')[0][-1])
     msg = ais.decode(body, pad)
     expected = entry['result']
     if msg.keys() != expected.keys():
       sys.stderr.write('key mismatch: %s\n' % set(msg).symmetric_difference(set(expected)))
     self.assertDictEqual(msg, expected,
                          'Mismatch for id:%d\n%s\n%s' % (msg['id'] ,msg, expected))
Example #20
0
def decode_file(file_path, decoded_file):

    global number_of_exceptions
    global number_of_lines

    with gzip.open(file_path, 'rb') as f:
        for line in f:
            number_of_lines += 1
            try:
                #lines without time stamp
                if line.startswith(split_lines):
                    decoded_file.write(
                        str(
                            ais.decode(
                                line.split(split_message)[5],
                                int(line.split(split_message)[6][:1]))) +
                        '\n\n')
            #lines with time stamp
                else:
                    list1 = line.split(split_time)
                    decoded_file.write(
                        str(datetime.datetime.fromtimestamp(float(list1[0]))) +
                        '\n\n')
                    try:
                        decoded_file.write(
                            str(
                                ais.decode(
                                    line.split(split_message)[5],
                                    int(line.split(split_message)[6][:1]))) +
                            '\n\n')

                    #uncommon messages which start with time stamp
                    except:
                        number_of_exceptions += 1
                        pass

            #uncommon messages
            except:
                number_of_exceptions += 1
                pass
Example #21
0
def decode(message):

    decoded = None

    # immediately attempt to extract payload from single part messages
    if '\r\n' not in message:
        try:
            payload = message.split(',')[5]
        except Exception as e:
            log.error('{} trying to parse message {}'.format(e, message))
            return

        try:
            decoded = ais.decode(payload, 0)
        except Exception as e:
            log.error('{} trying to decode message {}'.format(e, message))
            return

    # unpack and assemble payload from multipart messages
    else:
        fragments = message.split('\r\n')
        try:
            payload = ''.join(fragment.split(',')[5] for fragment in fragments)
        except Exception as e:
            log.error('{} trying to parse multipart message {}'.format(
                e, message))
            return
        # not sure what this is for but it seems to be necessary
        # found it here: https://github.com/schwehr/libais/blob/master/test/test_decode.py#L20
        pad = int(fragments[-1].split('*')[0][-1])
        try:
            decoded = ais.decode(payload, pad)
        except Exception as e:
            log.error('{} trying to decode multipart message {}'.format(
                e, message))
            log.debug('Payload: {}'.format(payload))
            log.debug('Pad: {}'.format(pad))
            return

    return decoded
Example #22
0
 def handle(self):
     data = 'dummy'
     while data:
         data = self.request.recv(1024)
         clean_data=data.strip()
         if clean_data == 'bye':
             return
         try:
             decoded_ais_data = ais.decode(clean_data)
             jdump = json.dumps(decoded_ais_data)
             self.request.send(jdump)
         except:
             print "couldn't decode packet: ", clean_data
Example #23
0
def decode(message):

    decoded = None

    # immediately attempt to extract payload from single part messages
    if '\r\n' not in message:
        try:
            payload = message.split(',')[5]
        except Exception as e:
            log.error('{} trying to parse message {}'.format(e, message))
            return

        try:
            decoded = ais.decode(payload, 0)
        except Exception as e:
            log.error('{} trying to decode message {}'.format(e, message))
            return

    # unpack and assemble payload from multipart messages
    else:
        fragments = message.split('\r\n')
        try:
            payload = ''.join(fragment.split(',')[5] for fragment in fragments)
        except Exception as e:
            log.error('{} trying to parse multipart message {}'.format(e, message))
            return
        # not sure what this is for but it seems to be necessary
        # found it here: https://github.com/schwehr/libais/blob/master/test/test_decode.py#L20
        pad = int(fragments[-1].split('*')[0][-1])
        try:
            decoded = ais.decode(payload, pad)
        except Exception as e:
            log.error('{} trying to decode multipart message {}'.format(e, message))
            log.debug('Payload: {}'.format(payload))
            log.debug('Pad: {}'.format(pad))
            return

    return decoded
Example #24
0
 def testAll(self):
     """Decode one of each top level message"""
     # TODO: message 20
     for entry in test_data.top_level:
         body = ''.join([line.split(',')[5] for line in entry['nmea']])
         pad = int(entry['nmea'][-1].split('*')[0][-1])
         msg = ais.decode(body, pad)
         expected = entry['result']
         if msg.keys() != expected.keys():
             sys.stderr.write('key mismatch: %s\n' %
                              set(msg).symmetric_difference(set(expected)))
         self.assertDictEqual(
             msg, expected, 'Mismatch for id:%d\n%s\n%s\n  From: %s' %
             (msg['id'], msg, expected, entry['nmea']))
Example #25
0
def decodeAis(message):
    words = list()
    i = 0
    for msg in message:
        msg_list = msg.rstrip().split(',')
        words.append(msg_list[5])
        i += 1
    if i > 1:
        fill = 2
    else:
        fill = 0
    sentence = ''.join(words)
    try:
        return ais.decode(sentence, fill)
    except Exception as e:
        return {unicode('id'): long(999), unicode('error'): unicode(str(e))}
Example #26
0
def decode(nmea=sys.stdin,
           errorcb=ErrorPrinter,
           keep_nmea=False,
           **kw):
  """Decodes a stream of AIS messages. Takes the same arguments as normalize."""

  for tagblock, line in normalize(nmea=nmea, errorcb=errorcb, **kw):
    body = ''.join(line.split(',')[5])
    pad = int(line.split('*')[0][-1])
    try:
      res = ais.decode(body, pad)
      res.update(tagblock)
      if keep_nmea:
        res['nmea'] = line
      yield res
    except ais.DecodeError as e:
      errorcb(e)
Example #27
0
 def handle(self):
   data = self.rfile.readline()
   while data:
     data = data.strip().decode("utf-8")
     ais_arr = data.split(',')
     num_fragments = int(ais_arr[1])
     fragment_number = int(ais_arr[2])
     sentence_id = ais_arr[3]
     channel = ais_arr[4]
     data_payload = ais_arr[5]
     if num_fragments == 1:
       decoded_message = ais.decode(data_payload,0)
       self.indexDoc(doc=decoded_message)
     elif fragment_number < num_fragments:
       if sentence_id in self._messagemap:
         self._messagemap[sentence_id] = self._messagemap[sentence_id] + data_payload
       else:
Example #28
0
def rawdata(request):
    resp = HttpResponse()
    mmsis = []
    aiss = []

    with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "nmea-sample.txt"), 'r') as f:
        for line in f:
            m = re.search('!AIVDM,[\w]?,[\w]?,[\w]?,[\w]?,(?P<ais>[^,]*)', line)
            if m:
                aisData = m.group('ais')
                try:
                    resp.write(ais.decode(aisData, 0))
                except:
                    pass
                    #resp.write("Could not decode " + aisData)
                resp.write("<br/><br />")

    return resp
Example #29
0
 def ParseAIS_String(self, ais_str):
     aivdm_m = re.match(
         '!AIVDM,([0-9]*),([0-9]*),([0-9]*),([AB]+),([\w]+),([0-9]*)([*A-Z0-9]+)',
         ais_str)
     if (aivdm_m):
         self.frag_count = aivdm_m.group(1)
         self.frag_num = aivdm_m.group(2)
         self.seq_msgid = aivdm_m.group(3)
         self.radio_chan = aivdm_m.group(4)
         self.payload = aivdm_m.group(5)
         self.fill_bits = aivdm_m.group(6)
         self.nmea_chksum = aivdm_m.group(7)
         try:
             self.decodedMsg = ais.decode(self.payload)
             #print self.decodedMsg
             return self.decodedMsg
         except:
             self.decodedMsg = None
     return None
Example #30
0
  def testMsg1(self):
    fields = '!AIVDM,1,1,,B,169A91O005KbT4gUoUl9d;5j0D0U,0*2D'.split(',')
    decoded = ais.decode(fields[5], int(fields[6][0]))
    mangled = self.mangle(decoded)
    expected = {
        'type': 1,
        'repeat': 0,
        'mmsi': 412371205,
        'status': 15,
        'status_text': 'Not defined',
        'turn': 0,
        'speed': 0.5,
        'accuracy': False,
        'course': 248.0,
        'heading': 354,
        'second': 57,
        'maneuver': 0,
        'raim': False}
    self.assertDictContainsSubset(expected, mangled)

    # Float values will not match, so just test existence.
    for field in ('lat', 'lon'):
      self.assertIn(field, mangled)
Example #31
0
def convert(f):
    print "Converting"
    buffer = ''
    for line in f:
        if line.startswith("\\"):
            line = line[1:]
            header, nmea = line.split("\\", 1)
            header = dict(item.upper().split(":") for item in header.split("*")[0].split(","))
        else:
            nmea = line
            header = {}

        buffer += nmea.split(',')[5]
        pad = int(nmea.split('*')[0][-1])

        try:
            msg = ais.decode(buffer, pad)
        except:
            pass
        else:
            buffer = ''
            msg.update(header)
            yield msg
Example #32
0
    def testMsg1(self):
        fields = '!AIVDM,1,1,,B,169A91O005KbT4gUoUl9d;5j0D0U,0*2D'.split(',')
        decoded = ais.decode(fields[5], int(fields[6][0]))
        mangled = self.mangle(decoded)
        expected = {
            'type': 1,
            'repeat': 0,
            'mmsi': 412371205,
            'status': 15,
            'status_text': 'Not defined',
            'turn': 0,
            'speed': 0.5,
            'accuracy': False,
            'course': 248.0,
            'heading': 354,
            'second': 57,
            'maneuver': 0,
            'raim': False
        }
        self.assertDictContainsSubset(expected, mangled)

        # Float values will not match, so just test existence.
        for field in ('lat', 'lon'):
            self.assertIn(field, mangled)
Example #33
0
def main():
    (options,args) = get_parser().parse_args()
    v = options.verbose
   
    match_count = 0
    counters = {}
    for i in range(30):
        counters[i] = 0

    if v: print ('connecting to db')
    options_dict = vars(options) # Turn options into a real dictionary

    if options.drop_database:   drop_database(  options.database_name, v)
    if options.create_database: create_database(options.database_name, v)

    cx_str = "dbname='{database_name}' user='******' host='{database_host}'".format(**options_dict)
    if v: print ('cx_str:',cx_str)
    cx = psycopg2.connect(cx_str)

    if options.drop_tables: drop_tables(cx, v)
    if options.create_tables: create_tables(cx, v)

    if options.delete_table_entries: delete_table_entries(cx, v)


    if v: print ('initilizing caches...')
    
    # Caches
    vessel_names = VesselNames(cx, verbose = v)
    pos_cache = PositionCache(db_cx=cx, verbose=v)

    if options.preload_names is not None: vessel_names.preload_db(options.preload_names)
    

    # FIFOs

    line_queue = LineQueue(maxsize = 1000) # If we have to drop, drop early
    norm_queue = NormQueue()

    if len(args) == 0:
        print ('GOING_LIVE: no log files specified for reading')

        run_network_app(
            host_name = options.in_host,
            port_num = options.in_port,
            vessel_names = vessel_names,
            pos_cache = pos_cache,
            line_queue = line_queue,
            norm_queue = norm_queue,
            verbose=v
            )

        print ('GOODBYE... main thread ending')
        return
        
    print ('USING_LOG_FILES: non-threaded')
    for infile in args:
        if v: print ('reading data from ...',infile)
        last_time = time.time()
        last_count = 0
        last_match_count = 0
        
        last_ais_msg_cnt = 0 # FULL AIS message decoded
        ais_msg_cnt = 0
        
        for line_num, text in enumerate(open(infile)):
            
            #if line_num > 300: break
            #print ()
            if line_num % 10000 == 0:
                print ("line: %d   %d" % (line_num,match_count),
                       #'\tupdate_rate:',(match_count - last_match_count) / (time.time() - last_time), '(lines/sec)'
                       #'\tupdate_rate:',(line_num - last_count) / (time.time() - last_time), '(msgs/sec)'
                       '\tupdate_rate:',(ais_msg_cnt - last_ais_msg_cnt) / (time.time() - last_time), '(msgs/sec)'
                       )
                last_time = time.time()
                last_count = line_num
                last_match_count = match_count
                last_ais_msg_cnt = ais_msg_cnt
                
            if 'AIVDM' not in text:
                continue

            line_queue.put(text)

            while line_queue.qsize() > 0:
                line = line_queue.get(False)
                if len(line) < 15 or '!' != line[0]: continue # Try to go faster

                #print ('line:',line)
                try:
                    match = uscg_ais_nmea_regex.search(line).groupdict()
                    match_count += 1
                except AttributeError:
                    if 'AIVDM' in line:
                        print ('BAD_MATCH:',line)
                    continue

                norm_queue.put(match)

                while norm_queue.qsize()>0:

                    try:
                        result = norm_queue.get(False)
                    except Queue.Empty:
                        continue

                    if len(result['body']) < 10: continue
                    # FIX: make sure we have all the critical messages

                    # FIX: add 9
                    if result['body'][0] not in ('1', '2', '3', '5', 'B', 'C', 'H') :
                        #print( 'skipping',result['body'])
                        continue
                    #print( 'not skipping',result['body'])

                    try:
                         msg = ais.decode(result['body'])
                    #except ais.decode.error:
                    except Exception as e:
                        #print ()

                        if 'not yet handled' in str(e):
                            continue
                        if ' not known' in str(e): continue

                        print ('BAD Decode:',result['body'][0]) #result
                            #continue
                        print ('E:',Exception)
                        print ('e:',e)
                        continue
                        #pass # Bad or unhandled message

                    #continue #  FIX: Remove after debugging

                    counters[msg['id']] += 1

                    if msg['id'] in (1,2,3,5,18,19,24): ais_msg_cnt += 1

                    #continue  # Skip all the database stuff

                    if msg['id'] in (1,2,3,18,19):
                        # for field in ('received_stations', 'rot', 'raim', 'spare','timestamp', 'position_accuracy', 'rot_over_range', 'special_manoeuvre','slot_number',
                        #               'utc_spare', 'utc_min', 'slots_to_allocate', 'slot_increment','commstate_flag', 'mode_flag', 'utc_hour', 'band_flag', 'keep_flag',
                        #               ):
                        #     try:
                        #         msg.pop(field)
                        #     except:
                        #         pass
                        #print (msg['mmsi'])
                        #print (','.join(["'%s'" %(key,)for key in msg.keys()]))
                        #print (result)
                        msg['time_stamp'] = float(result['time_stamp'])
                        #if msg['mmsi'] in (304606000, 366904910, 366880220): dump_file.write(str(msg)+',\n')
                        pos_cache.update(msg)
                        continue

                    #continue # FIX for debugging


                    if msg['id'] == 24:
                        #print24(msg)
                        vessel_names.update_partial(msg)

                    #continue # FIX remove

                    if msg['id'] in (5,19):
                        msg['name'] = msg['name'].strip(' @')
                        if len(msg['name']) == 0: continue # Skip blank names
                        #print ('UPDATING vessel name', msg)
                        #vessel_names.update(msg['mmsi'], msg['name'].rstrip('@'), msg['type_and_cargo'])
                        #if msg['mmsi'] == 367178330:
                        #    print (' CHECK:', msg['mmsi'], msg['name'])
                        vessel_names.update(msg)
                        #print ()
                        
    print ('match_count:',match_count)
    #print (counters)
    for key in counters:
        if counters[key] < 1: continue
        print ('%d: %d' % (key,counters[key]))
Example #34
0
def main():
    (options, args) = get_parser().parse_args()
    v = options.verbose

    match_count = 0
    counters = {}
    for i in range(30):
        counters[i] = 0

    if v: print('connecting to db')
    options_dict = vars(options)  # Turn options into a real dictionary

    if options.drop_database: drop_database(options.database_name, v)
    if options.create_database: create_database(options.database_name, v)

    cx_str = "dbname='{database_name}' user='******' host='{database_host}'".format(
        **options_dict)
    if v: print('cx_str:', cx_str)
    cx = psycopg2.connect(cx_str)

    if options.drop_tables: drop_tables(cx, v)
    if options.create_tables: create_tables(cx, v)

    if options.delete_table_entries: delete_table_entries(cx, v)

    if v: print('initilizing caches...')

    # Caches
    vessel_names = VesselNames(cx, verbose=v)
    pos_cache = PositionCache(db_cx=cx, verbose=v)

    if options.preload_names is not None:
        vessel_names.preload_db(options.preload_names)

    # FIFOs

    line_queue = LineQueue(maxsize=1000)  # If we have to drop, drop early
    norm_queue = NormQueue()

    if len(args) == 0:
        print('GOING_LIVE: no log files specified for reading')

        run_network_app(host_name=options.in_host,
                        port_num=options.in_port,
                        vessel_names=vessel_names,
                        pos_cache=pos_cache,
                        line_queue=line_queue,
                        norm_queue=norm_queue,
                        verbose=v)

        print('GOODBYE... main thread ending')
        return

    print('USING_LOG_FILES: non-threaded')
    for infile in args:
        if v: print('reading data from ...', infile)
        last_time = time.time()
        last_count = 0
        last_match_count = 0

        last_ais_msg_cnt = 0  # FULL AIS message decoded
        ais_msg_cnt = 0

        for line_num, text in enumerate(open(infile)):

            #if line_num > 300: break
            #print ()
            if line_num % 10000 == 0:
                print(
                    "line: %d   %d" % (line_num, match_count),
                    #'\tupdate_rate:',(match_count - last_match_count) / (time.time() - last_time), '(lines/sec)'
                    #'\tupdate_rate:',(line_num - last_count) / (time.time() - last_time), '(msgs/sec)'
                    '\tupdate_rate:',
                    (ais_msg_cnt - last_ais_msg_cnt) /
                    (time.time() - last_time),
                    '(msgs/sec)')
                last_time = time.time()
                last_count = line_num
                last_match_count = match_count
                last_ais_msg_cnt = ais_msg_cnt

            if 'AIVDM' not in text:
                continue

            line_queue.put(text)

            while line_queue.qsize() > 0:
                line = line_queue.get(False)
                if len(line) < 15 or '!' != line[0]:
                    continue  # Try to go faster

                #print ('line:',line)
                try:
                    match = uscg_ais_nmea_regex.search(line).groupdict()
                    match_count += 1
                except AttributeError:
                    if 'AIVDM' in line:
                        print('BAD_MATCH:', line)
                    continue

                norm_queue.put(match)

                while norm_queue.qsize() > 0:

                    try:
                        result = norm_queue.get(False)
                    except Queue.Empty:
                        continue

                    if len(result['body']) < 10: continue
                    # TODO(schwehr): make sure we have all the critical messages

                    # FIX: add 9
                    if result['body'][0] not in ('1', '2', '3', '5', 'B', 'C',
                                                 'H'):
                        continue

                    try:
                        msg = ais.decode(result['body'])
                    # TODO(schwehr): except ais.decode.error:
                    except Exception as e:
                        if 'not yet handled' in str(e):
                            continue
                        if ' not known' in str(e): continue

                        print('BAD Decode:', result['body'][0])
                        print('E:', Exception)
                        print('e:', e)
                        continue

                    counters[msg['id']] += 1

                    if msg['id'] in (1, 2, 3, 5, 18, 19, 24): ais_msg_cnt += 1

                    if msg['id'] in (1, 2, 3, 18, 19):
                        msg['time_stamp'] = float(result['time_stamp'])
                        pos_cache.update(msg)
                        continue

                    if msg['id'] == 24:
                        vessel_names.update_partial(msg)

                    if msg['id'] in (5, 19):
                        msg['name'] = msg['name'].strip(' @')
                        if len(msg['name']) == 0: continue  # Skip blank names
                        vessel_names.update(msg)

    print('match_count:', match_count)
    for key in counters:
        if counters[key] < 1: continue
        print('%d: %d' % (key, counters[key]))
Example #35
0
    def put(self, line, line_num=None):
        if line_num is not None:
            self.line_num = line_num
        else:
            self.line_num += 1

        line = line.rstrip()
        metadata_match = Parse(line)
        match = vdm.Parse(line)

        if not match:
            logging.info("not match")
            msg = {"line_nums": [self.line_num], "lines": [line]}
            if metadata_match:
                msg["match"] = metadata_match
            Queue.Queue.put(self, msg)
            return

        if not metadata_match:
            logging.info("not metadata match")
            self.unknown_queue.put(line)
            if not self.unknown_queue.empty():
                msg = Queue.Queue.get()
                self.put(msg)
            return

        match.update(metadata_match)

        if "station" not in match:
            match["station"] = "rUnknown"

        sentence_tot = int(match["sen_tot"])

        if sentence_tot == 1:
            body = match["body"]
            fill_bits = int(match["fill_bits"])
            try:
                decoded = ais.decode(body, fill_bits)
            except ais.DecodeError as error:
                logging.error("Unable to decode message: %s\n  %d %s", error, self.line_num, line)
                return
            decoded["md5"] = hashlib.md5(body.encode("utf-8")).hexdigest()
            Queue.Queue.put(self, {"line_nums": [line_num], "lines": [line], "decoded": decoded, "matches": [match]})
            return

        station = match["station"] or "rUnknown"
        sentence_num = int(match["sen_num"])
        sequence_id = match["seq_id"] or ""
        group_id = station + str(sequence_id)
        time = util.MaybeToNumber(match["time"])

        if group_id not in self.groups:
            self.groups[group_id] = []

        if not self.groups[group_id]:
            if sentence_num != 1:
                # Drop a partial AIS message.
                return

        if sentence_num == 1:
            self.groups[group_id] = {"line_nums": [self.line_num], "lines": [line], "matches": [match], "times": [time]}
            return

        entry = self.groups[group_id]
        entry["line_nums"].append(self.line_num)
        entry["lines"].append(line)
        entry["matches"].append(match)
        entry["times"].append(time)

        if sentence_num != sentence_tot:
            # Found the middle part of a message.
            return

        decoded = DecodeMultiple(entry)

        if decoded:
            entry["decoded"] = decoded
        else:
            logging.info("Unable to process: %s", entry)
        Queue.Queue.put(self, entry)
        self.groups.pop(group_id)
Example #36
0
  def put(self, line, line_num=None):
    if line_num is not None:
      self.line_num = line_num
    else:
      self.line_num += 1

    line = line.rstrip()
    metadata_match = Parse(line)
    match = vdm.Parse(line)

    if not match:
      logging.info('not match')
      msg = {
          'line_nums': [self.line_num],
          'lines': [line],
      }
      if metadata_match:
        msg['match'] = metadata_match
      Queue.Queue.put(self, msg)
      return

    if not metadata_match:
      logging.info('not metadata match')
      self.unknown_queue.put(line)
      if not self.unknown_queue.empty():
        msg = Queue.Queue.get()
        self.put(msg)
      return

    match.update(metadata_match)

    if 'station' not in match:
      match['station'] = 'rUnknown'

    sentence_tot = int(match['sen_tot'])

    if sentence_tot == 1:
      body = match['body']
      fill_bits = int(match['fill_bits'])
      try:
        decoded = ais.decode(body, fill_bits)
      except ais.DecodeError as error:
        logging.error(
            'Unable to decode message: %s\n  %d %s', error, self.line_num, line)
        return
      decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
      Queue.Queue.put(self, {
          'line_nums': [line_num],
          'lines': [line],
          'decoded': decoded,
          'matches': [match]
      })
      return

    station = match['station'] or 'rUnknown'
    sentence_num = int(match['sen_num'])
    sequence_id = match['seq_id'] or ''
    group_id = station + str(sequence_id)
    time = util.MaybeToNumber(match['time'])

    if group_id not in self.groups:
      self.groups[group_id] = []

    if not self.groups[group_id]:
      if sentence_num != 1:
        # Drop a partial AIS message.
        return

    if sentence_num == 1:
      self.groups[group_id] = {
          'line_nums': [self.line_num],
          'lines': [line],
          'matches': [match],
          'times': [time],
      }
      return

    entry = self.groups[group_id]
    entry['line_nums'].append(self.line_num)
    entry['lines'].append(line)
    entry['matches'].append(match)
    entry['times'].append(time)

    if sentence_num != sentence_tot:
      # Found the middle part of a message.
      return

    decoded = DecodeMultiple(entry)

    if decoded:
      entry['decoded'] = decoded
    else:
      logging.info('Unable to process: %s', entry)
    Queue.Queue.put(self, entry)
    self.groups.pop(group_id)
Example #37
0
def ais_decode(output_data):
    utc = ''
    msg = ''
    flag = 0
    length = 0
    ais_data = {}

    station_types = {
        0: 'All types of mobiles',
        1: 'Reserved for future use',
        2: 'All types of Class B mobile stations',
        3: 'SAR airborne mobile station',
        4: 'Aid to Navigation station',
        5: 'Class B shipborne mobile station (IEC62287 only)',
        6: 'Regional use and inland waterways',
        7: 'Regional use and inland waterways',
        8: 'Regional use and inland waterways',
        9: 'Regional use and inland waterways',
        10: 'Reserved for future use',
        11: 'Reserved for future use',
        12: 'Reserved for future use',
        13: 'Reserved for future use',
        14: 'Reserved for future use',
        15: 'Reserved for future use'
    }

    aton_types = {
        0: 'Default, Type of Aid to Navigation not specified',
        1: 'Reference point',
        2: 'RACON (radar transponder marking a navigation hazard)',
        3:
        'Fixed structure off shore, such as oil platforms, wind farms, rigs.',
        4: 'Spare, Reserved for future use.',
        5: 'Light, without sectors',
        6: 'Light, with sectors',
        7: 'Leading Light Front',
        8: 'Leading Light Rear',
        9: 'Beacon, Cardinal N',
        10: 'Beacon, Cardinal E',
        11: 'Beacon, Cardinal S',
        12: 'Beacon, Cardinal W',
        13: 'Beacon, Port hand',
        14: 'Beacon, Starboard hand',
        15: 'Beacon, Preferred Channel port hand',
        16: 'Beacon, Preferred Channel starboard hand',
        17: 'Beacon, Isolated danger',
        18: 'Beacon, Safe water',
        19: 'Beacon, Special mark',
        20: 'Cardinal Mark N',
        21: 'Cardinal Mark E',
        22: 'Cardinal Mark S',
        23: 'Cardinal Mark W',
        24: 'Port hand Mark',
        25: 'Starboard hand Mark',
        26: 'Preferred Channel Port hand',
        27: 'Preferred Channel Starboard hand',
        28: 'Isolated danger',
        29: 'Safe Water',
        30: 'Special Mark',
        31: 'Light Vessel / LANBY / Rigs'
    }

    fix_types = {
        0: 'Undefined',
        1: 'GPS',
        2: 'GLONASS',
        3: 'Combined GPS/GLONASS',
        4: 'Loran-C',
        5: 'Chayka',
        6: 'Integrated navigation system',
        7: 'Surveyed',
        8: 'Galileo'
    }

    # Match the output of gpsd 3.11.
    nav_statuses = {
        0: 'Under way using engine',
        1: 'At anchor',
        2: 'Not under command',
        3: 'Restricted manoeuverability',  # Maneuverability.
        4: 'Constrained by her draught',
        5: 'Moored',
        6: 'Aground',
        7: 'Engaged in fishing',
        8: 'Under way sailing',
        # Reserved for future amendment of navigational status for ships
        # carrying DG, HS, or MP, or IMO hazard or pollutant category C,
        # high speed craft (HSC).
        9: 'Reserved for HSC',
        # Reserved for future amendment of navigational status for ships
        # carrying dangerous goods (DG), harmful substances (HS) or marine
        # pollutants (MP), or IMO hazard or pollutant category A, wing in
        # ground (WIG).
        10: 'Reserved for WIG',
        # Power-driven vessel towing astern (regional use).
        11: 'Reserved',
        # Power-driven vessel pushing ahead or towing alongside (regional use).
        12: 'Reserved',
        # Reserved for future use.
        13: 'Reserved',
        # AIS-SART (active), MOB-AIS, EPIRB-AIS,
        14: 'Reserved',
        # Default (also used by AIS-SART, MOB-AIS and EPIRB-AIS under test).
        15: 'Not defined'
    }

    ship_types = {
        0: 'Not available',
        1: 'Reserved for future use',
        2: 'Reserved for future use',
        3: 'Reserved for future use',
        4: 'Reserved for future use',
        5: 'Reserved for future use',
        6: 'Reserved for future use',
        7: 'Reserved for future use',
        8: 'Reserved for future use',
        9: 'Reserved for future use',
        10: 'Reserved for future use',
        11: 'Reserved for future use',
        12: 'Reserved for future use',
        13: 'Reserved for future use',
        14: 'Reserved for future use',
        15: 'Reserved for future use',
        16: 'Reserved for future use',
        17: 'Reserved for future use',
        18: 'Reserved for future use',
        19: 'Reserved for future use',
        20: 'Wing in ground (WIG), all ships of this type',
        21: 'Wing in ground (WIG), Hazardous category A',
        22: 'Wing in ground (WIG), Hazardous category B',
        23: 'Wing in ground (WIG), Hazardous category C',
        24: 'Wing in ground (WIG), Hazardous category D',
        25: 'Wing in ground (WIG), Reserved for future use',
        26: 'Wing in ground (WIG), Reserved for future use',
        27: 'Wing in ground (WIG), Reserved for future use',
        28: 'Wing in ground (WIG), Reserved for future use',
        29: 'Wing in ground (WIG), Reserved for future use',
        30: 'Fishing',
        31: 'Towing',
        32: 'Towing: length exceeds 200m or breadth exceeds 25m',
        33: 'Dredging or underwater ops',
        34: 'Diving ops',
        35: 'Military ops',
        36: 'Sailing',
        37: 'Pleasure Craft',
        38: 'Reserved',
        39: 'Reserved',
        40: 'High speed craft (HSC), all ships of this type',
        41: 'High speed craft (HSC), Hazardous category A',
        42: 'High speed craft (HSC), Hazardous category B',
        43: 'High speed craft (HSC), Hazardous category C',
        44: 'High speed craft (HSC), Hazardous category D',
        45: 'High speed craft (HSC), Reserved for future use',
        46: 'High speed craft (HSC), Reserved for future use',
        47: 'High speed craft (HSC), Reserved for future use',
        48: 'High speed craft (HSC), Reserved for future use',
        49: 'High speed craft (HSC), No additional information',
        50: 'Pilot Vessel',
        51: 'Search and Rescue vessel',
        52: 'Tug',
        53: 'Port Tender',
        54: 'Anti-pollution equipment',
        55: 'Law Enforcement',
        56: 'Spare - Local Vessel',
        57: 'Spare - Local Vessel',
        58: 'Medical Transport',
        59: 'Noncombatant ship according to RR Resolution No. 18',
        60: 'Passenger, all ships of this type',
        61: 'Passenger, Hazardous category A',
        62: 'Passenger, Hazardous category B',
        63: 'Passenger, Hazardous category C',
        64: 'Passenger, Hazardous category D',
        65: 'Passenger, Reserved for future use',
        66: 'Passenger, Reserved for future use',
        67: 'Passenger, Reserved for future use',
        68: 'Passenger, Reserved for future use',
        69: 'Passenger, No additional information',
        70: 'Cargo, all ships of this type',
        71: 'Cargo, Hazardous category A',
        72: 'Cargo, Hazardous category B',
        73: 'Cargo, Hazardous category C',
        74: 'Cargo, Hazardous category D',
        75: 'Cargo, Reserved for future use',
        76: 'Cargo, Reserved for future use',
        77: 'Cargo, Reserved for future use',
        78: 'Cargo, Reserved for future use',
        79: 'Cargo, No additional information',
        80: 'Tanker, all ships of this type',
        81: 'Tanker, Hazardous category A',
        82: 'Tanker, Hazardous category B',
        83: 'Tanker, Hazardous category C',
        84: 'Tanker, Hazardous category D',
        85: 'Tanker, Reserved for future use',
        86: 'Tanker, Reserved for future use',
        87: 'Tanker, Reserved for future use',
        88: 'Tanker, Reserved for future use',
        89: 'Tanker, No additional information',
        90: 'Other Type, all ships of this type',
        91: 'Other Type, Hazardous category A',
        92: 'Other Type, Hazardous category B',
        93: 'Other Type, Hazardous category C',
        94: 'Other Type, Hazardous category D',
        95: 'Other Type, Reserved for future use',
        96: 'Other Type, Reserved for future use',
        97: 'Other Type, Reserved for future use',
        98: 'Other Type, Reserved for future use',
        99: 'Other Type, no additional information'
    }

    for data in output_data:
        if data[1][0] == '!AIVDM':
            if data[1][1] == '1':
                msg = data[1][5]
                length = 0
                flag = 0
            else:
                if flag == 0:
                    msg = data[1][5]
                    flag = 1
                else:
                    msg += data[1][5]
                    flag = 0
                    length = 2
            if flag == 0:
                try:
                    decode = ais.decode(msg, length)
                    decode['utc'] = data[0]
                    if 'station_type' in decode:
                        decode['station_type_text'] = station_types[
                            decode['station_type']]

                    if 'aton_type' in decode:
                        decode['aton_type_text'] = aton_types[
                            decode['aton_type']]

                    if 'fix_type' in decode:
                        decode['epfd_text(fix_type)'] = fix_types[
                            decode['fix_type']]

                    if 'nav_status' in decode:
                        decode['nav_status_text'] = nav_statuses[
                            decode['nav_status']]

                    if 'type_and_cargo' in decode:
                        decode['shiptype_text(type_and_cargo)'] = ship_types[
                            decode['type_and_cargo']]

                    # Message Type ID
                    if decode['id'] < 4:
                        decode['type'] = 123
                    else:
                        decode['type'] = decode['id']

                    if len(utc) > 0:
                        if decode['type'] in ais_data:
                            if utc in ais_data[decode['type']]:
                                ais_data[decode['type']][utc].append(decode)
                            else:
                                ais_data[decode['type']].update(
                                    {utc: [decode]})
                        else:
                            ais_data[decode['type']] = {utc: [decode]}
                except:
                    continue
        else:
            # '$GPGGA, $GPRMC --> UTC Time'
            utc = data[0]

    return ais_data
    msg.vitesse = float(data['sog'])
    msg.cap = float(data['cog'])
    msg.vitesseRot = float(data['rot'])
    msg.statut = float(data['special_manoeuvre'])

    print data
    pub.publish(msg)
    rate.sleep()

buff = ''  # for first part of 2-lines msgs
while not rospy.is_shutdown():

    data = ser.readline()
    data = data.split(',')
    # msg in one line
    if data[0] == '!AIVDM' and data[1] == '1':
        publish(ais.decode(data[5], 0))

    # msg in two lines, part 1
    elif data[0] == '!AIVDM' and data[1] == '2' and data[2] == '1':
        buff = data[5]

    # msg in two lines, part 2
    elif data[0] == '!AIVDM' and data[1] == '2' and data[2] == '2':
        if len(buff) == 56 and len(data[5]) == 15:
            total = buff + data[5]
            publish(ais.decode(total, 2))
            buff = ''
        else:
            publish('error', 'biz')
    def process_RAIS(self, ais_message, for_send, ships_mat):
        # I removed the printing of error for troubleshooting a different part of the code
        # The Error messages are as follows from the cambridge pixel
        # AIS_1 exception Ais5: AIS_ERR_BAD_BIT_COUNT
        # nmea_receive AIS_0 exception ais.decode: unknown message - Q
        # AIS_1 exception ais.decode: unknown message - 0

        s = ais_message

        ais_message = s.split(',')  # [5]

        try:
            p = ais_message[5]
            q = ais.decode(p, 0)

        except Exception:
            # self.log.error(f'nmea_receive AIS_0 exception {e}')
            # self.log.error(p)

            try:
                q = ais.decode(p, 2)
            # self.log.info("process_RAIS Success! 1")
            # self.log.info(q)
            except Exception as e2:
                # self.log.error(f' AIS_1 exception {s}')
                # self.log.error(f' AIS_1 exception {e2}')
                return (for_send, ships_mat)

        # self.log.info(q)

        mmsi = float(q['mmsi'])

        if mmsi == self.my_mmsi:  # ths is is our ship

            return (for_send, ships_mat)

        if 'heading' in q:
            heading_ais = q['heading']

        elif 'cog' in q:
            heading_ais = q['cog']

        else:
            heading_ais = 0

        if 'sog' in q:
            sog = q['sog']

        else:
            sog = 0

        if 'y' in q:
            self.lat_2 = q['y']
        # self.log.info(f"nmea_recieve  AIS : {self.lat_2}")

        else:
            return (for_send, ships_mat)

        if 'x' in q:
            self.lng_2 = q['x']

        else:
            return (for_send, ships_mat)

        self.ship_update[0] = nt.get_time()  #
        self.ship_update[1] = mmsi
        self.ship_update[2] = self.lat_2
        self.ship_update[3] = self.lng_2
        self.ship_update[4] = heading_ais
        self.ship_update[5] = sog

        # self.log.info(f"nmea_receive self.ship_update = {self.ship_update}")
        if self.lng_2 > ships_mat[
                0, 0, 3] - self.ROI_ships and self.lng_2 < ships_mat[
                    0, 0, 3] + self.ROI_ships and self.lat_2 > ships_mat[
                        0, 0, 2] - self.ROI_ships and self.lat_2 < ships_mat[
                            0, 0, 2] + self.ROI_ships:
            ships_mat = nav_mat.ship_update(ships_mat, self.ship_update)
        # else:
        # self.log.info("nmea_receive: No nearby ships detected")

        # self.og.info(ships_mat)

        return (for_send, ships_mat)
Example #40
0
File: vdm.py Project: jbrahy/libais
  def put(self, line, line_num=None):
    """Add a line of NMEA or raw text to the queue."""

    if line_num is not None:
      self.line_num = line_num
    else:
      self.line_num += 1
      line_num = self.line_num

    line = line.rstrip()
    match = Parse(line)

    if not match:
      logging.info('No VDM match for line: %d, %s', line_num, line)
      msg = {
          'line_nums': [line_num],
          'lines': [line]}
      decoded = nmea_messages.Decode(line)
      if decoded:
        msg['decoded'] = decoded
      else:
        logging.info('No NMEA match for line: %d, %s', line_num, line)
      Queue.Queue.put(self, msg)
      return

    sentence_total = int(match['sen_tot'])
    if sentence_total == 1:
      body = match['body']
      fill_bits = int(match['fill_bits'])
      try:
        decoded = ais.decode(body, fill_bits)
      except ais.DecodeError as error:
        logging.error(
            'Unable to decode message: %s\n  %d %s', error, line_num, line)
        return
      decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
      Queue.Queue.put(self, {
          'line_nums': [line_num],
          'lines': [line],
          'decoded': decoded,
          'matches': [match]
      })
      return

    sentence_num = int(match['sen_num'])
    group_id = int(match['seq_id'])

    if sentence_num == 1:
      if group_id in self.groups:
        logging.error('Incomplete message overwritten by new start.  '
                      'Dropped:\n  %s', self.groups[group_id])
      self.groups[group_id] = {
          'line_nums': [line_num],
          'lines': [line],
          'matches': [match]
      }
      return

    if group_id not in self.groups:
      logging.error('Do not have the prior lines in group_id %d. '
                    'Dropping: \n  %s', group_id, line)
      return

    entry = self.groups[group_id]
    if len(entry['lines']) != sentence_num - 1:
      logging.error('Out of sequence message.  Dropping: %d != %d \n %s',
                    len(entry['lines']), sentence_num - 1, line)
      return

    entry['lines'].append(line)
    entry['matches'].append(match)
    entry['line_nums'].append(line_num)

    if sentence_num != sentence_total:
      # Nothing more to do in the middle of a sequence of sentences.
      return

    body = ''.join([match['body'] for match in entry['matches']])
    fill_bits = int(entry['matches'][-1]['fill_bits'])
    try:
      decoded = ais.decode(body, fill_bits)
    except ais.DecodeError as error:
      logging.error(
          'Unable to decode message: %s\n%s', error, entry)
      return
    decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
    entry['decoded'] = decoded

    # Found the final message in a group.
    Queue.Queue.put(self, entry)
    self.groups.pop(group_id)
Example #41
0
        fname = '.'.join(t.split())+'.kml'
        fld = KML.Folder()


    start_aisdeco(args.port, args.error)

    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(('localhost', args.port))
    while 1:
        try:
            line = s.recv(2048)
            line = line.strip()
            body = ''.join(line.split(',')[5])
            pad = int(line.split(',')[-1].split('*')[0][-1])
            try:
                msg = decode(body, pad)
                if args.verbose:
                    alert(msg)
                if args.kml:
                    addKML(msg,fld)
                try:
                    if args.mmsi and msg['mmsi'] in args.mmsi:
                        print "\n#########BEGIN ALERT#########\n"
                        alert(msg)
                        print "#########END ALERT#########\n"
                    if args.name and msg['name'] in args.name:
                        print "\n#########BEGIN ALERT#########"
                        alert(msg)
                        print "#########END ALERT#########\n"
                except KeyError:
                    pass
Example #42
0
    msglength = int(payload[1])
    msgpart = int(payload[2])
    msgseqid = 0
    if payload[3]:
        msgseqid = int(payload[3])
    msg = payload[5]
    return (msg, pad, msgpart, msglength, msgseqid)


while True:

    try:

        msg, pad, msgpart, msglength, msgseqid = getmessage()
        if msglength == 1:
            decodedmessage = ais.decode(msg, pad)
            aisprocessor.process(decodedmessage)
        else:
            while msglength != msgpart:
                msgfragment, pad, msgpart, msglength, msgseqid = getmessage()
                msg += msgfragment

                if msglength == msgpart:  # Is this the final part?
                    decodedmessage = ais.decode(
                        msg[0:71], 2
                    )  # libais rejects AIS5 messages where pad is NOT 2 and length is NOT 71
                    aisprocessor.process_ais5(decodedmessage)
    except DecodeError:
        pass
    except:
        logging.error(traceback.format_exc())
Example #43
0
def main():
    (options,args) = get_parser().parse_args()
    v = options.verbose

    match_count = 0
    counters = {}
    for i in range(30):
        counters[i] = 0

    if v: print ('connecting to db')
    options_dict = vars(options) # Turn options into a real dictionary

    if options.drop_database:   drop_database(  options.database_name, v)
    if options.create_database: create_database(options.database_name, v)

    cx_str = "dbname='{database_name}' user='******' host='{database_host}'".format(**options_dict)
    if v: print ('cx_str:',cx_str)
    cx = psycopg2.connect(cx_str)

    if options.drop_tables: drop_tables(cx, v)
    if options.create_tables: create_tables(cx, v)

    if options.delete_table_entries: delete_table_entries(cx, v)


    if v: print ('initilizing caches...')

    # Caches
    vessel_names = VesselNames(cx, verbose = v)
    pos_cache = PositionCache(db_cx=cx, verbose=v)

    if options.preload_names is not None: vessel_names.preload_db(options.preload_names)


    # FIFOs

    line_queue = LineQueue(maxsize = 1000) # If we have to drop, drop early
    norm_queue = NormQueue()

    if len(args) == 0:
        print ('GOING_LIVE: no log files specified for reading')

        run_network_app(
            host_name = options.in_host,
            port_num = options.in_port,
            vessel_names = vessel_names,
            pos_cache = pos_cache,
            line_queue = line_queue,
            norm_queue = norm_queue,
            verbose=v
            )

        print ('GOODBYE... main thread ending')
        return

    print ('USING_LOG_FILES: non-threaded')
    for infile in args:
        if v: print ('reading data from ...',infile)
        last_time = time.time()
        last_count = 0
        last_match_count = 0

        last_ais_msg_cnt = 0 # FULL AIS message decoded
        ais_msg_cnt = 0

        for line_num, text in enumerate(open(infile)):

            #if line_num > 300: break
            #print ()
            if line_num % 10000 == 0:
                print ("line: %d   %d" % (line_num,match_count),
                       #'\tupdate_rate:',(match_count - last_match_count) / (time.time() - last_time), '(lines/sec)'
                       #'\tupdate_rate:',(line_num - last_count) / (time.time() - last_time), '(msgs/sec)'
                       '\tupdate_rate:',(ais_msg_cnt - last_ais_msg_cnt) / (time.time() - last_time), '(msgs/sec)'
                       )
                last_time = time.time()
                last_count = line_num
                last_match_count = match_count
                last_ais_msg_cnt = ais_msg_cnt

            if 'AIVDM' not in text:
                continue

            line_queue.put(text)

            while line_queue.qsize() > 0:
                line = line_queue.get(False)
                if len(line) < 15 or '!' != line[0]: continue # Try to go faster

                #print ('line:',line)
                try:
                    match = uscg_ais_nmea_regex.search(line).groupdict()
                    match_count += 1
                except AttributeError:
                    if 'AIVDM' in line:
                        print ('BAD_MATCH:',line)
                    continue

                norm_queue.put(match)

                while norm_queue.qsize()>0:

                    try:
                        result = norm_queue.get(False)
                    except Queue.Empty:
                        continue

                    if len(result['body']) < 10: continue
                    # TODO(schwehr): make sure we have all the critical messages

                    # FIX: add 9
                    if result['body'][0] not in ('1', '2', '3', '5', 'B', 'C', 'H') :
                        continue

                    try:
                         msg = ais.decode(result['body'])
                    except ais.DecodeError as e:
                        if 'not yet handled' in str(e):
                            continue
                        if ' not known' in str(e): continue

                        print ('BAD Decode:',result['body'][0])
                        print ('E:',Exception)
                        print ('e:',e)
                        continue

                    counters[msg['id']] += 1

                    if msg['id'] in (1,2,3,5,18,19,24): ais_msg_cnt += 1

                    if msg['id'] in (1,2,3,18,19):
                        msg['time_stamp'] = float(result['time_stamp'])
                        pos_cache.update(msg)
                        continue

                    if msg['id'] == 24:
                        vessel_names.update_partial(msg)

                    if msg['id'] in (5,19):
                        msg['name'] = msg['name'].strip(' @')
                        if len(msg['name']) == 0: continue # Skip blank names
                        vessel_names.update(msg)

    print ('match_count:',match_count)
    for key in counters:
        if counters[key] < 1: continue
        print ('%d: %d' % (key,counters[key]))
Example #44
0
def input(request, collection_label = None):
	if(len(request.body) == 0 or not aidvm_match.match(request.body)):
		return render(request, 'error.html')
	messages = request.body.splitlines(False)

	if(collection_label is not None):
		collection_label, created = CollectionLabel.objects.get_or_create(name=collection_label)

	for message in messages:
		split_message = message.split(',')
		sentence = split_message[5]
		fill_bits = int(split_message[-1].split('*')[0])


		if(int(split_message[1]) > 1): # fragmented message
			message_id = split_message[3]
			this_frag = int(split_message[2])
			total_frags = int(split_message[1])

			if(message_id not in fragment_buffer.keys()):
				fragment_buffer[message_id] = [0] * total_frags
			fragment_buffer[message_id][this_frag-1] = split_message
			if(this_frag == total_frags): # last fragment of a fragmented message
				if(len(fragment_buffer[message_id]) != total_frags): # missed a message
					# purge fragments and error out
					del fragment_buffer[message_id]
					return render(request, 'error.html')
				sentence = ''
				for i in range(0, total_frags):
					sentence += fragment_buffer[message_id][i][5]
				del fragment_buffer[message_id]
			else:
				return render(request, 'blank.html')
		
		# it should now be safe to assume the sentence is complete
		report_type = ord(sentence[0])-48
		if report_type > 48:
			report_type = report_type - 8
		# catch potential Type 24 bug
		if(report_type == 24 and len(sentence) == 27):
			sentence += '0'
		try:
			decoded = ais.decode(sentence, fill_bits)
			decoded['mmsi'] = format(decoded['mmsi'], '09')
			#if(report_type == 5 or report_type == 24):
			#	logger.error(decoded)
			contact, created = Contact.objects.get_or_create(mmsi=decoded['mmsi'])
			if(collection_label not in contact.collection_labels.all()):
				contact.collection_labels.add(collection_label)
				contact.save()
			report = Report.objects.create(sentence=sentence, fill_bits=fill_bits, contact=contact, report_type=report_type, collection_label=collection_label, decoded=decoded)
			
			# Publish to stream
			if(report_type in util.position_types):
				heading = report.decoded['true_heading']
				if(heading == 511 and report.decoded['cog'] > 0):
					heading = report.decoded['cog']
				message = RedisMessage(json.dumps({'type': 'position', 'collection_label': collection_label.name, 'mmsi': report.contact.mmsi, 'lat': report.decoded['y'], 'lng': report.decoded['x'], 'speed': report.decoded['sog'], 'heading': heading}))
				RedisPublisher(facility='jsonStream', broadcast=True).publish_message(message)
		except ais.DecodeError:
			# message could not be parsed. add a RawReport instead of Report
			logger.error("Decode error! Message: %s" % message)
			raw_report = RawReport(sentence=sentence, fill_bits=fill_bits, decode_error=True)
			raw_report.save()
	return render(request, 'blank.html')
            append_next = False

            # We need the whole message to validate the decoder
            # so replace the message on the shorter string.
            # Note this does mean that checksums will fail.
            deconstructed_line = line.split(',')
            deconstructed_line[6] = raw_message
            line = ','.join(deconstructed_line)

        # Workaround for message types 24. Padding is incorrectly calculated
        # from the above
        if padding == 27:
            padding = 2

        try:
            data = ais.decode(raw_message, padding)  # Attempt the decode

            target_file = str(data['id']) + '.csv'

            if data['id'] <= 3:
                # Merge types 1,2,3 together - as they're the same for our
                # purposes
                target_file = '123.csv'

                # The output fields vary a bit, we only want the core AIS ones
                # or it messes up the csv
                unwanted = set(data.keys()) - set(m123keys)
                for unwanted_key in unwanted:
                    del data[unwanted_key]

                # In Scala we map the nav_status to a string discription, so do
Example #46
0
    def put(self, line, line_num=None):
        """Add a line of NMEA or raw text to the queue."""

        if line_num is not None:
            self.line_num = line_num
        else:
            self.line_num += 1
            line_num = self.line_num

        line = line.rstrip()
        match = Parse(line)

        if not match:
            logger.info('No VDM match for line: %d, %s', line_num, line)
            msg = {'line_nums': [line_num], 'lines': [line]}
            decoded = nmea_messages.Decode(line)
            if decoded:
                msg['decoded'] = decoded
            else:
                logger.info('No NMEA match for line: %d, %s', line_num, line)
            Queue.Queue.put(self, msg)
            return

        sentence_total = int(match['sen_tot'])
        if sentence_total == 1:
            body = match['body']
            fill_bits = int(match['fill_bits'])
            try:
                decoded = ais.decode(body, fill_bits)
            except ais.DecodeError as error:
                logger.error('Unable to decode message: %s\n  %d %s', error,
                             line_num, line)
                return
            decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
            Queue.Queue.put(
                self, {
                    'line_nums': [line_num],
                    'lines': [line],
                    'decoded': decoded,
                    'matches': [match]
                })
            return

        sentence_num = int(match['sen_num'])
        group_id = int(match['seq_id'])

        if sentence_num == 1:
            if group_id in self.groups:
                logger.error(
                    'Incomplete message overwritten by new start.  '
                    'Dropped:\n  %s', self.groups[group_id])
            self.groups[group_id] = {
                'line_nums': [line_num],
                'lines': [line],
                'matches': [match]
            }
            return

        if group_id not in self.groups:
            logger.error(
                'Do not have the prior lines in group_id %d. '
                'Dropping: \n  %s', group_id, line)
            return

        entry = self.groups[group_id]
        if len(entry['lines']) != sentence_num - 1:
            logger.error('Out of sequence message.  Dropping: %d != %d \n %s',
                         len(entry['lines']), sentence_num - 1, line)
            return

        entry['lines'].append(line)
        entry['matches'].append(match)
        entry['line_nums'].append(line_num)

        if sentence_num != sentence_total:
            # Nothing more to do in the middle of a sequence of sentences.
            return

        body = ''.join([match['body'] for match in entry['matches']])
        fill_bits = int(entry['matches'][-1]['fill_bits'])
        try:
            decoded = ais.decode(body, fill_bits)
        except ais.DecodeError as error:
            logger.error('Unable to decode message: %s\n%s', error, entry)
            return
        decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
        entry['decoded'] = decoded

        # Found the final message in a group.
        Queue.Queue.put(self, entry)
        self.groups.pop(group_id)
Example #47
0
def parser(filepath):

    ##print(ais.decode('55NBjP01mtGIL@CW;SM<D60P5Ld000000000000P0`<3557l0<50@kk@K5h@00000000000', 2))
    ##
    ##print(ais.decode('15O<@N001sG:hgHL6F?m;T6D00S@', 0))
    ##
    ##print(ais.decode('D030p<QoDN?b<`N00A=O6D0', 0)

    ##print(ais.decode('15O<@N001sG:hgHL6F?m;T6D00S@', 0))
    ##print(ais.decode('8@30oni?0@=@Rh2531>3Boep75Cn7P4dh01@RhkeB9F00ode?UCJ604lh000', 0))
    ##print(ais.decode('D030p<QoDN?b<`N00A=O6D0', 2))

    #print(ais.decode('54eGUdT1r?uAH63OS7@M9DF0@E>0ThE>222222152hH7576B052@Ap3CkU3@AkVH888888034eGP`UP00JBhV`HhM=tbwvB0000',2))
    #print(ais.decode('54eGUdT1r?uAH63OS7@M9DF0@E>0ThE>222222152hH7576B052@Ap3CkU3@AkVH8888880',2))

    parsedData = {
        "MMSI": {
            "region": "Region",
            "stationLocation": "Station Location",
            "channel": "Channel",
            "date": "Date",
            "time": "Time",
            "imoNum": "IMO Number",
            "callsign": "Callsign",
            "vesselName": "Vessel Name",
            "cargoType": "CargoType",
            "dimBow": "Dimension Bow",
            "dimStern": "Dimension Stern",
            "dimPort": "Dimension Port",
            "dimStar": "Dimension Starbord",
            "draught": "Draught",
            "destination": "Destination"
        }
    }

    errorCount = 0
    time1 = time.time()
    with open('CCG_AIS_Log_2018-05-01.csv', newline='\n') as csvfile:
        csv_reader = csv.reader(csvfile, delimiter=",")
        count = 0
        MMSIArray = []
        for row in csv_reader:
            count += 1

            # if count == 10000:
            #     break
            try:
                #print(text)
                #print(len(row))
                if (len(row) < 8):
                    rowPos = 5
                    text = str(row[rowPos])
                    sentencePos = 1
                    channel = str(row[4])
                    stationLocation = "NA"
                    region = 'NA'

                else:
                    rowPos = 7
                    text = str(row[rowPos])
                    sentencePos = 3
                    tempRegion = (str(row[2]))
                    region = tempRegion[2]
                    stationLocation = tempRegion[tempRegion.rindex("-") +
                                                 1:tempRegion.rindex("*")]
                    channel = str(row[6])

                if (row[sentencePos] == "1"):
                    decodeTxt = ais.decode(text, 0)

                else:
                    for j in range(int(row[sentencePos]) - 1):
                        count += 1
                        row = csv_reader.__next__()
                        text += str(row[rowPos])
                        ##print(text)
                    decodeTxt = ais.decode(text, int(row[sentencePos]))

                ## STATIC INFO
                mmsi = decodeTxt.get('mmsi')
                year = decodeTxt.get('year')
                month = decodeTxt.get('month')
                day = decodeTxt.get('day')
                hour = decodeTxt.get('hour')
                minute = decodeTxt.get('minute')
                second = decodeTxt.get('second')
                imoNum = decodeTxt.get('imo_num')
                callsign = decodeTxt.get('callsign')
                vesselName = decodeTxt.get('name')
                typeCargo = decodeTxt.get('type_and_cargo')
                dimBow = decodeTxt.get('dim_a')
                dimStern = decodeTxt.get('dim_b')
                dimPort = decodeTxt.get('dim_c')
                dimStarbord = decodeTxt.get('dim_d')
                draught = decodeTxt.get('draught')
                destination = decodeTxt.get('destination')

                # with open('test.csv', mode='a', newline="") as test:
                #     test_writer = csv.writer(test, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
                #     test_writer.writerow([region, stationLocation, channel, year, month, day, hour, minute, second, mmsi,
                #                           imoNum, callsign, vesselName, typeCargo, dimBow, dimStern, dimPort, dimStarbord,
                #                           draught, destination])

                listKey = [
                    "region", "stationLocation", "channel", "date", "time",
                    "imoNum", "callsign", "vesselName", "cargoType", "dimBow",
                    "dimStern", "dimPort", "dimStar", "draught", "destination"
                ]

                dicKey = {
                    "region": region,
                    "stationLocation": stationLocation,
                    "channel": channel,
                    "date": (str(day) + "/" + str(month) + "/" + str(year)),
                    "time":
                    (str(hour) + ":" + str(minute) + ":" + str(second)),
                    "imoNum": imoNum,
                    "callsign": callsign,
                    "vesselName": vesselName,
                    "cargoType": typeCargo,
                    "dimBow": dimBow,
                    "dimStern": dimStern,
                    "dimPort": dimPort,
                    "dimStar": dimStarbord,
                    "draught": draught,
                    "destination": destination
                }

                try:
                    if mmsi not in parsedData.keys():

                        MMSIArray.append(mmsi)
                        parsedData[mmsi] = {
                            "region":
                            region,
                            "stationLocation":
                            stationLocation,
                            "channel":
                            channel,
                            "date":
                            (str(day) + "/" + str(month) + "/" + str(year)),
                            "time": (str(hour) + ":" + str(minute) + ":" +
                                     str(second)),
                            "imoNum":
                            imoNum,
                            "callsign":
                            callsign,
                            "vesselName":
                            vesselName,
                            "cargoType":
                            typeCargo,
                            "dimBow":
                            dimBow,
                            "dimStern":
                            dimStern,
                            "dimPort":
                            dimPort,
                            "dimStar":
                            dimStarbord,
                            "draught":
                            draught,
                            "destination":
                            destination
                        }
                    else:
                        for tempVar in listKey:
                            if (parsedData[mmsi][tempVar] == ""):
                                parsedData[mmsi][tempVar] = dicKey[tempVar]

                except:
                    print("F**k")

                # print(decodeTxt)

                ##print(str(mmsi))
            except:
                errorCount += 1
                ##print("error")
                ##print(count)
                ##print(text)
                ##print(len(row))
    with open('test.csv', mode='w', newline="") as test:
        test_writer = csv.writer(test,
                                 delimiter=',',
                                 quotechar='"',
                                 quoting=csv.QUOTE_MINIMAL)
        test_writer.writerow([
            "MMSI", parsedData["MMSI"]["region"],
            parsedData["MMSI"]["stationLocation"],
            parsedData["MMSI"]["channel"], parsedData["MMSI"]["date"],
            parsedData["MMSI"]["time"], parsedData["MMSI"]["imoNum"],
            parsedData["MMSI"]["callsign"], parsedData["MMSI"]["vesselName"],
            parsedData["MMSI"]["cargoType"], parsedData["MMSI"]["dimBow"],
            parsedData["MMSI"]["dimStern"], parsedData["MMSI"]["dimPort"],
            parsedData["MMSI"]["dimStar"], parsedData["MMSI"]["draught"],
            parsedData["MMSI"]["destination"]
        ])

    with open('test.csv', mode='a', newline="") as test:
        test_writer = csv.writer(test,
                                 delimiter=',',
                                 quotechar='"',
                                 quoting=csv.QUOTE_MINIMAL)

        for i in range(len(MMSIArray)):
            test_writer.writerow([
                MMSIArray[i], parsedData[MMSIArray[i]]["region"],
                parsedData[MMSIArray[i]]["stationLocation"],
                parsedData[MMSIArray[i]]["channel"],
                parsedData[MMSIArray[i]]["date"],
                parsedData[MMSIArray[i]]["time"],
                parsedData[MMSIArray[i]]["imoNum"],
                parsedData[MMSIArray[i]]["callsign"],
                parsedData[MMSIArray[i]]["vesselName"],
                parsedData[MMSIArray[i]]["cargoType"],
                parsedData[MMSIArray[i]]["dimBow"],
                parsedData[MMSIArray[i]]["dimStern"],
                parsedData[MMSIArray[i]]["dimPort"],
                parsedData[MMSIArray[i]]["dimStar"],
                parsedData[MMSIArray[i]]["draught"],
                parsedData[MMSIArray[i]]["destination"]
            ])
    time2 = time.time()
    timefinal = time2 - time1
    print("end ", timefinal)
    print(errorCount)
    print(count)
    print(errorCount / count)
    global file_path_out
    global nr_lines_r
    global nr_lines_wr
    observation_list = ""
  
	file_in = file_path_in + filename
    file_out = file_path_out + filename
    with open(file_in, 'r') as f_in:
            with open(file_out, 'w') as f_out:
                for line in f_in:
                    try:
                        # lines without time stamp
                        if line.startswith(split_lines):
							#valid messages end with the value of 0
							if line.split(split_message)[6][:1] == "0":
								observation_list = str(ais.decode(line.split(split_message)[5], int(line.split(split_message)[6][:1])))
								nr_lines_r += 1
                        # lines with time stamp
                        else:
                            list1 = line.split(split_time)
                            try:
								#valid messages end with the value of 0
                                if line.split(split_message)[6][:1] == "0":
									observation_list = str(ais.decode(line.split(split_message)[5], int(line.split(split_message)[6][:1])))
									nr_lines_r += 1
							except ValueError:  # uncommon messages which start with time stamp
								number_of_exceptions += 1
								pass
                    # uncommon messages
                    except:
                        number_of_exceptions += 1
Example #49
0
  def put(self, line, line_num=None):
    if line_num is not None:
      self.line_num = line_num
    else:
      self.line_num += 1

    line = line.rstrip()
    metadata_match = Parse(line)
    match = vdm.Parse(line)

    if not match:
      logger.info('not match')
      msg = {
          'line_nums': [self.line_num],
          'lines': [line],
      }
      if metadata_match:
        msg['match'] = metadata_match
      Queue.Queue.put(self, msg)
      return

    if not metadata_match:
      logger.info('not metadata match')
      self.unknown_queue.put(line)
      if not self.unknown_queue.empty():
        msg = Queue.Queue.get()
        self.put(msg)
      return

    match.update(metadata_match)

    if 'station' not in match:
      match['station'] = 'rUnknown'

    sentence_tot = int(match['sen_tot'])

    if sentence_tot == 1:
      body = match['body']
      fill_bits = int(match['fill_bits'])
      try:
        decoded = ais.decode(body, fill_bits)
      except ais.DecodeError as error:
        logger.error(
            'Unable to decode message: %s\n  %d %s', error, self.line_num, line)
        return
      decoded['md5'] = hashlib.md5(body.encode('utf-8')).hexdigest()
      Queue.Queue.put(self, {
          'line_nums': [line_num],
          'lines': [line],
          'decoded': decoded,
          'matches': [match]
      })
      return

    station = match['station'] or 'rUnknown'
    sentence_num = int(match['sen_num'])
    sequence_id = match['seq_id'] or ''
    group_id = station + str(sequence_id)
    time = util.MaybeToNumber(match['time'])

    if group_id not in self.groups:
      self.groups[group_id] = []

    if not self.groups[group_id]:
      if sentence_num != 1:
        # Drop a partial AIS message.
        return

    if sentence_num == 1:
      self.groups[group_id] = {
          'line_nums': [self.line_num],
          'lines': [line],
          'matches': [match],
          'times': [time],
      }
      return

    entry = self.groups[group_id]
    entry['line_nums'].append(self.line_num)
    entry['lines'].append(line)
    entry['matches'].append(match)
    entry['times'].append(time)

    if sentence_num != sentence_tot:
      # Found the middle part of a message.
      return

    decoded = DecodeMultiple(entry)

    if decoded:
      entry['decoded'] = decoded
    else:
      logger.info('Unable to process: %s', entry)
    Queue.Queue.put(self, entry)
    self.groups.pop(group_id)