def get_alert_counts(): global alert_terms result_list = [] try: session = db_session() result = session.query(alertStats).order_by(alertStats.count) if result.count() > 0: result_list = [query_to_dict(d) for d in result] for term in alert_terms: found = False for item in result_list: if item["term"] == term: found = True continue if not found: result_list.append({"term": term, "count": 0}) else: for term in alert_terms: result_list.append({"term": term, "count": 0}) except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() return result_list
def prune_database(): try: acarshub_logging.log("Pruning database", "database") cutoff = ( datetime.datetime.now() - datetime.timedelta(days=acarshub_configuration.DB_SAVE_DAYS) ).timestamp() session = db_session() result = session.query(messages).filter(messages.time < cutoff).delete() acarshub_logging.log("Pruned %s messages" % result, "database") session.commit() acarshub_logging.log("Pruning alert database", "database") cutoff = ( datetime.datetime.now() - datetime.timedelta(days=acarshub_configuration.DB_ALERT_SAVE_DAYS) ).timestamp() result = ( session.query(messages_saved).filter(messages_saved.time < cutoff).delete() ) acarshub_logging.log("Pruned %s messages" % result, "database") session.commit() except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close()
def get_freq_count(): freq_count = [] found_freq = [] try: session = db_session() for item in session.query(messagesFreq).all(): if item.freq not in found_freq: freq_count.append( { "freq_type": f"{item.freq_type}", "freq": f"{item.freq}", "count": item.count, } ) except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() if len(freq_count) == 0: return [] return sorted( freq_count, reverse=True, key=lambda freq: (freq["freq_type"], freq["count"]), )
def get_errors(): count_total, count_errors, nonlogged_good, nonlogged_errors = 0, 0, 0, 0 try: session = db_session() count = session.query(messagesCount).first() nonlogged = session.query(messagesCountDropped).first() if count is not None: count_total = count.total count_errors = count.errors if nonlogged is not None: nonlogged_good = nonlogged.nonlogged_good nonlogged_errors = nonlogged.nonlogged_errors except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() return { "non_empty_total": count_total, "non_empty_errors": count_errors, "empty_total": nonlogged_good, "empty_errors": nonlogged_errors, }
def set_alert_terms(terms=None): if terms is None: return global alert_terms alert_terms = terms try: session = db_session() # we need to do two things. First is to loop through all of the terms we should be monitoring and make sure the db has them # next is to loop through what is in the db and make sure it should still be there for item in terms: result = session.query(alertStats).filter(alertStats.term == item).count() if result == 0: session.add(alertStats(term=item, count=0)) result = session.query(alertStats).all() for item in result: if item.term not in terms: session.query(alertStats).filter(alertStats.term == item.term).delete() session.query(messages_saved).filter( messages_saved.term == item.term ).delete() session.commit() except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close()
def show_all(page=0): result = None processed_results = [] count = 0 try: session = db_session() result = ( session.query(messages) .order_by(messages.time.desc()) .limit(50) .offset(page * 50) ) count = session.query(messages).count() if count > 0: processed_results = [query_to_dict(d) for d in result] processed_results.reverse() except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() if count == 0: return (None, 50) return (processed_results, count)
def update_db(vdlm=0, acars=0, error=0): total = vdlm + acars try: rrdtool.update("/run/acars/acarshub.rrd", f"N:{acars}:{vdlm}:{total}:{error}") acarshub_logging.log( f"rrdtool.update: N:{acars}:{vdlm}:{total}:{error}", "rrdtool", level=LOG_LEVEL["DEBUG"], ) except Exception as e: acarshub_logging.acars_traceback(e, "rrdtool")
def reset_alert_counts(): try: session = db_session() result = session.query(alertStats).all() for item in result: item.count = 0 session.commit() except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close()
def database_search(search_term, page=0): result = None try: acarshub_logging.log( f"[database] Searching database for {search_term}", "database", level=LOG_LEVEL["DEBUG"], ) match_string = "" for key in search_term: if search_term[key] is not None and search_term[key] != "": if match_string == "": match_string += f'\'{key}:"{search_term[key]}"*' else: match_string += f' AND {key}:"{search_term[key]}"*' if match_string == "": return [None, 0] match_string += "'" session = db_session() result = session.execute( f"SELECT * FROM messages WHERE id IN (SELECT rowid FROM messages_fts WHERE messages_fts MATCH {match_string} ORDER BY rowid DESC LIMIT 50 OFFSET {page * 50})" ) count = session.execute( f"SELECT COUNT(*) FROM messages_fts WHERE messages_fts MATCH {match_string}" ) processed_results = [] final_count = 0 for row in count: final_count = row[0] if final_count == 0: session.close() return [None, 0] for row in result: processed_results.append(dict(row)) session.close() return (processed_results, final_count) except Exception as e: acarshub_logging.acars_traceback(e, "database") session.close() return [None, 0]
def grab_most_recent(count): output = [] try: session = db_session() result = session.query(messages).order_by(desc("id")).limit(count) if result.count() > 0: output = [query_to_dict(d) for d in result] except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() return output
def reset_alert_counts(message, namespace): if message["reset_alerts"]: acarshub_helpers.acarshub_database.reset_alert_counts() try: socketio.emit( "alert_terms", { "data": acarshub_helpers.acarshub_database.get_alert_counts() }, namespace="/main", ) except Exception as e: acarshub_logging.log( f"Main Connect: Error sending alert_terms: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp")
def database_get_row_count(): result = None size = None try: session = db_session() result = session.query(messages).count() try: size = os.path.getsize(db_path[10:]) except Exception as e: acarshub_logging.acars_traceback(e, "database") except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() return (result, size)
def get_signal_levels(): try: output = [] session = db_session() result = session.query(messagesLevel).order_by(messagesLevel.level) if result.count() > 0: output = [query_to_dict(d) for d in result] except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close() if len(output) > 0: return output else: return []
def set_alert_ignore(terms=None): if terms is None: return global alert_terms_ignore alert_terms_ignore = terms try: session = db_session() session.query(ignoreAlertTerms).delete() for t in terms: session.add(ignoreAlertTerms(term=t)) session.commit() except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close()
def create_db(): try: if not os.path.exists("/run/acars/acarshub.rrd"): acarshub_logging.log("creating the RRD Database", "rrdtool") rrdtool.create( "/run/acars/acarshub.rrd", "--start", "N", "--step", "60", "DS:ACARS:GAUGE:120:U:U", "DS:VDLM:GAUGE:120:U:U", "DS:TOTAL:GAUGE:120:U:U", "DS:ERROR:GAUGE:120:U:U", "RRA:AVERAGE:0.5:1:1500", # 25 hours at 1 minute reso "RRA:AVERAGE:0.5:5:8640", # 1 month at 5 minute reso "RRA:AVERAGE:0.5:60:4320", # 6 months at 1 hour reso "RRA:AVERAGE:0.5:360:4380", # 3 year at 6 hour reso ) except Exception as e: acarshub_logging.acars_traceback(e, "rrdtool") else: acarshub_logging.log("Database found", "rrdtool")
def init(): global list_of_recent_messages # grab recent messages from db and fill the most recent array # then turn on the listeners acarshub_logging.log("Grabbing most recent messages from database", "init") try: results = acarshub_helpers.acarshub_database.grab_most_recent( list_of_recent_messages_max) except Exception as e: acarshub_logging.log( f"Startup Error grabbing most recent messages {e}", "init", level=LOG_LEVEL["ERROR"], ) acarshub_logging.acars_traceback(e, "init") if not acarshub_configuration.LOCAL_TEST: try: acarshub_logging.log("Initializing RRD Database", "init") acarshub_rrd_database.create_db( ) # make sure the RRD DB is created / there except Exception as e: acarshub_logging.log(f"Startup Error creating RRD Database {e}", "init") acarshub_logging.acars_traceback(e, "init") if results is not None: for json_message in results: try: que_type = getQueType(json_message["message_type"]) client_message = generateClientMessage(que_type, json_message) list_of_recent_messages.insert(0, client_message) except Exception as e: acarshub_logging.log( f"Startup Error adding message to recent messages {e}", "init") acarshub_logging.acars_traceback(e, "init") acarshub_logging.log( "Completed grabbing messages from database, starting up rest of services", "init", ) init_listeners()
for data in f: message_json = [] if data.count("}\n") == 1: message_json.append(data) elif data.count("}\n") == 0 and data.count("}{") == 0: message_json.append(data + "\n") elif data.count("}{") > 0: split_json = data.split("}{") count = 0 for j in split_json: if len(j) > 1: msg = j if not msg.startswith("{"): msg = "{" + msg if not count == len(split_json) - 1: msg = msg + "}" message_json.append(msg) count += 1 print(len(message_json)) for msg in message_json: try: vdlm2_message = format_acars_message(json.loads(msg)) except Exception as e: print(e) print(msg) print(vdlm2_message) except Exception as e: acarshub_logging.acars_traceback(e, "acars_formatter")
def update_keys(json_message): # Santiztize the message of any empty/None vales # This won't occur for live messages but if the message originates from a DB query # It will return all keys, even ones where the original message didn't have a value stale_keys = [] for key in json_message: if not has_specified_key_not_none(json_message, key): stale_keys.append(key) for key in stale_keys: del json_message[key] # Now we process individual keys, if that key is present # database tablename for the message text doesn't match up with typescript-decoder (needs it to be text) # so we rewrite the key if has_specified_key(json_message, "msg_text"): json_message["text"] = json_message["msg_text"] del json_message["msg_text"] if has_specified_key(json_message, "time"): json_message["timestamp"] = json_message["time"] del json_message["time"] if has_specified_key(json_message, "libacars"): json_message["libacars"] = libacars_formatted(json_message["libacars"]) if has_specified_key(json_message, "icao"): try: json_message["icao_hex"] = format(int(json_message["icao"]), "X") except Exception as e: acarshub_logging.log( f"Unable to convert icao to hex: {json_message['icao']}", "update_keys", LOG_LEVEL["WARNING"], ) acarshub_logging.acars_traceback(e, "update_keys") if has_specified_key(json_message, "flight") and has_specified_key( json_message, "icao_hex" ): json_message["flight"], json_message["icao_flight"] = flight_finder( callsign=json_message["flight"], hex_code=json_message["icao_hex"] ) elif has_specified_key(json_message, "flight"): json_message["flight"], json_message["icao_flight"] = flight_finder( callsign=json_message["flight"], url=False ) elif has_specified_key(json_message, "icao_hex"): json_message["icao_url"] = flight_finder(hex_code=json_message["icao_hex"]) if has_specified_key(json_message, "toaddr"): json_message["toaddr_hex"] = format(int(json_message["toaddr"]), "X") toaddr_icao, toaddr_name = acarshub_database.lookup_groundstation( json_message["toaddr_hex"] ) if toaddr_icao is not None: json_message["toaddr_decoded"] = f"{toaddr_name} ({toaddr_icao})" if has_specified_key(json_message, "fromaddr"): json_message["fromaddr_hex"] = format(int(json_message["fromaddr"]), "X") fromaddr_icao, fromaddr_name = acarshub_database.lookup_groundstation( json_message["fromaddr_hex"] ) if fromaddr_icao is not None: json_message["fromaddr_decoded"] = f"{fromaddr_name} ({fromaddr_icao})" if has_specified_key(json_message, "label"): label_type = acarshub_database.lookup_label(json_message["label"]) if label_type is not None: json_message["label_type"] = label_type else: json_message["label_type"] = "Unknown Message Label"
def service_check(): import re global decoders global servers global receivers global system_error global stats global start_time global external_formats if os.getenv("LOCAL_TEST", default=False): healthcheck = subprocess.Popen( ["../../tools/healthtest.sh"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) else: healthcheck = subprocess.Popen( ["/scripts/healthcheck.sh"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) stdout, stderr = healthcheck.communicate() healthstatus = stdout.decode() decoders = dict() servers = dict() receivers = dict() stats = dict() external_formats = dict() system_error = False for line in healthstatus.split("\n"): try: match = re.search("(?:acarsdec|dumpvdl2)-.+ =", line) if match: if match.group(0).strip(" =") not in decoders: decoders[match.group(0).strip(" =")] = dict() continue else: for decoder in decoders: if line.find(decoder) != -1: if line.find(f"Decoder {decoder}") and line.endswith( "UNHEALTHY" ): decoders[decoder]["Status"] = "Bad" system_error = True elif line.find(f"Decoder {decoder}") == 0 and line.endswith( "HEALTHY" ): decoders[decoder]["Status"] = "Ok" elif line.find(f"Decoder {decoder}") == 0: system_error = True decoders[decoder]["Status"] = "Unknown" continue match = re.search("^(?:acars|vdlm2)_server", line) if match: if match.group(0) not in servers: servers[match.group(0)] = dict() if line.find("listening") != -1 and line.endswith("UNHEALTHY"): servers[match.group(0)]["Status"] = "Bad" system_error = True elif line.find("listening") != -1 and line.endswith("HEALTHY"): servers[match.group(0)]["Status"] = "Ok" elif line.find("listening") != -1: system_error = True servers[match.group(0)]["Status"] = "Unknown" elif line.find("python") != -1 and line.endswith("UNHEALTHY"): system_error = True servers[match.group(0)]["Web"] = "Bad" elif line.find("python") != -1 and line.endswith("HEALTHY"): servers[match.group(0)]["Web"] = "Ok" elif line.find("python") != -1: system_error = True servers[match.group(0)]["Web"] = "Unknown" continue match = re.search("\\d+\\s+(?:ACARS|VDLM2) messages", line) if match: if line.find("ACARS") != -1 and "ACARS" not in receivers: receivers["ACARS"] = dict() receivers["ACARS"]["Count"] = line.split(" ")[0] if line.endswith("UNHEALTHY"): if time.time() - start_time > 300.0: system_error = True receivers["ACARS"]["Status"] = "Bad" else: receivers["ACARS"]["Status"] = "Waiting for first message" elif line.endswith("HEALTHY"): receivers["ACARS"]["Status"] = "Ok" else: system_error = True receivers["ACARS"]["Status"] = "Unknown" if line.find("VDLM2") != -1 and "VDLM2" not in receivers: receivers["VDLM2"] = dict() receivers["VDLM2"]["Count"] = line.split(" ")[0] if line.endswith("UNHEALTHY"): if time.time() - start_time > 300.0: system_error = True receivers["VDLM2"]["Status"] = "Bad" else: receivers["VDLM2"]["Status"] = "Waiting for first message" elif line.endswith("HEALTHY"): receivers["VDLM2"]["Status"] = "Ok" else: system_error = True receivers["VDLM2"]["Status"] = "Unknown" continue match = re.search("^(acars|vdlm2)_stats", line) if match: if match.group(0) not in stats: stats[match.group(0)] = dict() if line.endswith("UNHEALTHY"): system_error = True stats[match.group(0)]["Status"] = "Bad" elif line.endswith("HEALTHY"): stats[match.group(0)]["Status"] = "Ok" else: system_error = True stats[match.group(0)]["Status"] = "Unknown" match = re.search("^planeplotter", line) if match: if line.find("vdl2") != -1: pp_decoder = "VDLM2" else: pp_decoder = "ACARS" if pp_decoder not in external_formats: external_formats[pp_decoder] = [] if line.endswith("UNHEALTHY"): system_error = True external_formats[pp_decoder].append( {"type": "planeplotter", "Status": "Bad"} ) elif line.endswith("HEALTHY"): external_formats[pp_decoder].append( {"type": "planeplotter", "Status": "Ok"} ) else: system_error = True external_formats[pp_decoder].append( {"type": "planeplotter", "Status": "Unknown"} ) match = re.search("dumpvdl2 and planeplotter", line) if match: if line.find("vdl2") != -1: pp_decoder = "VDLM2" else: pp_decoder = "ACARS" if pp_decoder not in external_formats: external_formats[pp_decoder] = [] if line.endswith("UNHEALTHY"): system_error = True external_formats[pp_decoder].append( {"type": "dumpvdl2 to planeplotter", "Status": "Bad"} ) elif line.endswith("HEALTHY"): external_formats[pp_decoder].append( {"type": "dumpvdl2 to planeplotter", "Status": "Ok"} ) else: system_error = True external_formats[pp_decoder].append( {"type": "dumpvdl2 to planeplotter", "Status": "Unknown"} ) except Exception as e: acarshub_logging.log(e, "service_check", level=LOG_LEVEL["ERROR"]) acarshub_logging.acars_traceback(e)
def default_error_handler(e): acarshub_logging.acars_traceback(e, "server")
def error_handler_main(e): acarshub_logging.acars_traceback(e, "server-main")
def error_handler(e): acarshub_logging.acars_traceback(e, "server-error")
def create_db_safe_params(message_from_json): params = { "time": "", "station_id": "", "toaddr": "", "fromaddr": "", "depa": "", "dsta": "", "eta": "", "gtout": "", "gtin": "", "wloff": "", "wlin": "", "lat": "", "lon": "", "alt": "", "text": "", "tail": "", "flight": "", "icao": "", "freq": "", "ack": "", "mode": "", "label": "", "block_id": "", "msgno": "", "is_response": "", "is_onground": "", "error": 0, "libacars": "", "level": "", } for index, value in message_from_json.items(): if index == "timestamp": params["time"] = value elif index == "station_id": params["station_id"] = value elif index == "toaddr": params["toaddr"] = value elif index == "fromaddr": params["fromaddr"] = value elif index == "depa": params["depa"] = value elif index == "dsta": params["dsta"] = value elif index == "eta": params["eta"] = value elif index == "gtout": params["gtout"] = value elif index == "gtin": params["gtin"] = value elif index == "wloff": params["wloff"] = value elif index == "wlin": params["wlin"] = value elif index == "lat": params["lat"] = value elif index == "lon": params["lon"] = value elif index == "alt": params["alt"] = value elif index == "text": params["text"] = value elif index == "data": params["text"] = value elif index == "tail": params["tail"] = value elif index == "flight": params["flight"] = value elif index == "icao": params["icao"] = value elif index == "freq": # normalizing frequency to 7 decimal places params["freq"] = str(value).ljust(7, "0") elif index == "ack": params["ack"] = value elif index == "mode": params["mode"] = value elif index == "label": params["label"] = value elif index == "block_id": params["block_id"] = value elif index == "msgno": params["msgno"] = value elif index == "is_response": params["is_response"] = value elif index == "is_onground": params["is_onground"] = value elif index == "error": params["error"] = value elif index == "libacars": try: params["libacars"] = json.dumps(value) except Exception as e: acarshub_logging.acars_traceback(e, "database") # skip these elif index == "channel": pass elif index == "level": params["level"] = value elif index == "end": pass # FIXME: acarsdec now appears to support message reassembly? # https://github.com/TLeconte/acarsdec/commit/b2d0a4c27c6092a1c38943da48319a3406db74f2 # do we need to do anything here for reassembled messages? elif index == "assstat": acarshub_logging.log( f"assstat key: {index}: {value}", "database", level=LOG_LEVEL["DEBUG"] ) acarshub_logging.log( message_from_json, "database", level=LOG_LEVEL["DEBUG"] ) # We have a key that we aren't saving the database. Log it else: acarshub_logging.log( f"Unidenitied key: {index}: {value}", "database", level=LOG_LEVEL["DEBUG"], ) acarshub_logging.log( message_from_json, "database", level=LOG_LEVEL["DEBUG"] ) return params
def main_connect(): pt = time.time() import sys # need visibility of the global thread object global thread_html_generator global thread_adsb global thread_adsb_stop_event recent_options = {"loading": True, "done_loading": False} requester = request.sid try: socketio.emit( "features_enabled", { "vdlm": acarshub_configuration.ENABLE_VDLM, "acars": acarshub_configuration.ENABLE_ACARS, "arch": acarshub_configuration.ARCH, "adsb": { "enabled": acarshub_configuration.ENABLE_ADSB, "lat": acarshub_configuration.ADSB_LAT, "lon": acarshub_configuration.ADSB_LON, "url": acarshub_configuration.ADSB_URL, "bypass": acarshub_configuration.ADSB_BYPASS_URL, "range_rings": acarshub_configuration.ENABLE_RANGE_RINGS, }, }, to=requester, namespace="/main", ) socketio.emit( "terms", { "terms": acarshub_helpers.acarshub_database.get_alert_terms(), "ignore": acarshub_helpers.acarshub_database.get_alert_ignore(), }, to=requester, namespace="/main", ) except Exception as e: acarshub_logging.log( f"Main Connect: Error sending features_enabled: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp") try: socketio.emit( "labels", { "labels": acarshub_helpers.acarshub_database.get_message_label_json() }, to=requester, namespace="/main", ) except Exception as e: acarshub_logging.log(f"Main Connect: Error sending labels: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp") msg_index = 1 for json_message in list_of_recent_messages: if msg_index == len(list_of_recent_messages): recent_options["done_loading"] = True msg_index += 1 try: socketio.emit( "acars_msg", { "msghtml": json_message, **recent_options, }, to=requester, namespace="/main", ) except Exception as e: acarshub_logging.log(f"Main Connect: Error sending acars_msg: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp") try: socketio.emit( "system_status", {"status": acarshub_helpers.get_service_status()}, to=requester, namespace="/main", ) except Exception as e: acarshub_logging.log(f"Main Connect: Error sending system_status: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp") try: rows, size = get_cached( acarshub_helpers.acarshub_database.database_get_row_count, 30) socketio.emit("database", { "count": rows, "size": size }, to=requester, namespace="/main") except Exception as e: acarshub_logging.log(f"Main Connect: Error sending database: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp") try: socketio.emit( "signal", { "levels": get_cached( acarshub_helpers.acarshub_database.get_signal_levels, 30) }, to=requester, namespace="/main", ) socketio.emit( "alert_terms", { "data": get_cached(acarshub_helpers.acarshub_database.get_alert_counts, 30) }, to=requester, namespace="/main", ) send_version() except Exception as e: acarshub_logging.log(f"Main Connect: Error sending signal levels: {e}", "webapp") acarshub_logging.acars_traceback(e, "webapp") # Start the htmlGenerator thread only if the thread has not been started before. if not thread_html_generator.is_alive(): sys.stdout.flush() thread_html_generator_event.clear() thread_html_generator = socketio.start_background_task(htmlListener) pt = time.time() - pt acarshub_logging.log(f"main_connect took {pt * 1000:.0f}ms", "htmlListener", level=LOG_LEVEL["DEBUG"])
def message_listener(message_type=None, ip="127.0.0.1", port=None): import time import socket import json global error_messages_last_minute if message_type == "VDLM2": global vdlm_messages_last_minute elif message_type == "ACARS": global acars_messages_last_minute disconnected = True receiver = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM) acarshub_logging.log( f"message_listener starting: {message_type.lower()}", "message_listener", level=LOG_LEVEL["DEBUG"], ) partial_message = None # Run while requested... while not thread_message_listener_stop_event.isSet(): data = None # acarshub_logging.log(f"recv_from ...", "message_listener", level=LOG_LEVEL["DEBUG"]) try: if disconnected: receiver = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM) # Set socket timeout 1 seconds receiver.settimeout(1) # Connect to the sender receiver.connect((ip, port)) disconnected = False acarshub_logging.log( f"{message_type.lower()}_receiver connected to {ip}:{port}", f"{message_type.lower()}Generator", level=LOG_LEVEL["DEBUG"], ) if acarshub_configuration.LOCAL_TEST is True: data, addr = receiver.recvfrom(65527) else: data, addr = receiver.recvfrom(65527, socket.MSG_WAITALL) except socket.timeout: continue except socket.error as e: acarshub_logging.log( f"Error to {ip}:{port}. Reattempting...", f"{message_type.lower()}Generator", level=LOG_LEVEL["ERROR"], ) acarshub_logging.acars_traceback( e, f"{message_type.lower()}Generator") disconnected = True receiver.close() time.sleep(1) continue except Exception as e: acarshub_logging.acars_traceback( e, f"{message_type.lower()}Generator") disconnected = True receiver.close() time.sleep(1) continue # acarshub_logging.log(f"{message_type.lower()}: got data", "message_listener", level=LOG_LEVEL["DEBUG"]) if data is not None: decoded = data.decode() else: decoded = "" if decoded == "": disconnected = True receiver.close() continue # Decode json # There is a rare condition where we'll receive two messages at once # We will cover this condition off by ensuring each json message is # broken apart and handled individually # acarsdec or vdlm2dec single message ends with a newline so no additional processing required # acarsdec or vdlm2dec multi messages ends with a newline and each message has a newline but the decoder # breaks with more than one JSON object # in case of back to back objects, add a newline to split on decoded = decoded.replace("}{", "}\n{") # split on newlines split_json = decoded.splitlines() # try and reassemble messages that were received separately if partial_message is not None and len(split_json) > 0: combined = partial_message + split_json[0] try: # check if we can decode the json json.loads(combined) # no exception, json decoded fine, reassembly succeeded # replace the first string in the list with the reassembled string split_json[0] = combined acarshub_logging.log( "Reassembly successful, message not skipped after all!", f"{message_type.lower()}Generator", 1, ) except Exception as e: # reassembly didn't work, don't do anything but print an error when debug is enabled acarshub_logging.log( f"Reassembly failed {e}: {combined}", f"{message_type.lower()}Generator", level=LOG_LEVEL["DEBUG"], ) # forget the partial message, it can't be useful anymore partial_message = None for part in split_json: # acarshub_logging.log(f"{message_type.lower()}: part: {part}", "message_listener", level=LOG_LEVEL["DEBUG"]) if len(part) == 0: continue msg = None try: msg = json.loads(part) except ValueError as e: if part == split_json[-1]: # last element in the list, could be a partial json object partial_message = part acarshub_logging.log(f"JSON Error: {e}", f"{message_type.lower()}Generator", 1) acarshub_logging.log(f"Skipping Message: {part}", f"{message_type.lower()}Generator", 1) continue except Exception as e: acarshub_logging.log( f"Unknown Error with JSON input: {e}", f"{message_type.lower()}Generator", level=LOG_LEVEL["ERROR"], ) acarshub_logging.acars_traceback( e, f"{message_type.lower()}Generator") continue que_type = getQueType(message_type) if message_type == "VDLM2": vdlm_messages_last_minute += 1 elif message_type == "ACARS": acars_messages_last_minute += 1 if "error" in msg: if msg["error"] > 0: error_messages_last_minute += msg["error"] que_messages.append( (que_type, acars_formatter.format_acars_message(msg))) que_database.append( (que_type, acars_formatter.format_acars_message(msg))) if (len(list_of_recent_messages) >= list_of_recent_messages_max): # Keep the que size down del list_of_recent_messages[0] if not acarshub_configuration.QUIET_MESSAGES: print(f"MESSAGE:{message_type.lower()}Generator: {msg}") client_message = generateClientMessage( que_type, acars_formatter.format_acars_message(msg)) # add to recent message que for anyone fresh loading the page list_of_recent_messages.append(client_message)
def add_message(params, message_type, message_from_json, backup=False): global database global alert_terms try: if backup: session = db_session_backup() else: session = db_session() update_frequencies(params["freq"], message_type, session) if acarshub_configuration.DB_SAVEALL or is_message_not_empty(message_from_json): # write the message session.add(messages(message_type=message_type, **params)) # Now lets decide where to log the message count to # First we'll see if the message is not blank if is_message_not_empty(message_from_json): count = session.query(messagesCount).first() if count is not None: count.total += 1 if params["error"] > 0: count.errors += 1 else: count.good += 1 else: session.add( messagesCount( total=1, good=0 if params["error"] > 0 else 1, errors=1 if params["error"] > 0 else 0, ) ) else: count = session.query(messagesCountDropped).first() if count is not None: if params["error"] > 0: count.nonlogged_errors += 1 else: count.nonlogged_good += 1 else: session.add( messagesCountDropped( nonlogged_good=1 if params["error"] == 0 else 0, nonlogged_errors=1 if params["error"] > 0 else 0, ) ) # Log the level count # We'll see if the level is in the database already, and if so, increment the counter # If not, we'll add it in found_level = ( session.query(messagesLevel) .filter(messagesLevel.level == params["level"]) .first() ) if found_level is not None: found_level.count += 1 else: session.add(messagesLevel(level=params["level"], count=1)) if len(params["text"]) > 0 and alert_terms: for search_term in alert_terms: if re.findall(r"\b{}\b".format(search_term), params["text"]): should_add = True for ignore_term in alert_terms_ignore: if re.findall(r"\b{}\b".format(ignore_term), params["text"]): should_add = False break if should_add: found_term = ( session.query(alertStats) .filter(alertStats.term == search_term.upper()) .first() ) if found_term is not None: found_term.count += 1 else: session.add(alertStats(term=search_term.upper(), count=1)) session.add( messages_saved( message_type=message_type, **params, term=search_term.upper(), type_of_match="text", ) ) session.commit() # commit the db change and close the session session.commit() except Exception as e: acarshub_logging.acars_traceback(e, "database") finally: if session: session.close()
def search_alerts(icao=None, tail=None, flight=None): result = None global alert_terms if ( icao is not None or tail is not None or flight is not None or alert_terms is not None ): try: session = db_session() search_term = { "icao": icao, # "msg_text": alert_terms, "flight": flight, "tail": tail, } query_string = "" for key in search_term: if search_term[key] is not None and search_term[key] != "": for term in search_term[key]: if query_string == "": query_string += f'{key}:"{term}"*' else: query_string += f' OR {key}:"{term}"*' if query_string != "": query_string = f"SELECT * FROM messages WHERE id IN (SELECT rowid FROM messages_fts WHERE messages_fts MATCH '{query_string}')" if alert_terms is not None: terms_string = """SELECT id, message_type, msg_time, station_id, toaddr, fromaddr, depa, dsta, eta, gtout, gtin, wloff, wlin, lat, lon, alt, msg_text, tail, flight, icao, freq, ack, mode, label, block_id, msgno, is_response, is_onground, error, libacars, level FROM messages_saved""" else: terms_string = "" if query_string != "" and terms_string != "": joiner = " UNION " else: joiner = "" if query_string != "" or terms_string != "": result = session.execute( f"{query_string}{joiner}{terms_string} ORDER BY msg_time DESC LIMIT 50 OFFSET 0" ) else: acarshub_logging.log("SKipping alert search", "database") return None processed_results = [] for row in result: processed_results.insert(0, dict(row)) if len(processed_results) == 0: return None processed_results.reverse() session.close() return processed_results except Exception as e: acarshub_logging.acars_traceback(e, "database") return None else: return None
and str(os.getenv("AUTO_VACUUM")).upper() == "TRUE"): acarshub_logging.log("Reclaiming disk space", "db_upgrade", level=LOG_LEVEL["INFO"]) cur.execute("PRAGMA auto_vacuum = '0';") cur.execute("VACUUM;") conn.commit() if upgraded: acarshub_logging.log( "Completed upgrading database structure", "db_upgrade", level=LOG_LEVEL["INFO"], ) acarshub_logging.log( "Database structure did not require upgrades", "db_upgrade", level=LOG_LEVEL["INFO"], ) except Exception as e: acarshub_logging.acars_traceback(e, "db_upgrade", level=LOG_LEVEL["ERROR"]) exit_code = 1 finally: if conn: conn.close() sys.exit(exit_code)
try: acarshub_logging.log("Downloading Station IDs", "database") with open("./data/ground-stations.json", "r") as f: groundStations_json = json.load(f) for station in groundStations_json["ground_stations"]: stationId = station.get("id") if stationId: groundStations[stationId] = { "icao": station["airport"]["icao"], "name": station["airport"]["name"], } acarshub_logging.log("Completed loading Station IDs", "database") except Exception as e: acarshub_logging.acars_traceback(e, "database") # Load Message Labels try: acarshub_logging.log("Downloading message labels", "database") with open("./data/metadata.json", "r") as f: message_labels = json.load(f) acarshub_logging.log("Completed loading message labels", "database") except Exception as e: message_labels = {"labels": {}} # handle URL exception acarshub_logging.acars_traceback(e, "database") # DB PATH MUST BE FROM ROOT! # default database db_path = acarshub_configuration.ACARSHUB_DB