def status_check(): backup_db_proxy() try: conn = sqlite3.connect(DB_FILE_BACKUP, timeout=60) conn.row_factory = sqlite3.Row html = "" response = conn.execute( "SELECT time FROM packet WHERE direction='up' ORDER BY time DESC LIMIT 1" ).fetchone() if response: html += "Last up packet %f seconds ago <br>" % (time.time() - response["time"]) else: html += "No uplink packets <br>" response = conn.execute( "SELECT time FROM packet WHERE direction='down' ORDER BY time DESC LIMIT 1" ).fetchone() if response: html += "Last down packet %f seconds ago <br>" % (time.time() - response["time"]) else: html += "No downlink packets <br>" conn.close() return html except: return "error"
def remove_cache(): backup_db_proxy() backup_db_pm() shutil.rmtree(CACHE_FOLDER) os.mkdir(CACHE_FOLDER) conn = sqlite3.connect(DB_FILE_CONTROLLER) conn.execute("UPDATE schedule SET Passed = null, Ready = 0") conn.commit() conn.close() backup_db_tc() return "ok"
def open_packet_last(count, items): backup_db_proxy() conn = sqlite3.connect(DB_FILE_BACKUP, timeout=60) conn.row_factory = sqlite3.Row packets_conn = conn.execute( 'SELECT rowid,* FROM packet ORDER BY time DESC LIMIT (?)', (count, )).fetchall() packets = [] for packet in packets_conn: pkt = dict(packet) if pkt["json"]: pkt["json"] = json.loads(pkt["json"]) packets.append(pkt) html = plot_items(packets, items, None) return html
def run_backup(): lib_db.backup_db_proxy() lib_db.backup_db_tc()
def backup(): backup_db_proxy() backup_db_tc() backup_db_pm() return "ok"
def get_all_packets(response): cached = False if response["UpdateTime"]: fileName = CACHE_FOLDER + "/" + response["DevEui"] + "_" + str( response["UpdateTime"]) + "_" + "all_packets" + ".html_xz" if os.path.exists(fileName): cached = True else: needs_cache = True if not cached: conn = sqlite3.connect(DB_FILE_BACKUP, timeout=60) conn.row_factory = sqlite3.Row if not response["FinishTime"]: response["FinishTime"] = time.time() max_time = conn.execute( "SELECT MAX(time) FROM packet").fetchone()["MAX(time)"] if not max_time: backup_db_tc() backup_db_proxy() else: if max_time < response["FinishTime"]: backup_db_tc() backup_db_proxy() dev_addrs = conn.execute( 'SELECT DevAddr FROM session WHERE TestInstID=(?) and DevAddr is not null', (response["TestInstID"], )).fetchall() dev_addrs = [item["DevAddr"] for item in dev_addrs] packets_conn = conn.execute( 'SELECT * FROM packet WHERE TestInstID=(?) ORDER BY time', (response["TestInstID"], )).fetchall() conn.close() packets = [] last_up_time = 0 for packet in packets_conn: pkt = dict(packet) if pkt["json"]: pkt["json"] = json.loads(pkt["json"]) if "error" in pkt["json"]: if pkt["json"]["MType"] in ["000"]: if pkt["json"]["DevEui"] == response["DevEui"]: packets.append(pkt) else: if "DevAddr" in pkt["json"]: if pkt["json"]["DevAddr"] in dev_addrs: packets.append(pkt) else: if pkt["direction"] == "up": if "Cat" in response and response["Cat"].lower( ) == "rf": # RF testbench needs test information returned for validation if pkt["stat"] == 1: # dedup happens already, single stat==1 packet can be generated packets.append(pkt) else: if pkt["time"] - last_up_time > deduplication_threshold and pkt[ "stat"] == 0: packets.append(pkt) last_up_time = pkt["time"] if pkt["stat"] == 1 and pkt["direction"] == "down": packets.append(pkt) packets.sort(key=operator.itemgetter("time")) if response["UpdateTime"]: with lzma.open(fileName, "w") as f: f.write(json.dumps(packets).encode()) else: with lzma.open(fileName, "r") as f: packets = json.loads(f.read().decode()) return packets