def test_influx_connect(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._backend = mock.MagicMock() sqlbackend.connect() # Make backend connection assert sqlbackend._backend.InfluxDBClient.called sqlbackend._backend.InfluxDBClient.assert_called_with( 'localhost', 8086, 'root', 'root') # Switch to default database assert sqlbackend._conn.switch_database.called sqlbackend._conn.switch_database.assert_called_with('ait') sqlbackend._backend.reset_mock() sqlbackend.connect(database='foo') # make backend connection assert sqlbackend._backend.InfluxDBClient.called sqlbackend._backend.InfluxDBClient.assert_called_with( 'localhost', 8086, 'root', 'root') # Switch to custom database assert sqlbackend._conn.switch_database.called sqlbackend._conn.switch_database.assert_called_with('foo')
def test_influx_create(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._backend = mock.MagicMock() sqlbackend.create = mock.MagicMock() sqlbackend.create() assert sqlbackend.create.called
def test_query_return_types(self): # This test is only relevant if we can raise a specific exception. Skip otherwise # Tested and running with python-influxdb=5.3.0 try: sqlbackend = db.InfluxDBBackend() except cfg.AitConfigError: self.skipTest("Test requires database library to be installed") sqlbackend._conn = mock.MagicMock() sqlbackend._query = mock.MagicMock() # Check that a successful query returns a properly formatted AITDBResult ret_val = [1, 2, 3] query_string = "select * from table" sqlbackend._query.return_value = ret_val results = sqlbackend.query(query_string) assert isinstance(results, db.AITDBResult) assert results.query == query_string assert results.results == ret_val # Check that a failed query returns a properly formatted AITDBResult sqlbackend._query.side_effect = ( sqlbackend._backend.exceptions.InfluxDBClientError("foo")) results = sqlbackend.query(query_string) assert results.query == query_string assert results.errors == ["foo"]
def test_query_packets_arg_handling(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() # Test no packet provided handling ####################################### start = dmc.GPS_Epoch end = dt.datetime.utcnow() sqlbackend.query_packets(start_time=start, end_time=end) packets = ", ".join([f'"{i}"' for i in tlm.getDefaultDict().keys()]) start = start.strftime(dmc.RFC3339_Format) end = end.strftime(dmc.RFC3339_Format) query = ( f"SELECT * FROM \"{packets}\" WHERE time >= '{start}' AND time <= '{end}'" ) assert sqlbackend._conn.query.call_args[0][0] == query sqlbackend._conn.reset_mock() # Test no start time handling ####################################### end = dt.datetime.utcnow() sqlbackend.query_packets(end_time=end) packets = ", ".join([f'"{i}"' for i in tlm.getDefaultDict().keys()]) start = dmc.GPS_Epoch.strftime(dmc.RFC3339_Format) end = end.strftime(dmc.RFC3339_Format) query = ( f"SELECT * FROM \"{packets}\" WHERE time >= '{start}' AND time <= '{end}'" ) assert sqlbackend._conn.query.call_args[0][0] == query sqlbackend._conn.reset_mock() # Test no end time handling ####################################### sqlbackend.query_packets() packets = ", ".join([f'"{i}"' for i in tlm.getDefaultDict().keys()]) start = dmc.GPS_Epoch.strftime(dmc.RFC3339_Format) end = dt.datetime.utcnow() query = f"SELECT * FROM {packets} WHERE time >= '{start}' AND time <= '{end}'" exec_end_time = dt.datetime.strptime( sqlbackend._conn.query.call_args[0][0].split("'")[-2], dmc.RFC3339_Format) assert (end - exec_end_time).seconds < 1 sqlbackend.query_packets() # Test bad packet name exception ####################################### with pytest.raises(ValueError): sqlbackend.query_packets(packets=["not_a_valid_packet"])
def test_query_packet_time_inclusion(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() ret_data = [( ("1553_HS_Packet", None), [ { "time": "2020-11-17T21:12:17.677316Z", "Current_A": 0, "Voltage_A": 0, "Voltage_B": 0, "Voltage_C": 0, "Voltage_D": 0, }, { "time": "2020-11-17T21:12:18.675379Z", "Current_A": 1, "Voltage_A": 1, "Voltage_B": 1, "Voltage_C": 1, "Voltage_D": 1, }, { "time": "2020-11-17T21:12:19.682312Z", "Current_A": 2, "Voltage_A": 2, "Voltage_B": 2, "Voltage_C": 2, "Voltage_D": 2, }, ], )] res_mock = mock.MagicMock() res_mock.items = mock.MagicMock(return_value=ret_data) sqlbackend._query = mock.MagicMock(return_value=res_mock) res = sqlbackend.query_packets(yield_packet_time=True) assert isinstance(res, db.AITDBResult) assert res._packets is not None res_pkts = list(res.get_packets()) assert len(res_pkts) == 3 assert isinstance(res_pkts[0], tuple) for i, test_data in enumerate(ret_data[0][1]): assert dmc.rfc3339_str_to_datetime( test_data["time"]) == res_pkts[i][0] assert res_pkts[i][1].Voltage_A == i
def test_query_success_handling(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() res_mock = mock.MagicMock() res_mock.items = mock.MagicMock(return_value=[( ("1553_HS_Packet", None), [ { "time": "2020-11-17T21:12:17.677316Z", "Current_A": 0, "Voltage_A": 0, "Voltage_B": 0, "Voltage_C": 0, "Voltage_D": 0, }, { "time": "2020-11-17T21:12:18.675379Z", "Current_A": 1, "Voltage_A": 1, "Voltage_B": 1, "Voltage_C": 1, "Voltage_D": 1, }, { "time": "2020-11-17T21:12:19.682312Z", "Current_A": 2, "Voltage_A": 2, "Voltage_B": 2, "Voltage_C": 2, "Voltage_D": 2, }, ], )]) sqlbackend._query = mock.MagicMock(return_value=res_mock) res = sqlbackend.query_packets() assert isinstance(res, db.AITDBResult) assert res._packets is not None res_pkts = list(res.get_packets()) assert len(res_pkts) == 3 assert isinstance(res_pkts[0], tlm.Packet) assert res_pkts[0].Voltage_A == 0 assert res_pkts[1].Voltage_A == 1 assert res_pkts[2].Voltage_A == 2
def test_query_fail_handling(self): # This test is only relevant if we can raise a specific exception. Skip otherwise # Tested and running with python-influxdb=5.3.0 try: sqlbackend = db.InfluxDBBackend() except cfg.AitConfigError: self.skipTest("Test requires database library to be installed") sqlbackend._conn = mock.MagicMock() sqlbackend._query = mock.MagicMock() sqlbackend._query.side_effect = ( sqlbackend._backend.exceptions.InfluxDBClientError("foo")) res = sqlbackend.query_packets() assert res.errors == ["foo"]
def test_query_packets_calldown(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() start = dmc.GPS_Epoch end = dt.datetime.utcnow() packets = [list(tlm.getDefaultDict().keys())[0]] sqlbackend.query_packets(packets=packets, start_time=start, end_time=end) packets = ", ".join(packets) start = start.strftime(dmc.RFC3339_Format) end = end.strftime(dmc.RFC3339_Format) query = ( f"SELECT * FROM \"{packets}\" WHERE time >= '{start}' AND time <= '{end}'" ) assert sqlbackend._conn.query.call_args[0][0] == query
def test_influx_insert(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 - !Field name: SampleTime32 type: TIME32 - !Field name: SampleTime40 type: TIME40 - !Field name: SampleEvr16 type: EVR16 - !Field name: SampleCmd16 type: CMD16 """ with open(self.test_yaml_file, 'wt') as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() pkt_defn = tlmdict['Packet1'] pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes))) now = dt.datetime.utcnow() sqlbackend.insert(pkt, time=now) sqlbackend._conn.write_points.assert_called_with([{ 'measurement': 'Packet1', 'time': now.strftime('%Y-%m-%dT%H:%M:%S'), 'tags': {}, 'fields': { 'col1': 1, 'SampleTime': 33752069.10112411, 'SampleTime32': 168496141, 'SampleTime40': 235868177.0703125, 'SampleCmd16': 5398, 'SampleEvr16': 4884 } }]) sqlbackend._conn.reset_mock() # Insert without a timestamp sqlbackend.insert(pkt) sqlbackend._conn.write_points.assert_called_with([{ 'measurement': 'Packet1', 'tags': {}, 'fields': { 'col1': 1, 'SampleTime': 33752069.10112411, 'SampleTime32': 168496141, 'SampleTime40': 235868177.0703125, 'SampleCmd16': 5398, 'SampleEvr16': 4884 } }]) sqlbackend._conn.reset_mock() # Insert with additional tags sqlbackend.insert(pkt, tags={'testNum': '3'}) sqlbackend._conn.write_points.assert_called_with([{ 'measurement': 'Packet1', 'tags': { 'testNum': '3' }, 'fields': { 'col1': 1, 'SampleTime': 33752069.10112411, 'SampleTime32': 168496141, 'SampleTime40': 235868177.0703125, 'SampleCmd16': 5398, 'SampleEvr16': 4884 } }]) sqlbackend._conn.reset_mock() os.remove(self.test_yaml_file)
def test_influx_backend_init(self, importlib_mock): sqlbackend = db.InfluxDBBackend() importlib_mock.assert_called_with('influxdb')
def test_influx_query(self, importlib_mock): sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() sqlbackend.query('SELECT * FROM table') sqlbackend._conn.query.assert_called_with('SELECT * FROM table')
def main(): tlmdict = tlm.getDefaultDict() pnames = tlmdict.keys() ap = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) arguments = { '--packet': { 'type': str, 'choices': pnames, 'default': pnames[0] if len(pnames) > 0 else None, 'help': 'Type of packets (!Packet name in tlm.yaml) in file', 'required': len(pnames) > 1, }, '--database': { 'default': ait.config.get('database.name'), 'help': ('Name of database in which to insert packets (may ' 'also be specified in config.yaml database.name)'), 'required': ait.config.get('database.name') is None }, '--backend': { 'default': 'sqlite', 'choices': ['sqlite', 'influx'], 'action': 'store', 'help': ('Name of database in which to insert packets (may ' 'also be specified in config.yaml database.name)') }, '--use-current-time': { 'action': 'store_true', 'help': ('Use current time stamps when insert packets instead ' 'of ground receipt time (or the time written in the ' 'PCAP header).') }, 'file': { 'nargs': '+', 'help': 'File(s) containing telemetry packets' } } for name, params in arguments.items(): ap.add_argument(name, **params) args = ap.parse_args() log.begin() try: npackets = 0 dbconn = None defn = tlm.getDefaultDict()[args.packet] nbytes = defn.nbytes if args.backend == 'sqlite': dbconn = db.SQLiteBackend() elif args.backend == 'influx': dbconn = db.InfluxDBBackend() if args.backend == 'sqlite' and (args.database == ':memory:' or not os.path.exists(args.database)): dbconn.create(database=args.database) else: dbconn.connect(database=args.database) for filename in args.file: log.info('Processing %s' % filename) with pcap.open(filename) as stream: for header, pkt_data in stream: try: packet = tlm.Packet(defn, pkt_data) time = header.timestamp if args.use_current_time: time = None dbconn.insert(packet, time=time) npackets += 1 except struct.error: log.error( "Unable to unpack data into packet. Skipping ...") except KeyboardInterrupt: log.info('Received Ctrl-C. Stopping database insert.') except IOError as e: log.error(str(e)) finally: dbconn.close() values = npackets, args.packet, args.database log.info('Inserted %d %s packets into database %s.' % values) log.end()
def test_influx_insert(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 - !Field name: SampleTime32 type: TIME32 - !Field name: SampleTime40 type: TIME40 - !Field name: SampleEvr16 type: EVR16 - !Field name: SampleCmd16 type: CMD16 """ with open(self.test_yaml_file, "wt") as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() pkt_defn = tlmdict["Packet1"] pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes))) now = dt.datetime.utcnow() sqlbackend.insert(pkt, time=now) sqlbackend._conn.write_points.assert_called_with([{ "measurement": "Packet1", "time": now.strftime(dmc.RFC3339_Format), "tags": {}, "fields": { "col1": 1, "SampleTime": 33752069.10112411, "SampleTime32": 168496141, "SampleTime40": 235868177.0703125, "SampleCmd16": 5398, "SampleEvr16": 4884, }, }]) sqlbackend._conn.reset_mock() # Insert without a timestamp sqlbackend.insert(pkt) sqlbackend._conn.write_points.assert_called_with([{ "measurement": "Packet1", "tags": {}, "fields": { "col1": 1, "SampleTime": 33752069.10112411, "SampleTime32": 168496141, "SampleTime40": 235868177.0703125, "SampleCmd16": 5398, "SampleEvr16": 4884, }, }]) sqlbackend._conn.reset_mock() # Insert with additional tags sqlbackend.insert(pkt, tags={"testNum": "3"}) sqlbackend._conn.write_points.assert_called_with([{ "measurement": "Packet1", "tags": { "testNum": "3" }, "fields": { "col1": 1, "SampleTime": 33752069.10112411, "SampleTime32": 168496141, "SampleTime40": 235868177.0703125, "SampleCmd16": 5398, "SampleEvr16": 4884, }, }]) sqlbackend._conn.reset_mock() os.remove(self.test_yaml_file)
def main(): tlmdict = tlm.getDefaultDict() pnames = list(tlmdict.keys()) ap = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) arguments = { "--packet": { "type": str, "choices": pnames, "default": pnames[0] if len(pnames) > 0 else None, "help": "Type of packets (!Packet name in tlm.yaml) in file", "required": len(pnames) > 1, }, "--database": { "default": ait.config.get("database.dbname"), "help": ("Name of database in which to insert packets (may " "also be specified in config.yaml database.name)"), "required": ait.config.get("database.dbname") is None, }, "--backend": { "default": "sqlite", "choices": ["sqlite", "influx"], "action": "store", "help": ("Name of database in which to insert packets (may " "also be specified in config.yaml database.name)"), }, "--use-current-time": { "action": "store_true", "help": ("Use current time stamps when insert packets instead " "of ground receipt time (or the time written in the " "PCAP header)."), }, "file": { "nargs": "+", "help": "File(s) containing telemetry packets" }, } for name, params in arguments.items(): ap.add_argument(name, **params) args = ap.parse_args() log.begin() try: npackets = 0 dbconn = None defn = tlm.getDefaultDict()[args.packet] if args.backend == "sqlite": dbconn = db.SQLiteBackend() elif args.backend == "influx": dbconn = db.InfluxDBBackend() if args.backend == "sqlite" and (args.database == ":memory:" or not os.path.exists(args.database)): dbconn.create(database=args.database) else: dbconn.connect(database=args.database) for filename in args.file: log.info("Processing %s" % filename) with pcap.open(filename) as stream: for header, pkt_data in stream: try: packet = tlm.Packet(defn, pkt_data) time = header.timestamp if args.use_current_time: time = None dbconn.insert(packet, time=time) npackets += 1 except struct.error: log.error( "Unable to unpack data into packet. Skipping ...") except KeyboardInterrupt: log.info("Received Ctrl-C. Stopping database insert.") except IOError as e: log.error(str(e)) finally: dbconn.close() values = npackets, args.packet, args.database log.info("Inserted %d %s packets into database %s." % values) log.end()