def test_yaml_fld_includesx2(self): yaml_docs_main = ( '- !Packet\n' ' name: Test_Packet\n' ' fields:\n' ' - !include /tmp/test_inc1.yaml\n' ' - !Field\n' ' name: field_1\n' ' type: MSB_U16\n' ' - !include /tmp/test_inc2.yaml\n' ) with open(self.test_yaml_main, 'wb') as out: out.write(yaml_docs_main) tlmdict = tlm.TlmDict(self.test_yaml_main) assert len(tlmdict['Test_Packet'].fields) == 5 assert tlmdict['Test_Packet'].fields[4].name == 'field_Z' assert tlmdict['Test_Packet'].fields[4].bytes == 5 try: os.remove(self.test_yaml_main) os.remove(self.test_pkl_main) except OSError: None
def test_sqlite_create_table(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 """ with open(self.test_yaml_file, 'wt') as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.SQLiteBackend() sqlbackend._conn = mock.MagicMock() sqlbackend._create_table(tlmdict['Packet1']) sqlbackend._conn.execute.assert_called_with( 'CREATE TABLE IF NOT EXISTS "Packet1" (time DATETIME DEFAULT(STRFTIME(\'%Y-%m-%dT%H:%M:%fZ\', \'NOW\')), col1 INTEGER, SampleTime REAL)' ) os.remove(self.test_yaml_file)
def test_yaml_fld_includes_nested(self): test_yaml_inc3 = "/tmp/test_inc3.yaml" yaml_docs_inc3 = ("- !include /tmp/test_inc1.yaml\n" "- !include /tmp/test_inc2.yaml\n") with open(test_yaml_inc3, "wt") as out: out.write(yaml_docs_inc3) yaml_docs_main = ("- !Packet\n" " name: Test_Packet\n" " fields:\n" " - !Field\n" " name: field_1\n" " type: MSB_U16\n" " - !include /tmp/test_inc3.yaml\n") with open(self.test_yaml_main, "wt") as out: out.write(yaml_docs_main) tlmdict = tlm.TlmDict(self.test_yaml_main) assert len(tlmdict["Test_Packet"].fields) == 5 assert tlmdict["Test_Packet"].fields[4].name == "field_Z" assert tlmdict["Test_Packet"].fields[4].bytes == 5 try: os.remove(test_yaml_inc3) os.remove(self.test_yaml_main) os.remove(self.test_pkl_main) except OSError: None
def test_sqlite_create_table(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 """ with open(self.test_yaml_file, "wt") as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.SQLiteBackend() sqlbackend._conn = mock.MagicMock() sqlbackend._create_table(tlmdict["Packet1"]) sqlbackend._conn.execute.assert_called_with( "CREATE TABLE IF NOT EXISTS \"Packet1\" (time DATETIME DEFAULT(STRFTIME('%Y-%m-%dT%H:%M:%fZ', 'NOW')), PKTDATA BLOB NOT NULL)" ) os.remove(self.test_yaml_file)
def test_sqlite_insert(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 """ with open(self.test_yaml_file, 'wt') as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.SQLiteBackend() sqlbackend._conn = mock.MagicMock() pkt_defn = tlmdict['Packet1'] pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes))) sqlbackend.insert(pkt) sqlbackend._conn.execute.assert_called_with( 'INSERT INTO "Packet1" (col1, SampleTime) VALUES (?, ?)', [1, 33752069.10112411]) os.remove(self.test_yaml_file)
def test_sqlite_create(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime64 type: TIME64 """ with open(self.test_yaml_file, 'wt') as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.SQLiteBackend() sqlbackend.connect = mock.MagicMock() sqlbackend._create_table = mock.MagicMock() sqlbackend.create(tlmdict=tlmdict) sqlbackend._create_table.assert_called_with(tlmdict['Packet1']) os.remove(self.test_yaml_file)
def test_yaml_fld_includes_nested(self): test_yaml_inc3 = '/tmp/test_inc3.yaml' yaml_docs_inc3 = ('- !include /tmp/test_inc1.yaml\n' '- !include /tmp/test_inc2.yaml\n') with open(test_yaml_inc3, 'wb') as out: out.write(yaml_docs_inc3) yaml_docs_main = ('- !Packet\n' ' name: OCO3_1553_EHS\n' ' fields:\n' ' - !Field\n' ' name: field_1\n' ' type: MSB_U16\n' ' - !include /tmp/test_inc3.yaml\n') with open(self.test_yaml_main, 'wb') as out: out.write(yaml_docs_main) tlmdict = tlm.TlmDict(self.test_yaml_main) assert len(tlmdict['OCO3_1553_EHS'].fields) == 5 assert tlmdict['OCO3_1553_EHS'].fields[4].name == 'field_Z' assert tlmdict['OCO3_1553_EHS'].fields[4].bytes == 5 try: os.remove(test_yaml_inc3) os.remove(self.test_yaml_main) os.remove(self.test_pkl_main) except OSError: None
def testMask(): """ # This test will use the following TLM dictionary definitions. # The mask 0x0180 singles out the two bits on either MSB_U16 # word: # # 0b00000001 0b10000000 - !Packet name: P fields: - !Field name: M type: MSB_U16 mask: 0x0180 """ defn = tlm.TlmDict(testMask.__doc__)["P"] packet = tlm.Packet(defn) assert packet.M == 0 assert packet._data == bytearray([0x00, 0x00]) packet.M = 1 assert packet.M == 1 assert packet._data == bytearray([0x00, 0x80]) packet.M = 2 assert packet.M == 2 assert packet._data == bytearray([0x01, 0x00]) packet.M = 3 assert packet.M == 3 assert packet._data == bytearray([0x01, 0x80])
def test_yaml_pkt_includes(self): yaml_docs_inc3 = ( '- !Packet\n' ' name: Test_Packet_1\n' ' fields:\n' ' - !include /tmp/test_inc1.yaml\n' ) test_yaml_inc3 = '/tmp/test_inc3.yaml' with open(test_yaml_inc3, 'wb') as out: out.write(yaml_docs_inc3) yaml_docs_inc4 = ( '- !Packet\n' ' name: Test_Packet_2\n' ' fields:\n' ' - !include /tmp/test_inc2.yaml\n' ) test_yaml_inc4 = '/tmp/test_inc4.yaml' with open(test_yaml_inc4, 'wb') as out: out.write(yaml_docs_inc4) yaml_docs_main = ( '- !Packet\n' ' name: Test_Packet\n' ' fields:\n' ' - !include /tmp/test_inc1.yaml\n' ' - !Field\n' ' name: field_1\n' ' type: MSB_U16\n' ' - !include /tmp/test_inc2.yaml\n' '- !include /tmp/test_inc3.yaml\n' '- !include /tmp/test_inc4.yaml\n' ) with open(self.test_yaml_main, 'wb') as out: out.write(yaml_docs_main) tlmdict = tlm.TlmDict(self.test_yaml_main) assert len(tlmdict['Test_Packet'].fields) == 5 assert tlmdict['Test_Packet'].fields[4].name == 'field_Z' assert tlmdict['Test_Packet'].fields[4].bytes == 5 assert len(tlmdict['Test_Packet_1'].fields) == 2 assert tlmdict['Test_Packet_1'].fields[1].name == 'field_B' assert tlmdict['Test_Packet_1'].fields[1].bytes == 1 try: os.remove(test_yaml_inc3) os.remove(test_yaml_inc4) os.remove(self.test_yaml_main) os.remove(self.test_pkl_main) except OSError: None
def test_derivation_definition(self): tlmdict = tlm.TlmDict(self.test_yaml_deriv1) pktdefn = tlmdict["Test_Packet"] deriv1 = pktdefn.derivations[0] assert deriv1.name == "deriv_1" assert type(deriv1.equation) == tlm.PacketExpression assert deriv1.equation.toJSON() == "field_1 + field_2" pkt = tlm.Packet(pktdefn) pkt.field_1 = 1 pkt.field_2 = 2 assert pkt.deriv_1 == 3
def test_writeToCSV(self): yaml_doc = """ - !Packet name: Packet1 fields: - !Field name: col1 desc: test column 1 bytes: 0 type: MSB_U16 mask: 0x10 enum: a: testa - !Field name: SampleTime type: TIME64 bytes: 1 """ csv_row1 = [ "col1", "0", "2", "0x10", "MSB", "MSB_U16", "test column 1", "a: testa", ] with open(self.test_yaml_file, "wt") as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) writer = tlm.TlmDictWriter(tlmdict=tlmdict) writer.write_to_csv(self.test_outpath) expected_csv = os.path.join(self.test_outpath, "Packet1.csv") assert os.path.isfile(expected_csv) with open(expected_csv, "rt") as csvfile: reader = csv.reader(csvfile) # skip header next(reader) actual_row = next(reader) assert actual_row[0] == csv_row1[0] assert actual_row[1] == csv_row1[1] assert actual_row[4] == csv_row1[4] os.remove(self.test_yaml_file) os.remove(expected_csv)
def test_packet_creation_from_result(self, importlib_mock): yaml_doc = """ - !Packet name: TestPacket fields: - !Field name: SampleField type: MSB_U16 - !Field name: SampleTime type: TIME64 - !Field name: SampleTime8 type: TIME8 - !Field name: SampleTime32 type: TIME32 - !Field name: SampleTime40 type: TIME40 - !Field name: SampleEvr16 type: EVR16 - !Field name: SampleCmd16 type: CMD16 """ with open(self.test_yaml_file, "wt") as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) res = { "time": "2020-11-17T21:12:17.677316Z", "SampleField": 1, "SampleTime": 33752069.101124, "SampleTime8": 100, "SampleTime32": 168496141, "SampleTime40": 1113733097.03125, "SampleEvr16": 1, "SampleCmd16": 1, } pkt = db.InfluxDBBackend.create_packet_from_result( tlmdict["TestPacket"], res) for f in pkt._defn.fields: assert getattr(pkt.raw, f.name) == res[f.name] os.remove(self.test_yaml_file)
def test_deriv_defn_noeqn(self): test_yaml_deriv2 = "/tmp/test_deriv2.yaml" yaml_docs_deriv2 = ("- !Packet\n" " name: Test_Packet\n" " derivations:\n" " - !Derivation\n" " name: deriv_1\n") with open(test_yaml_deriv2, "wb") as out: out.write(yaml_docs_deriv2.encode("utf-8")) with pytest.raises(Exception): tlm.TlmDict(test_yaml_deriv2) os.remove(test_yaml_deriv2)
def test_deriv_defn_noeqn(self): test_yaml_deriv2 = '/tmp/test_deriv2.yaml' yaml_docs_deriv2 = ( '- !Packet\n' ' name: Test_Packet\n' ' derivations:\n' ' - !Derivation\n' ' name: deriv_1\n' ) with open(test_yaml_deriv2, 'wb') as out: out.write(yaml_docs_deriv2) tlmdict = tlm.TlmDict(test_yaml_deriv2) os.remove(test_yaml_deriv2)
def testArray(): """ # This test will use the following TLM dictionary definitions: - !Packet name: P fields: - !Field name: A type: MSB_U16[3] """ defn = tlm.TlmDict(testArray.__doc__)["P"] packet = tlm.Packet(defn, struct.pack(">HHH", 1, 2, 3)) assert packet.A == [1, 2, 3]
def test_sqlite_insert(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 """ with open(self.test_yaml_file, "wt") as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.SQLiteBackend() sqlbackend._conn = mock.MagicMock() pkt_defn = tlmdict["Packet1"] pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes))) # Note: we can't fully test this call given the modification to # the packet data on insert. Better than nothing I suppose. # Test without time sqlbackend.insert(pkt) assert ('INSERT INTO "Packet1" (PKTDATA) VALUES (?)' in sqlbackend._conn.execute.call_args[0]) sqlbackend._conn.reset_mock() # Test with time now = dt.datetime.utcnow() sqlbackend.insert(pkt, time=now) assert ('INSERT INTO "Packet1" (PKTDATA, time) VALUES (?, ?)' in sqlbackend._conn.execute.call_args[0]) assert (now.strftime( dmc.RFC3339_Format) == sqlbackend._conn.execute.call_args[0][1][1]) os.remove(self.test_yaml_file)
def test_yaml_fld_includes_nestedx2(self): test_yaml_inc3 = '/tmp/test_inc3.yaml' yaml_docs_inc3 = ( '- !include /tmp/test_inc1.yaml\n' '- !include /tmp/test_inc2.yaml\n' ) with open(test_yaml_inc3, 'wb') as out: out.write(yaml_docs_inc3) test_yaml_inc4 = '/tmp/test_inc4.yaml' yaml_docs_inc4 = ( ' - !include /tmp/test_inc3.yaml\n' ' - !Field\n' ' name: field_FOO\n' ' type: MSB_U16\n' ) with open(test_yaml_inc4, 'wb') as out: out.write(yaml_docs_inc4) yaml_docs_main = ( '- !Packet\n' ' name: Test_Packet\n' ' fields:\n' ' - !Field\n' ' name: field_1\n' ' type: MSB_U16\n' ' - !include /tmp/test_inc4.yaml\n' ) with open(self.test_yaml_main, 'wb') as out: out.write(yaml_docs_main) tlmdict = tlm.TlmDict(self.test_yaml_main) assert len(tlmdict['Test_Packet'].fields) == 6 assert tlmdict['Test_Packet'].fields[5].name == 'field_FOO' assert tlmdict['Test_Packet'].fields[5].bytes == [6, 7] try: os.remove(test_yaml_inc3) os.remove(test_yaml_inc4) os.remove(self.test_yaml_main) os.remove(self.test_pkl_main) except OSError: None
def test_deriv_defn_notitle(self): test_yaml_deriv2 = "/tmp/test_deriv2.yaml" yaml_docs_deriv2 = ("- !Packet\n" " name: Test_Packet\n" " derivations:\n" " - !Derivation\n" " name: deriv_1\n" " equation: field_1 + field_2\n") with open(test_yaml_deriv2, "wt") as out: out.write(yaml_docs_deriv2) tlmdict = tlm.TlmDict(test_yaml_deriv2) assert tlmdict["Test_Packet"].derivations[0].title == "deriv_1" os.remove(test_yaml_deriv2)
def test_fld_defn_notitle(self): test_yaml_test2 = '/tmp/test_test2.yaml' yaml_docs_test2 = ('- !Packet\n' ' name: OCO3_1553_EHS\n' ' fields:\n' ' - !Field\n' ' name: field_1\n' ' type: MSB_U16\n') with open(test_yaml_test2, 'wb') as out: out.write(yaml_docs_test2) tlmdict = tlm.TlmDict(test_yaml_test2) assert tlmdict['OCO3_1553_EHS'].fields[0].title == 'field_1' os.remove(test_yaml_test2)
def test_writeToCSV(self): yaml_doc = """ - !Packet name: Packet1 fields: - !Field name: col1 desc: test column 1 bytes: 0 type: MSB_U16 mask: 0x10 enum: a: testa - !Field name: SampleTime type: TIME64 bytes: 1 """ csv_row1 = [ 'col1', '0', '2', '0x10', 'MSB', 'MSB_U16', 'test column 1', 'a: testa' ] with open(self.test_yaml_file, 'wb') as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) writer = tlm.TlmDictWriter(tlmdict=tlmdict) writer.writeToCSV(self.test_outpath) expected_csv = os.path.join(self.test_outpath, 'Packet1.csv') assert os.path.isfile(expected_csv) with open(expected_csv, 'rb') as csvfile: reader = csv.reader(csvfile) # skip header reader.next() actual_row = reader.next() assert actual_row[0] == csv_row1[0] assert actual_row[1] == csv_row1[1] assert actual_row[4] == csv_row1[4] os.remove(self.test_yaml_file) os.remove(expected_csv)
def test_fld_defn_notitle(self): test_yaml_test2 = "/tmp/test_test2.yaml" yaml_docs_test2 = ("- !Packet\n" " name: Test_Packet\n" " fields:\n" " - !Field\n" " name: field_1\n" " type: MSB_U16\n") with open(test_yaml_test2, "wt") as out: out.write(yaml_docs_test2) tlmdict = tlm.TlmDict(test_yaml_test2) assert tlmdict["Test_Packet"].fields[0].title == "field_1" os.remove(test_yaml_test2)
def testAliases(): """ # This test will use the following TLM dictionary definitions: - !Packet name: P fields: - !Field name: A aliases: icd: ALIAS_A subsys: ALIAS_B type: MSB_U16[3] """ defn = tlm.TlmDict(testAliases.__doc__)["P"] assert defn.fieldmap["A"].aliases["icd"] == "ALIAS_A" assert defn.fieldmap["A"].aliases["subsys"] == "ALIAS_B" assert len(defn.fieldmap["A"].aliases) == 2
def testAliases(): """ # This test will use the following TLM dictionary definitions: - !Packet name: P fields: - !Field name: A aliases: icd: ALIAS_A subsys: ALIAS_B type: MSB_U16[3] """ defn = tlm.TlmDict(testAliases.__doc__)['P'] assert defn.fieldmap['A'].aliases['icd'] == 'ALIAS_A' assert defn.fieldmap['A'].aliases['subsys'] == 'ALIAS_B' assert len(defn.fieldmap['A'].aliases) == 2
def test_deriv_defn_notitle(self): test_yaml_deriv2 = '/tmp/test_deriv2.yaml' yaml_docs_deriv2 = ( '- !Packet\n' ' name: Test_Packet\n' ' derivations:\n' ' - !Derivation\n' ' name: deriv_1\n' ' equation: field_1 + field_2\n' ) with open(test_yaml_deriv2, 'wb') as out: out.write(yaml_docs_deriv2) tlmdict = tlm.TlmDict(test_yaml_deriv2) assert tlmdict['Test_Packet'].derivations[0].title == 'deriv_1' os.remove(test_yaml_deriv2)
def testSingleItemList(): """ # this test will test 1-item lists - !Packet name: P fields: - !Field name: foo bytes: 0 type: U8 - !Field name: bar bytes: [1] type: U8 - !Field name: baz bytes: [9,10] type: MSB_U16 """ defn = tlm.TlmDict(testSingleItemList.__doc__)["P"] assert defn.fieldmap["foo"].nbytes == 1 assert defn.fieldmap["bar"].bytes == 1 assert defn.fieldmap["baz"].bytes == [9, 10]
def test_influx_insert(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 - !Field name: SampleTime32 type: TIME32 - !Field name: SampleTime40 type: TIME40 - !Field name: SampleEvr16 type: EVR16 - !Field name: SampleCmd16 type: CMD16 """ with open(self.test_yaml_file, 'wt') as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() pkt_defn = tlmdict['Packet1'] pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes))) now = dt.datetime.utcnow() sqlbackend.insert(pkt, time=now) sqlbackend._conn.write_points.assert_called_with([{ 'measurement': 'Packet1', 'time': now.strftime('%Y-%m-%dT%H:%M:%S'), 'tags': {}, 'fields': { 'col1': 1, 'SampleTime': 33752069.10112411, 'SampleTime32': 168496141, 'SampleTime40': 235868177.0703125, 'SampleCmd16': 5398, 'SampleEvr16': 4884 } }]) sqlbackend._conn.reset_mock() # Insert without a timestamp sqlbackend.insert(pkt) sqlbackend._conn.write_points.assert_called_with([{ 'measurement': 'Packet1', 'tags': {}, 'fields': { 'col1': 1, 'SampleTime': 33752069.10112411, 'SampleTime32': 168496141, 'SampleTime40': 235868177.0703125, 'SampleCmd16': 5398, 'SampleEvr16': 4884 } }]) sqlbackend._conn.reset_mock() # Insert with additional tags sqlbackend.insert(pkt, tags={'testNum': '3'}) sqlbackend._conn.write_points.assert_called_with([{ 'measurement': 'Packet1', 'tags': { 'testNum': '3' }, 'fields': { 'col1': 1, 'SampleTime': 33752069.10112411, 'SampleTime32': 168496141, 'SampleTime40': 235868177.0703125, 'SampleCmd16': 5398, 'SampleEvr16': 4884 } }]) sqlbackend._conn.reset_mock() os.remove(self.test_yaml_file)
def test_field_definition(self): tlmdict = tlm.TlmDict(self.test_yaml_test1) assert tlmdict['OCO3_1553_EHS'].fields[0].name == 'field_1' assert tlmdict['OCO3_1553_EHS'].fields[0].title == 'Field 1'
def test_influx_insert(self, importlib_mock): yaml_doc = """ - !Packet name: Packet1 history: - col1 fields: - !Field name: col1 desc: test column 1 type: MSB_U16 enum: a: testa - !Field name: SampleTime type: TIME64 - !Field name: SampleTime32 type: TIME32 - !Field name: SampleTime40 type: TIME40 - !Field name: SampleEvr16 type: EVR16 - !Field name: SampleCmd16 type: CMD16 """ with open(self.test_yaml_file, "wt") as out: out.write(yaml_doc) tlmdict = tlm.TlmDict(self.test_yaml_file) sqlbackend = db.InfluxDBBackend() sqlbackend._conn = mock.MagicMock() pkt_defn = tlmdict["Packet1"] pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes))) now = dt.datetime.utcnow() sqlbackend.insert(pkt, time=now) sqlbackend._conn.write_points.assert_called_with([{ "measurement": "Packet1", "time": now.strftime(dmc.RFC3339_Format), "tags": {}, "fields": { "col1": 1, "SampleTime": 33752069.10112411, "SampleTime32": 168496141, "SampleTime40": 235868177.0703125, "SampleCmd16": 5398, "SampleEvr16": 4884, }, }]) sqlbackend._conn.reset_mock() # Insert without a timestamp sqlbackend.insert(pkt) sqlbackend._conn.write_points.assert_called_with([{ "measurement": "Packet1", "tags": {}, "fields": { "col1": 1, "SampleTime": 33752069.10112411, "SampleTime32": 168496141, "SampleTime40": 235868177.0703125, "SampleCmd16": 5398, "SampleEvr16": 4884, }, }]) sqlbackend._conn.reset_mock() # Insert with additional tags sqlbackend.insert(pkt, tags={"testNum": "3"}) sqlbackend._conn.write_points.assert_called_with([{ "measurement": "Packet1", "tags": { "testNum": "3" }, "fields": { "col1": 1, "SampleTime": 33752069.10112411, "SampleTime32": 168496141, "SampleTime40": 235868177.0703125, "SampleCmd16": 5398, "SampleEvr16": 4884, }, }]) sqlbackend._conn.reset_mock() os.remove(self.test_yaml_file)
def test_field_definition(self): tlmdict = tlm.TlmDict(self.test_yaml_test1) assert tlmdict["Test_Packet"].fields[0].name == "field_1" assert tlmdict["Test_Packet"].fields[0].title == "Field 1"
def content_val(self, ymldata=None, messages=None): """Validates the Telemetry Dictionary to ensure the contents for each of the fields meets specific criteria regarding the expected types, byte ranges, etc.""" # Turn off the YAML Processor log.debug("BEGIN: Content-based validation of Telemetry dictionary") if ymldata is not None: tlmdict = ymldata else: tlmdict = tlm.TlmDict(self._ymlfile) try: # boolean to hold argument validity fldsvalid = True # list of rules to validate against rules = [] # set the packet rules # # set uniqueness rule for packet names rules.append( UniquenessRule("name", "Duplicate packet name: %s", messages)) # Loop through the keys and check each PacketDefinition for key in tlmdict.keys(): pktdefn = tlmdict[key] # check the telemetry packet rules for rule in rules: rule.check(pktdefn) # list of field rules to validate against fldrules = [] # set rules for telemetry fields # # set uniqueness rule for field name fldrules.append( UniquenessRule( "name", "Duplicate field name: " + pktdefn.name + ".%s", messages, )) # set type rule for field.type fldrules.append( TypeRule( "type", "Invalid field type for field: " + pktdefn.name + ".%s", messages, )) # set field size rule for field.type.nbytes fldrules.append( TypeSizeRule( "nbytes", "Invalid field size for field: " + pktdefn.name + ".%s", messages, )) # set field enumerations rule to check no enumerations contain un-quoted YAML special variables fldrules.append( EnumRule( "enum", "Invalid enum value for field: " + pktdefn.name + ".%s", messages, )) # ### flddefns = pktdefn.fields for fld in flddefns: # check field rules for rule in fldrules: rule.check(fld) # check if field rule failed, if so set the validity to False if not all(r.valid is True for r in fldrules): fldsvalid = False log.debug("END: Content-based validation complete for '%s'", self._ymlfile) # check validity of all packet rules and field validity return all(rule.valid is True for rule in rules) and fldsvalid except util.YAMLValidationError as e: # Display the error message if messages is not None: if len(e.message) < 128: msg = ("Validation Failed for YAML file '" + self._ymlfile + "': '" + str(e.message) + "'") else: msg = "Validation Failed for YAML file '" + self._ymlfile + "'" log.error(msg) self.ehandler.process(self.ehandler.doclines, e, messages) return False