Beispiel #1
0
def testMask():
    """
    # This test will use the following TLM dictionary definitions.
    # The mask 0x0180 singles out the two bits on either MSB_U16
    # word:
    #
    #     0b00000001 0b10000000

    - !Packet
      name: P
      fields:
        - !Field
          name: M
          type: MSB_U16
          mask: 0x0180
    """
    defn = tlm.TlmDict(testMask.__doc__)["P"]
    packet = tlm.Packet(defn)

    assert packet.M == 0
    assert packet._data == bytearray([0x00, 0x00])

    packet.M = 1
    assert packet.M == 1
    assert packet._data == bytearray([0x00, 0x80])

    packet.M = 2
    assert packet.M == 2
    assert packet._data == bytearray([0x01, 0x00])

    packet.M = 3
    assert packet.M == 3
    assert packet._data == bytearray([0x01, 0x80])
Beispiel #2
0
    def _run(self):
        try:
            while True:
                gevent.sleep(0)
                msg = self._sub.recv_multipart()
                topic, message = serv_utils.decode_message(msg)
                message = pickle.loads(message)

                if topic is None or message is None:
                    log.error(f"{self} received invalid topic or message. Skipping")
                    continue

                if not isinstance(message, tuple):
                    log.error(
                        "TlmMonitor received message that it is unable to process "
                        "Messages must be tagged packet data tuples (uid, data)."
                    )
                    continue

                if message[0] not in self._defns:
                    log.error(f"Skipping packet with id {message[0]}")
                    continue

                pkt = tlm.Packet(defn=self._defns[message[0]], data=message[1])

                pkt_name = pkt._defn.name
                if pkt_name in self._pktbufs:
                    self._pktbufs[pkt_name].appendleft(pkt)

        except Exception as e:
            log.error("Exception raised in TlmMonitor while receiving messages")
            log.error(f"API telemetry is no longer being received from server. {e}")
            raise e
Beispiel #3
0
    def test_sqlite_insert(self, importlib_mock):
        yaml_doc = """
        - !Packet
          name: Packet1
          history:
            - col1
          fields:
            - !Field
              name:       col1
              desc:       test column 1
              type:       MSB_U16
              enum:
                a: testa
            - !Field
              name: SampleTime
              type: TIME64
        """
        with open(self.test_yaml_file, 'wt') as out:
            out.write(yaml_doc)

        tlmdict = tlm.TlmDict(self.test_yaml_file)

        sqlbackend = db.SQLiteBackend()
        sqlbackend._conn = mock.MagicMock()

        pkt_defn = tlmdict['Packet1']
        pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes)))

        sqlbackend.insert(pkt)
        sqlbackend._conn.execute.assert_called_with(
            'INSERT INTO "Packet1" (col1, SampleTime) VALUES (?, ?)',
            [1, 33752069.10112411])

        os.remove(self.test_yaml_file)
Beispiel #4
0
    def process(self, input_data, topic=None):
        """
        Process received input message.

        This plugin should be configured to only receive telemetry.

        Received messaged is expected to be a tuple of the form produced
        by AITPacketHandler.
        """
        processed = False

        try:
            pkl_load = pickle.loads(input_data)
            pkt_id, pkt_data = int(pkl_load[0]), pkl_load[1]
            packet_def = self._get_tlm_packet_def(pkt_id)
            if packet_def:
                packet_def = self._uidToPktDefMap[pkt_id]
                tlm_packet = tlm.Packet(packet_def, data=bytearray(pkt_data))
                self._process_telem_msg(tlm_packet)
                processed = True
            else:
                log.error(
                    "OpenMCT Plugin received telemetry message with unknown "
                    f"packet id {pkt_id}.  Skipping input...")
        except Exception as e:
            log.error(f"OpenMCT Plugin: {e}")
            log.error(
                "OpenMCT Plugin received input_data that it is unable to "
                "process. Skipping input ...")

        return processed
def get_packet_and_defn():
    first_stream = ait.config.get('gui.telemetry')[0]
    stream = cfg.AitConfig(config=first_stream).get('stream')
    name = stream.get('name', '<unnamed>')
    pkt_defn = tlm.getDefaultDict().get(name, None)
    pkt = tlm.Packet(pkt_defn)

    return pkt_defn, pkt
Beispiel #6
0
    def create_packet_from_result(cls, packet_name, data):
        try:
            pkt_defn = tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error("Unknown packet name {}. Unable to unpack SQLite result".
                      format(packet_name))
            return None

        return tlm.Packet(pkt_defn, data=data)
Beispiel #7
0
    def test_derivation_definition(self):
        tlmdict = tlm.TlmDict(self.test_yaml_deriv1)
        pktdefn = tlmdict["Test_Packet"]
        deriv1 = pktdefn.derivations[0]
        assert deriv1.name == "deriv_1"
        assert type(deriv1.equation) == tlm.PacketExpression
        assert deriv1.equation.toJSON() == "field_1 + field_2"

        pkt = tlm.Packet(pktdefn)
        pkt.field_1 = 1
        pkt.field_2 = 2
        assert pkt.deriv_1 == 3
Beispiel #8
0
    def process(self, input_data, topic=None, **kwargs):
        try:
            load = pickle.loads(input_data)
            pkt_id, pkt_data = int(load[0]), load[1]
            packet = self.packet_dict[pkt_id]
            decoded = tlm.Packet(packet, data=bytearray(pkt_data))
        except Exception as e:
            log.error('TelemetryLimitMonitor: {}'.format(e))
            log.error(
                'TelemetryLimitMonitor received input_data that it is unable to process. Skipping input ...'
            )
            return

        if packet.name in self.limit_dict:
            for field, defn in self.limit_dict[packet.name].iteritems():
                v = decoded._getattr(field)

                if packet.name not in self.limit_trip_repeats.keys():
                    self.limit_trip_repeats[packet.name] = {}

                if field not in self.limit_trip_repeats[packet.name].keys():
                    self.limit_trip_repeats[packet.name][field] = 0

                if defn.error(v):
                    msg = 'Field {} error out of limit with value {}'.format(
                        field, v)
                    log.error(msg)

                    self.limit_trip_repeats[packet.name][field] += 1
                    repeats = self.limit_trip_repeats[packet.name][field]

                    if (repeats == self.notif_thrshld or
                        (repeats > self.notif_thrshld and
                         (repeats - self.notif_thrshld) % self.notif_freq
                         == 0)):
                        notify.trigger_notification('limit-error', msg)

                elif defn.warn(v):
                    msg = 'Field {} warning out of limit with value {}'.format(
                        field, v)
                    log.warn(msg)

                    self.limit_trip_repeats[packet.name][field] += 1
                    repeats = self.limit_trip_repeats[packet.name][field]

                    if (repeats == self.notif_thrshld or
                        (repeats > self.notif_thrshld and
                         (repeats - self.notif_thrshld) % self.notif_freq
                         == 0)):
                        notify.trigger_notification('limit-warn', msg)

                else:
                    self.limit_trip_repeats[packet.name][field] = 0
Beispiel #9
0
def testArray():
    """
    # This test will use the following TLM dictionary definitions:

    - !Packet
      name: P
      fields:
        - !Field
          name: A
          type: MSB_U16[3]

    """
    defn = tlm.TlmDict(testArray.__doc__)["P"]
    packet = tlm.Packet(defn, struct.pack(">HHH", 1, 2, 3))

    assert packet.A == [1, 2, 3]
Beispiel #10
0
    def test_sqlite_insert(self, importlib_mock):
        yaml_doc = """
        - !Packet
          name: Packet1
          history:
            - col1
          fields:
            - !Field
              name:       col1
              desc:       test column 1
              type:       MSB_U16
              enum:
                a: testa
            - !Field
              name: SampleTime
              type: TIME64
        """
        with open(self.test_yaml_file, "wt") as out:
            out.write(yaml_doc)

        tlmdict = tlm.TlmDict(self.test_yaml_file)

        sqlbackend = db.SQLiteBackend()
        sqlbackend._conn = mock.MagicMock()

        pkt_defn = tlmdict["Packet1"]
        pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes)))

        # Note: we can't fully test this call given the modification to
        # the packet data on insert. Better than nothing I suppose.

        # Test without time
        sqlbackend.insert(pkt)
        assert ('INSERT INTO "Packet1" (PKTDATA) VALUES (?)'
                in sqlbackend._conn.execute.call_args[0])

        sqlbackend._conn.reset_mock()

        # Test with time
        now = dt.datetime.utcnow()
        sqlbackend.insert(pkt, time=now)
        assert ('INSERT INTO "Packet1" (PKTDATA, time) VALUES (?, ?)'
                in sqlbackend._conn.execute.call_args[0])
        assert (now.strftime(
            dmc.RFC3339_Format) == sqlbackend._conn.execute.call_args[0][1][1])

        os.remove(self.test_yaml_file)
Beispiel #11
0
    def process(self, input_data, topic=None, **kwargs):
        """
        Splits tuple received from PacketHandler into packet UID and packet message.
        Decodes packet and inserts into database backend.
        Logs any exceptions raised.

        Params:
            input_data:  message received from inbound stream through PacketHandler
            topic:       name of inbound stream message received from
            **kwargs:    any args required for connected to the backend
        """
        try:
            load = pickle.loads(input_data)
            uid, pkt = int(load[0]), load[1]
            defn = self.packet_dict[uid]
            decoded = tlm.Packet(defn, data=bytearray(pkt))
            self.dbconn.insert(decoded, **kwargs)
        except Exception as e:
            log.error('Data archival failed with error: {}.'.format(e))
Beispiel #12
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument("--all", action="store_true", help="output all fields/values")

    parser.add_argument(
        "--csv",
        default="output.csv",
        metavar="</path/to/output/csv>",
        help="Output as CSV with filename",
    )

    parser.add_argument(
        "--fields",
        metavar="</path/to/fields/file>",
        help="path to the file containing all fields to query, separated by newline.",
    )

    parser.add_argument(
        "--packet", required=True, help="Packet name from telemetry dictionary specified in config file."
    )

    parser.add_argument(
        "--time_field",
        help=(
            "Time field to use for time range comparisons. Ground receipt time "
            "will be used if nothing is specified."
        ),
    )

    parser.add_argument(
        "--stime",
        help=(
            "Datetime in file to start collecting the data values. Defaults to "
            "beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "--etime",
        help=(
            "Datetime in file to end collecting the data values. Defaults to end "
            "of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "pcap", nargs="*", help=("PCAP file(s) containing telemetry packets")
    )

    args = parser.parse_args()

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' % args.packet)
        log.end()
        sys.exit(2)

    if not args.all and args.fields is None:
        log.error(
            "Must provide fields file with --fields or specify that all fields should be queried with --all"
        )
        log.end()
        sys.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, "r") as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split(".")
        if fldname not in defn.fieldmap and (
            len(raw) == 2 and raw[0] != "raw" or raw[1] not in defn.fieldmap
        ):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        log.end()
        sys.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, "Ground Receipt Time")

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, "w")
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug("Processing %s" % filename)

        with pcap.open(filename, "rb") as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = (
                    header.timestamp
                    if args.ground_time
                    else getattr(packet, args.time_field)
                )
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split(".")
                            if len(names) == 2 and names[0] == "raw":
                                field = names[1]
                                _raw = True

                            field_val = packet._getattr(field, raw=_raw)

                            if hasattr(field_val, "name"):
                                field_val = field_val.name
                            else:
                                field_val = str(field_val)

                        except KeyError:
                            log.debug("%s not found in Packet" % field)
                            field_val = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            field_val = packet._getattr(field, raw=True)

                        row.append(field_val)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug("Parsed %s packets." % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()
Beispiel #13
0
    def mimic_tlm(self, ait_tlm_pkt_name, ait_tlm_pkt_fill=None):
        """Used for debugging, creates an instance of a packet based on
        packet name, and fills it with zero data.
        Special case for '1553_HS_Packet' which will get random number
        data fills.
        If HTTP Request query includes a value for 'repeat', then this
        will continue emitting telemetry.
        """

        # Http query option, if it is set to anything, consider it true
        self._debugMimicRepeat = len(str(bottle.request.query.repeat)) > 0

        # This will be helpful in testing by simulating TLM
        # but by a rest call instead of actual telem
        ait_pkt_defn = None
        if ait_tlm_pkt_name:
            ait_pkt_defn = tlm.getDefaultDict()[ait_tlm_pkt_name]
        else:
            ait_pkt_defn = tlm.getDefaultDict().values()[0]

        # Create the expected message format
        pkt_size_bytes = ait_pkt_defn.nbytes

        # if self._debugMimicRepeat:
        repeat_str = " REPEATED " if self._debugMimicRepeat else " a single "
        info_msg = ("Received request to mimic" + repeat_str +
                    "telemetry packet for " + ait_pkt_defn.name)
        self.dbg_message(info_msg)

        # Create a binary array of size filled with 0
        dummy_data = bytearray(pkt_size_bytes)

        info_msg = ""

        while True:

            # Special handling for simply integer based packet, others will
            # have all 0 zero
            if ait_pkt_defn.name == "1553_HS_Packet":
                hs_packet = struct.Struct(">hhhhh")
                random_num = random.randint(1, 100)
                dummy_data = hs_packet.pack(random_num, random_num, random_num,
                                            random_num, random_num)

            tlm_pkt = tlm.Packet(ait_pkt_defn, data=bytearray(dummy_data))
            self._process_telem_msg(tlm_pkt)

            info_msg = (
                "AIT OpenMct Plugin submitted mimicked telemetry for " +
                ait_pkt_defn.name + " (" + str(datetime.datetime.now()) +
                ") to telem queue")
            self.dbg_message(info_msg)

            # sleep if mimic on
            if self._debugMimicRepeat:
                gsleep(5)

            # either it was immediate or we woke up, check break condition
            if not self._debugMimicRepeat:
                break

        # Return last status message as result to client
        return info_msg
Beispiel #14
0
def main():
    dn_limits = {}
    eu_limits = {}
    eu_values = {}

    ld = limits.getDefaultDict()
    td = tlm.getDefaultDict()

    all_vals = [[
        "Telem Point",
        "EU lower.error",
        "EU lower.warn",
        "EU upper.warn",
        "EU upper.error",
        "DN lower.error",
        "DN lower.warn",
        "DN upper.warn",
        "DN upper.error",
        "DN to EU LE",
        "DN to EU LW",
        "DN to EU UW",
        "DN to EU UE",
    ]]

    for source in sorted(ld.keys(), key=lambda x: x.split(".")[1]):
        log.info(f"Processing {source}")
        pkt_name, name = source.split(".")

        # Don't support limits specifying individual values. This is usually
        # used to specify enumerations that aren't valid and we don't properly
        # handle those cases.
        if ld[source].value is not None:
            log.warn(f'Skipping unsupported "value" limit {source}')
            continue

        dn_limits.setdefault(name, [None, None, None, None])
        eu_limits.setdefault(name, [None, None, None, None])
        eu_values.setdefault(name, [None, None, None, None])

        if ld[source].lower is not None:
            try:
                eu_limits[name][0] = ld[source].lower.error
            except AttributeError:
                pass

            try:
                eu_limits[name][1] = ld[source].lower.warn
            except AttributeError:
                pass

        if ld[source].upper is not None:
            try:
                eu_limits[name][2] = ld[source].upper.warn
            except AttributeError:
                pass

            try:
                eu_limits[name][3] = ld[source].upper.error
            except AttributeError:
                pass

        values = []

        defn = td[pkt_name]
        data = bytearray(defn.nbytes)
        packet = tlm.Packet(defn, data)
        for dn in range(65536):
            setattr(packet, name, dn)
            eu = getattr(packet, name)

            if eu is not None:
                values.append((dn, eu))

        values.sort(key=lambda pair: pair[1])

        for dn, eu in values:
            for n in range(4):
                if (eu_limits[name][n] is not None
                        and dn_limits[name][n] is None
                        and eu > eu_limits[name][n]):
                    value = dn - 1 if dn > 0 else 0
                    dn_limits[name][n] = value

                    setattr(packet, name, value)
                    eu_values[name][n] = getattr(packet, name)

            if all(dn_limits[name][n] is not None for n in range(4)):
                break

        values = [source]
        values.extend(map(str, eu_limits[name]))
        values.extend(map(str, dn_limits[name]))
        values.extend(map(str, eu_values[name]))
        all_vals.append(values)

    s = [[str(e) for e in row] for row in all_vals]
    lens = [max(map(len, col)) for col in zip(*s)]
    fmt = "\t".join("{{:{}}}".format(x) for x in lens)
    table = [fmt.format(*row) for row in s]
    print("\n".join(table))
Beispiel #15
0
 def handle (self, data, address):
     self._pktbuf.appendleft( tlm.Packet(self._defn, data) )
Beispiel #16
0
    def create_packets_from_results(self, packet_name, result_set):
        ''' Generate AIT Packets from a InfluxDB query ResultSet

        Extract Influx DB query results into one packet per result entry. This
        assumes that telemetry data was inserted in the format generated by
        :func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
        evaluated if they can be properly encoded from the raw value in the
        query result. If there is no opcode / EVR-code for a particular raw
        value the value is skipped (and thus defaulted to 0).

        Arguments
            packet_name (string)
                The name of the AIT Packet to create from each result entry

            result_set (influxdb.resultset.ResultSet)
                The query ResultSet object to convert into packets

        Returns
            A list of packets extracted from the ResultSet object or None if
            an invalid packet name is supplied.
                
        '''
        try:
            pkt_defn = tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error('Unknown packet name {} Unable to unpack ResultSet'.format(packet_name))
            return None

        pkt = tlm.Packet(pkt_defn)

        pkts = []
        for r in result_set.get_points():
            new_pkt = tlm.Packet(pkt_defn)

            for f, f_defn in pkt_defn.fieldmap.iteritems():
                field_type_name = f_defn.type.name
                if field_type_name == 'CMD16':
                    if cmd.getDefaultDict().opcodes.get(r[f], None):
                        setattr(new_pkt, f, cmd_def.name)
                elif field_type_name == 'EVR16':
                    if evr.getDefaultDict().codes.get(r[f], None):
                        setattr(new_pkt, f, r[f])
                elif field_type_name == 'TIME8':
                    setattr(new_pkt, f, r[f] / 256.0)
                elif field_type_name == 'TIME32':
                    new_val = dmc.GPS_Epoch + dt.timedelta(seconds=r[f])
                    setattr(new_pkt, f, new_val)
                elif field_type_name == 'TIME40':
                    sec = int(r[f])
                    microsec = r[f] * 1e6
                    new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec, microseconds=microsec)
                    setattr(new_pkt, f, new_val)
                elif field_type_name == 'TIME64':
                    sec = int(r[f])
                    microsec = r[f] % 1 * 1e6
                    new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec, microseconds=microsec)
                    setattr(new_pkt, f, new_val)
                else:
                    try:
                        setattr(new_pkt, f, r[f])
                    except KeyError:
                        log.info('Field not found in query results {} Skipping ...'.format(f))

            pkts.append(new_pkt)
        return pkts
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = tlmdict.keys()
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        '--packet': {
            'type': str,
            'choices': pnames,
            'default': pnames[0] if len(pnames) > 0 else None,
            'help': 'Type of packets (!Packet name in tlm.yaml) in file',
            'required': len(pnames) > 1,
        },
        '--database': {
            'default':
            ait.config.get('database.name'),
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)'),
            'required':
            ait.config.get('database.name') is None
        },
        '--backend': {
            'default':
            'sqlite',
            'choices': ['sqlite', 'influx'],
            'action':
            'store',
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)')
        },
        '--use-current-time': {
            'action':
            'store_true',
            'help': ('Use current time stamps when insert packets instead '
                     'of ground receipt time (or the time written in the '
                     'PCAP header).')
        },
        'file': {
            'nargs': '+',
            'help': 'File(s) containing telemetry packets'
        }
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]
        nbytes = defn.nbytes

        if args.backend == 'sqlite':
            dbconn = db.SQLiteBackend()
        elif args.backend == 'influx':
            dbconn = db.InfluxDBBackend()

        if args.backend == 'sqlite' and (args.database == ':memory:'
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info('Processing %s' % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:
                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info('Received Ctrl-C.  Stopping database insert.')

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info('Inserted %d %s packets into database %s.' % values)

    log.end()
Beispiel #18
0
def main():
    log.begin()

    description = """Parses 1553 telemetry into CSV file."""

    arguments = {
        '--all': {
            'action': 'store_true',
            'help': 'output all fields/values',
        },
        '--csv': {
            'type': str,
            'default': 'output.csv',
            'metavar': '</path/to/output/csv>',
            'help': 'Output as CSV with filename'
        },
        '--fields': {
            'type': str,
            'metavar': '</path/to/fields/file>',
            'help':
            'file containing all fields to query, separated by newline.'
        },
        '--packet': {
            'type': str,
            'required': True,
            'help': 'field names to query, separated by space'
        },
        '--time_field': {
            'type':
            str,
            'help':
            'Time field to use for time range comparisons. Ground receipt time will be used if nothing is specified.'
        },
        '--stime': {
            'type':
            str,
            'help':
            'Datetime in file to start collecting the data values. Defaults to beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        },
        '--etime': {
            'type':
            str,
            'help':
            'Datetime in file to end collecting the data values. Defaults to end of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        }
    }

    arguments['pcap'] = {
        'nargs': '*',
        'help': 'PCAP file(s) containing telemetry packets'
    }

    args = gds.arg_parse(arguments, description)

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' %
                  args.packet)
        gds.exit(2)

    if not args.all and args.fields is None:
        log.error(
            'Must provide fields file with --fields or specify that all fields should be queried with --all'
        )
        gds.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, 'rb') as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split('.')
        if fldname not in defn.fieldmap and (len(raw) == 2 and raw[0] != 'raw'
                                             or raw[1] not in defn.fieldmap):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        gds.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, 'Ground Receipt Time')

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, 'wb')
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug('Processing %s' % filename)

        with pcap.open(filename, 'rb') as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = header.timestamp if args.ground_time else getattr(
                    packet, args.time_field)
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split('.')
                            if len(names) == 2 and names[0] == 'raw':
                                field = names[1]
                                _raw = True

                            fieldVal = packet._getattr(field, raw=_raw)

                            if hasattr(fieldVal, 'name'):
                                fieldVal = fieldVal.name
                            else:
                                fieldVal = str(fieldVal)

                        except KeyError:
                            log.debug('%s not found in Packet' % field)
                            fieldVal = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            fieldVal = packet._getattr(field, raw=True)

                        row.append(fieldVal)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug('Parsed %s packets.' % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()
Beispiel #19
0
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = list(tlmdict.keys())
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        "--packet": {
            "type": str,
            "choices": pnames,
            "default": pnames[0] if len(pnames) > 0 else None,
            "help": "Type of packets (!Packet name in tlm.yaml) in file",
            "required": len(pnames) > 1,
        },
        "--database": {
            "default":
            ait.config.get("database.dbname"),
            "help": ("Name of database in which to insert packets (may "
                     "also be specified in config.yaml database.name)"),
            "required":
            ait.config.get("database.dbname") is None,
        },
        "--backend": {
            "default":
            "sqlite",
            "choices": ["sqlite", "influx"],
            "action":
            "store",
            "help": ("Name of database in which to insert packets (may "
                     "also be specified in config.yaml database.name)"),
        },
        "--use-current-time": {
            "action":
            "store_true",
            "help": ("Use current time stamps when insert packets instead "
                     "of ground receipt time (or the time written in the "
                     "PCAP header)."),
        },
        "file": {
            "nargs": "+",
            "help": "File(s) containing telemetry packets"
        },
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]

        if args.backend == "sqlite":
            dbconn = db.SQLiteBackend()
        elif args.backend == "influx":
            dbconn = db.InfluxDBBackend()

        if args.backend == "sqlite" and (args.database == ":memory:"
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info("Processing %s" % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:

                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Stopping database insert.")

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info("Inserted %d %s packets into database %s." % values)

    log.end()
Beispiel #20
0
    def test_influx_insert(self, importlib_mock):
        yaml_doc = """
        - !Packet
          name: Packet1
          history:
            - col1
          fields:
            - !Field
              name:       col1
              desc:       test column 1
              type:       MSB_U16
              enum:
                a: testa
            - !Field
              name: SampleTime
              type: TIME64
            - !Field
              name: SampleTime32
              type: TIME32
            - !Field
              name: SampleTime40
              type: TIME40
            - !Field
              name: SampleEvr16
              type: EVR16
            - !Field
              name: SampleCmd16
              type: CMD16
        """
        with open(self.test_yaml_file, "wt") as out:
            out.write(yaml_doc)

        tlmdict = tlm.TlmDict(self.test_yaml_file)

        sqlbackend = db.InfluxDBBackend()
        sqlbackend._conn = mock.MagicMock()

        pkt_defn = tlmdict["Packet1"]
        pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes)))

        now = dt.datetime.utcnow()
        sqlbackend.insert(pkt, time=now)
        sqlbackend._conn.write_points.assert_called_with([{
            "measurement":
            "Packet1",
            "time":
            now.strftime(dmc.RFC3339_Format),
            "tags": {},
            "fields": {
                "col1": 1,
                "SampleTime": 33752069.10112411,
                "SampleTime32": 168496141,
                "SampleTime40": 235868177.0703125,
                "SampleCmd16": 5398,
                "SampleEvr16": 4884,
            },
        }])
        sqlbackend._conn.reset_mock()

        # Insert without a timestamp
        sqlbackend.insert(pkt)
        sqlbackend._conn.write_points.assert_called_with([{
            "measurement": "Packet1",
            "tags": {},
            "fields": {
                "col1": 1,
                "SampleTime": 33752069.10112411,
                "SampleTime32": 168496141,
                "SampleTime40": 235868177.0703125,
                "SampleCmd16": 5398,
                "SampleEvr16": 4884,
            },
        }])
        sqlbackend._conn.reset_mock()

        # Insert with additional tags
        sqlbackend.insert(pkt, tags={"testNum": "3"})
        sqlbackend._conn.write_points.assert_called_with([{
            "measurement": "Packet1",
            "tags": {
                "testNum": "3"
            },
            "fields": {
                "col1": 1,
                "SampleTime": 33752069.10112411,
                "SampleTime32": 168496141,
                "SampleTime40": 235868177.0703125,
                "SampleCmd16": 5398,
                "SampleEvr16": 4884,
            },
        }])
        sqlbackend._conn.reset_mock()

        os.remove(self.test_yaml_file)
Beispiel #21
0
    def test_influx_insert(self, importlib_mock):
        yaml_doc = """
        - !Packet
          name: Packet1
          history:
            - col1
          fields:
            - !Field
              name:       col1
              desc:       test column 1
              type:       MSB_U16
              enum:
                a: testa
            - !Field
              name: SampleTime
              type: TIME64
            - !Field
              name: SampleTime32
              type: TIME32
            - !Field
              name: SampleTime40
              type: TIME40
            - !Field
              name: SampleEvr16
              type: EVR16
            - !Field
              name: SampleCmd16
              type: CMD16
        """
        with open(self.test_yaml_file, 'wt') as out:
            out.write(yaml_doc)

        tlmdict = tlm.TlmDict(self.test_yaml_file)

        sqlbackend = db.InfluxDBBackend()
        sqlbackend._conn = mock.MagicMock()

        pkt_defn = tlmdict['Packet1']
        pkt = tlm.Packet(pkt_defn, bytearray(range(pkt_defn.nbytes)))

        now = dt.datetime.utcnow()
        sqlbackend.insert(pkt, time=now)
        sqlbackend._conn.write_points.assert_called_with([{
            'measurement':
            'Packet1',
            'time':
            now.strftime('%Y-%m-%dT%H:%M:%S'),
            'tags': {},
            'fields': {
                'col1': 1,
                'SampleTime': 33752069.10112411,
                'SampleTime32': 168496141,
                'SampleTime40': 235868177.0703125,
                'SampleCmd16': 5398,
                'SampleEvr16': 4884
            }
        }])
        sqlbackend._conn.reset_mock()

        # Insert without a timestamp
        sqlbackend.insert(pkt)
        sqlbackend._conn.write_points.assert_called_with([{
            'measurement': 'Packet1',
            'tags': {},
            'fields': {
                'col1': 1,
                'SampleTime': 33752069.10112411,
                'SampleTime32': 168496141,
                'SampleTime40': 235868177.0703125,
                'SampleCmd16': 5398,
                'SampleEvr16': 4884
            }
        }])
        sqlbackend._conn.reset_mock()

        # Insert with additional tags
        sqlbackend.insert(pkt, tags={'testNum': '3'})
        sqlbackend._conn.write_points.assert_called_with([{
            'measurement': 'Packet1',
            'tags': {
                'testNum': '3'
            },
            'fields': {
                'col1': 1,
                'SampleTime': 33752069.10112411,
                'SampleTime32': 168496141,
                'SampleTime40': 235868177.0703125,
                'SampleCmd16': 5398,
                'SampleEvr16': 4884
            }
        }])
        sqlbackend._conn.reset_mock()

        os.remove(self.test_yaml_file)
Beispiel #22
0
    def create_packet_from_result(cls, packet_id, result):
        """Create an AIT Packet from an InfluxDB query ResultSet item

        Extract Influx DB query results entry into an AIT packet. This
        assumes that telemetry data was inserted in the format generated by
        :func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
        evaluated if they can be properly encoded from the raw value in the
        query result. If there is no opcode / EVR-code for a particular raw
        value the value is skipped (and thus defaulted to 0).

        TODO: Reevaluate this assumption that missing opcodes / EVR-codes should be
        left as 0 if a match isn't found in the dictionary.

        Arguments
            packet_id (string or PacketDefinition)
                The "id" for the packet to create. If packet_id is a string it must
                name a valid PacketDefintion in the telemetry dictionary. Otherwise,
                it must be a PacketDefinition.

            result (dict)
                The :class:`influxdb.resultset.ResultSet` entry from which values
                should be extracted to form the AIT packet


        Returns
            A :class:`ait.core.tlm.Packet` with values initialized from the values in the
            ResultSet entry. If a field cannot be located in the result entry it will left
            as the default value in the Packet or set to None if it's a CMD / EVR type.
        """
        if isinstance(packet_id, str):
            try:
                pkt_defn = tlm.getDefaultDict()[packet_id]
            except KeyError:
                log.error(
                    f"Unknown packet name {packet_id} Unable to unpack ResultSet"
                )
                return None
        elif isinstance(packet_id, tlm.PacketDefinition):
            pkt_defn = packet_id
        else:
            log.error(
                f"Unknown packet id type {packet_id}. Unable to unpack ResultSet"
            )
            return None

        new_pkt = tlm.Packet(pkt_defn)
        cmd_dict = cmd.getDefaultDict()
        evr_dict = evr.getDefaultDict()

        for f, f_defn in pkt_defn.fieldmap.items():
            field_type_name = f_defn.type.name
            if field_type_name == "CMD16":
                if cmd_dict.opcodes.get(result[f], None):
                    cmd_def = cmd_dict.opcodes.get(result[f])
                    setattr(new_pkt, f, cmd_def.name)
            elif field_type_name == "EVR16":
                if evr_dict.codes.get(result[f], None):
                    evr_def = evr_dict.codes.get(result[f])
                    setattr(new_pkt, f, evr_def.name)
            elif field_type_name == "TIME8":
                setattr(new_pkt, f, result[f] / 256.0)
            elif field_type_name == "TIME32":
                new_val = dmc.GPS_Epoch + dt.timedelta(seconds=result[f])
                setattr(new_pkt, f, new_val)
            elif field_type_name == "TIME40":
                sec = int(result[f])
                microsec = result[f] % 1 * 1e6
                new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec,
                                                       microseconds=microsec)
                setattr(new_pkt, f, new_val)
            elif field_type_name == "TIME64":
                sec = int(result[f])
                microsec = result[f] % 1 * 1e6
                new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec,
                                                       microseconds=microsec)
                setattr(new_pkt, f, new_val)
            else:
                try:
                    setattr(new_pkt, f, result[f])
                except KeyError:
                    log.info(
                        "Field not found in query results {} Skipping ...".
                        format(f))

        return new_pkt