예제 #1
0
    def create_packets_from_results(cls, packet_name, result_set):
        """Generate AIT Packets from a InfluxDB query ResultSet

        Extract Influx DB query results into one packet per result entry. This
        assumes that telemetry data was inserted in the format generated by
        :func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
        evaluated if they can be properly encoded from the raw value in the
        query result. If there is no opcode / EVR-code for a particular raw
        value the value is skipped (and thus defaulted to 0).

        Arguments
            packet_name (string)
                The name of the AIT Packet to create from each result entry

            result_set (influxdb.resultset.ResultSet)
                The query ResultSet object to convert into packets

        Returns
            A list of packets extracted from the ResultSet object or None if
            an invalid packet name is supplied.

        """
        try:
            tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error(
                "Unknown packet name {} Unable to unpack ResultSet".format(
                    packet_name))
            return None

        return [
            InfluxDBBackend.create_packet_from_result(packet_name, r)
            for r in result_set.get_points()
        ]
예제 #2
0
    def test_query_packets_arg_handling(self, importlib_mock):
        sqlbackend = db.InfluxDBBackend()
        sqlbackend._conn = mock.MagicMock()

        # Test no packet provided handling
        #######################################
        start = dmc.GPS_Epoch
        end = dt.datetime.utcnow()

        sqlbackend.query_packets(start_time=start, end_time=end)

        packets = ", ".join([f'"{i}"' for i in tlm.getDefaultDict().keys()])
        start = start.strftime(dmc.RFC3339_Format)
        end = end.strftime(dmc.RFC3339_Format)
        query = (
            f"SELECT * FROM \"{packets}\" WHERE time >= '{start}' AND time <= '{end}'"
        )

        assert sqlbackend._conn.query.call_args[0][0] == query
        sqlbackend._conn.reset_mock()

        # Test no start time handling
        #######################################
        end = dt.datetime.utcnow()

        sqlbackend.query_packets(end_time=end)

        packets = ", ".join([f'"{i}"' for i in tlm.getDefaultDict().keys()])
        start = dmc.GPS_Epoch.strftime(dmc.RFC3339_Format)
        end = end.strftime(dmc.RFC3339_Format)
        query = (
            f"SELECT * FROM \"{packets}\" WHERE time >= '{start}' AND time <= '{end}'"
        )

        assert sqlbackend._conn.query.call_args[0][0] == query
        sqlbackend._conn.reset_mock()

        # Test no end time handling
        #######################################
        sqlbackend.query_packets()

        packets = ", ".join([f'"{i}"' for i in tlm.getDefaultDict().keys()])
        start = dmc.GPS_Epoch.strftime(dmc.RFC3339_Format)
        end = dt.datetime.utcnow()
        query = f"SELECT * FROM {packets} WHERE time >= '{start}' AND time <= '{end}'"

        exec_end_time = dt.datetime.strptime(
            sqlbackend._conn.query.call_args[0][0].split("'")[-2],
            dmc.RFC3339_Format)

        assert (end - exec_end_time).seconds < 1
        sqlbackend.query_packets()

        # Test bad packet name exception
        #######################################
        with pytest.raises(ValueError):
            sqlbackend.query_packets(packets=["not_a_valid_packet"])
예제 #3
0
    def __init__(self, input_type=None, output_type=None, **kwargs):
        """
        Params:
            input_type:   (optional) Specifies expected input type, used to
                                     validate handler workflow. Defaults to None.
            output_type:  (optional) Specifies expected output type, used to
                                     validate handler workflow. Defaults to None
            **kwargs:
                packet:   (required) Name of packet, present in default tlm dict.
        Raises:
            ValueError:    If packet is not present in kwargs.
                           If packet is specified but not present in default tlm dict.
        """
        super(PacketHandler, self).__init__(input_type, output_type)
        self.packet = kwargs.get("packet", None)

        if not self.packet:
            msg = 'PacketHandler: No packet name provided in handler config as key "packet"'
            raise ValueError(msg)

        tlm_dict = tlm.getDefaultDict()
        if self.packet not in tlm_dict:
            msg = "PacketHandler: Packet name {} not present in telemetry dictionary".format(
                self.packet)
            msg += " Available packet types are {}".format(tlm_dict.keys())
            raise ValueError(msg)

        self._pkt_defn = tlm_dict[self.packet]
예제 #4
0
def handle():
    """Return JSON Telemetry dictionary
    **Example Response**:
    .. sourcecode: json
       {
           ExaplePacket1: {
               uid: 1,
               fields: {
                   Voltage_B: {
                       type: "MSB_U16",
                       bytes: [2, 3],
                       name: "Voltage_B",
                       desc: "Voltage B as a 14-bit DN. Conversion to engineering units is TBD."
                   },
                   Voltage_C: {
                       type: "MSB_U16",
                       bytes: [4, 5],
                       name: "Voltage_C",
                       desc: "Voltage C as a 14-bit DN. Conversion to engineering units is TBD."
                   },
                   ...
               }
           },
           ExamplePacket2: {
               ...
           }
       }
    """
    return json.dumps(tlm.getDefaultDict().toJSON())
예제 #5
0
    def create(self, **kwargs):
        ''' Create and connect to a SQLite instance
        Since creation and connection are intertwined in SQLite,
        a pre-existing connection is not required unlike InfluxDB.
        A skeleton database is built with the telemetry dictionary
        since tables must be pre-created in SQLite.

        **Configuration Parameters**

        database name
          The database name to create. Passed as either the config
          key **database.dbname** or the kwargs argument
          **database**. Defaults to **ait.db**.

        tlmdict
            The :class:`ait.core.tlm.TlmDict` instance to use. Defaults to
            the currently configured telemetry dictionary.
        '''

        tlmdict = kwargs.get('tlmdict', tlm.getDefaultDict())

        dbname = ait.config.get('database.dbname', kwargs.get('database', None))
        if dbname:
            self._conn = self._backend.connect(dbname)
        else:
            self._conn = self._backend.connect('ait.db')

        for name, defn in tlmdict.items():
            self._create_table(defn)
예제 #6
0
    def __init__(self, input_type=None, output_type=None, **kwargs):
        """
        Params:
            input_type:   (optional) Specifies expected input type, used to
                                     validate handler workflow. Defaults to None.
            output_type:  (optional) Specifies expected output type, used to
                                     validate handler workflow. Defaults to None
            packet_types: (required) APID value (string) : packet name (string) pairs
                                     APID value can use 'X' to mask out a bit
                                     For example, 'XXXXX1011XXX' means only bits 6-9 represent the APID
            packet_secondary_header_length: (optional) Length of secondary header in octets.
                                                       Defaults to 0.
        Raises:
            ValueError:   If packet in config is not present in default tlm dict.
        """
        super(CCSDSPacketHandler, self).__init__(input_type, output_type)
        self.packet_types = kwargs['packet_types']
        self.packet_secondary_header_length = kwargs.get(
            'packet_secondary_header_length', 0)

        # Check if all packet names in config are in telemetry dictionary
        tlm_dict = tlm.getDefaultDict()
        for packet_name in self.packet_types.values():
            if packet_name not in tlm_dict.keys():
                msg = 'CCSDSPacketHandler: Packet name {} not present in telemetry dictionary.'.format(
                    packet_name)
                msg += ' Available packet types are {}'.format(tlm_dict.keys())
                raise ValueError(msg)
예제 #7
0
def get_packet_and_defn():
    first_stream = ait.config.get('gui.telemetry')[0]
    stream = cfg.AitConfig(config=first_stream).get('stream')
    name = stream.get('name', '<unnamed>')
    pkt_defn = tlm.getDefaultDict().get(name, None)
    pkt = tlm.Packet(pkt_defn)

    return pkt_defn, pkt
예제 #8
0
def main():
    try:
        log.begin()

        parser = argparse.ArgumentParser(
            description=__doc__,
            formatter_class=argparse.RawDescriptionHelpFormatter)

        # Add optional command line arguments
        parser.add_argument("--port", default=3076, type=int)
        parser.add_argument("--host", default="127.0.0.1", type=str)
        parser.add_argument("--packetName", default=None)
        parser.add_argument("--packetFill", default=None)

        # Get command line arguments
        args = vars(parser.parse_args())

        port = args["port"]
        host = args["host"]
        fill = args["packetFill"]
        name = args["packetName"]

        if name:
            defn = tlm.getDefaultDict()[name]
        else:
            defn = list(tlm.getDefaultDict().values())[0]

        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

        packet = defn.simulate(fill=fill)

        while True:
            sock.sendto(packet._data, (host, port))

            log.info("Sent telemetry (%d bytes) to %s:%d" %
                     (packet.nbytes, host, port))

            time.sleep(1)

    except KeyboardInterrupt:
        log.info("Received Ctrl-C. Stopping telemetry stream.")

    except Exception as e:
        log.error("TLM send error: %s" % str(e))

    log.end()
예제 #9
0
    def test_ccsds_packet_uid(self):
        handler = CCSDSPacketHandler(
            packet_types={"01011100111": "CCSDS_HEADER"})
        data = bytearray(b"\x02\xE7\x40\x00\x00\x00\x01")

        tlm_dict = tlm.getDefaultDict()
        packet_uid = tlm_dict["CCSDS_HEADER"].uid
        result = handler.handle(data)
        self.assertEqual(packet_uid, pickle.loads(result)[0])
예제 #10
0
    def create_packet_from_result(cls, packet_name, data):
        try:
            pkt_defn = tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error("Unknown packet name {}. Unable to unpack SQLite result".
                      format(packet_name))
            return None

        return tlm.Packet(pkt_defn, data=data)
예제 #11
0
    def create(self, **kwargs):
        """Create packet tables in the connected database

        **Configuration Parameters**

        tlmdict
            The :class:`ait.core.tlm.TlmDict` instance to use. Defaults to
            the currently configured telemetry dictionary.
        """
        tlmdict = kwargs.get("tlmdict", tlm.getDefaultDict())
        for _name, defn in tlmdict.items():
            self._create_table(defn)
예제 #12
0
def create(database, tlmdict=None):
    """Creates a new database for the given Telemetry Dictionary and
    returns a connection to it.
    """
    if tlmdict is None:
        tlmdict = tlm.getDefaultDict()
    
    dbconn = connect(database)

    for name, defn in tlmdict.items():
        createTable(dbconn, defn)

    return dbconn
예제 #13
0
    def handle(self, input_data):
        """
        Params:
            packet:    CCSDS packet
        Returns:
            tuple of packet UID and packet data field
        """

        # Check if packet length is at least 7 bytes
        primary_header_length = 6
        if len(input_data) < primary_header_length + 1:
            ait.core.log.info(
                'CCSDSPacketHandler: Received packet length is less than minimum of 7 bytes.'
            )
            return

        # Extract APID from packet
        packet_apid = str(
            bin(int(binascii.hexlify(input_data[0:2]), 16)
                & 0x07FF))[2:].zfill(11)

        # Check if packet_apid matches with an APID in the config
        config_apid = self.comp_apid(packet_apid)
        if not config_apid:
            msg = 'CCSDSPacketHandler: Packet APID {} not present in config.'.format(
                packet_apid)
            msg += ' Available packet APIDs are {}'.format(
                self.packet_types.keys())
            ait.core.log.info(msg)
            return

        # Map APID to packet name in config to get UID from telemetry dictionary
        packet_name = self.packet_types[config_apid]
        tlm_dict = tlm.getDefaultDict()
        packet_uid = tlm_dict[packet_name].uid

        # Extract user data field from packet
        packet_data_length = int(binascii.hexlify(input_data[4:6]), 16) + 1
        if len(input_data) < primary_header_length + packet_data_length:
            ait.core.log.info(
                'CCSDSPacketHandler: Packet data length is less than stated length in packet primary header.'
            )
            return
        udf_length = packet_data_length - self.packet_secondary_header_length
        udf_start = primary_header_length + self.packet_secondary_header_length
        user_data_field = input_data[udf_start:udf_start + udf_length + 1]

        return pickle.dumps((packet_uid, user_data_field), 2)
예제 #14
0
    def test_query_packets_calldown(self):
        sqlbackend = db.SQLiteBackend()
        sqlbackend._conn = mock.MagicMock()

        start = dmc.GPS_Epoch
        end = dt.datetime.utcnow()
        packets = [list(tlm.getDefaultDict().keys())[0]]

        sqlbackend.query_packets(packets=packets,
                                 start_time=start,
                                 end_time=end)

        start = start.strftime(dmc.RFC3339_Format)
        end = end.strftime(dmc.RFC3339_Format)
        for i, pkt in enumerate(packets):
            query = f'SELECT * FROM "{pkt}" WHERE time >= "{start}" AND time <= "{end}" ORDER BY time ASC'
            assert sqlbackend._conn.execute.call_args[i][0] == query
예제 #15
0
    def __init__(self, inputs, outputs, **kwargs):
        super(TelemetryLimitMonitor, self).__init__(inputs, outputs, **kwargs)

        self.limit_dict = defaultdict(dict)
        for k, v in limits.getDefaultDict().items():
            packet, field = k.split('.')
            self.limit_dict[packet][field] = v

        self.packet_dict = defaultdict(dict)
        for k, v in tlm.getDefaultDict().items():
            self.packet_dict[v.uid] = v

        self.notif_thrshld = ait.config.get('notifications.options.threshold', 1)
        self.notif_freq = ait.config.get('notifications.options.frequency', float('inf'))

        self.limit_trip_repeats = {}
        log.info('Starting telemetry limit monitoring')
예제 #16
0
    def __init__(self, inputs, outputs, zmq_args=None, **kwargs):
        """
        Params:
            inputs:     names of inbound streams plugin receives data from
            outputs:    names of outbound streams plugin sends its data to
            zmq_args:   dict containing the follow keys:
                            zmq_context
                            zmq_proxy_xsub_url
                            zmq_proxy_xpub_url
                        Defaults to empty dict. Default values
                        assigned during instantiation of parent class.
            **kwargs:   (optional) Dependent on requirements of child class.
        """

        super(AITOpenMctPlugin, self).__init__(inputs, outputs, zmq_args,
                                               **kwargs)

        log.info('Running AIT OpenMCT Plugin')

        # Initialize state fields
        # Debug state fields
        self._debugEnabled = AITOpenMctPlugin.DEFAULT_DEBUG
        self._debugMimicRepeat = False
        # Port value for the server
        self._servicePort = AITOpenMctPlugin.DEFAULT_PORT

        # Setup server state
        self._app = bottle.Bottle()
        self._servers = []

        # Queues for AIT events events
        self._tlmQueue = api.GeventDeque(maxlen=100)
        self._logQueue = api.GeventDeque(maxlen=100)

        # Load AIT tlm dict and create OpenMCT format of it
        self._aitTlmDict = tlm.getDefaultDict()
        self._mctTlmDict = self.format_tlmdict_for_openmct(self._aitTlmDict)

        # Create lookup from packet-uid to packet def
        self._uidToPktDefMap = self.create_uid_pkt_map(self._aitTlmDict)

        # Check for AIT config overrides
        self._checkConfig()

        gevent.spawn(self.init)
예제 #17
0
    def __init__(self,
                 inputs,
                 outputs,
                 datastore='ait.core.db.InfluxDBBackend',
                 **kwargs):
        """
        Attempts to connect to database backend. Plugin will not be created if
        connection fails.

        Creates base packet dictionary for decoding packets with packet UIDs as
        keys and packet definitions as values.

        Params:
            inputs:      list of names of input streams to plugin
            outputs:     list of names of plugin output streams
            datastore:   path to database backend to use
            **kwargs:    any args required for connecting to backend database
        Raises:
            ImportError:   raised if provided database backend does not exist or
                           cannot be imported
            Exception:     raised if the backened database cannot be connected to
                           for any reason
        """
        super(DataArchive, self).__init__(inputs, outputs, **kwargs)

        self.datastore = datastore
        self.packet_dict = defaultdict(dict)
        for k, v in tlm.getDefaultDict().iteritems():
            self.packet_dict[v.uid] = v

        try:
            mod, cls = self.datastore.rsplit('.', 1)
            self.dbconn = getattr(importlib.import_module(mod), cls)()
            self.dbconn.connect(**kwargs)
            log.info('Starting telemetry data archiving')
        except ImportError as e:
            log.error("Could not import specified datastore {}".format(
                self.datastore))
            raise (e)
        except Exception as e:
            log.error(
                "Unable to connect to {} backend. Disabling data archive.".
                format(self.datastore))
            raise (e)
예제 #18
0
    def __init__(self,
                 inputs=None,
                 outputs=None,
                 zmq_args=None,
                 routing_table=None,
                 default_topic=None):

        super().__init__(inputs, outputs, zmq_args)

        self.default_topic = default_topic

        if 'path' in routing_table:
            self.routing_table_object = self.load_table_yaml(
                routing_table['path'], tlm.getDefaultDict())
        else:
            self.routing_table_object = None
            log.error("no path specified for routing table")
        if self.routing_table_object is None:
            log.error("Unable to load routing table .yaml file")
예제 #19
0
파일: db.py 프로젝트: vitork-l4b/AIT-Core
    def create(self, **kwargs):
        '''  Create a database for the current telemetry dictionary

        Connects to a SQLite instance via :func:`connect` and creates a
        skeleton database for future data inserts.

        **Configuration Parameters**

        tlmdict
            The :class:`ait.core.tlm.TlmDict` instance to use. Defaults to
            the currently configured telemetry dictionary.

        '''
        tlmdict = kwargs.get('tlmdict', tlm.getDefaultDict())

        self.connect(**kwargs)

        for name, defn in tlmdict.items():
            self._create_table(defn)
예제 #20
0
def handle():
    """Set playback query with packet name, start time, and end time from form"""
    global playback

    if not playback.enabled:
        return HttpResponse(status=404,
                            body='Historic data playback is disabled')

    tlm_dict = tlm.getDefaultDict()

    # Get values from form
    packet = bottle.request.forms.get('packet')
    start_time = bottle.request.forms.get('startTime')
    end_time = bottle.request.forms.get('endTime')
    uid = tlm_dict[packet].uid

    # Query packet and time range from database
    point_query = 'SELECT * FROM "{}" WHERE time >= \'{}\' AND time <= \'{}\''.format(
        packet, start_time, end_time)
    points = list(playback.dbconn.query(point_query).get_points())

    pkt = tlm_dict[packet]
    fields = pkt.fields
    # Build field names list from tlm dictionary for sorting data query
    field_names = []
    # Build field types list from tlm dictionary for packing data
    field_formats = []
    for i in range(len(fields)):
        field_names.append(fields[i].name)
        field_type = str(fields[i].type).split("'")[1]
        field_formats.append(dtype.get(field_type).format)
    # Put query into a map of {timestamp: list of (uid, data)}
    for i in range(len(points)):
        # Round time down to nearest 0.1 second
        timestamp = str(points[i]['time'][:21] + 'Z')
        data = ''
        for j in range(len(field_names)):
            data += struct.pack(field_formats[j], points[i][field_names[j]])
        if playback.query.has_key(timestamp):
            playback.query[timestamp].append((uid, data))
        else:
            playback.query[timestamp] = [(uid, data)]
예제 #21
0
    def __init__(self,
                 cmdport=ait.config.get('command.port', ait.DEFAULT_CMD_PORT),
                 tlmport=3076, defn=None):
        if defn is None:
            tlmdict = tlm.getDefaultDict()
            names   = sorted( tlmdict.keys() )

            if len(names) == 0:
                msg = 'No packets defined in default TLM dictionary.'
                raise TypeError(msg)

            defn = tlmdict[ names[0] ]

        self._packets = PacketBuffers()
        self._cmd     = CmdAPI(cmdport)

        self._packets.create(defn.name)
        pktbuf        = self._packets[defn.name]
        self._tlm     = UdpTelemetryServer(tlmport, pktbuf, defn)
        self._tlm.start()
예제 #22
0
    def test_query_packets_calldown(self, importlib_mock):
        sqlbackend = db.InfluxDBBackend()
        sqlbackend._conn = mock.MagicMock()

        start = dmc.GPS_Epoch
        end = dt.datetime.utcnow()
        packets = [list(tlm.getDefaultDict().keys())[0]]

        sqlbackend.query_packets(packets=packets,
                                 start_time=start,
                                 end_time=end)

        packets = ", ".join(packets)
        start = start.strftime(dmc.RFC3339_Format)
        end = end.strftime(dmc.RFC3339_Format)
        query = (
            f"SELECT * FROM \"{packets}\" WHERE time >= '{start}' AND time <= '{end}'"
        )

        assert sqlbackend._conn.query.call_args[0][0] == query
예제 #23
0
    def __init__(self, cmdport=None, packets=None):
        """"""
        tlmdict = tlm.getDefaultDict()
        if packets is None:
            packets = list(tlmdict.keys())
        else:
            if not isinstance(packets, collections.Iterable):
                packets = [packets]

            cln_pkts = []
            for pkt in packets:
                if pkt not in tlmdict:
                    log.error(
                        f"Instrument passed invalid packet name {pkt}. Skipping ..."
                    )
                else:
                    cln_pkts.append(pkt)
            packets = cln_pkts

        defns = {tlmdict[k].uid: tlmdict[k] for k in packets}

        if len(defns.keys()) == 0:
            msg = (
                "No packets available to Instrument. Ensure your dictionary "
                "is valid and contains Packets. If you passed packet names to "
                "Instrument ensure at least one is valid."
            )
            raise TypeError(msg)

        self._pkt_buffs = PacketBuffers()
        for _, pkt_defn in defns.items():
            self._pkt_buffs.create(pkt_defn.name)

        self._cmd = CmdAPI(cmdport)
        self._monitor = TlmMonitor(self._pkt_buffs, defns)
        self._monitor.start()
        gevent.sleep(0)
예제 #24
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument("--all", action="store_true", help="output all fields/values")

    parser.add_argument(
        "--csv",
        default="output.csv",
        metavar="</path/to/output/csv>",
        help="Output as CSV with filename",
    )

    parser.add_argument(
        "--fields",
        metavar="</path/to/fields/file>",
        help="path to the file containing all fields to query, separated by newline.",
    )

    parser.add_argument(
        "--packet", required=True, help="Packet name from telemetry dictionary specified in config file."
    )

    parser.add_argument(
        "--time_field",
        help=(
            "Time field to use for time range comparisons. Ground receipt time "
            "will be used if nothing is specified."
        ),
    )

    parser.add_argument(
        "--stime",
        help=(
            "Datetime in file to start collecting the data values. Defaults to "
            "beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "--etime",
        help=(
            "Datetime in file to end collecting the data values. Defaults to end "
            "of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "pcap", nargs="*", help=("PCAP file(s) containing telemetry packets")
    )

    args = parser.parse_args()

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' % args.packet)
        log.end()
        sys.exit(2)

    if not args.all and args.fields is None:
        log.error(
            "Must provide fields file with --fields or specify that all fields should be queried with --all"
        )
        log.end()
        sys.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, "r") as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split(".")
        if fldname not in defn.fieldmap and (
            len(raw) == 2 and raw[0] != "raw" or raw[1] not in defn.fieldmap
        ):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        log.end()
        sys.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, "Ground Receipt Time")

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, "w")
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug("Processing %s" % filename)

        with pcap.open(filename, "rb") as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = (
                    header.timestamp
                    if args.ground_time
                    else getattr(packet, args.time_field)
                )
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split(".")
                            if len(names) == 2 and names[0] == "raw":
                                field = names[1]
                                _raw = True

                            field_val = packet._getattr(field, raw=_raw)

                            if hasattr(field_val, "name"):
                                field_val = field_val.name
                            else:
                                field_val = str(field_val)

                        except KeyError:
                            log.debug("%s not found in Packet" % field)
                            field_val = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            field_val = packet._getattr(field, raw=True)

                        row.append(field_val)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug("Parsed %s packets." % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()
예제 #25
0
    def mimic_tlm(self, ait_tlm_pkt_name, ait_tlm_pkt_fill=None):
        """Used for debugging, creates an instance of a packet based on
        packet name, and fills it with zero data.
        Special case for '1553_HS_Packet' which will get random number
        data fills.
        If HTTP Request query includes a value for 'repeat', then this
        will continue emitting telemetry.
        """

        # Http query option, if it is set to anything, consider it true
        self._debugMimicRepeat = len(str(bottle.request.query.repeat)) > 0

        # This will be helpful in testing by simulating TLM
        # but by a rest call instead of actual telem
        ait_pkt_defn = None
        if ait_tlm_pkt_name:
            ait_pkt_defn = tlm.getDefaultDict()[ait_tlm_pkt_name]
        else:
            ait_pkt_defn = tlm.getDefaultDict().values()[0]

        # Create the expected message format
        pkt_size_bytes = ait_pkt_defn.nbytes

        # if self._debugMimicRepeat:
        repeat_str = " REPEATED " if self._debugMimicRepeat else " a single "
        info_msg = ("Received request to mimic" + repeat_str +
                    "telemetry packet for " + ait_pkt_defn.name)
        self.dbg_message(info_msg)

        # Create a binary array of size filled with 0
        dummy_data = bytearray(pkt_size_bytes)

        info_msg = ""

        while True:

            # Special handling for simply integer based packet, others will
            # have all 0 zero
            if ait_pkt_defn.name == "1553_HS_Packet":
                hs_packet = struct.Struct(">hhhhh")
                random_num = random.randint(1, 100)
                dummy_data = hs_packet.pack(random_num, random_num, random_num,
                                            random_num, random_num)

            tlm_pkt = tlm.Packet(ait_pkt_defn, data=bytearray(dummy_data))
            self._process_telem_msg(tlm_pkt)

            info_msg = (
                "AIT OpenMct Plugin submitted mimicked telemetry for " +
                ait_pkt_defn.name + " (" + str(datetime.datetime.now()) +
                ") to telem queue")
            self.dbg_message(info_msg)

            # sleep if mimic on
            if self._debugMimicRepeat:
                gsleep(5)

            # either it was immediate or we woke up, check break condition
            if not self._debugMimicRepeat:
                break

        # Return last status message as result to client
        return info_msg
예제 #26
0
    def __init__(self,
                 inputs,
                 outputs,
                 zmq_args=None,
                 datastore="ait.core.db.InfluxDBBackend",
                 **kwargs):
        """
        Params:
            inputs:     names of inbound streams plugin receives data from
            outputs:    names of outbound streams plugin sends its data to
            zmq_args:   dict containing the follow keys:
                            zmq_context
                            zmq_proxy_xsub_url
                            zmq_proxy_xpub_url
                        Defaults to empty dict. Default values
                        assigned during instantiation of parent class.
            datastore:   path to database backend to use
            **kwargs:   (optional) Dependent on requirements of child class.
        """

        super(AITOpenMctPlugin, self).__init__(inputs, outputs, zmq_args,
                                               **kwargs)

        log.info("Running AIT OpenMCT Plugin")

        self._datastore = datastore

        # Initialize state fields
        # Debug state fields
        self._debugEnabled = AITOpenMctPlugin.DEFAULT_DEBUG
        self._debugMimicRepeat = False
        # Port value for the server
        self._servicePort = AITOpenMctPlugin.DEFAULT_PORT
        # Flag indicating if we should create a database connection for historical queries
        self._databaseEnabled = AITOpenMctPlugin.DEFAULT_DATABASE_ENABLED

        # Check for AIT config overrides
        self._check_config()

        # Setup server state
        self._app = bottle.Bottle()
        self._servers = []

        # Queues for AIT events events
        self._tlmQueue = api.GeventDeque(maxlen=100)

        # Load AIT tlm dict and create OpenMCT format of it
        self._aitTlmDict = tlm.getDefaultDict()
        self._mctTlmDict = DictUtils.format_tlmdict_for_openmct(
            self._aitTlmDict)

        # Create lookup from packet-uid to packet def
        self._uidToPktDefMap = DictUtils.create_uid_pkt_map(self._aitTlmDict)

        # Attempt to initialize database, None if no DB
        self._database = self.load_database(**kwargs)

        # Maintains a set of active websocket structs
        self._socket_set = set()

        # Spawn greenlets to poll telemetry
        self.tlm_poll_greenlet = Greenlet.spawn(
            self.poll_telemetry_periodically)

        gevent.spawn(self.init)
예제 #27
0
    def test_query_packets_arg_handling(self):
        sqlbackend = db.SQLiteBackend()
        sqlbackend._conn = mock.MagicMock()
        query_string = (
            'SELECT * FROM "{}" WHERE time >= "{}" AND time <= "{}" ORDER BY time ASC'
        )

        # Test no packet provided handling
        #######################################
        start = dmc.GPS_Epoch
        end = dt.datetime.utcnow()

        res = sqlbackend.query_packets(start_time=start, end_time=end)

        packets = list(tlm.getDefaultDict().keys())
        start = start.strftime(dmc.RFC3339_Format)
        end = end.strftime(dmc.RFC3339_Format)
        query = query_string.format(packets, start, end)

        for i, pkt in enumerate(packets):
            query = f'SELECT * FROM "{pkt}" WHERE time >= "{start}" AND time <= "{end}" ORDER BY time ASC'
            assert sqlbackend._conn.execute.call_args_list[i][0][0] == query

        sqlbackend._conn.reset_mock()

        # Test no start time handling
        #######################################
        end = dt.datetime.utcnow()

        packets = [list(tlm.getDefaultDict().keys())[0]]

        sqlbackend.query_packets(packets=packets, end_time=end)

        start = dmc.GPS_Epoch.strftime(dmc.RFC3339_Format)
        end = end.strftime(dmc.RFC3339_Format)
        query = query_string.format(packets[0], start, end)

        assert sqlbackend._conn.execute.call_args[0][0] == query
        sqlbackend._conn.reset_mock()

        # Test no end time handling
        #######################################
        packets = [list(tlm.getDefaultDict().keys())[0]]

        sqlbackend.query_packets(packets=packets)

        start = dmc.GPS_Epoch.strftime(dmc.RFC3339_Format)
        end = dt.datetime.utcnow()
        query = query_string.format(packets, start, end)

        exec_end_time = dt.datetime.strptime(
            sqlbackend._conn.execute.call_args[0][0].split('"')[-2],
            dmc.RFC3339_Format)

        assert (end - exec_end_time).seconds < 1
        sqlbackend.query_packets()

        # Test bad packet name exception
        #######################################
        with pytest.raises(ValueError):
            sqlbackend.query_packets(packets=["not_a_valid_packet"])
예제 #28
0
    def create_packets_from_results(self, packet_name, result_set):
        ''' Generate AIT Packets from a InfluxDB query ResultSet

        Extract Influx DB query results into one packet per result entry. This
        assumes that telemetry data was inserted in the format generated by
        :func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
        evaluated if they can be properly encoded from the raw value in the
        query result. If there is no opcode / EVR-code for a particular raw
        value the value is skipped (and thus defaulted to 0).

        Arguments
            packet_name (string)
                The name of the AIT Packet to create from each result entry

            result_set (influxdb.resultset.ResultSet)
                The query ResultSet object to convert into packets

        Returns
            A list of packets extracted from the ResultSet object or None if
            an invalid packet name is supplied.
                
        '''
        try:
            pkt_defn = tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error('Unknown packet name {} Unable to unpack ResultSet'.format(packet_name))
            return None

        pkt = tlm.Packet(pkt_defn)

        pkts = []
        for r in result_set.get_points():
            new_pkt = tlm.Packet(pkt_defn)

            for f, f_defn in pkt_defn.fieldmap.iteritems():
                field_type_name = f_defn.type.name
                if field_type_name == 'CMD16':
                    if cmd.getDefaultDict().opcodes.get(r[f], None):
                        setattr(new_pkt, f, cmd_def.name)
                elif field_type_name == 'EVR16':
                    if evr.getDefaultDict().codes.get(r[f], None):
                        setattr(new_pkt, f, r[f])
                elif field_type_name == 'TIME8':
                    setattr(new_pkt, f, r[f] / 256.0)
                elif field_type_name == 'TIME32':
                    new_val = dmc.GPS_Epoch + dt.timedelta(seconds=r[f])
                    setattr(new_pkt, f, new_val)
                elif field_type_name == 'TIME40':
                    sec = int(r[f])
                    microsec = r[f] * 1e6
                    new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec, microseconds=microsec)
                    setattr(new_pkt, f, new_val)
                elif field_type_name == 'TIME64':
                    sec = int(r[f])
                    microsec = r[f] % 1 * 1e6
                    new_val = dmc.GPS_Epoch + dt.timedelta(seconds=sec, microseconds=microsec)
                    setattr(new_pkt, f, new_val)
                else:
                    try:
                        setattr(new_pkt, f, r[f])
                    except KeyError:
                        log.info('Field not found in query results {} Skipping ...'.format(f))

            pkts.append(new_pkt)
        return pkts
예제 #29
0
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = tlmdict.keys()
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        '--packet': {
            'type': str,
            'choices': pnames,
            'default': pnames[0] if len(pnames) > 0 else None,
            'help': 'Type of packets (!Packet name in tlm.yaml) in file',
            'required': len(pnames) > 1,
        },
        '--database': {
            'default':
            ait.config.get('database.name'),
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)'),
            'required':
            ait.config.get('database.name') is None
        },
        '--backend': {
            'default':
            'sqlite',
            'choices': ['sqlite', 'influx'],
            'action':
            'store',
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)')
        },
        '--use-current-time': {
            'action':
            'store_true',
            'help': ('Use current time stamps when insert packets instead '
                     'of ground receipt time (or the time written in the '
                     'PCAP header).')
        },
        'file': {
            'nargs': '+',
            'help': 'File(s) containing telemetry packets'
        }
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]
        nbytes = defn.nbytes

        if args.backend == 'sqlite':
            dbconn = db.SQLiteBackend()
        elif args.backend == 'influx':
            dbconn = db.InfluxDBBackend()

        if args.backend == 'sqlite' and (args.database == ':memory:'
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info('Processing %s' % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:
                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info('Received Ctrl-C.  Stopping database insert.')

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info('Inserted %d %s packets into database %s.' % values)

    log.end()
예제 #30
0
def main():
    log.begin()

    description = """Parses 1553 telemetry into CSV file."""

    arguments = {
        '--all': {
            'action': 'store_true',
            'help': 'output all fields/values',
        },
        '--csv': {
            'type': str,
            'default': 'output.csv',
            'metavar': '</path/to/output/csv>',
            'help': 'Output as CSV with filename'
        },
        '--fields': {
            'type': str,
            'metavar': '</path/to/fields/file>',
            'help':
            'file containing all fields to query, separated by newline.'
        },
        '--packet': {
            'type': str,
            'required': True,
            'help': 'field names to query, separated by space'
        },
        '--time_field': {
            'type':
            str,
            'help':
            'Time field to use for time range comparisons. Ground receipt time will be used if nothing is specified.'
        },
        '--stime': {
            'type':
            str,
            'help':
            'Datetime in file to start collecting the data values. Defaults to beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        },
        '--etime': {
            'type':
            str,
            'help':
            'Datetime in file to end collecting the data values. Defaults to end of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        }
    }

    arguments['pcap'] = {
        'nargs': '*',
        'help': 'PCAP file(s) containing telemetry packets'
    }

    args = gds.arg_parse(arguments, description)

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' %
                  args.packet)
        gds.exit(2)

    if not args.all and args.fields is None:
        log.error(
            'Must provide fields file with --fields or specify that all fields should be queried with --all'
        )
        gds.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, 'rb') as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split('.')
        if fldname not in defn.fieldmap and (len(raw) == 2 and raw[0] != 'raw'
                                             or raw[1] not in defn.fieldmap):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        gds.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, 'Ground Receipt Time')

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, 'wb')
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug('Processing %s' % filename)

        with pcap.open(filename, 'rb') as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = header.timestamp if args.ground_time else getattr(
                    packet, args.time_field)
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split('.')
                            if len(names) == 2 and names[0] == 'raw':
                                field = names[1]
                                _raw = True

                            fieldVal = packet._getattr(field, raw=_raw)

                            if hasattr(fieldVal, 'name'):
                                fieldVal = fieldVal.name
                            else:
                                fieldVal = str(fieldVal)

                        except KeyError:
                            log.debug('%s not found in Packet' % field)
                            fieldVal = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            fieldVal = packet._getattr(field, raw=True)

                        row.append(fieldVal)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug('Parsed %s packets.' % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()