Ejemplo n.º 1
0
def main():
    log.begin()

    try:
        parser = argparse.ArgumentParser(
            description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
        )

        # Add required command line arguments
        parser.add_argument(
            "filename",
            nargs="+",
            metavar="</path/to/seq>",
            help="File or collection of sequence file(s)",
        )

        # Add optional command line arguments
        args = parser.parse_args()

        for fname in args.filename:
            filename = os.path.abspath(fname)
            if not os.path.isfile(filename):
                raise Exception(f"File not found: {filename}")

            extension = os.path.splitext(filename)[1]

            if extension.lower() != ".txt":
                log.warn(f"Filename '{filename}' does not have a '.txt' extension")

            # Parse the filename for the applicable information
            parts = os.path.basename(filename).split("_")
            seqid = os.path.splitext(parts[-1])[0]

            try:
                seqid = int(seqid)
            except ValueError:
                raise Exception(
                    'Invalid filename "{os.path.basename(filename)}": . {__doc__}'
                )

            sequence = seq.createSeq(filename, id=seqid)

            if not sequence.validate():
                for msg in sequence.log.messages:
                    log.error(msg)
            else:
                binpath = sequence.binpath
                seqid = sequence.seqid

                log.info(f"Writing {binpath} (seqid=0x{seqid:04X}).")
                sequence.writeBinary()

            exit = 0
    except Exception as e:
        log.error(e)
        exit = 1

    log.end()

    sys.exit(exit)
Ejemplo n.º 2
0
def decode_sequence_time(time, prev_time=None):
    ''' Decode a MPS Seq time into a datetime object

    Decode an absolute or relative time MPS Seq command time string into
    an absolute time datetime object. If a relative command time is passed
    a previous time must be supplied from which the absolute time should
    be calculated.

    Args:
        time: (string) A MPS Seq command time string to convert into
            a datetime object.

        prev_time: (datetime) A datetime object from which a relative time
            command time will be calculated. Required if `time` is a
            relative command time.

    Returns:
        A datetime object representing the time string

    Raises:
        TypeError: If prev_time is not supplied or is not a datetime object
            and time is a relative command time.

        ValueError: If time has a time code other than `A` or `R`.
    '''
    time_code, time = time[0], time[1:]

    if '.' not in time:
        time += ':000'
    else:
        time = time.replace('.', ':')

    if time_code == 'A':
        converted_time = dt.datetime.strptime(time, '%Y-%jT%H:%M:%S:%f')
    elif time_code == 'R':
        if not prev_time or type(prev_time) is not type(dt.datetime.now()):
            msg = ('Previous time not specified or incorrect format provided '
                   'when given a relative command')
            log.error(msg)
            raise TypeError(msg)

        if 'T' in time:
            t_split = time.split('T')
            days, dur = int(t_split[0]), t_split[1]
            hours, mins, secs, msecs = [int(i) for i in dur.split(':')]
        else:
            days = 0
            hours, mins, secs, msecs = [int(i) for i in time.split(':')]

        converted_time = prev_time + dt.timedelta(days=days,
                                                  hours=hours,
                                                  minutes=mins,
                                                  seconds=secs,
                                                  milliseconds=msecs)
    else:
        msg = 'Invalid time code "{}" in sequence time'.format(time_code)
        log.error(msg)
        raise ValueError(msg)

    return converted_time
Ejemplo n.º 3
0
def parseArgs(argv, defaults):
    """parseArgs(argv, defaults) -> (dict, list)

  Parses command-line arguments according to the given defaults.  For
  every key in defaults, an argument of the form --key=value will be
  parsed.  Numeric arguments are converted from strings with errors
  reported via ait.core.log.error() and default values used instead.

  Returns a copy of defaults with parsed option values and a list of
  any non-flag arguments.
  """
    options = dict(defaults)
    numeric = \
      [ k for k, v in options.items() if type(v) is float or type(v) is int ]

    try:
        longopts = ["%s=" % key for key in options.keys()]
        opts, args = getopt.getopt(argv, "", longopts)

        for key, value in opts:
            if key.startswith("--"):
                key = key[2:]
            options[key] = value
    except getopt.GetoptError, err:
        log.error(str(err))
        usage(exit=True)
Ejemplo n.º 4
0
    def send(self, command, *args, **kwargs):
        """Creates, validates, and sends the given command as a UDP
        packet to the destination (host, port) specified when this
        CmdAPI was created.
        Returns True if the command was created, valid, and sent,
        False otherwise.
        """
        status = False
        cmdobj = CMD_API._cmddict.create(command, *args, **kwargs)
        messages = []

        if not cmdobj.validate(messages):
            for msg in messages:
                log.error(msg)
        else:
            encoded = cmdobj.encode()

            if CMD_API._verbose:
                size = len(cmdobj.name)
                pad = (size - len(cmdobj.name) + 1) * ' '
                gds.hexdump(encoded, preamble=cmdobj.name + ':' + pad)

            try:
                self.publish(encoded)
                status = True

                with pcap.open(CMD_API.CMD_HIST_FILE, 'a') as output:
                    output.write(str(cmdobj))
            except IOError as e:
                log.error(e.message)

        return status
Ejemplo n.º 5
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description = __doc__,
        formatter_class = argparse.RawDescriptionHelpFormatter)

    # Add required command line argument
    parser.add_argument('filename')

    # Get command line arguments
    args = vars(parser.parse_args())

    filename  = os.path.abspath(args['filename'])
    extension = os.path.splitext(filename)[1]

    if extension.lower() != '.bin':
        log.warn("Filename '%s' does not have a '.bin' extension", filename)

    sequence = seq.Seq(filename)

    if not sequence.validate():
        for msg in sequence.messages:
            log.error(msg)

    sequence.printText()

    log.end()
Ejemplo n.º 6
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # Add optional command line arguments
    parser.add_argument('--binfile', default=None, required=True)
    parser.add_argument('--fswtabdict', default=None, required=True)
    parser.add_argument('--tabletype', default=None, required=True)
    parser.add_argument('--verbose', action='store_true', default=False)
    parser.add_argument('--version', default=0, type=int)

    # Get command line arguments
    args = vars(parser.parse_args())
    binfile = args['binfile']
    dictpath = args['fswtabdict']
    tabletype = args['tabletype']
    verbose = args['verbose']
    version = args['version']

    # Grab default table dictionary
    if dictpath is not None:
        dictCache = table.FSWTabDictCache(filename=dictpath)

        try:
            filename = dictCache.filename
        except IOError, e:
            msg = 'Could not load default table dictionary "%s": %s'
            log.error(msg, filename, str(e))
Ejemplo n.º 7
0
    def _db_connect(self):
        """Connect to database"""

        # Get datastore from config
        plugins = ait.config.get('server.plugins', [])
        datastore = None
        other_args = {}
        for i in range(len(plugins)):
            if plugins[i]['plugin'][
                    'name'] == 'ait.core.server.plugins.DataArchive':
                datastore = plugins[i]['plugin']['datastore']
                other_args = copy.deepcopy(plugins[i]['plugin'])
                other_args.pop('name')
                other_args.pop('inputs', None)
                other_args.pop('outputs', None)
                other_args.pop('datastore', None)
                break

        if datastore:
            try:
                mod, cls = datastore.rsplit('.', 1)

                # Connect to database
                self.dbconn = getattr(importlib.import_module(mod), cls)()
                self.dbconn.connect(**other_args)
            except Exception as e:
                log.error('Error connecting to datastore {}: {}'.format(
                    datastore, e))
                log.warn('Disabling telemetry playback.')
        else:
            msg = ('[GUI Playback Configuration]'
                   'Unable to locate DataArchive plugin configuration for '
                   'historical data queries. Historical telemetry playback '
                   'will be disabled in monitoring UI and server endpoints.')
            log.warn(msg)
Ejemplo n.º 8
0
    def create_packets_from_results(cls, packet_name, result_set):
        """Generate AIT Packets from a InfluxDB query ResultSet

        Extract Influx DB query results into one packet per result entry. This
        assumes that telemetry data was inserted in the format generated by
        :func:`InfluxDBBackend.insert`. Complex types such as CMD16 and EVR16 are
        evaluated if they can be properly encoded from the raw value in the
        query result. If there is no opcode / EVR-code for a particular raw
        value the value is skipped (and thus defaulted to 0).

        Arguments
            packet_name (string)
                The name of the AIT Packet to create from each result entry

            result_set (influxdb.resultset.ResultSet)
                The query ResultSet object to convert into packets

        Returns
            A list of packets extracted from the ResultSet object or None if
            an invalid packet name is supplied.

        """
        try:
            tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error(
                "Unknown packet name {} Unable to unpack ResultSet".format(
                    packet_name))
            return None

        return [
            InfluxDBBackend.create_packet_from_result(packet_name, r)
            for r in result_set.get_points()
        ]
Ejemplo n.º 9
0
    def __init__(self, filename=None, data=None, config=None, pathvars=None):
        """Creates a new AitConfig object with configuration data read from
        the given YAML configuration file or passed-in via the given
        config dictionary.

        If filename and data are not given, it defaults to the following in
        order depending on the presence of environment variables::

            ${AIT_CONFIG}

        """
        self._filename = None
        self._data = data
        self._datetime = time.gmtime()
        self._pathvars = pathvars

        if data is None and filename is None:
            if 'AIT_CONFIG' in os.environ:
                filename = os.path.abspath(os.environ.get('AIT_CONFIG'))
            else:
                msg = 'AIT_CONFIG is not set. Exiting ...'
                log.error(msg)
                raise ValueError(msg)

        if config is None:
            self.reload(filename, data)
        else:
            self._config = config
            self._filename = filename
Ejemplo n.º 10
0
def process_pdu(raf_mngr):
    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    while True:
        gevent.sleep(0)
        if raf_mngr._data_queue.empty():
            continue

        log.info('Empty {}'.format(raf_mngr._data_queue.empty()))
        pdu = raf_mngr._data_queue.get()

        try:
            decoded_pdu, remainder = raf_mngr.decode(pdu)
        except pyasn1.error.PyAsn1Error as e:
            log.error('Unable to decode PDU. Skipping ...')
            continue
        except TypeError as e:
            log.error('Unable to decode PDU due to type error ...')
            continue

        if ('data' in decoded_pdu['rafTransferBuffer'][0]['annotatedFrame']
                and decoded_pdu['rafTransferBuffer'][0]['annotatedFrame']
            ['data'].isValue):
            # Data is present and initialized. Processing telemetry ...
            trans_data = decoded_pdu['rafTransferBuffer'][0]['annotatedFrame'][
                'data'].asOctets()
        else:
            # Object does not contain data or data is not initalized. Skipping ...
            continue

        tmf = ait.dsn.sle.frames.TMTransFrame(trans_data)
        log.info('Emitting {} bytes of telemetry to GUI'.format(
            len(tmf._data[0])))
        sock.sendto(tmf._data[0], ('localhost', 3076))
Ejemplo n.º 11
0
def getDefaultDict(modname,
                   config_key,
                   loader,
                   reload=False,
                   filename=None):  # noqa
    """
    Returns default AIT dictonary for modname

    This helper function encapulates the core logic necessary to
    (re)load, cache (via util.ObjectCache), and return the default
    dictionary.  For example, in ait.core.cmd:

    def getDefaultDict(reload=False):
        return ait.util.getDefaultDict(__name__, 'cmddict', CmdDict, reload)
    """
    module = sys.modules[modname]
    default = getattr(module, "DefaultDict", None)

    if filename is None:
        filename = ait.config.get(f"{config_key}.filename", None)

    if filename is not None and (default is None or reload is True):
        try:
            default = ObjectCache(filename, loader).load()
            setattr(module, "DefaultDict", default)  # noqa
        except IOError as e:
            msg = 'Could not load default %s "%s": %s'
            log.error(msg, config_key, filename, str(e))

    return default or loader()
Ejemplo n.º 12
0
def main():
    ap = argparse.ArgumentParser(
        epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)

    ap.add_argument(
        "-n",
        "--dry-run",
        action="store_true",
        help="Dry run; do not actually write files",
    )

    ap.add_argument("-b",
                    "--bytes",
                    help="Segment evey B bytes",
                    metavar="B",
                    type=int)

    ap.add_argument("-p",
                    "--packets",
                    help="Segment evey P packets",
                    metavar="P",
                    type=int)

    ap.add_argument(
        "-s",
        "--seconds",
        help="Segment when first and last pcap timestamps span S seconds",
        metavar="S",
        type=int,
    )

    ap.add_argument(
        "format",
        help="Segment filename (should include strftime(3) time format)")

    ap.add_argument("file", nargs="+", help="Packet Capture (.pcap) file(s)")

    args = ap.parse_args()

    if args.bytes is None and args.packets is None and args.seconds is None:
        msg = "At least one of -b, -p, or -s is required."
        ap.error(msg)

    try:
        pcap.segment(
            filenames=args.file,
            format=args.format,
            nbytes=args.bytes,
            npackets=args.packets,
            nseconds=args.seconds,
            dryrun=args.dry_run,
        )

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Aborting pcap segmentation.")

    except IOError as e:
        log.error(str(e))

    log.end()
Ejemplo n.º 13
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # Add required command line arguments
    parser.add_argument('filename', default=None)

    # Get command line arguments
    args = vars(parser.parse_args())

    filename = os.path.abspath(args['filename'])
    extension = os.path.splitext(filename)[1]

    if extension.lower() != '.bin':
        log.warn("Filename '%s' does not have a '.bin' extension", filename)

    sequence = seq.Seq(filename)

    if not sequence.validate():
        for msg in sequence.messages:
            log.error(msg)
    else:
        txtpath = sequence.txtpath
        seqid = sequence.seqid
        version = sequence.version

        msg = "Writing %s (seqid=0x%04x, version=%u)."
        log.info(msg, txtpath, seqid, version)

        sequence.writeText()

    log.end()
Ejemplo n.º 14
0
def decode_sequence_time(time, prev_time=None):
    """Decode a MPS Seq time into a datetime object

    Decode an absolute or relative time MPS Seq command time string into
    an absolute time datetime object. If a relative command time is passed
    a previous time must be supplied from which the absolute time should
    be calculated.

    Args:
        time: (string) A MPS Seq command time string to convert into
            a datetime object.

        prev_time: (datetime) A datetime object from which a relative time
            command time will be calculated. Required if `time` is a
            relative command time.

    Returns:
        A datetime object representing the time string

    Raises:
        TypeError: If prev_time is not supplied or is not a datetime object
            and time is a relative command time.

        ValueError: If time has a time code other than `A` or `R`.
    """
    time_code, time = time[0], time[1:]

    if "." not in time:
        time += ":000"
    else:
        time = time.replace(".", ":")

    if time_code == "A":
        converted_time = dt.datetime.strptime(time, "%Y-%jT%H:%M:%S:%f")
    elif time_code == "R":
        if not prev_time or not isinstance(prev_time, dt.datetime):
            msg = ("Previous time not specified or incorrect format provided "
                   "when given a relative command")
            log.error(msg)
            raise TypeError(msg)

        if "T" in time:
            t_split = time.split("T")
            days, dur = int(t_split[0]), t_split[1]
            hours, mins, secs, msecs = [int(i) for i in dur.split(":")]
        else:
            days = 0
            hours, mins, secs, msecs = [int(i) for i in time.split(":")]

        converted_time = prev_time + dt.timedelta(days=days,
                                                  hours=hours,
                                                  minutes=mins,
                                                  seconds=secs,
                                                  milliseconds=msecs)
    else:
        msg = 'Invalid time code "{}" in sequence time'.format(time_code)
        log.error(msg)
        raise ValueError(msg)

    return converted_time
Ejemplo n.º 15
0
    def _run(self):
        try:
            while True:
                gevent.sleep(0)
                msg = self._sub.recv_multipart()
                topic, message = serv_utils.decode_message(msg)
                message = pickle.loads(message)

                if topic is None or message is None:
                    log.error(f"{self} received invalid topic or message. Skipping")
                    continue

                if not isinstance(message, tuple):
                    log.error(
                        "TlmMonitor received message that it is unable to process "
                        "Messages must be tagged packet data tuples (uid, data)."
                    )
                    continue

                if message[0] not in self._defns:
                    log.error(f"Skipping packet with id {message[0]}")
                    continue

                pkt = tlm.Packet(defn=self._defns[message[0]], data=message[1])

                pkt_name = pkt._defn.name
                if pkt_name in self._pktbufs:
                    self._pktbufs[pkt_name].appendleft(pkt)

        except Exception as e:
            log.error("Exception raised in TlmMonitor while receiving messages")
            log.error(f"API telemetry is no longer being received from server. {e}")
            raise e
Ejemplo n.º 16
0
def getDefaultFSWTabDict ():
    fswtabdict = None
    try:
        filename = _DefaultFSWTabDictCache.filename
        fswtabdict  = _DefaultFSWTabDictCache.load()
    except IOError, e:
        msg = "Could not load default command dictionary '%s': %s'"
        log.error(msg, filename, str(e))
Ejemplo n.º 17
0
 def add(self, defn):
     if defn.name not in self and defn.code not in self.codes:
         self[defn.name] = defn
         self.codes[defn.code] = defn
     else:
         msg = "EVRDict: Duplicate EVR name/code {}".format(defn)
         log.error(msg)
         raise util.YAMLError(msg)
Ejemplo n.º 18
0
 def add(self, defn):
     """Adds the given Packet Definition to this Telemetry Dictionary."""
     if defn.name not in self:
         self[defn.name] = defn
     else:
         msg = f"Duplicate packet name {defn.name}"
         log.error(msg)
         raise util.YAMLError(msg)
Ejemplo n.º 19
0
def main():
    log.begin()

    try:
        defaults = {}
        parser = argparse.ArgumentParser(
            description=__doc__,
            formatter_class=argparse.RawDescriptionHelpFormatter)

        # Add required command line arguments
        parser.add_argument('filename',
                            nargs='+',
                            metavar='</path/to/seq>',
                            help='File or collection of sequence file(s)')

        # Add optional command line arguments
        args = parser.parse_args()

        for fname in args.filename:
            filename = os.path.abspath(fname)
            if not os.path.isfile(filename):
                raise Exception('File not found: %s ' % filename)

            extension = os.path.splitext(filename)[1]

            if extension.lower() != '.txt':
                log.warn("Filename '%s' does not have a '.txt' extension",
                         filename)

            # Parse the filename for the applicable information
            parts = os.path.basename(filename).split('_')
            l = len(parts)
            seqid = os.path.splitext(parts[l - 1])[0]
            desc = parts[l - 2]
            subsys = parts[l - 3]

            try:
                int(seqid)
            except ValueError:
                raise Exception('Invalid filename "%s": . %s' %
                                (os.path.basename(filename), __doc__))

            sequence = seq.Seq(filename, id=seqid)

            if not sequence.validate():
                for msg in sequence.log.messages:
                    log.error(msg)
            else:
                binpath = sequence.binpath
                seqid = sequence.seqid

                log.info("Writing %s (seqid=0x%04x).", binpath, seqid)
                sequence.writeBinary()

            exit = 0
    except Exception, e:
        log.error(e)
        exit = 1
Ejemplo n.º 20
0
def main():
    ap = argparse.ArgumentParser(
        epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)

    ap.add_argument(
        '-n',
        '--dry-run',
        action='store_true',
        help='Dry run; do not actually write files',
    )

    ap.add_argument('-b',
                    '--bytes',
                    help='Segment evey B bytes',
                    metavar='B',
                    type=int)

    ap.add_argument('-p',
                    '--packets',
                    help='Segment evey P packets',
                    metavar='P',
                    type=int)

    ap.add_argument(
        '-s',
        '--seconds',
        help='Segment when first and last pcap timestamps span S seconds',
        metavar='S',
        type=int)

    ap.add_argument(
        'format',
        help='Segment filename (should include strftime(3) time format)')

    ap.add_argument('file', nargs='+', help='Packet Capture (.pcap) file(s)')

    args = ap.parse_args()

    if args.bytes is None and args.packets is None and args.seconds is None:
        msg = 'At least one of -b, -p, or -s is required.'
        ap.error(msg)

    try:
        pcap.segment(filenames=args.file,
                     format=args.format,
                     nbytes=args.bytes,
                     npackets=args.packets,
                     nseconds=args.seconds,
                     dryrun=args.dry_run)

    except KeyboardInterrupt:
        log.info('Received Ctrl-C.  Aborting pcap segmentation.')

    except IOError as e:
        log.error(str(e))

    log.end()
Ejemplo n.º 21
0
    def create_packet_from_result(cls, packet_name, data):
        try:
            pkt_defn = tlm.getDefaultDict()[packet_name]
        except KeyError:
            log.error("Unknown packet name {}. Unable to unpack SQLite result".
                      format(packet_name))
            return None

        return tlm.Packet(pkt_defn, data=data)
Ejemplo n.º 22
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # Add required command line argument
    parser.add_argument('filename', default=None)

    # Add optional command line arguments
    parser.add_argument('--port',
                        default=ait.config.get('command.port',
                                               ait.DEFAULT_CMD_PORT),
                        type=int)
    parser.add_argument('--verbose', default=0, type=int)

    # Get command line arguments
    args = vars(parser.parse_args())

    host = '127.0.0.1'
    port = args['port']
    data = ' '.join(args)
    verbose = args['verbose']

    cmd = api.CmdAPI(port, verbose=verbose)
    filename = args['filename']

    try:
        with open(filename, 'r') as stream:
            for line in stream.readlines():
                line = line.strip()

                # Skip blank lines and comments
                if len(line) == 0 or line.startswith('#'):
                    continue

                # Meta-command
                elif line.startswith('%'):
                    command = line[1:].strip()
                    system(command)

                # Sequence command
                else:
                    tokens = line.split()
                    delay = float(tokens[0])
                    name = tokens[1]
                    args = [util.toNumber(t, t) for t in tokens[2:]]
                    args = cmd.parseArgs(name, *args)
                    time.sleep(delay)
                    log.info(line)
                    cmd.send(name, *args)
    except IOError:
        log.error("Could not open '%s' for reading." % filename)

    log.end()
Ejemplo n.º 23
0
def getDefaultFSWTabDict():  # noqa: N802
    fswtabdict = None
    filename = None
    try:
        filename = _DefaultFSWTabDictCache.filename
        fswtabdict = _DefaultFSWTabDictCache.load()
    except IOError as e:
        msg = "Could not load default table dictionary '%s': %s'"
        log.error(msg, filename, str(e))

    return fswtabdict
Ejemplo n.º 24
0
        def new_init(self, routing_table=None, default_topic=None):
            self.default_topic = default_topic

            if 'path' in routing_table:
                self.routing_table_object = self.load_table_yaml(
                    routing_table['path'], create_test_dict(10))
            else:
                self.routing_table_object = None
                log.error("no path specified for routing table")
            if self.routing_table_object is None:
                log.error("Unable to load routing table .yaml file")
Ejemplo n.º 25
0
    def _update_leap_second_data(self):
        """Updates the systems leap second information

        Pulls the latest leap second information from
        https://www.ietf.org/timezones/data/leap-seconds.list
        and updates the leapsecond config file.

        Raises:
            ValueError: If the connection to IETF does not return 200
            IOError: If the path to the leap seconds file is not valid
        """

        log.info("Attempting to acquire latest leapsecond data")

        ls_file = ait.config.get(
            "leapseconds.filename",
            os.path.join(ait.config._directory, _DEFAULT_FILE_NAME),
        )

        url = "https://www.ietf.org/timezones/data/leap-seconds.list"
        r = requests.get(url)

        if r.status_code != 200:
            msg = "Unable to locate latest timezone data. Connection to IETF failed"
            log.error(msg)
            raise ValueError(msg)

        text = r.text.split("\n")
        lines = [
            line for line in text
            if line.startswith("#@") or not line.startswith("#")
        ]

        data = {"valid": None, "leapseconds": []}
        data["valid"] = datetime.datetime(1900, 1, 1) + datetime.timedelta(
            seconds=int(lines[0].split("\t")[1]))

        leap = 1
        for line in lines[1:-1]:
            t = datetime.datetime(1900, 1, 1) + datetime.timedelta(
                seconds=int(line.split("\t")[0]))
            if t < GPS_Epoch:
                continue

            data["leapseconds"].append((t, leap))
            leap += 1

        log.info("Leapsecond data processed")

        self._data = data
        with open(ls_file, "wb") as outfile:
            pickle.dump(data, outfile)

        log.info("Successfully generated leapseconds config file")
Ejemplo n.º 26
0
    def encode(self, value):
        """encode(value) -> bytearray

        Encodes the given value to a bytearray according to this
        Complex Type definition.
        """
        e = self.evrs.get(value, None)
        if not e:
            log.error(str(value) + " not found as EVR. Cannot encode.")
            return None
        else:
            return super(EVRType, self).encode(e.code)
Ejemplo n.º 27
0
    def _run(self):
        try:
            while True:
                gevent.sleep(0)
                topic, message = self.sub.recv_string().split(' ', 1)
                log.debug('{} recieved message from {}'.format(self, topic))
                self.process(message, topic=topic)

        except Exception as e:
            log.error('Exception raised in {} while receiving messages: {}'
                       .format(self, e))
            raise(e)
Ejemplo n.º 28
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
    )

    parser.add_argument("in_file", help="Input file path")
    parser.add_argument("--out_file", default=None, help="Output file path")
    parser.add_argument(
        "--raw",
        action="store_true",
        help="Decode columns into raw values without enumerations",
    )

    args = parser.parse_args()

    file_in = open(args.in_file, "rb")
    out_path = (
        args.out_file
        if args.out_file is not None
        else f"{os.path.splitext(args.in_file)[0]}_decoded.txt"
    )

    # Extract the table upload type (byte 0) from the binary so we can
    # locate the table definition that we need.
    uptype = int.from_bytes(file_in.read(1), byteorder="big")
    file_in.seek(0)
    fswtabdict = table.getDefaultFSWTabDict()
    pos_defn = [map[0] for map in fswtabdict.items() if map[1].uptype == uptype]

    if len(pos_defn) != 1:
        log.error(
            f"Table upload type {uptype} not found in table dictionary. Stopping ..."
        )
        sys.exit(1)

    tbldefn = fswtabdict[pos_defn[0]]
    decoded = tbldefn.decode(file_in=file_in, raw=args.raw)

    out_file = open(out_path, "w")

    # Output our header values in comments so the table can be re-encoded easily
    hdr_row = decoded[0]
    for defn, val in zip(tbldefn.fswheaderdefns, hdr_row):
        print(f"# {defn.name}={val}", file=out_file)

    for row in decoded[1:]:
        print(tbldefn.delimiter.join(map(str, row)), file=out_file)

    out_file.close()

    log.end()
Ejemplo n.º 29
0
    def decode(self, **kwargs):
        """Decode table data according to the current table definition

        Decode table data (provided via either an input file or binary blob)
        given the current FSWTabDefn format. The decoded table data will be
        returned as a list of lists, each containing an individual row's
        field data. The first row of the returned data is the header's
        values if applicable for this table definition.

        Keyword Arguments:
            file_in (open file stream): A file stream from which to read
                the table data for decoding.

            bin_in (bytes-like object): An encoded binary table data.

            raw (boolean): Flag indicating whether columns with enumerations
                should return a raw value (True) or an enumerated value
                (False) when the option exists. (default: False)
        """
        # Setup the "iterator" from which to read input data. Input data is
        # passed as either an open file stream or a binary blob.
        in_stream = None
        if "file_in" in kwargs:
            in_stream = kwargs["file_in"]
        elif "bin_in" in kwargs:
            in_stream = io.BytesIO(kwargs["bin_in"])

        if in_stream is None:
            msg = "No valid input source provided to table.decode."
            log.error(msg)
            raise TypeError(msg)

        raw = kwargs.get("raw", False)

        table = []

        # Extract header column names and values if applicable.
        if len(self.fswheaderdefns) > 0:
            table.append([
                col.decode(in_stream, raw=raw) for col in self.fswheaderdefns
            ])

        # Decode rows from the remaining data
        while True:
            row = self._decode_table_row(in_stream, raw=raw)

            if row is None:
                break

            table.append(row)

        return table
Ejemplo n.º 30
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
    )

    # Add required command line arguments
    parser.add_argument(
        "filename", nargs="+", metavar="</path/to/seq>", help="encoded sequence file(s)"
    )

    # Get command line arguments
    args = parser.parse_args()
    for fname in args.filename:
        filename = os.path.abspath(fname)
        if not os.path.isfile(filename):
            raise Exception("File not found: %s " % filename)

        extension = os.path.splitext(filename)[1]

        if extension.lower() != ".bin":
            log.warn("Filename '%s' does not have a '.bin' extension", filename)

        # Parse the filename for the applicable information
        parts = os.path.basename(filename).split("_")
        seqid = os.path.splitext(parts[-1])[0]

        try:
            int(seqid)
        except ValueError:
            raise Exception(
                'Invalid filename "%s": . %s' % (os.path.basename(filename), __doc__)
            )

    sequence = seq.createSeq(filename, id=seqid)

    if not sequence.validate():
        for msg in sequence.messages:
            log.error(msg)
    else:
        txtpath = sequence.txtpath
        seqid = sequence.seqid
        version = sequence.version

        msg = "Writing %s (seqid=0x%04x, version=%u)."
        log.info(msg, txtpath, seqid, version)

        sequence.writeText()

    log.end()