def main():
    ap = argparse.ArgumentParser(
        epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)

    ap.add_argument(
        "-n",
        "--dry-run",
        action="store_true",
        help="Dry run; do not actually write files",
    )

    ap.add_argument("-b",
                    "--bytes",
                    help="Segment evey B bytes",
                    metavar="B",
                    type=int)

    ap.add_argument("-p",
                    "--packets",
                    help="Segment evey P packets",
                    metavar="P",
                    type=int)

    ap.add_argument(
        "-s",
        "--seconds",
        help="Segment when first and last pcap timestamps span S seconds",
        metavar="S",
        type=int,
    )

    ap.add_argument(
        "format",
        help="Segment filename (should include strftime(3) time format)")

    ap.add_argument("file", nargs="+", help="Packet Capture (.pcap) file(s)")

    args = ap.parse_args()

    if args.bytes is None and args.packets is None and args.seconds is None:
        msg = "At least one of -b, -p, or -s is required."
        ap.error(msg)

    try:
        pcap.segment(
            filenames=args.file,
            format=args.format,
            nbytes=args.bytes,
            npackets=args.packets,
            nseconds=args.seconds,
            dryrun=args.dry_run,
        )

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Aborting pcap segmentation.")

    except IOError as e:
        log.error(str(e))

    log.end()
Exemple #2
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # Add required command line arguments
    parser.add_argument('filename', default=None)

    # Get command line arguments
    args = vars(parser.parse_args())

    filename = os.path.abspath(args['filename'])
    extension = os.path.splitext(filename)[1]

    if extension.lower() != '.bin':
        log.warn("Filename '%s' does not have a '.bin' extension", filename)

    sequence = seq.Seq(filename)

    if not sequence.validate():
        for msg in sequence.messages:
            log.error(msg)
    else:
        txtpath = sequence.txtpath
        seqid = sequence.seqid
        version = sequence.version

        msg = "Writing %s (seqid=0x%04x, version=%u)."
        log.info(msg, txtpath, seqid, version)

        sequence.writeText()

    log.end()
Exemple #3
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description = __doc__,
        formatter_class = argparse.RawDescriptionHelpFormatter)

    # Add required command line argument
    parser.add_argument('filename')

    # Get command line arguments
    args = vars(parser.parse_args())

    filename  = os.path.abspath(args['filename'])
    extension = os.path.splitext(filename)[1]

    if extension.lower() != '.bin':
        log.warn("Filename '%s' does not have a '.bin' extension", filename)

    sequence = seq.Seq(filename)

    if not sequence.validate():
        for msg in sequence.messages:
            log.error(msg)

    sequence.printText()

    log.end()
Exemple #4
0
def main():
    log.begin()

    try:
        parser = argparse.ArgumentParser(
            description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
        )

        # Add required command line arguments
        parser.add_argument(
            "filename",
            nargs="+",
            metavar="</path/to/seq>",
            help="File or collection of sequence file(s)",
        )

        # Add optional command line arguments
        args = parser.parse_args()

        for fname in args.filename:
            filename = os.path.abspath(fname)
            if not os.path.isfile(filename):
                raise Exception(f"File not found: {filename}")

            extension = os.path.splitext(filename)[1]

            if extension.lower() != ".txt":
                log.warn(f"Filename '{filename}' does not have a '.txt' extension")

            # Parse the filename for the applicable information
            parts = os.path.basename(filename).split("_")
            seqid = os.path.splitext(parts[-1])[0]

            try:
                seqid = int(seqid)
            except ValueError:
                raise Exception(
                    'Invalid filename "{os.path.basename(filename)}": . {__doc__}'
                )

            sequence = seq.createSeq(filename, id=seqid)

            if not sequence.validate():
                for msg in sequence.log.messages:
                    log.error(msg)
            else:
                binpath = sequence.binpath
                seqid = sequence.seqid

                log.info(f"Writing {binpath} (seqid=0x{seqid:04X}).")
                sequence.writeBinary()

            exit = 0
    except Exception as e:
        log.error(e)
        exit = 1

    log.end()

    sys.exit(exit)
Exemple #5
0
def main():
    ap = argparse.ArgumentParser(
        epilog=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)

    ap.add_argument(
        '-n',
        '--dry-run',
        action='store_true',
        help='Dry run; do not actually write files',
    )

    ap.add_argument('-b',
                    '--bytes',
                    help='Segment evey B bytes',
                    metavar='B',
                    type=int)

    ap.add_argument('-p',
                    '--packets',
                    help='Segment evey P packets',
                    metavar='P',
                    type=int)

    ap.add_argument(
        '-s',
        '--seconds',
        help='Segment when first and last pcap timestamps span S seconds',
        metavar='S',
        type=int)

    ap.add_argument(
        'format',
        help='Segment filename (should include strftime(3) time format)')

    ap.add_argument('file', nargs='+', help='Packet Capture (.pcap) file(s)')

    args = ap.parse_args()

    if args.bytes is None and args.packets is None and args.seconds is None:
        msg = 'At least one of -b, -p, or -s is required.'
        ap.error(msg)

    try:
        pcap.segment(filenames=args.file,
                     format=args.format,
                     nbytes=args.bytes,
                     npackets=args.packets,
                     nseconds=args.seconds,
                     dryrun=args.dry_run)

    except KeyboardInterrupt:
        log.info('Received Ctrl-C.  Aborting pcap segmentation.')

    except IOError as e:
        log.error(str(e))

    log.end()
Exemple #6
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # Add required command line argument
    parser.add_argument('filename', default=None)

    # Add optional command line arguments
    parser.add_argument('--port',
                        default=ait.config.get('command.port',
                                               ait.DEFAULT_CMD_PORT),
                        type=int)
    parser.add_argument('--verbose', default=0, type=int)

    # Get command line arguments
    args = vars(parser.parse_args())

    host = '127.0.0.1'
    port = args['port']
    data = ' '.join(args)
    verbose = args['verbose']

    cmd = api.CmdAPI(port, verbose=verbose)
    filename = args['filename']

    try:
        with open(filename, 'r') as stream:
            for line in stream.readlines():
                line = line.strip()

                # Skip blank lines and comments
                if len(line) == 0 or line.startswith('#'):
                    continue

                # Meta-command
                elif line.startswith('%'):
                    command = line[1:].strip()
                    system(command)

                # Sequence command
                else:
                    tokens = line.split()
                    delay = float(tokens[0])
                    name = tokens[1]
                    args = [util.toNumber(t, t) for t in tokens[2:]]
                    args = cmd.parseArgs(name, *args)
                    time.sleep(delay)
                    log.info(line)
                    cmd.send(name, *args)
    except IOError:
        log.error("Could not open '%s' for reading." % filename)

    log.end()
def main():
    log.begin()

    description = """

    Sends the given command and its arguments to the ISS simulator via UDP.

        Examples:
            $ ait-cmd-send OCO3_CMD_START_SEQUENCE_NOW 1

          """

    arguments = OrderedDict({
        '--port': {
            'type': int,
            'default': ait.config.get('command.port', ait.DEFAULT_CMD_PORT),
            'help': 'Port on which to send data'
        },
        '--host': {
            'type': str,
            'default': "127.0.0.1",
            'help': 'Host to which to send data'
        },
        '--verbose': {
            'action': 'store_true',
            'default': False,
            'help': 'Hexdump of the raw command being sent.'
        }
    })

    arguments['command'] = {
        'type': str,
        'help': 'Name of the command to send.'
    }

    arguments['arguments'] = {
        'type': util.toNumberOrStr,
        'metavar': 'argument',
        'nargs': '*',
        'help': 'Command arguments.'
    }

    args = gds.arg_parse(arguments, description)

    host = args.host
    port = args.port
    verbose = args.verbose

    cmdApi = api.CmdAPI(port, verbose=verbose)

    cmdArgs = cmdApi.parseArgs(args.command, *args.arguments)

    cmdApi.send(args.command, *cmdArgs)

    log.end()
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
    )

    parser.add_argument("in_file", help="Input file path")
    parser.add_argument("--out_file", default=None, help="Output file path")
    parser.add_argument(
        "--raw",
        action="store_true",
        help="Decode columns into raw values without enumerations",
    )

    args = parser.parse_args()

    file_in = open(args.in_file, "rb")
    out_path = (
        args.out_file
        if args.out_file is not None
        else f"{os.path.splitext(args.in_file)[0]}_decoded.txt"
    )

    # Extract the table upload type (byte 0) from the binary so we can
    # locate the table definition that we need.
    uptype = int.from_bytes(file_in.read(1), byteorder="big")
    file_in.seek(0)
    fswtabdict = table.getDefaultFSWTabDict()
    pos_defn = [map[0] for map in fswtabdict.items() if map[1].uptype == uptype]

    if len(pos_defn) != 1:
        log.error(
            f"Table upload type {uptype} not found in table dictionary. Stopping ..."
        )
        sys.exit(1)

    tbldefn = fswtabdict[pos_defn[0]]
    decoded = tbldefn.decode(file_in=file_in, raw=args.raw)

    out_file = open(out_path, "w")

    # Output our header values in comments so the table can be re-encoded easily
    hdr_row = decoded[0]
    for defn, val in zip(tbldefn.fswheaderdefns, hdr_row):
        print(f"# {defn.name}={val}", file=out_file)

    for row in decoded[1:]:
        print(tbldefn.delimiter.join(map(str, row)), file=out_file)

    out_file.close()

    log.end()
Exemple #9
0
def exit(status=None):
    """exit([status])

  Calls ait.core.log.end()

  Exit the interpreter by raising SystemExit(status).  If the status
  is omitted or None, it defaults to zero (i.e., success).  If the
  status is numeric, it will be used as the system exit status.  If it
  is another kind of object, it will be printed and the system exit
  status will be one (i.e., failure).
  """
    log.end()
    sys.exit(status)
Exemple #10
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
    )

    # Add required command line arguments
    parser.add_argument(
        "filename", nargs="+", metavar="</path/to/seq>", help="encoded sequence file(s)"
    )

    # Get command line arguments
    args = parser.parse_args()
    for fname in args.filename:
        filename = os.path.abspath(fname)
        if not os.path.isfile(filename):
            raise Exception("File not found: %s " % filename)

        extension = os.path.splitext(filename)[1]

        if extension.lower() != ".bin":
            log.warn("Filename '%s' does not have a '.bin' extension", filename)

        # Parse the filename for the applicable information
        parts = os.path.basename(filename).split("_")
        seqid = os.path.splitext(parts[-1])[0]

        try:
            int(seqid)
        except ValueError:
            raise Exception(
                'Invalid filename "%s": . %s' % (os.path.basename(filename), __doc__)
            )

    sequence = seq.createSeq(filename, id=seqid)

    if not sequence.validate():
        for msg in sequence.messages:
            log.error(msg)
    else:
        txtpath = sequence.txtpath
        seqid = sequence.seqid
        version = sequence.version

        msg = "Writing %s (seqid=0x%04x, version=%u)."
        log.info(msg, txtpath, seqid, version)

        sequence.writeText()

    log.end()
Exemple #11
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    parser.add_argument(
        "table_type",
        choices=list(table.getDefaultDict().keys()),
        help=
        (f"The type of table being encoded. One of {list(table.getDefaultDict().keys())}"
         ),
    )

    parser.add_argument("in_file", help="Input file path")
    parser.add_argument("--out_file", help="Output file path")

    args = parser.parse_args()

    fswtabdict = table.getDefaultFSWTabDict()
    tbldefn = fswtabdict[args.table_type]

    out_path = (args.out_file if args.out_file is not None else
                f"{os.path.splitext(args.in_file)[0]}.bin")

    with open(args.in_file, "r") as in_file:
        encoded = tbldefn.encode(file_in=in_file)

    # Verify that the encoded table is the proper size. If it's too small we need
    # to pad it out. If it's too big then the user needs to remove some of the
    # entires.
    enc_len = len(encoded)
    if enc_len < tbldefn.size:
        encoded += bytearray(tbldefn.size - enc_len)
    elif enc_len > tbldefn.size:
        log.error(
            f"Encoded {tbldefn.name} table is too large. "
            f"Expected size: {tbldefn.size} bytes. Encoded size: {enc_len} bytes."
            "Please remove some entires from the table.")
        sys.exit(1)

    with open(out_path, "wb") as out_file:
        out_file.write(encoded)

    log.end()
def main():
    try:
        log.begin()

        parser = argparse.ArgumentParser(
            description=__doc__,
            formatter_class=argparse.RawDescriptionHelpFormatter)

        # Add optional command line arguments
        parser.add_argument("--port", default=3076, type=int)
        parser.add_argument("--host", default="127.0.0.1", type=str)
        parser.add_argument("--packetName", default=None)
        parser.add_argument("--packetFill", default=None)

        # Get command line arguments
        args = vars(parser.parse_args())

        port = args["port"]
        host = args["host"]
        fill = args["packetFill"]
        name = args["packetName"]

        if name:
            defn = tlm.getDefaultDict()[name]
        else:
            defn = list(tlm.getDefaultDict().values())[0]

        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

        packet = defn.simulate(fill=fill)

        while True:
            sock.sendto(packet._data, (host, port))

            log.info("Sent telemetry (%d bytes) to %s:%d" %
                     (packet.nbytes, host, port))

            time.sleep(1)

    except KeyboardInterrupt:
        log.info("Received Ctrl-C. Stopping telemetry stream.")

    except Exception as e:
        log.error("TLM send error: %s" % str(e))

    log.end()
Exemple #13
0
def main():
    log.begin()

    arguments = {
        "filename": {
            "metavar": "</path/to/cmdhist.pcap>",
            "help": "command history pcap",
        }
    }

    ap = argparse.ArgumentParser(description=__doc__)
    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    with pcap.open(args.filename) as stream:
        for header, data in stream:
            print(header.timestamp.strftime("%Y-%m-%d %H:%M:%S") + "\t" + data.decode())

    log.end()
Exemple #14
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # Add optional command line arguments
    parser.add_argument('--fswtabdict', default=None, required=True)
    parser.add_argument('--tabfile', default=None, required=True)
    parser.add_argument('--binfile', default=None, required=True)
    parser.add_argument('--tabletype', default=None, required=True)
    parser.add_argument('--verbose', action='store_true', default=False)

    # Get command line arguments
    args = parser.parse_args()
    dictpath      = args.fswtabdict
    tabfile       = args.tabfile
    tabletype     = args.tabletype
    verbose       = args.verbose

    # Grab default command dictionary
    if dictpath is not None:
        dictCache = table.FSWTabDictCache(filename=dictpath)

        try:
            filename = dictCache.filename
        except IOError as e:
            msg = 'Could not load default table dictionary "%s": %s'
            log.error(msg, filename, str(e))
        fswtabdict  = table.FSWTabDict(filename)
    else:
        fswtabdict  = table.getDefaultFSWTabDict()

    # Check if cmddict exists
    if fswtabdict is not None:
        # Write out the table file using the command dictionary
        table.writeToBinary(fswtabdict, tabletype, tabfile, verbose, outbin=args.binfile)

    log.end()
Exemple #15
0
def main():
    log.begin()

    description     = """

    Query all commands from a Command History PCAP

          """

    arguments = {}
    arguments['filename'] = {
        'type'    : str,
        'metavar' : '</path/to/cmdhist.pcap>',
        'help'    : 'command history pcap'
    }

    args = gds.arg_parse(arguments, description)

    with pcap.open(args.filename) as stream:
        for header, data in stream:
            print(header.timestamp.strftime('%Y-%m-%d %H:%M:%S') + '\t' + data)

    log.end()
Exemple #16
0
def main():
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        '--query': {
            'action':
            'store_true',
            'help': ('Creates a new file containing the data from one or '
                     'more given pcap files in a given time range. If no '
                     'output file name is given, the new file name will '
                     'be the name of the first file with the time frame '
                     'appended to the name.')
        },
        '--times': {
            'action': 'store_true',
            'help': 'Lists time ranges available in pcap file(s)'
        },
        '--stime': {
            'default':
            dmc.GPS_Epoch,
            'help': ('Starting time for desired telemetry range in '
                     'ISO 8601 Format "YYYY-MM-DDThh:mm:ssZ" (default: '
                     '1980-01-06T00:00:00Z)')
        },
        '--etime': {
            'default':
            datetime.datetime.now(),
            'help': ('Ending time for desired telemetry range in '
                     'ISO 8601 Format "YYYY-MM-DDThh:mm:ssZ" (default: '
                     'the current time; example: 2018-05-23T18:54:31Z)')
        },
        '--output': {
            'default': None,
            'help': 'The name of the output file to be generated'
        },
        '--tol': {
            'type': int,
            'default': 2,
            'help': 'Number of seconds allowed between time ranges'
        },
        'file': {
            'nargs': '+',
            'metavar': '</path/to/pcap>',
            'help': 'File or directory path containing .pcap file(s)',
        }
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    pcapfiles = []
    for p in args.file:
        if os.path.isdir(p):
            pcapfiles.extend(util.listAllFiles(p, 'pcap', True))
        elif os.path.isfile(p):
            pcapfiles.append(p)
        else:
            ap.print_help()
            raise IOError("Invalid pcapfile. Check path and try again: %s" % p)

    log.begin()

    # if using pcap.query
    if args.query:
        stime = args.stime
        etime = args.etime
        output = args.output

        try:
            # Convert start time to datetime object
            starttime = datetime.datetime.strptime(stime, dmc.ISO_8601_Format)

            # Convert end time to datetime object
            endtime = datetime.datetime.strptime(etime, dmc.ISO_8601_Format)

        except ValueError:
            ap.print_help()
            print()
            print()
            raise ValueError(
                "Start and end time must be formatted as YYYY-MM-DDThh:mm:ssZ")

        pcap.query(starttime, endtime, output, *pcapfiles)

    # if using pcap.times
    elif args.times:
        times = pcap.times(pcapfiles, args.tol)

        if len(times) == 1:
            for start, stop in times.values()[0]:
                print('%s - %s' % (start, stop))
        else:
            for filename in sorted(times.keys()):
                basename = os.path.basename(filename)
                for start, stop in times[filename]:
                    print('%s: %s - %s' % (filename, start, stop))
    else:
        ap.print_help()

    log.end()
Exemple #17
0
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = list(tlmdict.keys())
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        "--packet": {
            "type": str,
            "choices": pnames,
            "default": pnames[0] if len(pnames) > 0 else None,
            "help": "Type of packets (!Packet name in tlm.yaml) in file",
            "required": len(pnames) > 1,
        },
        "--database": {
            "default":
            ait.config.get("database.dbname"),
            "help": ("Name of database in which to insert packets (may "
                     "also be specified in config.yaml database.name)"),
            "required":
            ait.config.get("database.dbname") is None,
        },
        "--backend": {
            "default":
            "sqlite",
            "choices": ["sqlite", "influx"],
            "action":
            "store",
            "help": ("Name of database in which to insert packets (may "
                     "also be specified in config.yaml database.name)"),
        },
        "--use-current-time": {
            "action":
            "store_true",
            "help": ("Use current time stamps when insert packets instead "
                     "of ground receipt time (or the time written in the "
                     "PCAP header)."),
        },
        "file": {
            "nargs": "+",
            "help": "File(s) containing telemetry packets"
        },
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]

        if args.backend == "sqlite":
            dbconn = db.SQLiteBackend()
        elif args.backend == "influx":
            dbconn = db.InfluxDBBackend()

        if args.backend == "sqlite" and (args.database == ":memory:"
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info("Processing %s" % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:

                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Stopping database insert.")

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info("Inserted %d %s packets into database %s." % values)

    log.end()
Exemple #18
0
            try:
                int(seqid)
            except ValueError:
                raise Exception('Invalid filename "%s": . %s' %
                                (os.path.basename(filename), __doc__))

            sequence = seq.Seq(filename, id=seqid)

            if not sequence.validate():
                for msg in sequence.log.messages:
                    log.error(msg)
            else:
                binpath = sequence.binpath
                seqid = sequence.seqid

                log.info("Writing %s (seqid=0x%04x).", binpath, seqid)
                sequence.writeBinary()

            exit = 0
    except Exception, e:
        log.error(e)
        exit = 1

    log.end()

    sys.exit(exit)


if __name__ == '__main__':
    main()
Exemple #19
0
def main():
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        "--query": {
            "action":
            "store_true",
            "help": ("Creates a new file containing the data from one or "
                     "more given pcap files in a given time range. If no "
                     "output file name is given, the new file name will "
                     "be the name of the first file with the time frame "
                     "appended to the name."),
        },
        "--times": {
            "action": "store_true",
            "help": "Lists time ranges available in pcap file(s)",
        },
        "--stime": {
            "default":
            dmc.GPS_Epoch.strftime(dmc.ISO_8601_Format),
            "help": ("Starting time for desired telemetry range in "
                     'ISO 8601 Format "YYYY-MM-DDThh:mm:ssZ" (default: '
                     "1980-01-06T00:00:00Z)"),
        },
        "--etime": {
            "default":
            datetime.datetime.now().strftime(dmc.ISO_8601_Format),
            "help": ("Ending time for desired telemetry range in "
                     'ISO 8601 Format "YYYY-MM-DDThh:mm:ssZ" (default: '
                     "the current time; example: 2018-05-23T18:54:31Z)"),
        },
        "--output": {
            "default": None,
            "help": "The name of the output file to be generated",
        },
        "--tol": {
            "type": int,
            "default": 2,
            "help": "Number of seconds allowed between time ranges",
        },
        "file": {
            "nargs": "+",
            "metavar": "</path/to/pcap>",
            "help": "File or directory path containing .pcap file(s)",
        },
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    pcapfiles = []
    for p in args.file:
        if os.path.isdir(p):
            pcapfiles.extend(util.listAllFiles(p, "pcap", True))
        elif os.path.isfile(p):
            pcapfiles.append(p)
        else:
            ap.print_help()
            raise IOError("Invalid pcapfile. Check path and try again: %s" % p)

    log.begin()

    # if using pcap.query
    if args.query:
        stime = args.stime
        etime = args.etime
        output = args.output

        try:
            # Convert start time to datetime object
            starttime = datetime.datetime.strptime(stime, dmc.ISO_8601_Format)

            # Convert end time to datetime object
            endtime = datetime.datetime.strptime(etime, dmc.ISO_8601_Format)

        except ValueError:
            ap.print_help()
            print()
            print()
            raise ValueError(
                "Start and end time must be formatted as YYYY-MM-DDThh:mm:ssZ")

        pcap.query(starttime, endtime, output, *pcapfiles)

    # if using pcap.times
    elif args.times:
        times = pcap.times(pcapfiles, args.tol)

        if len(times) == 1:
            for start, stop in list(times.values())[0]:
                print("%s - %s" % (start, stop))
        else:
            for filename in sorted(times.keys()):
                for start, stop in times[filename]:
                    print("%s: %s - %s" % (filename, start, stop))
    else:
        ap.print_help()

    log.end()
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = tlmdict.keys()
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        '--packet': {
            'type': str,
            'choices': pnames,
            'default': pnames[0] if len(pnames) > 0 else None,
            'help': 'Type of packets (!Packet name in tlm.yaml) in file',
            'required': len(pnames) > 1,
        },
        '--database': {
            'default':
            ait.config.get('database.name'),
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)'),
            'required':
            ait.config.get('database.name') is None
        },
        '--backend': {
            'default':
            'sqlite',
            'choices': ['sqlite', 'influx'],
            'action':
            'store',
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)')
        },
        '--use-current-time': {
            'action':
            'store_true',
            'help': ('Use current time stamps when insert packets instead '
                     'of ground receipt time (or the time written in the '
                     'PCAP header).')
        },
        'file': {
            'nargs': '+',
            'help': 'File(s) containing telemetry packets'
        }
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]
        nbytes = defn.nbytes

        if args.backend == 'sqlite':
            dbconn = db.SQLiteBackend()
        elif args.backend == 'influx':
            dbconn = db.InfluxDBBackend()

        if args.backend == 'sqlite' and (args.database == ':memory:'
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info('Processing %s' % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:
                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info('Received Ctrl-C.  Stopping database insert.')

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info('Inserted %d %s packets into database %s.' % values)

    log.end()
Exemple #21
0
def main():
    argparser = argparse.ArgumentParser(
        description="""
Validate YAML files with applicable schema and/or advanced
content validation for CMD and TLM dictionaries.

YAML validation is done through a combination of JSON Schema
(http://json-schema.org/) and Python-coded content validation.  The
JSON Schema is used to validate general format of the YAML, i.e
dictionaries contain the expected keys, values are the expected type,
etc.

Why JSON Schema? All of the available YAML validators did not meet the
robustness expected for this tool. Since JSON and YAML are stored
similarly in memory, the JSON Schema became an option.  The only
difference between YAML and JSON is the use of multiple documents in
the same YAML file. The val.py module handles this implication. See
TBD wiki page for more details on developing JSON schema for an
applicable YAML file.
""",
        epilog="""
Examples:

    $ ait-yaml-validate.py --cmd
    $ ait-yaml-validate.py --tlm
    $ ait-yaml-validate.py --evr
    $ ait-yaml-validate.py --cmd --yaml /path/to/cmd.yaml
    $ ait-yaml-validate.py --tlm --yaml /path/to/tlm.yaml
    $ ait-yaml-validate.py --yaml /path/to/yaml --schema /path/to/schema
""",
        formatter_class=argparse.RawDescriptionHelpFormatter)

    argparser.add_argument('-y',
                           '--yaml',
                           metavar='</path/to/yaml>',
                           type=str,
                           help='Path to YAML file.')

    argparser.add_argument('-s',
                           '--schema',
                           metavar='</path/to/schema>',
                           type=str,
                           help='Path to JSON schema file.')

    argparser.add_argument(
        '-c',
        '--cmd',
        action='store_true',
        default=False,
        help="""Command dictionary flag. If a YAML file is not
        specified, the default command dictionary and schema will be used.
        """)

    argparser.add_argument(
        '-t',
        '--tlm',
        action='store_true',
        default=False,
        help="""Telemetry dictionary flag. If a YAML file is not
        specified, the default telemetry dictionary and schema will be used.
        """)

    argparser.add_argument(
        '-e',
        '--evr',
        action='store_true',
        default=False,
        help="""EVR dictionary flag. If a YAML file is not specified,
        the default EVR dictionary and schema will be used.
        """)

    argparser.add_argument(
        '-l',
        '--limits',
        action='store_true',
        default=False,
        help="""Limits dictionary flag. If a YAML file is not specified,
        the default limits dictionary and schema will be used.
        """)

    if len(sys.argv) < 2:
        argparser.print_usage()
        print 'Run with --help for detailed help.'
        sys.exit(2)

    options = argparser.parse_args()

    log.begin()

    # Validate specified yaml file with specified schema
    if options.yaml is not None and options.schema is not None:
        # Check YAML exists
        if not os.path.exists(options.yaml):
            raise os.error(options.yaml + " does not exist.")

        # Check schema exists
        if not os.path.exists(options.schema):
            raise os.error(options.schema + " does not exist.")

        validator = val.Validator
        retcode = validate(validator, options.yaml, options.schema)

    else:
        if options.cmd:
            yml = ait.config.cmddict.filename
            schema = cmd.getDefaultSchema()
            validator = val.CmdValidator
        elif options.evr:
            yml = ait.config.evrdict.filename
            schema = evr.getDefaultSchema()
            validator = val.Validator
        elif options.tlm:
            yml = ait.config.tlmdict.filename
            schema = tlm.getDefaultSchema()
            validator = val.TlmValidator
        elif options.limits:
            yml = ait.config.limits.filename
            schema = limits.getDefaultSchema()
            validator = val.Validator

        if options.yaml is not None:
            yml = options.yaml

        retcode = validate(validator, yml, schema)

    log.end()
    return retcode
Exemple #22
0
def main():
    log.begin()

    descr = (
        "Sends the given relative timed sequence via the AIT server, or if the 'udp' "
        "flag is set then directly via UDP.")

    parser = argparse.ArgumentParser(
        description=descr,
        formatter_class=argparse.RawDescriptionHelpFormatter)

    # The optional argument(s)
    arg_defns = OrderedDict({
        "--topic": {
            "type": str,
            "default": ait.config.get("command.topic", ait.DEFAULT_CMD_TOPIC),
            "help": "Name of topic from which to publish data",
        },
        "--verbose": {
            "action": "store_true",
            "default": False,
            "help": "Hexdump of the raw command being sent.",
        },
        "--udp": {
            "action": "store_true",
            "default": False,
            "help": "Send data to UDP socket.",
        },
        "--host": {
            "type": str,
            "default": ait.DEFAULT_CMD_HOST,
            "help": "Host to which to send data",
        },
        "--port": {
            "type": int,
            "default": ait.config.get("command.port", ait.DEFAULT_CMD_PORT),
            "help": "Port on which to send data",
        },
    })

    # Required argument(s)
    arg_defns["filename"] = {
        "type": str,
        "help": "Name of the sequence file.",
        "default": None,
    }

    # Push argument defs to the parser
    for name, params in arg_defns.items():
        parser.add_argument(name, **params)

    # Get arg results of the parser
    args = parser.parse_args()

    # Extract args to local fields
    host = args.host
    port = args.port
    verbose = args.verbose
    udp = args.udp
    topic = args.topic
    filename = args.filename

    # If UDP enabled, collect host/port info
    if udp:
        if host is not None:
            dest = (host, port)
        else:
            dest = port

        cmd_api = api.CmdAPI(udp_dest=dest, verbose=verbose)
    # Default CmdAPI connect hooks up to C&DH server 0MQ port
    else:
        cmd_api = api.CmdAPI(verbose=verbose, cmdtopic=topic)

    try:
        with open(filename, "r") as stream:
            for line in stream.readlines():
                line = line.strip()

                # Skip blank lines and comments
                if len(line) == 0 or line.startswith("#"):
                    continue

                # Meta-command
                elif line.startswith("%"):
                    command = line[1:].strip()
                    system(command)

                # Sequence command
                else:
                    tokens = line.split()
                    delay = float(tokens[0])
                    cmd_name = tokens[1]
                    cmd_args = [util.toNumber(t, t) for t in tokens[2:]]
                    cmd_args = cmd_api.parse_args(cmd_name, *cmd_args)
                    time.sleep(delay)
                    log.info(line)
                    cmd_api.send(cmd_name, *cmd_args)
    except IOError:
        log.error("Could not open '%s' for reading." % filename)

    log.end()
Exemple #23
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument("--all", action="store_true", help="output all fields/values")

    parser.add_argument(
        "--csv",
        default="output.csv",
        metavar="</path/to/output/csv>",
        help="Output as CSV with filename",
    )

    parser.add_argument(
        "--fields",
        metavar="</path/to/fields/file>",
        help="path to the file containing all fields to query, separated by newline.",
    )

    parser.add_argument(
        "--packet", required=True, help="Packet name from telemetry dictionary specified in config file."
    )

    parser.add_argument(
        "--time_field",
        help=(
            "Time field to use for time range comparisons. Ground receipt time "
            "will be used if nothing is specified."
        ),
    )

    parser.add_argument(
        "--stime",
        help=(
            "Datetime in file to start collecting the data values. Defaults to "
            "beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "--etime",
        help=(
            "Datetime in file to end collecting the data values. Defaults to end "
            "of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "pcap", nargs="*", help=("PCAP file(s) containing telemetry packets")
    )

    args = parser.parse_args()

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' % args.packet)
        log.end()
        sys.exit(2)

    if not args.all and args.fields is None:
        log.error(
            "Must provide fields file with --fields or specify that all fields should be queried with --all"
        )
        log.end()
        sys.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, "r") as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split(".")
        if fldname not in defn.fieldmap and (
            len(raw) == 2 and raw[0] != "raw" or raw[1] not in defn.fieldmap
        ):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        log.end()
        sys.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, "Ground Receipt Time")

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, "w")
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug("Processing %s" % filename)

        with pcap.open(filename, "rb") as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = (
                    header.timestamp
                    if args.ground_time
                    else getattr(packet, args.time_field)
                )
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split(".")
                            if len(names) == 2 and names[0] == "raw":
                                field = names[1]
                                _raw = True

                            field_val = packet._getattr(field, raw=_raw)

                            if hasattr(field_val, "name"):
                                field_val = field_val.name
                            else:
                                field_val = str(field_val)

                        except KeyError:
                            log.debug("%s not found in Packet" % field)
                            field_val = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            field_val = packet._getattr(field, raw=True)

                        row.append(field_val)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug("Parsed %s packets." % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()
Exemple #24
0
def main():
    try:

        log.begin()

        parser = argparse.ArgumentParser(
            description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
        )

        # Add required command line arguments
        parser.add_argument("filename")

        # Add optional command line arguments
        parser.add_argument("--port", default=3076, type=int)
        parser.add_argument("--verbose", action="store_true", default=False)

        # Get command line arguments
        args = vars(parser.parse_args())

        filename = args["filename"]
        host = "localhost"
        port = args["port"]
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        verbose = args["verbose"]

        if not verbose:
            log.info("Will only report every 10 telemetry packets")
            log.info("Will only report long telemetry send delays")

        with pcap.open(filename, "r") as stream:
            npackets = 0
            prev_ts = None

            for header, packet in stream:
                if prev_ts is None:
                    prev_ts = header.ts

                delay = header.ts - prev_ts

                if delay >= 2:
                    log.info("Next telemetry in %1.2f seconds" % delay)

                time.sleep(delay)

                nbytes = len(packet)

                if npackets == 0:
                    log.info("Sent first telemetry packet (%d bytes)" % nbytes)
                elif verbose:
                    log.info("Sent telemetry (%d bytes)" % nbytes)
                elif npackets % 10 == 0:
                    log.info("Sent 10 telemetry packets")

                sock.sendto(packet, (host, port))

                npackets += 1
                prev_ts = header.ts

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Stopping telemetry stream.")

    except Exception as e:
        log.error("TLM send error: %s" % str(e))

    log.end()
Exemple #25
0
def main():
    argparser = argparse.ArgumentParser(
        description="""
    AIT Create Directories Script

    Based on the data paths specified in the AIT_CONFIG, this software creates
    daily directories for the GDS based on the paths and any applicable variable
    substitution.
""",
        epilog="""
    Create directories based on some set of variables in a separate YAML config

        $ ait-create-dirs -c vars.yaml

    Create directories starting 3 days from now for 90 days

        $ ait-create-dirs -d 2016-01-01T00:00:00Z
""",
        formatter_class=argparse.RawDescriptionHelpFormatter)

    argparser.add_argument('-d',
                           '--date',
                           metavar='<YYYY-MM-DDTHH:mm:ssZ>',
                           type=str,
                           help='Create directory structure using this' +
                           'ISO 8610 datetime for strftime replacement' +
                           'in directory path. Default: TODAY')

    argparser.add_argument(
        '-t',
        '--timedelta',
        metavar='<days>',
        type=int,
        help='Number of days in the future you would like ' +
        'to create a directory.' + 'Default: 0')

    options = argparser.parse_args()

    log.begin()

    retcode = 0

    try:
        pathvars = {}

        if options.date:
            ait.config._datetime = time.strptime(options.date,
                                                 dmc.ISO_8601_Format)

        if options.timedelta:
            ait.config._datetime = time.strptime(
                dmc.getUTCDatetimeDOY(days=options.timedelta), dmc.DOY_Format)

        pathvars['year'] = ait.config._datetime.tm_year
        pathvars['doy'] = '%03d' % ait.config._datetime.tm_yday

        # Add the updated path variables for the date
        ait.config.addPathVariables(pathvars)

        ait.config.reload()

        # Create the directory
        retcode = createDirStruct(ait.config._datapaths)

    except Exception as e:
        print e
        log.error('AIT Create Directories error: %s' % traceback.format_exc())

    log.end()
    return retcode
Exemple #26
0
def main():
    log.begin()

    description = """Parses 1553 telemetry into CSV file."""

    arguments = {
        '--all': {
            'action': 'store_true',
            'help': 'output all fields/values',
        },
        '--csv': {
            'type': str,
            'default': 'output.csv',
            'metavar': '</path/to/output/csv>',
            'help': 'Output as CSV with filename'
        },
        '--fields': {
            'type': str,
            'metavar': '</path/to/fields/file>',
            'help':
            'file containing all fields to query, separated by newline.'
        },
        '--packet': {
            'type': str,
            'required': True,
            'help': 'field names to query, separated by space'
        },
        '--time_field': {
            'type':
            str,
            'help':
            'Time field to use for time range comparisons. Ground receipt time will be used if nothing is specified.'
        },
        '--stime': {
            'type':
            str,
            'help':
            'Datetime in file to start collecting the data values. Defaults to beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        },
        '--etime': {
            'type':
            str,
            'help':
            'Datetime in file to end collecting the data values. Defaults to end of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        }
    }

    arguments['pcap'] = {
        'nargs': '*',
        'help': 'PCAP file(s) containing telemetry packets'
    }

    args = gds.arg_parse(arguments, description)

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' %
                  args.packet)
        gds.exit(2)

    if not args.all and args.fields is None:
        log.error(
            'Must provide fields file with --fields or specify that all fields should be queried with --all'
        )
        gds.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, 'rb') as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split('.')
        if fldname not in defn.fieldmap and (len(raw) == 2 and raw[0] != 'raw'
                                             or raw[1] not in defn.fieldmap):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        gds.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, 'Ground Receipt Time')

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, 'wb')
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug('Processing %s' % filename)

        with pcap.open(filename, 'rb') as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = header.timestamp if args.ground_time else getattr(
                    packet, args.time_field)
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split('.')
                            if len(names) == 2 and names[0] == 'raw':
                                field = names[1]
                                _raw = True

                            fieldVal = packet._getattr(field, raw=_raw)

                            if hasattr(fieldVal, 'name'):
                                fieldVal = fieldVal.name
                            else:
                                fieldVal = str(fieldVal)

                        except KeyError:
                            log.debug('%s not found in Packet' % field)
                            fieldVal = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            fieldVal = packet._getattr(field, raw=True)

                        row.append(fieldVal)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug('Parsed %s packets.' % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()
Exemple #27
0
def main():
    argparser = argparse.ArgumentParser(
        description="""
    AIT Create Directories Script

    Based on the data paths specified in the AIT_CONFIG, this software creates
    daily directories for the GDS based on the paths and any applicable variable
    substitution.
""",
        epilog="""
    Create directories based on some set of variables in a separate YAML config

        $ ait-create-dirs -c vars.yaml

    Create directories starting 3 days from now for 90 days

        $ ait-create-dirs -d 2016-01-01T00:00:00Z
""",
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )

    argparser.add_argument(
        "-d",
        "--date",
        metavar="<YYYY-MM-DDTHH:mm:ssZ>",
        type=str,
        help="Create directory structure using this" +
        "ISO 8610 datetime for strftime replacement" +
        "in directory path. Default: TODAY",
    )

    argparser.add_argument(
        "-t",
        "--timedelta",
        metavar="<days>",
        type=int,
        help="Number of days in the future you would like " +
        "to create a directory." + "Default: 0",
    )

    options = argparser.parse_args()

    log.begin()

    retcode = 0

    try:
        pathvars = {}

        if options.date:
            ait.config._datetime = time.strptime(options.date,
                                                 dmc.ISO_8601_Format)

        if options.timedelta:
            ait.config._datetime = time.strptime(
                dmc.getUTCDatetimeDOY(days=options.timedelta), dmc.DOY_Format)

        pathvars["year"] = ait.config._datetime.tm_year
        pathvars["doy"] = "%03d" % ait.config._datetime.tm_yday

        # Add the updated path variables for the date
        ait.config.add_path_variables(pathvars)

        ait.config.reload()

        # Create the directory
        retcode = create_dir_struct(ait.config._datapaths)

    except Exception as e:
        print(e)
        log.error("AIT Create Directories error: %s" % traceback.format_exc())

    log.end()
    return retcode
Exemple #28
0
def main():
    log.begin()

    descr = (
        "Sends the given command and its arguments to the ISS simulator via  "
        "the AIT server, or if the 'udp' flag is set then directly via UDP.")

    parser = argparse.ArgumentParser(
        description=descr,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arg_defns = OrderedDict({
        "--topic": {
            "type": str,
            "default": ait.config.get("command.topic", ait.DEFAULT_CMD_TOPIC),
            "help": "Name of topic from which to publish data",
        },
        "--verbose": {
            "action": "store_true",
            "default": False,
            "help": "Hexdump of the raw command being sent.",
        },
        "--udp": {
            "action": "store_true",
            "default": False,
            "help": "Send data to UDP socket.",
        },
        "--host": {
            "type": str,
            "default": ait.DEFAULT_CMD_HOST,
            "help": "Host to which to send data",
        },
        "--port": {
            "type": int,
            "default": ait.config.get("command.port", ait.DEFAULT_CMD_PORT),
            "help": "Port on which to send data",
        },
    })

    arg_defns["command"] = {
        "type": str,
        "help": "Name of the command to send."
    }

    arg_defns["arguments"] = {
        "type": util.toNumberOrStr,
        "metavar": "arguments",
        "nargs": "*",
        "help": "Command arguments.",
    }

    # Push argument defs to the parser
    for name, params in arg_defns.items():
        parser.add_argument(name, **params)

    # Get arg results of the parser
    args = parser.parse_args()

    # Extract args to local fields
    host = args.host
    port = args.port
    verbose = args.verbose
    udp = args.udp
    topic = args.topic

    # If UDP enabled, collect host/port info
    if udp:
        if host is not None:
            dest = (host, port)
        else:
            dest = port

        cmd_api = api.CmdAPI(udp_dest=dest, verbose=verbose)
    # Default CmdAPI connect hooks up to C&DH server 0MQ port
    else:
        cmd_api = api.CmdAPI(verbose=verbose, cmdtopic=topic)

    cmd_args = cmd_api.parse_args(args.command, *args.arguments)

    cmd_api.send(args.command, *cmd_args)

    log.end()