Exemplo n.º 1
0
def testWriteRead():
    packets = 'When a packet hits a pocket on a socket on a port.'.split()

    with pcap.open(TmpFilename, 'w') as stream:
        for p in packets:
            stream.write(p)

    with pcap.open(TmpFilename, 'r') as stream:
        index = 0
        prev_ts = 0

        for header, packet in stream:
            assert header.ts >= prev_ts
            assert header.incl_len == len(packets[index])
            assert header.orig_len == len(packets[index])
            assert packet == packets[index]

            index += 1
            prev_ts = header.ts

        assert index == len(packets)

        header, packet = stream.read()
        assert header.incomplete()
        assert packet is None

    os.unlink(TmpFilename)
Exemplo n.º 2
0
def testReadLittleEndian():
    bytes = 'Hello World!'
    ts = int(time.time())

    # Write pcap file
    with open(TmpFilename, 'wb') as stream:
        stream.write(
            struct.pack('<IHHiIII', 0xA1B2C3D4, 2, 4, 0, 0, 65535, 147))
        stream.write(struct.pack('<IIII', ts, 0, len(bytes), len(bytes)))
        stream.write(bytes)

    # Read pcap using API
    with pcap.open(TmpFilename, 'r') as stream:
        assert stream.header.magic_number == 0xA1B2C3D4
        assert stream.header.version_major == 2
        assert stream.header.version_minor == 4
        assert stream.header.thiszone == 0
        assert stream.header.sigfigs == 0
        assert stream.header.snaplen == 65535
        assert stream.header.network == 147

        header, packet = stream.read()
        assert header.ts == ts
        assert header.incl_len == len(bytes)
        assert header.orig_len == len(bytes)
        assert packet == bytes

        header, packet = stream.read()
        assert header.incomplete()
        assert packet is None

    os.unlink(TmpFilename)
Exemplo n.º 3
0
    def send(self, command, *args, **kwargs):
        """Creates, validates, and sends the given command as a UDP
        packet to the destination (host, port) specified when this
        CmdAPI was created.
        Returns True if the command was created, valid, and sent,
        False otherwise.
        """
        status = False
        cmdobj = CMD_API._cmddict.create(command, *args, **kwargs)
        messages = []

        if not cmdobj.validate(messages):
            for msg in messages:
                log.error(msg)
        else:
            encoded = cmdobj.encode()

            if CMD_API._verbose:
                size = len(cmdobj.name)
                pad = (size - len(cmdobj.name) + 1) * ' '
                gds.hexdump(encoded, preamble=cmdobj.name + ':' + pad)

            try:
                self.publish(encoded)
                status = True

                with pcap.open(CMD_API.CMD_HIST_FILE, 'a') as output:
                    output.write(str(cmdobj))
            except IOError as e:
                log.error(e.message)

        return status
Exemplo n.º 4
0
    def _get_logger(self, handler):
        ''' Initialize a PCAP stream for logging data '''
        log_file = self._get_log_file(handler)

        if not os.path.isdir(os.path.dirname(log_file)):
            os.makedirs(os.path.dirname(log_file))

        handler['log_rot_time'] = time.gmtime()
        return pcap.open(log_file, mode='a')
Exemplo n.º 5
0
    def send(self, command, *args, **kwargs):
        """Creates, validates, and sends the given command as a UDP
        packet to the destination (host, port) specified when this
        CmdAPI was created.

        Returns True if the command was created, valid, and sent,
        False otherwise.
        """
        status = False
        cmdobj = self._cmddict.create(command, *args, **kwargs)
        messages = []

        if not cmdobj.validate(messages):
            for msg in messages:
                log.error(msg)
        else:
            encoded = cmdobj.encode()

            if self._verbose:
                size = len(cmdobj.name)
                pad = (size - len(cmdobj.name) + 1) * " "
                gds.hexdump(encoded, preamble=cmdobj.name + ":" + pad)

            try:
                # Send to either UDP socket or ZMQ publish socket

                if self._udp_socket:
                    values = (self._host, self._port, str(cmdobj))
                    log.command("Sending to %s:%d: %s" % values)
                    self._udp_socket.sendto(encoded, (self._host, self._port))
                    status = True
                elif self._pub_socket:
                    values = (self._pub_topic, str(cmdobj))
                    log.command("Sending via publisher: %s %s" % values)
                    msg = serv_utils.encode_message(self._pub_topic, encoded)

                    if msg is None:
                        log.error(
                            "CmdAPI unable to encode cmd message "
                            f"({self._pub_topic}, {encoded}) for send"
                        )
                        status = False
                    else:
                        self._pub_socket.send_multipart(msg)
                        status = True

                # Only add to history file if success status is True
                if status:
                    with pcap.open(self.CMD_HIST_FILE, "a") as output:
                        output.write(str(cmdobj))

            except socket.error as e:
                log.error(e.message)
            except IOError as e:
                log.error(e.message)

        return status
Exemplo n.º 6
0
def testQuery():
    TmpRes = "test_pcap_res.pcap"
    TmpFilename = "test_pcap_file.pcap"
    packets = "This is a nice little sentence".split()
    start = datetime.datetime.now()

    with pcap.open(TmpFilename, 'w') as stream:
        for p in packets:
            stream.write(p)
    end = datetime.datetime.max

    pcap.query(start, end, TmpRes, (TmpFilename))

    with pcap.open(TmpFilename, 'r') as stream1:
        with pcap.open(TmpRes, 'r') as stream2:
            header1, packet1 = stream1.read()
            header2, packet2 = stream2.read()
            assert str(header1) == str(header2)
            assert packet1 == packet2

    os.remove(TmpRes)
    os.remove(TmpFilename)
Exemplo n.º 7
0
def testTimes():
    packets = "This is a nice little sentence".split()
    with pcap.open(TmpFilename, 'w') as stream:
        for p in packets:
            stream.write(p)

    with pcap.open(TmpFilename, 'r') as stream:
        i = 0
        for header, packet in stream:
            if i is 0:
                exp_start = header.timestamp
            if i is 5:
                exp_end = header.timestamp
            i += 1

    times = pcap.times(TmpFilename)

    start = times[TmpFilename][0][0]
    stop = times[TmpFilename][0][1]

    assert len(times[TmpFilename]) == 1
    assert start == exp_start
    assert stop == exp_end

    # test when we have 2 separate time segments
    with pcap.open(TmpFilename, 'w') as stream:
        for p in packets:
            stream.write(p)

        time.sleep(3)

        for p in packets:
            stream.write(p)

    times = pcap.times(TmpFilename, 2)
    assert len(times[TmpFilename]) == 2

    os.remove(TmpFilename)
Exemplo n.º 8
0
def testSegmentBytes(log_info):
    try:
        with pcap.open(TmpFilename, 'w') as output:
            for p in range(10):
                output.write(str(p))

        pcap.segment(TmpFilename, 'foo.pcap', nbytes=41, dryrun=True)
        expected = 'Would write 41 bytes, 1 packets, 1 seconds to foo.pcap.'

        assert len(log_info.call_args_list) == 10
        for call in log_info.call_args_list:
            assert call[0][0] == expected

    finally:
        os.unlink(TmpFilename)
Exemplo n.º 9
0
def testSegmentPackets(log_info):
    try:
        with pcap.open(TmpFilename, "w") as output:
            for p in range(10):
                output.write(str(p))

        pcap.segment(TmpFilename, "foo.pcap", npackets=5, dryrun=True)
        expected = "Would write 109 bytes, 5 packets, 1 seconds to foo.pcap."

        assert len(log_info.call_args_list) == 2
        for call in log_info.call_args_list:
            assert call[0][0] == expected

    finally:
        os.unlink(TmpFilename)
Exemplo n.º 10
0
def testSegmentSeconds(log_info):
    try:
        header = pcap.PCapPacketHeader(orig_len=1)
        with pcap.open(TmpFilename, 'w') as output:
            for p in range(10):
                header.ts_sec = p
                output.write(str(p), header)

        pcap.segment(TmpFilename, 'foo.pcap', nseconds=2, dryrun=True)
        expected = 'Would write 58 bytes, 2 packets, 2 seconds to foo.pcap.'

        assert len(log_info.call_args_list) == 5
        for call in log_info.call_args_list:
            assert call[0][0] == expected

    finally:
        os.unlink(TmpFilename)
Exemplo n.º 11
0
def handle():
    """Return sent command history
    **Example Response**:
    .. sourcecode: json
       [
           "NO_OP",
           "SEQ_START 3423"
       ]
    If you set the **detailed** query string flag the JSON
    returned will include timestamp information.
    **Example Detailed Response**
    .. sourcecode: json
        [
            {
                "timestamp": "2017-08-01 15:41:13.117805",
                "command": "NO_OP"
            },
            {
                "timestamp": "2017-08-01 15:40:23.339886",
                "command": "NO_OP"
            }
        ]
    """
    cmds = []

    try:
        with pcap.open(CMD_API.CMD_HIST_FILE, 'r') as stream:
            if 'detailed' in bottle.request.query:
                cmds = [
                    {
                        'timestamp': str(header.timestamp),
                        'command': cmdname.decode('utf-8')
                    }
                    for (header, cmdname) in stream
                ]
                return json.dumps(list(reversed(cmds)))
            else:
                cmds = [cmdname.decode('utf-8') for (header, cmdname) in stream]
                return json.dumps(list(set(cmds)))
    except IOError:
        pass
Exemplo n.º 12
0
def main():
    log.begin()

    arguments = {
        "filename": {
            "metavar": "</path/to/cmdhist.pcap>",
            "help": "command history pcap",
        }
    }

    ap = argparse.ArgumentParser(description=__doc__)
    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    with pcap.open(args.filename) as stream:
        for header, data in stream:
            print(header.timestamp.strftime("%Y-%m-%d %H:%M:%S") + "\t" + data.decode())

    log.end()
Exemplo n.º 13
0
def main():
    log.begin()

    description     = """

    Query all commands from a Command History PCAP

          """

    arguments = {}
    arguments['filename'] = {
        'type'    : str,
        'metavar' : '</path/to/cmdhist.pcap>',
        'help'    : 'command history pcap'
    }

    args = gds.arg_parse(arguments, description)

    with pcap.open(args.filename) as stream:
        for header, data in stream:
            print(header.timestamp.strftime('%Y-%m-%d %H:%M:%S') + '\t' + data)

    log.end()
Exemplo n.º 14
0
def testWrite():
    bytes = 'Hello World!'
    ts = time.time()

    # Write pcap using API
    with pcap.open(TmpFilename, 'w') as stream:
        assert stream.write(bytes) == len(bytes)

    # Read pcap file
    with open(TmpFilename, 'rb') as stream:
        header = struct.unpack('IHHiIII', stream.read(24))
        assert header == (0xA1B2C3D4, 2, 4, 0, 0, 65535, 147)

        header = struct.unpack('IIII', stream.read(16))
        assert header[0] - ts <= 1  # write timestamp
        assert header[2] == len(
            bytes)  # number of octets of packet saved in file
        assert header[3] == len(bytes)  # actual length of packet

        assert stream.read(header[2]) == bytes
        assert len(stream.read()) == 0

    os.unlink(TmpFilename)
Exemplo n.º 15
0
def main():
    try:

        log.begin()

        parser = argparse.ArgumentParser(
            description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
        )

        # Add required command line arguments
        parser.add_argument("filename")

        # Add optional command line arguments
        parser.add_argument("--port", default=3076, type=int)
        parser.add_argument("--verbose", action="store_true", default=False)

        # Get command line arguments
        args = vars(parser.parse_args())

        filename = args["filename"]
        host = "localhost"
        port = args["port"]
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        verbose = args["verbose"]

        if not verbose:
            log.info("Will only report every 10 telemetry packets")
            log.info("Will only report long telemetry send delays")

        with pcap.open(filename, "r") as stream:
            npackets = 0
            prev_ts = None

            for header, packet in stream:
                if prev_ts is None:
                    prev_ts = header.ts

                delay = header.ts - prev_ts

                if delay >= 2:
                    log.info("Next telemetry in %1.2f seconds" % delay)

                time.sleep(delay)

                nbytes = len(packet)

                if npackets == 0:
                    log.info("Sent first telemetry packet (%d bytes)" % nbytes)
                elif verbose:
                    log.info("Sent telemetry (%d bytes)" % nbytes)
                elif npackets % 10 == 0:
                    log.info("Sent 10 telemetry packets")

                sock.sendto(packet, (host, port))

                npackets += 1
                prev_ts = header.ts

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Stopping telemetry stream.")

    except Exception as e:
        log.error("TLM send error: %s" % str(e))

    log.end()
Exemplo n.º 16
0
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = tlmdict.keys()
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        '--packet': {
            'type': str,
            'choices': pnames,
            'default': pnames[0] if len(pnames) > 0 else None,
            'help': 'Type of packets (!Packet name in tlm.yaml) in file',
            'required': len(pnames) > 1,
        },
        '--database': {
            'default':
            ait.config.get('database.name'),
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)'),
            'required':
            ait.config.get('database.name') is None
        },
        '--backend': {
            'default':
            'sqlite',
            'choices': ['sqlite', 'influx'],
            'action':
            'store',
            'help': ('Name of database in which to insert packets (may '
                     'also be specified in config.yaml database.name)')
        },
        '--use-current-time': {
            'action':
            'store_true',
            'help': ('Use current time stamps when insert packets instead '
                     'of ground receipt time (or the time written in the '
                     'PCAP header).')
        },
        'file': {
            'nargs': '+',
            'help': 'File(s) containing telemetry packets'
        }
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]
        nbytes = defn.nbytes

        if args.backend == 'sqlite':
            dbconn = db.SQLiteBackend()
        elif args.backend == 'influx':
            dbconn = db.InfluxDBBackend()

        if args.backend == 'sqlite' and (args.database == ':memory:'
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info('Processing %s' % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:
                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info('Received Ctrl-C.  Stopping database insert.')

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info('Inserted %d %s packets into database %s.' % values)

    log.end()
Exemplo n.º 17
0
def main():
    log.begin()

    parser = argparse.ArgumentParser(description=__doc__)

    parser.add_argument("--all", action="store_true", help="output all fields/values")

    parser.add_argument(
        "--csv",
        default="output.csv",
        metavar="</path/to/output/csv>",
        help="Output as CSV with filename",
    )

    parser.add_argument(
        "--fields",
        metavar="</path/to/fields/file>",
        help="path to the file containing all fields to query, separated by newline.",
    )

    parser.add_argument(
        "--packet", required=True, help="Packet name from telemetry dictionary specified in config file."
    )

    parser.add_argument(
        "--time_field",
        help=(
            "Time field to use for time range comparisons. Ground receipt time "
            "will be used if nothing is specified."
        ),
    )

    parser.add_argument(
        "--stime",
        help=(
            "Datetime in file to start collecting the data values. Defaults to "
            "beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "--etime",
        help=(
            "Datetime in file to end collecting the data values. Defaults to end "
            "of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ"
        ),
    )

    parser.add_argument(
        "pcap", nargs="*", help=("PCAP file(s) containing telemetry packets")
    )

    args = parser.parse_args()

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' % args.packet)
        log.end()
        sys.exit(2)

    if not args.all and args.fields is None:
        log.error(
            "Must provide fields file with --fields or specify that all fields should be queried with --all"
        )
        log.end()
        sys.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, "r") as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split(".")
        if fldname not in defn.fieldmap and (
            len(raw) == 2 and raw[0] != "raw" or raw[1] not in defn.fieldmap
        ):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        log.end()
        sys.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, "Ground Receipt Time")

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, "w")
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug("Processing %s" % filename)

        with pcap.open(filename, "rb") as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = (
                    header.timestamp
                    if args.ground_time
                    else getattr(packet, args.time_field)
                )
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split(".")
                            if len(names) == 2 and names[0] == "raw":
                                field = names[1]
                                _raw = True

                            field_val = packet._getattr(field, raw=_raw)

                            if hasattr(field_val, "name"):
                                field_val = field_val.name
                            else:
                                field_val = str(field_val)

                        except KeyError:
                            log.debug("%s not found in Packet" % field)
                            field_val = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            field_val = packet._getattr(field, raw=True)

                        row.append(field_val)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug("Parsed %s packets." % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()
Exemplo n.º 18
0
def main():
    tlmdict = tlm.getDefaultDict()
    pnames = list(tlmdict.keys())
    ap = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    arguments = {
        "--packet": {
            "type": str,
            "choices": pnames,
            "default": pnames[0] if len(pnames) > 0 else None,
            "help": "Type of packets (!Packet name in tlm.yaml) in file",
            "required": len(pnames) > 1,
        },
        "--database": {
            "default":
            ait.config.get("database.dbname"),
            "help": ("Name of database in which to insert packets (may "
                     "also be specified in config.yaml database.name)"),
            "required":
            ait.config.get("database.dbname") is None,
        },
        "--backend": {
            "default":
            "sqlite",
            "choices": ["sqlite", "influx"],
            "action":
            "store",
            "help": ("Name of database in which to insert packets (may "
                     "also be specified in config.yaml database.name)"),
        },
        "--use-current-time": {
            "action":
            "store_true",
            "help": ("Use current time stamps when insert packets instead "
                     "of ground receipt time (or the time written in the "
                     "PCAP header)."),
        },
        "file": {
            "nargs": "+",
            "help": "File(s) containing telemetry packets"
        },
    }

    for name, params in arguments.items():
        ap.add_argument(name, **params)

    args = ap.parse_args()

    log.begin()

    try:
        npackets = 0
        dbconn = None
        defn = tlm.getDefaultDict()[args.packet]

        if args.backend == "sqlite":
            dbconn = db.SQLiteBackend()
        elif args.backend == "influx":
            dbconn = db.InfluxDBBackend()

        if args.backend == "sqlite" and (args.database == ":memory:"
                                         or not os.path.exists(args.database)):
            dbconn.create(database=args.database)
        else:
            dbconn.connect(database=args.database)

        for filename in args.file:
            log.info("Processing %s" % filename)
            with pcap.open(filename) as stream:
                for header, pkt_data in stream:

                    try:
                        packet = tlm.Packet(defn, pkt_data)

                        time = header.timestamp
                        if args.use_current_time:
                            time = None

                        dbconn.insert(packet, time=time)
                        npackets += 1
                    except struct.error:
                        log.error(
                            "Unable to unpack data into packet. Skipping ...")

    except KeyboardInterrupt:
        log.info("Received Ctrl-C.  Stopping database insert.")

    except IOError as e:
        log.error(str(e))

    finally:
        dbconn.close()

    values = npackets, args.packet, args.database
    log.info("Inserted %d %s packets into database %s." % values)

    log.end()
Exemplo n.º 19
0
def main():
    log.begin()

    description = """Parses 1553 telemetry into CSV file."""

    arguments = {
        '--all': {
            'action': 'store_true',
            'help': 'output all fields/values',
        },
        '--csv': {
            'type': str,
            'default': 'output.csv',
            'metavar': '</path/to/output/csv>',
            'help': 'Output as CSV with filename'
        },
        '--fields': {
            'type': str,
            'metavar': '</path/to/fields/file>',
            'help':
            'file containing all fields to query, separated by newline.'
        },
        '--packet': {
            'type': str,
            'required': True,
            'help': 'field names to query, separated by space'
        },
        '--time_field': {
            'type':
            str,
            'help':
            'Time field to use for time range comparisons. Ground receipt time will be used if nothing is specified.'
        },
        '--stime': {
            'type':
            str,
            'help':
            'Datetime in file to start collecting the data values. Defaults to beginning of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        },
        '--etime': {
            'type':
            str,
            'help':
            'Datetime in file to end collecting the data values. Defaults to end of pcap. Expected format: YYYY-MM-DDThh:mm:ssZ'
        }
    }

    arguments['pcap'] = {
        'nargs': '*',
        'help': 'PCAP file(s) containing telemetry packets'
    }

    args = gds.arg_parse(arguments, description)

    args.ground_time = True
    if args.time_field is not None:
        args.ground_time = False

    tlmdict = tlm.getDefaultDict()
    defn = None

    try:
        if tlmdict is not None:
            defn = tlmdict[args.packet]
    except KeyError:
        log.error('Packet "%s" not defined in telemetry dictionary.' %
                  args.packet)
        gds.exit(2)

    if not args.all and args.fields is None:
        log.error(
            'Must provide fields file with --fields or specify that all fields should be queried with --all'
        )
        gds.exit(2)

    if args.all:
        fields = [flddefn.name for flddefn in defn.fields]
    else:
        # Parse the fields file into a list
        with open(args.fields, 'rb') as stream:
            fields = [fldname.strip() for fldname in stream.readlines()]

    not_found = False

    # TODO Rework this into the CSV generation. Not here.
    # Duplicating effort
    for fldname in fields:
        raw = fldname.split('.')
        if fldname not in defn.fieldmap and (len(raw) == 2 and raw[0] != 'raw'
                                             or raw[1] not in defn.fieldmap):
            not_found = True
            log.error('No telemetry point named "%s"' % fldname)

    if not_found:
        gds.exit(2)

    if args.stime:
        start = datetime.strptime(args.stime, dmc.ISO_8601_Format)
    else:
        start = dmc.GPS_Epoch

    if args.etime:
        stop = datetime.strptime(args.etime, dmc.ISO_8601_Format)
    else:
        stop = datetime.utcnow()

    # Append time to beginning of each row
    if not args.ground_time:
        fields.insert(0, args.time_field)
    else:
        fields.insert(0, 'Ground Receipt Time')

    csv_file = None
    csv_writer = None
    npackets = 0
    if args.csv:
        csv_file = open(args.csv, 'wb')
        csv_writer = csv.writer(csv_file)

    output(csv_writer, fields)

    # If we're comparing off ground receipt time we need to drop the header label to avoid
    # indexing errors when processing the fields.
    if args.ground_time:
        fields = fields[1:]

    rowcnt = 0

    for filename in args.pcap:
        log.debug('Processing %s' % filename)

        with pcap.open(filename, 'rb') as stream:
            header, data = stream.read()

            while data:
                packet = tlm.Packet(defn, data)

                comp_time = header.timestamp if args.ground_time else getattr(
                    packet, args.time_field)
                if start < comp_time < stop:
                    row = []
                    for field in fields:
                        try:
                            # check if raw value requested
                            _raw = False
                            names = field.split('.')
                            if len(names) == 2 and names[0] == 'raw':
                                field = names[1]
                                _raw = True

                            fieldVal = packet._getattr(field, raw=_raw)

                            if hasattr(fieldVal, 'name'):
                                fieldVal = fieldVal.name
                            else:
                                fieldVal = str(fieldVal)

                        except KeyError:
                            log.debug('%s not found in Packet' % field)
                            fieldVal = None
                        except ValueError:
                            # enumeration not found. just get the raw value
                            fieldVal = packet._getattr(field, raw=True)

                        row.append(fieldVal)

                    if args.ground_time:
                        row = [comp_time] + row

                    rowcnt += 1
                    output(csv_writer, row)

                npackets += 1
                header, data = stream.read()

    log.debug('Parsed %s packets.' % npackets)

    csv_file.close()

    if rowcnt == 0:
        os.remove(args.csv)

    log.end()