示例#1
0
def consumer(args):
    # connect to kafka
    brokers = args.kafka_brokers.split(",")
    kafka_consumer = kafka.KafkaConsumer(args.topic, bootstrap_servers=brokers)

    # if debug not set, write libpcap global header
    if args.debug == 0:
        sys.stdout.write(global_header())

    # start packet capture
    packet_count = 0
    for msg in kafka_consumer:

        # if debug not set, write the packet header and packet
        if args.debug == 0:
            sys.stdout.write(packet_header(msg.value, msg.key))
            sys.stdout.write(msg.value)

        elif packet_count % args.debug == 0:
            print 'Packet: count=%s dt=%s topic=%s' % (
                packet_count, to_date(unpack_ts(msg.key)), args.topic)
            print to_hex(msg.value)

        packet_count += 1
        if args.packet_count > 0 and packet_count >= args.packet_count:
            break
示例#2
0
def consumer(args):
    # connect to kafka
    brokers = args.kafka_brokers.split(",")
    kafka_consumer = kafka.KafkaConsumer(args.topic, bootstrap_servers=brokers)

    # if debug not set, write libpcap global header
    if args.debug == 0:
        sys.stdout.write(global_header())

    # start packet capture
    packet_count = 0
    for msg in kafka_consumer:

        # if debug not set, write the packet header and packet
        if args.debug == 0:
            sys.stdout.write(packet_header(msg.value, msg.key))
            sys.stdout.write(msg.value)

        elif packet_count % args.debug == 0:
            print "Packet: count=%s dt=%s topic=%s" % (packet_count, to_date(unpack_ts(msg.key)), args.topic)
            print to_hex(msg.value)

        packet_count += 1
        if args.packet_count > 0 and packet_count >= args.packet_count:
            break
示例#3
0
def consumer(args, poll_timeout=3.0):
    """ Consumes packets from a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    # connect to kafka
    logging.debug("Connecting to Kafka; %s", args.kafka_configs)
    kafka_consumer = Consumer(args.kafka_configs)
    kafka_consumer.subscribe([args.kafka_topic])

    # if 'pretty-print' not set, write libpcap global header
    if args.pretty_print == 0:
        sys.stdout.write(global_header(args))
        sys.stdout.flush()

    try:
        pkts_in = 0
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # consume a message from kafka
            msg = kafka_consumer.poll(timeout=poll_timeout)
            if msg is None:
                # no message received
                continue;

            elif msg.error():

                if msg.error().code() == KafkaError._PARTITION_EOF:
                    logging.debug("reached end of topar: topic=%s, partition=%d, offset=%s", msg.topic(), msg.partition(), msg.offset())
                elif msg.error():
                    raise KafkaException(msg.error())

            else:
                pkts_in += 1
                logging.debug("Packet received: pkts_in=%d", pkts_in)

                if args.pretty_print == 0:

                    # write the packet header and packet
                    sys.stdout.write(packet_header(msg))
                    sys.stdout.write(msg.value())
                    sys.stdout.flush()

                elif pkts_in % args.pretty_print == 0:

                    # pretty print
                    print 'Packet: count=%s date=%s topic=%s' % (
                        pkts_in, to_date(unpack_ts(msg.key())), args.kafka_topic)
                    print to_hex(msg.value())

    finally:
        sys.stdout.close()
        kafka_consumer.close()
示例#4
0
def delivery_callback(err, msg):
    """ Callback executed when message delivery either succeeds or fails. """

    # initialize counter, if needed
    if not hasattr(delivery_callback, "pkts_out"):
         delivery_callback.pkts_out = 0

    if err:
        logging.error("message delivery failed: error=%s", err)

    elif msg is not None:
        delivery_callback.pkts_out += 1

        pretty_print = 0
        pretty_print = producer_args.pretty_print

        if pretty_print > 0 and delivery_callback.pkts_out % pretty_print == 0:
            print 'Packet delivered[%s]: date=%s topic=%s partition=%s offset=%s len=%s' % (
                delivery_callback.pkts_out, to_date(unpack_ts(msg.key())), msg.topic(),
                msg.partition(), msg.offset(), len(msg.value()))
示例#5
0
def consumer(args, poll_timeout=3.0):
    """ Consumes packets from a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    # where to start consuming messages from
    kafka_offset_options = {
        "begin": seek_to_begin,
        "end": seek_to_end,
        "stored": seek_to_stored
    }
    on_assign_cb = kafka_offset_options[args.kafka_offset]

    # connect to kafka
    logging.debug("Connecting to Kafka; %s", args.kafka_configs)
    kafka_consumer = Consumer(args.kafka_configs)
    kafka_consumer.subscribe([args.kafka_topic], on_assign=on_assign_cb)

    # if 'pretty-print' not set, write libpcap global header
    if args.pretty_print == 0:
        sys.stdout.write(global_header(args))
        sys.stdout.flush()

    try:
        pkts_in = 0
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # consume a message from kafka
            msg = kafka_consumer.poll(timeout=poll_timeout)
            if msg is None:
                # no message received
                continue;

            elif msg.error():

                if msg.error().code() == KafkaError._PARTITION_EOF:
                    if args.pretty_print > 0:
                        print "Reached end of topar: topic=%s, partition=%d, offset=%s" % (
                            msg.topic(), msg.partition(), msg.offset())
                else:
                    raise KafkaException(msg.error())

            else:
                pkts_in += 1
                logging.debug("Packet received: pkts_in=%d", pkts_in)

                if args.pretty_print == 0:

                    # write the packet header and packet
                    sys.stdout.write(packet_header(msg))
                    sys.stdout.write(msg.value())
                    sys.stdout.flush()

                elif pkts_in % args.pretty_print == 0:

                    # pretty print
                    print 'Packet[%s]: date=%s topic=%s partition=%s offset=%s len=%s' % (
                        pkts_in, to_date(unpack_ts(msg.key())), args.kafka_topic,
                        msg.partition(), msg.offset(), len(msg.value()))

    finally:
        sys.stdout.close()
        kafka_consumer.close()
示例#6
0
def consumer(args, poll_timeout=3.0):
    """ Consumes packets from a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    # where to start consuming messages from
    kafka_offset_options = {
        "begin": seek_to_begin,
        "end": seek_to_end,
        "stored": seek_to_stored
    }
    on_assign_cb = kafka_offset_options[args.kafka_offset]

    # connect to kafka
    logging.debug("Connecting to Kafka; %s", args.kafka_configs)
    kafka_consumer = Consumer(args.kafka_configs)
    kafka_consumer.subscribe([args.kafka_topic], on_assign=on_assign_cb)

    # if 'pretty-print' not set, write libpcap global header
    if args.pretty_print == 0:
        sys.stdout.write(global_header(args))
        sys.stdout.flush()

    try:
        pkts_in = 0
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # consume a message from kafka
            msg = kafka_consumer.poll(timeout=poll_timeout)
            if msg is None:
                # no message received
                continue;

            elif msg.error():

                if msg.error().code() == KafkaError._PARTITION_EOF:
                    if args.pretty_print > 0:
                        print "Reached end of topar: topic=%s, partition=%d, offset=%s" % (
                            msg.topic(), msg.partition(), msg.offset())
                else:
                    raise KafkaException(msg.error())

            else:
                pkts_in += 1
                logging.debug("Packet received: pkts_in=%d", pkts_in)

                if args.pretty_print == 0:

                    # write the packet header and packet

                    # AT:  We are just sending over the results of the scan -- a list of macs/rssi's -- where this code
                    # was dealing with network packet sniffers --
                    sys.stdout.write(json.dumps(msg.value(), indent=2))
                    # sys.stdout.write(packet_header(msg))
                    # sys.stdout.write(msg.value())
                    sys.stdout.flush()

                elif pkts_in % args.pretty_print == 0:

                    # pretty print
                    print 'Packet[%s]: date=%s topic=%s partition=%s offset=%s len=%s' % (
                        pkts_in, to_date(unpack_ts(msg.key())), args.kafka_topic,
                        msg.partition(), msg.offset(), len(msg.value()))

    finally:
        sys.stdout.close()
        kafka_consumer.close()