Ejemplo n.º 1
0
def producer(args):
    # connect to kafka
    producer = kafka.KafkaProducer(
        bootstrap_servers=args.kafka_brokers.split(","),
        partitioner=partitioner)

    # initialize packet capture
    capture = pcapy.open_live(args.interface, 65535, True, 3000)
    packet_count = 0

    # start packet capture
    while True:
        (pkt_hdr, pkt_raw) = capture.next()
        if pkt_hdr is not None:

            # send packet to kafka
            pkt_ts = timestamp(pkt_hdr)
            producer.send(args.topic, key=pack_ts(pkt_ts), value=pkt_raw)

            # debug messages, if needed
            packet_count += 1
            if args.debug > 0 and packet_count % args.debug == 0:
                print 'Sent Packet: count=%s dt=%s topic=%s' % (
                    packet_count, to_date(pkt_ts), args.topic)
                print to_hex(pkt_raw)
Ejemplo n.º 2
0
def producer(args, sniff_timeout_ms=500, sniff_promisc=True):
    """ Captures packets from a network interface and sends them to a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    global producer_args
    producer_args = args

    # connect to kafka
    logging.info("Connecting to Kafka; %s", args.kafka_configs)
    kafka_producer = Producer(args.kafka_configs)

    # initialize packet capture
    logging.info("Starting packet capture")
    capture = pcapy.open_live(args.interface, args.snaplen, sniff_promisc, sniff_timeout_ms)
    pkts_in = 0

    try:
        while not finished.is_set() and (args.max_packets <= 0 or pkts_in < args.max_packets):

            # capture a packet
            (pkt_hdr, pkt_raw) = capture.next()
            if pkt_hdr is not None:
                logging.debug("Packet received: pkts_in=%d, pkt_len=%s", pkts_in, pkt_hdr.getlen())
                pkts_in += 1
                pkt_ts = timestamp(pkt_hdr)
                kafka_producer.produce(args.kafka_topic, key=pack_ts(pkt_ts), value=pkt_raw, callback=delivery_callback)

                # pretty print, if needed
                if args.pretty_print > 0 and pkts_in % args.pretty_print == 0:
                    print 'Packet received[%s]' % (pkts_in)

            # serve the callback queue
            kafka_producer.poll(0)

    finally:
        # flush all messages
        logging.info("Waiting for '%d' message(s) to flush", len(kafka_producer))
        kafka_producer.flush()

        # pkts_out may not be initialized if the callback was never executed
        pkts_out = 0
        if hasattr(delivery_callback, "pkts_out"):
            pkts_out = delivery_callback.pkts_out

        logging.info("'%d' packet(s) in, '%d' packet(s) out", pkts_in, pkts_out)
Ejemplo n.º 3
0
def producer(args, sniff_timeout_ms=500, sniff_promisc=True):
    """ Captures packets from a network interface and sends them to a Kafka topic. """

    # setup the signal handler
    signal.signal(signal.SIGINT, signal_handler)

    global producer_args
    producer_args = args

    # connect to kafka
    logging.info("Connecting to Kafka; %s", args.kafka_configs)
    kafka_producer = Producer(args.kafka_configs)

    # initialize packet capture
    logging.info("Starting packet capture")

    # most of the old agent args aren't relevant for this POC
    if args.simulation:
        atwifi = AtWifiSimulation('atwifi', 0, 'AT Wifi Scanner Simulator')
    else:
        atwifi = AtWifi('atwifi', 0, 'AT Wifi Scanner')
    atwifi.scan_for_devices()
    atwifi.start()
    #capture = pcapy.open_live(args.interface, args.snaplen, sniff_promisc, sniff_timeout_ms)
    pkts_in = 0

    try:
        while not finished.is_set() and (args.max_packets <= 0
                                         or pkts_in < args.max_packets):

            # capture a packet
            pkt_raw = atwifi.get_next_packet()
            """
            dict(device=device_name, package=
                   [ dict(company=name_of_company, rssi=average_rssi, rssi_first=first_rssi_reading, 
                          rssi_last=last_rssi_reading, rssi_max=maximum_rssi_reading, rssi_min=minimum_rssi_reading,
                          scan_time=time_of_day_of_scan), ... ]) 

            """
            if pkt_raw is None:
                raise RuntimeError('AtWifi produced bogus packet')

            if pkt_raw is not None:
                if len(pkt_raw.get('package')) == 0:
                    logging.debug('Scan received - no nearby cell devices')
                    continue

            logging.debug('Scan received package for device %s: %s',
                          pkt_raw['device'],
                          json.dumps(pkt_raw['package'], indent=2))

            pkts_in += 1
            # timestamp is in microseconds
            pkt_ts = ((pkt_raw.get('scan_time') -
                       datetime(1970, 1, 1)).total_seconds()) * 1000000.0
            #pkt_ts = timestamp(pkt_hdr)
            kafka_producer.produce(args.kafka_topic,
                                   key=pack_ts(pkt_ts),
                                   value=pkt_raw['package'],
                                   callback=delivery_callback)

            # pretty print, if needed
            if args.pretty_print > 0 and pkts_in % args.pretty_print == 0:
                print 'Packet received[%s]' % pkts_in

            # serve the callback queue
            kafka_producer.poll(0)

    finally:
        # flush all messages
        logging.info("Waiting for '%d' message(s) to flush",
                     len(kafka_producer))
        kafka_producer.flush()

        # pkts_out may not be initialized if the callback was never executed
        pkts_out = 0
        if hasattr(delivery_callback, "pkts_out"):
            pkts_out = delivery_callback.pkts_out

        logging.info("'%d' packet(s) in, '%d' packet(s) out", pkts_in,
                     pkts_out)