def track_of_yes_no_number_rsvps(topic_name: str, server: str, timestamp: str):
    '''

    Uses kafka consumer and returns count of yes/no answers of events with timestamp when count reach 10

    :param topic_name:
    :param server:
    :param timestamp:
    :return:
    '''
    try:
        event_yes = {}
        event_no = {}
        output_iterator_code = 10
        for msg in consumer.define_consumer(topic_name, server):
            events = msg.value.decode('utf8')
            json_events = json.loads(events)
            if json_events["response"] == 'yes':
                event_yes[json_events["rsvp_id"]] = json_events["response"]

            if json_events["response"] == 'no':
                event_no[json_events["rsvp_id"]] = json_events["response"]

            if len(event_yes) % output_iterator_code == 0 or len(
                    event_no) % output_iterator_code == 0:
                logging.info(
                    f'Count of yes {len(event_yes)} and count of no {len(event_no)} time is {timestamp}'
                )

    except Exception as ex:
        logging.error(f'Error occured {ex}')
def test_send_message_to_kafka(producer, topic, server):

    key = b'foo'
    value = b'bar3'
    #topic = 'meetup_v6'
    producer.send(topic, key=key, value=value)
    producer.flush()

    consumer = cr.define_consumer(topic, server)
    #msg = next(consumer)
    for msg in consumer:
        assert msg.key == key and msg.value == value
def test_consumer_write_data(tmp_path, producer, topic, server):
    producer_to_send(producer, topic)

    file = tmp_path / 'sub'
    file.mkdir()
    p = file / 'output.txt'

    consumer = cr.define_consumer(topic, server)
    msg = next(consumer)
    mk_write.write_data_to_file(p, msg)

    with open(p, 'r') as f:
        datastore = json.load(f)
    assert datastore == msg.value
Exemplo n.º 4
0
def create_kafka_consumer_to_group_event(topic_name:str, server:str, timestamp:str):
    '''

    Uses kafka consumer, and returns a set of different the messages of consumer in every 10 messages with timestamp

    :param topic_name:
    :param server:
    :param timestamp:
    :return:
    '''
    set_of_events = set()
    times = 0
    output_iterator_code = 10
    for msg in consumer.define_consumer(topic_name, server):
        events = msg.value.decode('utf8')
        json_events = json.loads(events)
        try:
            set_of_events.add(json_events["venue"]["venue_id"])
        except Exception as ex:
            pass
        times += 1
        if times == output_iterator_code:
            logging.info(f'Count of distinct events {len(set_of_events)} and time {timestamp}')
            times = 0
            json.dump(msg_from_kafka, fd, indent=4, sort_keys=True)
            logging.info('Data is being written')
    except Exception as e:
        logging.error(f'Cannot write to the file,: {e}')

if __name__ == '__main__':
    #configuration variables
    logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.INFO)
    config = configparser.ConfigParser()
    path = 'configuration.ini'
    config.read(path)
    file_name = 'files/' + config['DEFAULT']['file_name']
    server = config['DEFAULT']['server']
    topic_name = config['DEFAULT']['topic_name']
    request_url = config['DEFAULT']['request_url']
    size = int(config['DEFAULT']['size'])

    time_stamp = datetime.datetime.now().timestamp()

    msg_from_kafka = consumer.define_consumer(topic_name, server)
    file_name = file_name + str(time_stamp)

    data = list()
    #brings messages according to amount of size
    for val in next(chunks(msg_from_kafka, size)):
        events = val.value.decode('utf8')
        data.append(json.loads(events))
        logging.info(data)
    write_data_to_file(file_name, data)