Esempio n. 1
0
def main_loop():
    # pool_size = config.getint('gevent', 'worker.pool.size')
    # (crimi) - Setting pool_size to 1 to avoid deadlocks. This is until we are able to demonstrate that
    #           the deadlocks are able to be avoided.
    #           An improvement would be to do the DB updates on single worker, allowing everything else to
    #           happen concurrently. But expected load for 1.0 isn't great .. more than manageable with 1 worker.
    #
    pool_size = 1
    pool = gevent.pool.Pool(pool_size)
    logger.info('Started gevent pool with size %d', pool_size)

    consumer = kafkareader.create_consumer(config)

    while True:
        try:
            raw_event = kafkareader.read_message(consumer)
            logger.debug('READ MESSAGE %s', raw_event)
            event = MessageItem(json.loads(raw_event))

            if event.get_command() in known_commands:
                pool.spawn(topology_event_handler, event)
            else:
                logger.debug('Received unknown type or command %s', raw_event)

        except Exception as e:
            logger.exception(e.message)
Esempio n. 2
0
def main_loop():
    pool_size = config.getint('gevent', 'worker.pool.size')
    pool = gevent.pool.Pool(pool_size)
    logger.info('Started gevent pool with size %d', pool_size)

    consumer = kafkareader.create_consumer(config)

    while True:
        try:
            raw_event = kafkareader.read_message(consumer)
            logger.debug('READ MESSAGE %s', raw_event)
            event = MessageItem(**json.loads(raw_event))

            if event.get_message_type() in known_messages\
                    or event.get_command() in known_commands:
                pool.spawn(topology_event_handler, event)
            else:
                logger.debug('Received unknown type or command %s', raw_event)

        except Exception as e:
            logger.exception(e.message)
Esempio n. 3
0
def main_loop():
    pool_size = config.getint('gevent', 'worker.pool.size')

    logger.info('\n\nWHAT WHAT v002\n\n')
    logger.info('Start gevent pool with size {}.'.format(pool_size))

    pool = gevent.pool.Pool(pool_size)

    consumer = kafkareader.create_consumer(config)

    while True:
        try:
            raw_event = kafkareader.read_message(consumer)
            logger.debug('READ MESSAGE %s', raw_event)
            event = MessageItem(**json.loads(raw_event))

            if event.get_message_type() in known_messages\
                    or event.get_command() in known_commands:
                logger.debug('Processing message payload', event.payload)
                pool.spawn(topology_event_handler, event)

        except Exception as e:
            logger.exception(e.message)