示例#1
0
def main():
    """Main kafka listener entrypoint."""
    start_http_server(int(PROMETHEUS_PORT))
    init_logging()
    init_db()
    LOGGER.info("Starting upload listener.")
    # get DB connection
    conn = DatabaseHandler.get_connection()

    loop = asyncio.get_event_loop()
    signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT)
    for sig in signals:
        loop.add_signal_handler(
            sig, lambda sig=sig: loop.create_task(terminate(sig, loop)))
    executor = BoundedExecutor(MAX_QUEUE_SIZE, max_workers=WORKER_THREADS)

    def process_message(msg):
        """Message processing logic"""
        PROCESS_MESSAGES.inc()
        LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value)

        try:
            msg_dict = json.loads(msg.value.decode("utf8"))
        except json.decoder.JSONDecodeError:
            MESSAGE_PARSE_ERROR.inc()
            LOGGER.exception("Unable to parse message: ")
            return

        if msg.topic == mqueue.UPLOAD_TOPIC:
            process_func = process_upload
        elif msg.topic == mqueue.EVENTS_TOPIC:
            if msg_dict['type'] == 'delete':
                process_func = process_delete
            else:
                UNKNOWN_EVENT_TYPE.inc()
                LOGGER.error("Received unknown event type: %s",
                             msg_dict['type'])
                return
        else:
            UNKNOWN_TOPIC.inc()
            LOGGER.error("Received message on unsupported topic: %s",
                         msg.topic)
            return

        if 'id' not in msg_dict or msg_dict["id"] is None:
            MISSING_ID.inc()
            LOGGER.warning(
                "Unable to process message, inventory ID is missing.")
            return

        future = executor.submit(process_func, msg_dict, conn, loop=loop)
        future.add_done_callback(on_thread_done)

    LISTENER_QUEUE.listen(process_message)

    # wait until loop is stopped from terminate callback
    loop.run_forever()

    LOGGER.info("Shutting down.")
    executor.shutdown()
def main():
    """Main VMaaS listener entrypoint."""
    start_http_server(int(PROMETHEUS_PORT))
    init_logging()
    init_db()
    LOGGER.info("Starting VMaaS sync service.")
    app = ServerApplication()
    app.listen(8000)
    app.start()
示例#3
0
    def __init__(self, kafka_topics_in):
        # connect to the Messaging Service
        self.consumer = mqueue.MQReader(kafka_topics_in) # [kafka_evaluator_topic]

        LOGGER.info("Using BOOTSTRAP_SERVERS: %s", mqueue.BOOTSTRAP_SERVERS)
        LOGGER.info("Using GROUP_ID: %s", mqueue.GROUP_ID)
        LOGGER.info("Using TOPICS: %s", ", ".join(kafka_topics_in))

        self.producer = mqueue.MQWriter(kafka_evaluator_topic)

        # get DB connection
        init_db()
        self.conn = DatabaseHandler.get_connection()
        self.session = requests.Session()
示例#4
0
def main():
    """Main kafka listener entrypoint."""
    start_http_server(int(PROMETHEUS_PORT))
    init_logging()
    init_db()
    LOGGER.info("Starting upload listener.")
    # get DB connection
    conn = DatabaseHandler.get_connection()

    session = requests.Session()
    loop = asyncio.get_event_loop()
    signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT)
    for sig in signals:
        loop.add_signal_handler(
            sig, lambda sig=sig: loop.create_task(terminate(sig, loop)))
    executor = ThreadPoolExecutor(WORKER_THREADS)

    def process_message(msg):
        """Message processing logic"""
        PROCESS_UPLOAD.inc()
        LOGGER.info('Received message from topic %s: %s', msg.topic, msg.value)

        upload_data = json.loads(msg.value.decode("utf8"))

        # Inventory ID is missing
        if 'id' not in upload_data or upload_data["id"] is None:
            MISSING_ID.inc()
            LOGGER.warning("Unable to store system, inventory ID is missing.")
            return

        future = executor.submit(process_upload,
                                 upload_data,
                                 session,
                                 conn,
                                 loop=loop)
        future.add_done_callback(on_thread_done)

    UPLOAD_QUEUE.listen(process_message)

    # wait until loop is stopped from terminate callback
    loop.run_forever()

    LOGGER.info("Shutting down.")
    executor.shutdown()
    session.close()