def main(): """Creates the main loop where messages are read from the consume queue. Also initializes logging and sets up prometheus if configured. """ logger.info(f"Starting {config.APP_NAME} Service") config.log_config() if not config.DISABLE_PROMETHEUS: logger.info(f"Starting {config.APP_NAME} Prometheus Server") start_prometheus() consumer = consume.init_consumer() global producer producer = produce.init_producer() while True: for data in consumer: try: handle_message(data.value) except Exception: logger.exception("An error occurred during message processing") producer.flush()
def main(): logger = host_delete_logging.initialize_logging() logger.info("Starting legacy host deletion service") config.log_config() if not (config.LEGACY_USERNAME and config.LEGACY_PASSWORD): raise ValueError("Legacy Username and Password Required") consumer = kafka_consumer.init_consumer() for data in consumer: handle_message(data.value)
def main(): logger.info("Starting Puptoo Service") config.log_config() if not config.DISABLE_PROMETHEUS: logger.info("Starting Puptoo Prometheus Server") start_prometheus() consumer = consume.init_consumer() producer = produce.init_producer() while True: for data in consumer: msg = data.value msg["elapsed_time"] = time() extra = get_extra(msg.get("account"), msg.get("request_id")) logger.info("received request_id: %s", extra["request_id"]) producer.send(config.TRACKER_TOPIC, value=tracker.tracker_msg(extra, "received", "Received message")) metrics.msg_count.inc() facts = process.extraction(msg, extra) if facts.get("error"): metrics.extract_failure.inc() producer.send(config.TRACKER_TOPIC, value=tracker.tracker_msg(extra, "failure", "Unable to extract facts")) continue logger.debug("extracted facts from message for %s", extra["request_id"]) inv_msg = {"data": facts, "platform_metadata": msg} try: inv_msg["data"]["elapsed_time"] = time() - msg["elapsed_time"] logger.debug("Message traversed pup in %s seconds", inv_msg["data"]["elapsed_time"]) logger.debug("Message sent to Inventory: %s", extra["request_id"]) producer.send(config.INVENTORY_TOPIC, value=inv_msg) except KafkaError: logger.exception("Failed to produce message to inventory: %s", extra["request_id"]) metrics.msg_send_failure.inc() continue metrics.msg_processed.inc() try: producer.send(config.TRACKER_TOPIC, value=tracker.tracker_msg(extra, "success", "Sent to inventory")) except KafkaError: logger.exception("Failed to send payload tracker message for request %s", extra["request_id"]) metrics.msg_send_failure.inc() metrics.msg_produced.inc() producer.flush()
def main(): logger.info("Starting Storage Broker") config.log_config() consumer = consume.init_consumer() global producer producer = produce.init_producer() while True: for data in consumer: try: if data.topic == config.CONSUME_TOPIC: check_validation(data.value) else: produce_available(data.value) except Exception: logger.exception("An error occurred during message processing") producer.flush()
def main(): logger.info("Starting Puptoo Service") config.log_config() if not config.DISABLE_PROMETHEUS: logger.info("Starting Puptoo Prometheus Server") start_prometheus() consumer = consume.init_consumer() global producer producer = produce.init_producer() while True: for data in consumer: try: handle_message(data.value) except Exception: logger.exception("An error occurred during message processing") producer.flush()