""" The deserialization process is easy with parquet as it can be read with Parquet and Apache Arrow. Also, this file format won't change any model parameters. Similarly, for JSON it is the same. We need to assign the saved parameters to a new instance of the same class. Author : Guru Prasad Venkata Raghavan """ from confluent_kafka import Consumer from exercise.utils import logger log = logger.get_logger(__name__) class KafkaConsumer: """ Initializes the Kafka Consumer that reads the messages from the broker. If there is an error message in rare cases, it will decode the message. """ def __init__(self, hosts): """ It initializes the Kafka Hosts and also the consumer configs. hosts: String """ self.hosts = hosts self.c = Consumer({
import json from flask import Flask from MLEng_Exercise.exercise.mongo.mongodb import MongoDB from exercise.utils import logger log = logger.get_logger("main") app = Flask(__name__) @app.route("/hydrated_model_predictions", methods=['GET']) def get_predicted_labels(): """ :return: """ mongo_db = MongoDB(database_name="MLEng_Exercise_DB", collection_name="predicted_labels") predictions = [js["Job Seniority"] for js in list(mongo_db.retrieve_all())] request_in_json = json.dumps({"Predictions": predictions}) return request_in_json @app.route("/latest_online_predictions", methods=['GET']) def get_online_predicted_labels(): """ :return: """ mongo_db = MongoDB(database_name="MLEng_Exercise_DB", collection_name="online_predicted_labels")