def __init__(self): # Read the hyperparameters and configure paths checkpoint_filepath = os.path.join( basedir, 'models/best_weights_training.ckpt') checkpoint = os.path.basename(checkpoint_filepath) model_dir = os.path.dirname(checkpoint_filepath) hparams = Hparams() # Load the vocabulary print() print("Loading vocabulary...") input_vocabulary = Vocabulary.load() output_vocabulary = input_vocabulary # Create the model print("Initializing model...") print() model = ChatbotModel(mode="infer", model_hparams=hparams.model_hparams, input_vocabulary=input_vocabulary, output_vocabulary=output_vocabulary, model_dir=model_dir) # Load the weights print() print("Loading model weights...") model.load(checkpoint) self.model = model # Setting up the chat self.chatlog_filepath = path.join( model_dir, "chat_logs", "chatlog_{0}.txt".format( datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))) self.chat_settings = ChatSettings(hparams.inference_hparams) chat_command_handler.print_commands()
def chat_fun(): terminate_chat = False reload_model = False while not terminate_chat: #Create the model print() print("Initializing model..." if not reload_model else "Re-initializing model...") print() with ChatbotModel(mode="infer", model_hparams=chat_settings.model_hparams, input_vocabulary=input_vocabulary, output_vocabulary=output_vocabulary, model_dir=model_dir) as model: #Load the weights print() print("Loading model weights...") print() model.load(checkpoint) #Show the commands if not reload_model: chat_command_handler.print_commands() while True: #Get the input and check if it is a question or a command, and execute if it is a command question = input("You: ") is_command, terminate_chat, reload_model = chat_command_handler.handle_command( question, model, chat_settings) if terminate_chat or reload_model: break elif is_command: continue else: #If it is not a command (it is a question), pass it on to the chatbot model to get the answer question_with_history, answer = model.chat( question, chat_settings) #Print the answer or answer beams and log to chat log if chat_settings.show_question_context: print("Question with history (context): {0}".format( question_with_history)) print("\n1st if") if chat_settings.show_all_beams: for i in range(len(answer)): print("ChatBot (Beam {0}): {1}".format( i, answer[i])) print("\n2nd if") else: print("ChatBot: {0}".format(answer)) print("\n else") print() if chat_settings.inference_hparams.log_chat: chat_command_handler.append_to_chatlog( chatlog_filepath, question, answer)
model_hparams=hparams.model_hparams, input_vocabulary=input_vocabulary, output_vocabulary=output_vocabulary, model_dir=model_dir) as model: #Load the weights print() print("Loading model weights...") model.load(checkpoint) # Setting up the chat chatlog_filepath = path.join( model_dir, "chat_logs", "chatlog_{0}.txt".format( datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))) chat_settings = ChatSettings(hparams.inference_hparams) chat_command_handler.print_commands() while (True): #Get the input and check if it is a question or a command, and execute if it is a command question = input("You: ") is_command, terminate_chat = chat_command_handler.handle_command( question, model, chat_settings) if terminate_chat: break elif is_command: continue else: #If it is not a command (it is a question), pass it on to the chatbot model to get the answer question_with_history, answer = model.chat(question, chat_settings) #Print the answer or answer beams and log to chat log if chat_settings.show_question_context:
def serve_chat(checkpointfile, port): api = Api(app) #Read the hyperparameters and configure paths model_dir, hparams, checkpoint = general_utils.initialize_session_server(checkpointfile) #Load the vocabulary print() print ("Loading vocabulary...") if hparams.model_hparams.share_embedding: shared_vocab_filepath = path.join(model_dir, Vocabulary.SHARED_VOCAB_FILENAME) input_vocabulary = Vocabulary.load(shared_vocab_filepath) output_vocabulary = input_vocabulary else: input_vocab_filepath = path.join(model_dir, Vocabulary.INPUT_VOCAB_FILENAME) input_vocabulary = Vocabulary.load(input_vocab_filepath) output_vocab_filepath = path.join(model_dir, Vocabulary.OUTPUT_VOCAB_FILENAME) output_vocabulary = Vocabulary.load(output_vocab_filepath) #Create the model print ("Initializing model...") print() with ChatbotModel(mode = "infer", model_hparams = hparams.model_hparams, input_vocabulary = input_vocabulary, output_vocabulary = output_vocabulary, model_dir = model_dir) as model: #Load the weights print() print ("Loading model weights...") model.load(checkpoint) # Setting up the chat chatlog_filepath = path.join(model_dir, "chat_logs", "web_chatlog_{0}.txt".format(datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))) chat_settings = ChatSettings(hparams.model_hparams, hparams.inference_hparams) chat_command_handler.print_commands() class Answer(Resource): def get(self, question): is_command, terminate_chat, _ = chat_command_handler.handle_command(question, model, chat_settings) if terminate_chat: answer = "[Can't terminate from http request]" elif is_command: answer = "[Command processed]" else: #If it is not a command (it is a question), pass it on to the chatbot model to get the answer _, answer = model.chat(question, chat_settings) if chat_settings.inference_hparams.log_chat: chat_command_handler.append_to_chatlog(chatlog_filepath, question, answer) return answer class UI(Resource): def get(self): return send_from_directory(".", "chat_ui.html") api.add_resource(Answer, "/chat/<string:question>") api.add_resource(UI, "/chat_ui/") app.run(debug=False, port=port)