Ejemplo n.º 1
0
def chat_fun_urdu(n_query, model, chat_settings, chatlog_filepath):
    chk, response = wt.query_check(n_query)
    terminate_chat = False
    #Get the input and check if it is a question or a command, and execute if it is a command
    #question = input("You: ")
    question = n_query
    is_command, terminate_chat, reload_model = chat_command_handler.handle_command(
        question, model, chat_settings)
    if is_command:
        pass
    elif chk:
        return response
    else:
        question = ChatSettings.To_query(n_query)
        #If it is not a command (it is a question), pass it on to the chatbot model to get the answer
        question_with_history, answer = model.chat(question, chat_settings)

        #Print the answer or answer beams and log to chat log
        if chat_settings.show_question_context:
            print("Question with history (context): {0}".format(
                question_with_history))

        if chat_settings.show_all_beams:
            for i in range(len(answer)):
                print("ChatBot (Beam {0}): {1}".format(i, answer[i]))
        else:
            n_answer = ChatSettings.To_answer(answer)
            print("ChatBot: {0}".format(n_answer))
        print()
        return n_answer
        if chat_settings.inference_hparams.log_chat:
            chat_command_handler.append_to_chatlog(chatlog_filepath, question,
                                                   answer)
Ejemplo n.º 2
0
    def __init__(self):
        # Read the hyperparameters and configure paths
        checkpoint_filepath = os.path.join(
            basedir, 'models/best_weights_training.ckpt')
        checkpoint = os.path.basename(checkpoint_filepath)
        model_dir = os.path.dirname(checkpoint_filepath)
        hparams = Hparams()

        # Load the vocabulary
        print()
        print("Loading vocabulary...")

        input_vocabulary = Vocabulary.load()
        output_vocabulary = input_vocabulary
        # Create the model
        print("Initializing model...")
        print()
        model = ChatbotModel(mode="infer",
                             model_hparams=hparams.model_hparams,
                             input_vocabulary=input_vocabulary,
                             output_vocabulary=output_vocabulary,
                             model_dir=model_dir)

        # Load the weights
        print()
        print("Loading model weights...")
        model.load(checkpoint)
        self.model = model

        # Setting up the chat
        self.chatlog_filepath = path.join(
            model_dir, "chat_logs", "chatlog_{0}.txt".format(
                datetime.datetime.now().strftime("%Y%m%d_%H%M%S")))
        self.chat_settings = ChatSettings(hparams.inference_hparams)
        chat_command_handler.print_commands()
Ejemplo n.º 3
0
def model_loading():
    _, model_dir, hparams, checkpoint, _, _ = general_utils.initialize_session(
        "chat")
    #Load the vocabulary
    print()
    print("Loading vocabulary...")
    if hparams.model_hparams.share_embedding:
        shared_vocab_filepath = path.join(model_dir,
                                          Vocabulary.SHARED_VOCAB_FILENAME)
        input_vocabulary = Vocabulary.load(shared_vocab_filepath)
        output_vocabulary = input_vocabulary
    else:
        input_vocab_filepath = path.join(model_dir,
                                         Vocabulary.INPUT_VOCAB_FILENAME)
        input_vocabulary = Vocabulary.load(input_vocab_filepath)
        output_vocab_filepath = path.join(model_dir,
                                          Vocabulary.OUTPUT_VOCAB_FILENAME)
        output_vocabulary = Vocabulary.load(output_vocab_filepath)

    # Setting up the chat
    chatlog_filepath = path.join(
        model_dir, "chat_logs", "chatlog_{0}.txt".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M%S")))
    chat_settings = ChatSettings(hparams.model_hparams,
                                 hparams.inference_hparams)

    ############# Loading Model #############

    reload_model = False
    print()
    print("Initializing model..."
          if not reload_model else "Re-initializing model...")
    print()
    model = ChatbotModel(mode="infer",
                         model_hparams=chat_settings.model_hparams,
                         input_vocabulary=input_vocabulary,
                         output_vocabulary=output_vocabulary,
                         model_dir=model_dir)

    #Load the weights
    print()
    print("Loading model weights...")
    print()
    model.load(checkpoint)

    #Show the commands
    if not reload_model:
        #Uncomment the following line if you want to print commands.
        #chat_command_handler.print_commands()
        print('Model Reload!')
    return model, chatlog_filepath, chat_settings
Ejemplo n.º 4
0
    def __init__(self, training):
        if training:
            checkpointfile = r'models\best_weights_training.ckpt'
            # Make sure checkpoint file & hparams file exists
            checkpoint_filepath = os.path.relpath(checkpointfile)
            model_dir = os.path.dirname(checkpoint_filepath)
            hparams = Hparams()
            global chat_setting
            # Setting up the chat
            self.chatlog_filepath = path.join(
                model_dir, "chat_logs", "chatlog_{0}.txt".format(
                    datetime.datetime.now().strftime("%Y%m%d_%H%M%S")))
            chat_setting = self.chat_settings = ChatSettings(
                hparams.inference_hparams)
            # chat_command_handler.print_commands()
            self.train_thred = threading.Thread(target=train.train,
                                                args=(waiting_queue,
                                                      chat_setting,
                                                      result_queue))
            self.train_thred.start()
        else:

            def server_thread_function():
                sess = ChatSession()
                while True:
                    if not waiting_queue.empty():
                        q = waiting_queue.get()
                        if q.data == 'version':
                            t = os.path.getmtime(
                                'models/best_weights_training.ckpt.data-00000-of-00001'
                            )
                            result_queue[q.id] = time.strftime(
                                '%Y-%m-%d %H:%M:%S', time.localtime(t))

                        else:
                            result_queue[q.id] = sess.chat(q.data)
                        print(result_queue[q.id])

            threading.Thread(target=server_thread_function).start()

        try:
            myip = requests.get(
                'http://fun.alphamj.cn/wx/registered').content.decode()
        except:
            myip = '127.0.0.1'
        print('listen {}:4321'.format(myip))
        self.server = http.server.HTTPServer((myip, 4321), ServerClass)
        print('server init finish')
Ejemplo n.º 5
0
with ChatbotModel(mode="infer",
                  model_hparams=hparams.model_hparams,
                  input_vocabulary=input_vocabulary,
                  output_vocabulary=output_vocabulary,
                  model_dir=model_dir) as model:

    #Load the weights
    print()
    print("Loading model weights...")
    model.load(checkpoint)

    # Setting up the chat
    chatlog_filepath = path.join(
        model_dir, "chat_logs", "chatlog_{0}.txt".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M%S")))
    chat_settings = ChatSettings(hparams.inference_hparams)
    chat_command_handler.print_commands()
    while (True):
        #Get the input and check if it is a question or a command, and execute if it is a command
        question = input("You: ")
        is_command, terminate_chat = chat_command_handler.handle_command(
            question, model, chat_settings)
        if terminate_chat:
            break
        elif is_command:
            continue
        else:
            #If it is not a command (it is a question), pass it on to the chatbot model to get the answer
            question_with_history, answer = model.chat(question, chat_settings)

            #Print the answer or answer beams and log to chat log
Ejemplo n.º 6
0
def chat_fun(n_query):
    terminate_chat = False
    reload_model = False
    chk, response = wt.query_check(n_query)

    while not terminate_chat:
        #Create the model
        print()
        print("Initializing model..."
              if not reload_model else "Re-initializing model...")
        print()
        with ChatbotModel(mode="infer",
                          model_hparams=chat_settings.model_hparams,
                          input_vocabulary=input_vocabulary,
                          output_vocabulary=output_vocabulary,
                          model_dir=model_dir) as model:

            #Load the weights
            print()
            print("Loading model weights...")
            print()
            model.load(checkpoint)

            #Show the commands
            if not reload_model:
                #Uncomment the following line if you want to print commands.
                #chat_command_handler.print_commands()
                print('Model Reload!')

            while True:
                #Get the input and check if it is a question or a command, and execute if it is a command
                #question = input("You: ")
                question = n_query
                is_command, terminate_chat, reload_model = chat_command_handler.handle_command(
                    question, model, chat_settings)
                if terminate_chat or reload_model:
                    break
                elif is_command:
                    continue
                elif chk:
                    return response
                else:
                    question = ChatSettings.To_query(n_query)
                    #If it is not a command (it is a question), pass it on to the chatbot model to get the answer
                    question_with_history, answer = model.chat(
                        question, chat_settings)

                    #Print the answer or answer beams and log to chat log
                    if chat_settings.show_question_context:
                        print("Question with history (context): {0}".format(
                            question_with_history))

                    if chat_settings.show_all_beams:
                        for i in range(len(answer)):
                            print("ChatBot (Beam {0}): {1}".format(
                                i, answer[i]))
                    else:
                        n_answer = ChatSettings.To_answer(answer)
                        print("ChatBot: {0}".format(n_answer))

                    print()

                    return n_answer
                    if chat_settings.inference_hparams.log_chat:
                        chat_command_handler.append_to_chatlog(
                            chatlog_filepath, question, answer)
Ejemplo n.º 7
0
def serve_chat(checkpointfile, port):

    api = Api(app)

    #Read the hyperparameters and configure paths
    model_dir, hparams, checkpoint = general_utils.initialize_session_server(checkpointfile)

    #Load the vocabulary
    print()
    print ("Loading vocabulary...")
    if hparams.model_hparams.share_embedding:
        shared_vocab_filepath = path.join(model_dir, Vocabulary.SHARED_VOCAB_FILENAME)
        input_vocabulary = Vocabulary.load(shared_vocab_filepath)
        output_vocabulary = input_vocabulary
    else:
        input_vocab_filepath = path.join(model_dir, Vocabulary.INPUT_VOCAB_FILENAME)
        input_vocabulary = Vocabulary.load(input_vocab_filepath)
        output_vocab_filepath = path.join(model_dir, Vocabulary.OUTPUT_VOCAB_FILENAME)
        output_vocabulary = Vocabulary.load(output_vocab_filepath)

    #Create the model
    print ("Initializing model...")
    print()
    with ChatbotModel(mode = "infer",
                      model_hparams = hparams.model_hparams,
                      input_vocabulary = input_vocabulary,
                      output_vocabulary = output_vocabulary,
                      model_dir = model_dir) as model:

        #Load the weights
        print()
        print ("Loading model weights...")
        model.load(checkpoint)

        # Setting up the chat
        chatlog_filepath = path.join(model_dir, "chat_logs", "web_chatlog_{0}.txt".format(datetime.datetime.now().strftime("%Y%m%d_%H%M%S")))
        chat_settings = ChatSettings(hparams.model_hparams, hparams.inference_hparams)
        chat_command_handler.print_commands()
        
        class Answer(Resource):
            def get(self, question):
                is_command, terminate_chat, _ = chat_command_handler.handle_command(question, model, chat_settings)
                if terminate_chat:
                    answer = "[Can't terminate from http request]"
                elif is_command:
                    answer = "[Command processed]"
                else:
                    #If it is not a command (it is a question), pass it on to the chatbot model to get the answer
                    _, answer = model.chat(question, chat_settings)
                    
                    if chat_settings.inference_hparams.log_chat:
                        chat_command_handler.append_to_chatlog(chatlog_filepath, question, answer)

                return answer

        class UI(Resource):
            def get(self):
                return send_from_directory(".", "chat_ui.html")

        api.add_resource(Answer, "/chat/<string:question>")
        api.add_resource(UI, "/chat_ui/")
        app.run(debug=False, port=port)