Esempio n. 1
0
def bot_ui():
    corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
    knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')
    res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')
    rules_dir = os.path.join(PROJECT_ROOT, 'Data', 'Rules')

    with tf.Session() as sess:
        predictor = BotPredictor(sess,
                                 corpus_dir=corp_dir,
                                 knbase_dir=knbs_dir,
                                 result_dir=res_dir,
                                 aiml_dir=rules_dir,
                                 result_file='basic')
        # This command UI has a single chat session only
        session_id = predictor.session_data.add_session()

        # print("Welcome to Chat with ChatLearner!")
        # print("Type exit and press enter to end the conversation.")
        # Waiting from standard input.
        sys.stdout.write("> ")
        sys.stdout.flush()
        question = sys.stdin.readline()
        while question:
            if question.strip() == 'exit':
                print("Bye Bye ~")
                break

            print(
                re.sub(r'_nl_|_np_', '\n',
                       predictor.predict(session_id, question)).strip())
            print("> ", end="")
            sys.stdout.flush()
            question = sys.stdin.readline()
Esempio n. 2
0
def bot_ui():
    corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
    knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')
    res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')

    with tf.Session() as sess:
        predictor = BotPredictor(sess,
                                 corpus_dir=corp_dir,
                                 knbase_dir=knbs_dir,
                                 result_dir=res_dir,
                                 result_file='basic')
        # This command UI has a single chat session only
        session_id = predictor.session_data.add_session()
        # Waiting from standard input.
        question = ''.join(sys.argv[1:])
        #print(question)#, file=sys.stdout)
        #print("\n")
        print(
            re.sub(r'_nl_|_np_', ' ', predictor.predict(session_id,
                                                        question)).strip())
Esempio n. 3
0
def test_demo():
    print("# Creating TF session ...")

    corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
    knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')
    res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')

    test_dir = os.path.join(PROJECT_ROOT, 'Data', 'Test')
    in_file = os.path.join(test_dir, 'samples.txt')
    out_file = os.path.join(test_dir, 'responses.txt')

    with tf.Session() as sess:
        predictor = BotPredictor(sess,
                                 corpus_dir=corp_dir,
                                 knbase_dir=knbs_dir,
                                 result_dir=res_dir,
                                 result_file='basic')
        session_id = predictor.session_data.add_session()

        print("# Prediction started ...")
        t0 = time.time()
        with open(in_file, 'r') as f_in:
            with open(out_file, 'a') as f_out:
                f_out.write(get_header())
                for line in f_in:
                    sentence = line.strip()
                    if not sentence or sentence.startswith("#=="):
                        continue
                    f_out.write("> {}\n".format(sentence))
                    output = re.sub(r'_nl_|_np_', '\n',
                                    predictor.predict(session_id,
                                                      sentence)).strip()
                    f_out.write("{}\n\n".format(output))

        t1 = time.time()
        print(
            "# Prediction completed. Time spent on prediction: {:4.2f} seconds"
            .format(t1 - t0))
Esempio n. 4
0
def main():

    corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
    knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')
    res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')

    with tf.Session() as sess:
        predictor = BotPredictor(sess,
                                 corpus_dir=corp_dir,
                                 knbase_dir=knbs_dir,
                                 result_dir=res_dir,
                                 result_file='basic-32334')

        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        host = '127.0.0.1'
        port = int(2000)
        sock.bind((host, port))
        sock.listen(1)
        print("chatServer Start...\n")
        while True:
            connection, client_addr = sock.accept()
            # print(connection, client_addr)
            data = connection.recv(1024)
            data = data.decode("utf-8")
            print("data > " + data)

            # This command UI has a single chat session only
            session_id = predictor.session_data.add_session()
            question = data
            if question.strip() == 'exit':
                print("Thank you for using HeroBot. Goodbye.")
                break
            answer = predictor.predict(session_id, question)
            print("answ > " + answer)
            connection.sendall(answer.encode("utf-8"))
            connection.close()
        sock.close()
Esempio n. 5
0
 def __init__(self, config_file='config.cfg', host='http://localhost', port=9000):
     config = configparser.ConfigParser()
     config.read(config_file)
     self.filter_file = config.get('resource', 'filter_file')
     self.load_file = config.get('resource', 'load_file')
     self.save_file = config.get('resource', 'save_file')
     self.shelve_file = config.get('resource', 'shelve_file')
             
     corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
     knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')
     res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')
 
     # Initialize the KERNEL
     self.mybot = aiml.Kernel()
     sess = tf.Session()
     self.predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir, result_file='basic')
     self.session_id = self.predictor.session_data.add_session()            
     
     # Create AI Engine 
     if os.path.isfile("model\AIChatEngine.brn"):
         self.mybot.bootstrap(brainFile = "model\AIChatEngine.brn")            
     else:
         self.mybot.bootstrap(learnFiles=self.load_file, commands='load aiml b')
         self.mybot.saveBrain("model\AIChatEngine.brn")
     
     #Initialization learning library
     self.template = '<aiml version="1.0" encoding="UTF-8">\n{rule}\n</aiml>'
     self.category_template = '<category><pattern>{pattern}</pattern><template>{answer}</template></category>'
 
     # Initialize Filter sensitive words
     #self.gfw = filter.DFAFilter()
     #self.gfw.parse(self.filter_file)
 
     # Use an existing server: StanfordCoreNLP
     self.nlp = StanfordCoreNLP(host, port=port, timeout=30000)
     self.props = {
         'annotators': 'tokenize,ssplit,pos,lemma,ner,parse,depparse,dcoref,relation',
         'pipelineLanguage': 'en',
         'outputFormat': 'json'
     }
     
     # Initialize the Language Tool for GEC 
     self.tool = language_check.LanguageTool('en-US')
Esempio n. 6
0
    def __init__(self,
                 config_file='config.cfg',
                 host='http://localhost',
                 port=9000):
        config = configparser.ConfigParser()
        config.read(config_file)
        self.load_file = config.get('resource', 'load_file')
        self.save_file = config.get('resource', 'save_file')
        self.shelve_file = config.get('resource', 'shelve_file')
        self.filter_file = config.get('resource', 'filter_file')

        corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
        knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')
        res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')

        # Initialize the KERNEL
        self.mybot = aiml.Kernel()
        sess = tf.Session()
        self.predictor = BotPredictor(sess,
                                      corpus_dir=corp_dir,
                                      knbase_dir=knbs_dir,
                                      result_dir=res_dir,
                                      result_file='basic')
        self.session_id = self.predictor.session_data.add_session()

        # Create AI Engine
        if os.path.isfile("model\AIChatEngine.brn"):
            self.mybot.bootstrap(brainFile="model\AIChatEngine.brn")
        else:
            self.mybot.bootstrap(learnFiles=self.load_file,
                                 commands='load aiml b')
            self.mybot.saveBrain("model\AIChatEngine.brn")

        # Use an existing server: StanfordCoreNLP
        self.nlp = StanfordCoreNLP(host, port=port, timeout=30000)
        self.props = {
            'annotators':
            'tokenize,ssplit,pos,lemma,ner,parse,depparse,dcoref,relation',
            'pipelineLanguage': 'en',
            'outputFormat': 'json'
        }
Esempio n. 7
0
            outputSentence: The sessionId is the same as in the input for validation purpose. 
            The answer is the response from the ChatLearner.
        """
        if sessionId not in predictor.session_data.session_dict:  # Including the case of 0
            sessionId = self.predictor.session_data.add_session()

        answer = self.predictor.predict(sessionId, question)

        outputSentence = SessionSentence()
        outputSentence.sessionId = sessionId
        outputSentence.sentence = answer
        return outputSentence

if __name__ == "__main__":
    corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')
    knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'Variety')
    res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')
    rules_dir = os.path.join(PROJECT_ROOT, 'Data', 'Rules')

    with tf.Session() as sess:
        predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir,
                                 result_dir=res_dir, aiml_dir=rules_dir,
                                 result_file='basic')

        service = [('ChatService', ChatService, {'predictor': predictor})]
        app = webservices.WebService(service)
        ws = tornado.httpserver.HTTPServer(app)
        ws.listen(8080)
        print("Web service started.")
        tornado.ioloop.IOLoop.instance().start()
Esempio n. 8
0
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
DIR_PATH = os.path.dirname(os.path.realpath(__file__))

app = Flask(__name__)
CORS(app)

k = aiml.Kernel()
for f in glob.glob(DIR_PATH + '/xml/*.xml'):
    k.learn(f)
   
with tf.Session() as sess:
    predictor = BotPredictor(
        sess, 
        corpus_dir=os.path.join(PROJECT_ROOT, 'Data', 'Corpus'), 
        knbase_dir=os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase'),
        result_dir=os.path.join(PROJECT_ROOT, 'Data', 'Result'), 
        result_file='basic'
    )   

@app.route('/ping', methods=['GET'])
def ping():
    return 'pong'
   
@app.route('/chat', methods=['POST'])
def chat():
    session_id = predictor.session_data.add_session()
    question = str(request.get_json()['body'])

    aiml_reply = k.respond(question)