def generateBrain(): kernel = aiml.Kernel() kernel.bootstrap(learnFiles = aimlPath + "std-startup.xml", commands = "LOAD AIML B") kernel.saveBrain(aimlPath+"bot_brain.brn")
""" This file contains the PyAIML stress test. It creates two bots, and connects them in a cyclic loop. A lot of output is generated; piping the results to a log file is highly recommended. """ from __future__ import print_function import aiml # Create the kernels kern1 = aiml.Kernel() kern1.verbose(False) kern2 = aiml.Kernel() kern2.verbose(False) # Initialize the kernels print("Initializing Kernel #1") kern1.bootstrap(learnFiles="std-startup.xml", commands="load aiml b") kern1.saveBrain("standard.brn") print("\nInitializing Kernel #2") kern2.bootstrap(brainFile="standard.brn") # Start the bots off with some basic input. response = "askquestion" # Off they go! while True: response = kern1.respond(response).strip() print("1:", response, "\n") response = kern2.respond(response).strip() print("2:", response, "\n")
import os import aiml # change current working directory to aiml directory main_path = os.getcwd() os.chdir(os.getcwd() + '/aiml_files') # create bot object bot = aiml.Kernel() # load bot brain if os.path.isfile("bot_brain.brn"): bot.bootstrap(brainFile = "bot_brain.brn") else: bot.bootstrap(learnFiles = "std-startup.xml", commands = "LOAD AIML B") bot.saveBrain("bot_brain.brn") # set current directory to main project directory os.chdir(main_path) # set bot predicates bot.setBotPredicate("botmaster","Botmaster") bot.setBotPredicate("master","Nikhil") bot.setBotPredicate("name","NEWSBOT") bot.setBotPredicate("genus","robot") bot.setBotPredicate("location","Delhi,India") bot.setBotPredicate("gender","Male") bot.setBotPredicate("species","chat robot") bot.setBotPredicate("size", "129 MB") bot.setBotPredicate("birthday","") bot.setBotPredicate("order","artificial intelligence")
# -*- coding: utf-8 -*- #teste de chatbot --Jarvis -- #teste 1 usando a biblioteca aiml.py - 25/06/2019 import telepot #classe para faser conexao com telegram import aiml #classe que faz nosso bot conversar import os import sys kernel = aiml.Kernel() #inicializa o bot kernel.learn( "simple.aiml" ) # Abre o arquivo principal da AIML (que faz referências aos outros). #token do telegram fornecido pelo botfather telegram = telepot.Bot('Token do Chatbot Telegram') #funcao utilizada para receber menssagens def recebendoMSg(msg): fala = (msg['text']) #imprime na tela o que o usuario digitou no bot telegram print(fala) resp = kernel.respond(fala) tipoMsg, tipoChat, chatID = telepot.glance(msg) #enviar a resposta do bot para telegram mensagem = telegram.sendMessage(chatID, resp) #imprime a resposta na tela print(resp)
1/25/2012 Put some AIML up in this """ import logging import subprocess, select import irclib, random import sys import time, urllib, urllib2, simplejson import time from datetime import datetime import re import aiml k = aiml.Kernel() k.setBotPredicate("name", "madbot") k.setPredicate("name", "dude") k.learn("std-startup.xml") k.respond("load aiml b") ACCESS_LOG_FILE = 'access_log.txt' logger = logging.getLogger('mad_logger') logger.setLevel(logging.INFO) fh = logging.FileHandler('mad.log') fh.setLevel(logging.INFO) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger.addHandler(fh) ch = logging.StreamHandler() ch.setLevel(logging.INFO)
import face_recognition import numpy as np import os import aiml import os import record import speach_recognize import tts_Player import time import awake_recognize mybot_path = '../lab1/mybot/' # 切换到语料库所在工作目录 os.chdir(mybot_path) mybot = aiml.Kernel() #创建一个aiml对象 mybot.learn("std-startup.xml") #创建一个名为std-startup.xml的启动文件,作为加载AIML文件的主入口点。 mybot.respond('load aiml c') #在std-srartup.xml文件里面可以创建更多的匹配模式以及加入更多的语料库。 #用语音输入代替文字输入 myrecorder = record.recorder(record_seconds=5) # 录音对象,设定持续大约5秒 sr = speach_recognize.speachRecognizer(accountList=[{ 'APPID': '5cad4c88', 'API_KEY': '55dba8b5606fac7572450e79a2f03bcc' }]) # 输入科大讯飞统一平台的APPID 和 对应语音识别的API_KEY #这里需要使用讯飞语音合成API
from cgi import parse_header, parse_multipart if python_version.startswith('3'): from urllib.parse import parse_qs, unquote from http.server import BaseHTTPRequestHandler, HTTPServer importlib.reload(sys) else: from urlparse import parse_qs from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer from urllib import unquote reload(sys) sys.setdefaultencoding('utf8') BRAIN_FILE = "brain.dump" siri = aiml.Kernel() if os.path.exists(BRAIN_FILE): print("Load brain file: " + BRAIN_FILE) siri.loadBrain(BRAIN_FILE) else: print("Learn AIML files") siri.bootstrap(learnFiles="std-startup.aiml", commands="load aiml b") print("Saving brain file: " + BRAIN_FILE) siri.saveBrain(BRAIN_FILE) def get_answer(handler, ctype='application/json'): data = handler.get_payload() handler.send_response(200) handler.send_header('Content-Type', ctype)
def loadAIML(self, f="nlgAIML.xml"): self.aimlKernel = aiml.Kernel() self.aimlKernel.learn(f)
import aiml import os chatBot = aiml.Kernel() #if os.path.isfile("bot_brain.brn"): # chatBot.bootstrap(brainFile = "bot_brain.brn") #else: chatBot.bootstrap(learnFiles="std-startup.xml", commands="load aiml b") chatBot.saveBrain("bot_brain.brn") print(n) print chatBot.respond("Init") goalList = ["awareness", "conversion", "sale"] typeList = ["display", "search"] bot_response = "" # chatBot now ready for use while True: message = raw_input("\nYour message: ") if message == "quit": exit() elif message == "save": chatBot.saveBrain("bot_brain.brn") elif message == "test": print campaignName else: bot_response = chatBot.respond(message)
else: question = specialized_bot.respond( map_age(kernel.getPredicate('age', session_id))) if question == ' ': print "I don't know what should i tell you know." else: questions_asked[username].add(question) questions_asked[username].remove(' ') write_dictionary(questions_asked, questions_asked_filename) print question else: print(bot_response) kernel = aiml.Kernel() kernel.learn("std-startup.xml") kernel.respond("load aiml b") annoyed_bot = aiml.Kernel() annoyed_bot.learn("std-startup_annoyed.xml") annoyed_bot.respond("load aiml b") specialized_bot = aiml.Kernel() specialized_bot.learn("std-startup_specialized.xml") specialized_bot.respond("load aiml b") new_person = aiml.Kernel() new_person.learn("std-startup_new_person.xml") new_person.respond("load aiml b")
def Floki(message): simitest = '' # Create a Kernel object. mybot = aiml.Kernel() # When loading an AIML set, you have two options: load the original # AIML files, or load a precompiled "brain" that was created from a # previous run. If no brain file is available, we force a reload of # the AIML files. brainLoaded = False forceReload = False while not brainLoaded: if forceReload or (len(sys.argv) >= 2 and sys.argv[1] == "reload"): # Use the Kernel's bootstrap() method to initialize the Kernel. The # optional learnFiles argument is a file (or list of files) to load. # The optional commands argument is a command (or list of commands) # to run after the files are loaded. mybot.bootstrap(learnFiles=os.path.abspath(os.path.curdir)+'/acc/AIML/std-startup.xml', commands="load aiml b") brainLoaded = True # Now that we've loaded the brain, save it to speed things up for # next time. mybot.saveBrain(os.path.abspath(os.path.curdir)+'/acc/AIML/standard.brn') else: # Attempt to load the brain file. If it fails, fall back on the Reload # method. try: # The optional branFile argument specifies a brain file to load. mybot.bootstrap(brainFile = os.path.abspath(os.path.curdir)+'/acc/AIML/standard.brn') brainLoaded = True except: forceReload = True message_respond = mybot.respond(message) # Segmentation #whatareyou if message_respond =='': Smessage=TextSegmentation(message) Smessage_respond = mybot.respond(Smessage) else : return message_respond #Correction #wht are you if Smessage_respond =='': Cmessage=AutoCorrect(message) Cmessage_respond = mybot.respond(Cmessage) else : return Smessage_respond #Correction and Segmentation #wht areyou if Cmessage_respond =='': CSmessage=AutoCorrect(message) CSmessage=TextSegmentation(CSmessage) CSmessage_respond = mybot.respond(CSmessage) else : return Cmessage_respond #Similarity if CSmessage_respond == '' : Simimessage = Embedding_similarity(message) Simimessage_respond = '' if Simimessage != None : Simimessage_respond = mybot.respond(Simimessage) else : return CSmessage_respond #Segmentation then Similarity if Simimessage_respond == '' : SSimimessage = TextSegmentation(message) SSimimessage = Embedding_similarity(SSimimessage) SSimimessage_respond = '' if SSimimessage != None : SSimimessage_respond = mybot.respond(SSimimessage) else: return Simimessage_respond #AutoCorrection then Similarity if SSimimessage_respond == '' : CSimimessage = AutoCorrect(message) CSimimessage = Embedding_similarity(CSimimessage) CSimimessage_respond = '' if CSimimessage != None : CSimimessage_respond = mybot.respond(CSimimessage) else: return SSimimessage_respond #No answer here if CSimimessage_respond != '' : return CSimimessage_respond else : return "No Answer"
def __init__(self): self._bot = aiml.Kernel() self._bot.bootstrap(learnFiles="std-startup.xml", commands="load aiml b") pass
def __init__(self): self._kernel = aiml.Kernel()
# -*- coding: utf-8 -*- # # snippet from my Artificially Intelligent Remote Assistant Robot Project, Ethiopia # created by Jamie Amdework # May 2015 #import libraries import aiml import sys import time import os #prepare the brain (using aiml interpreter) brain = aiml.Kernel() #put conversation log on a test file f = open("user_log.txt", "a") print f #identify user input and bot response human = "you: " humanstring = str(human) bot = "bot: " botstring = str(bot) #load bunch of knowledge on brain brain.learn("F:\python work\Standard\std-startup.aiml") brain.learn("F:\python work\Standard\new_howmany.aiml") brain.learn("F:\python work\Standard\jokes.aiml") brain.learn("F:\python work\Standard\calendar.aiml") brain.learn("F:\python work\Standard\warnings.aiml") brain.learn("F:\python work\Standard\ai.aiml") brain.learn("F:\python work\Standard\personality.aiml") brain.learn("F:\python work\Standard\AIML\aiml-en-us-foundation-alice.v1-0/*") brain.learn("F:\python work\std-startup.aiml")
#!/usr/bin/env python3 import aiml import os import sys import serial import time # Aiml kury = aiml.Kernel() running = True class main(): def init(self): #Initalize voice print("Main: Stating Services") print("Main: Starting Voice") print("Main: Starting Web Gui") #os.system("cd Services/Server && python -m SimpleHTTPServer 8000") print("Main: Starting AIML") kury.learn("Services/Brain/Chatbot/bot.aiml") print("Main: Connecting to arduino") usbport = "COM3" try: right_arduino = serial.Serial(usbport, 9600) except:
def aichat(): kernel=aiml.Kernel() kernel.learn("basic_chat.xml") Q=json.loads(request.get_json()) Query=str(Q['Query']) return jsonify({"Answer":kernel.respond(Query.upper()),"status":200})
def init(): global kernel "Initialize the front-ends and back-ends." # Fetch the configuration info config = configFile.get() # Initialize the AIML interpreter #print "Initializing AIML interpreter (please be patient)..." kernel = aiml.Kernel() #extract config options try: verbose = config["general.verbose"] == "yes" or config[ "cla.verboseMode"] == "yes" except: verbose = False try: botName = config["general.botname"] except: botName = "Nameless" try: botMaster = config["general.botmaster"] except: botMaster = "The Master" try: sessionsPersist = config["general.sessionspersist"].lower() in [ "yes", "y", "true" ] except: sessionsPersist = False try: sessionsDir = config["general.sessionsdir"] except: sessionsDir = "sessions" # set up the kernel kernel.verbose(verbose) kernel.setPredicate("secure", "yes") # secure the global session kernel.bootstrap(learnFiles="std-startup.xml", commands="bootstrap") kernel.setPredicate("secure", "no") # and unsecure it. # Initialize bot predicates for k, v in config.items(): if k[:8] != "botinfo.": continue kernel.setBotPredicate(k[8:], v) # Load persistent session data, if necessary if sessionsPersist: try: for session in os.listdir(sessionsDir): # Session files are named "*****@*****.**", where # user@protocol is also the internal name of the session. root, ext = os.path.splitext(session) if ext != ".ses": # This isn't a session file. continue # Load the contents of the session file (a single dictionary # containing all the predicates for this session). if verbose: print "Loading session:", root f = file("%s/%s" % (sessionsDir, session), "rb") d = marshal.load(f) f.close() # update the predicate values in the Kernel. for k, v in d.items(): kernel.setPredicate(k, v, root) except OSError: print "WARNING: Error loading session data from", sessionsDir # Handle local mode: only start the tty frontend if config['cla.localMode'].lower() in ["yes", "y", "true"]: try: _addFrontEnd("tty", "FrontEndTTY") except: print "ERROR initializing frontend class frontends.tty.FrontEndTTY" traceback.print_tb(sys.exc_info()[2]) else: # Initialize the front-ends. Pythonic black magic ensues... # First we iterate over all frontend modules. for fe in frontends.__all__: # If this frontend isn't activated in the configuration file, # ignore it. try: isActive = (config["%s.active" % fe].lower() in ["yes", "y", "true"]) except KeyError: print "WARNING: no 'active' entry found for module %s in configuration file." % fe isActive = False if not isActive: if config['cla.verboseMode'] == 'yes': print "Skipping inactive frontend: %s" % fe continue # Attempt to extract the name of the front-end class defined in this module. # If no such class is defined, or if the class is not a subclass of IFrontEnd, # skip this module. try: cls = eval("frontends.%s.frontEndClass" % fe) if not issubclass(eval("frontends.%s.%s" % (fe, cls)), frontends.frontend.IFrontEnd): continue except AttributeError: # no valid front-end class defined in this file. print "WARNING: could not find valid front-end class in module %s" % fe continue # Create an instance of this class in the _frontends dictionary try: _addFrontEnd(fe, cls) except: # raise # uncomment for details on error print "ERROR initializing frontend class frontends.%s.%s" % ( fe, cls) traceback.print_tb(sys.exc_info()[2]) continue
#importing the aiml module import aiml kernel= aiml.Kernel() #initiating the Kernel kernel.bootstrap(brainFile = "bot_brain.brn") #since I have already optimised the files for you so now you need to only include the brain file '''set predicates for your bot name, hometown, gender and so on............. alo you can set the user predicted by using kernel.setBotPredicate() And you can set the sessionID for custom chat behaviour''' kernel.setBotPredicate("hometown", "127.0.0.1") kernel.setBotPredicate("name", "Tony") kernel.setBotPredicate("master", "Sharmaji") kernel.setBotPredicate("gender", "Male") while True: print kernel.respond(raw_input("Enter your message >> "))
def qa(question): #初始化jb分词器 T.jieba_initialize() #切换到语料库所在工作目录 mybot_path = './' # os.chdir(mybot_path) mybot = aiml.Kernel() mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/std-startup.xml") mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/Common conversation.aiml") mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/bye.aiml") mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/tools.aiml") mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/bad.aiml") mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/funny.aiml") mybot.learn( os.path.split(os.path.realpath(__file__))[0] + "/resources/OrdinaryQuestion.aiml") # mybot.respond('Load Doc Snake') #载入百科属性列表 print ''' .----------------. .-----------------. .----------------. .----------------. .----------------. | .--------------. || .--------------. || .--------------. || .--------------. || .--------------. | | | _______ | || | ____ _____ | || | __ | || | ___ ____ | || | _________ | | | | / ___ | | || ||_ \|_ _| | || | / \ | || | |_ ||_ _| | || | |_ ___ | | | | | | (__ \_| | || | | \ | | | || | / /\ \ | || | | |_/ / | || | | |_ \_| | | | | '.___`-. | || | | |\ \| | | || | / /__\ \ | || | | __'. | || | | _| _ | | | | |`\____) | | || | _| |_\ |_ | || | _/ / \ \_ | || | _| | \ \_ | || | _| |___/ | | | | | |_______.' | || ||_____|\____| | || ||____| |____|| || | |____||____| | || | |_________| | | | | | || | | || | | || | | || | | | | '--------------' || '--------------' || '--------------' || '--------------' || '--------------' | '----------------' '----------------' '----------------' '----------------' '----------------' Eric:你好,我是Eric。╭(╯^╰)╮ ''' input_message = raw_input("Enter your message >> ") if len(input_message) > 60: print mybot.respond("句子长度过长") elif input_message.strip() == '': print mybot.respond("无") print input_message message = T.wordSegment(input_message) # 去标点 print 'word Seg:' + message print '词性:' words = T.postag(input_message) if message == 'q': exit() else: response = mybot.respond(message) print "=======" print response print "=======" if response == "": ans = mybot.respond('找不到答案') print 'Eric:' + ans # 百科搜索 elif response[0] == '#': # 匹配百科 if response.__contains__("searchbaike"): print "searchbaike" print response res = response.split(':') #实体 entity = str(res[1]).replace(" ", "") #属性 attr = str(res[2]).replace(" ", "") print entity + '<---->' + attr ans = baike.query(entity, attr) # 如果命中答案 if type(ans) == list: print 'Eric:' + QAT.ptranswer(ans, False) elif ans.decode('utf-8').__contains__(u'::找不到'): #百度摘要+Bing摘要 print "通用搜索" ans = search_summary.kwquery(input_message) # 匹配不到模版,通用查询 elif response.__contains__("NoMatchingTemplate"): print "NoMatchingTemplate" ans = search_summary.kwquery(input_message) if len(ans) == 0: ans = mybot.respond('找不到答案') print 'Eric:' + ans elif len(ans) > 1: print "不确定候选答案" print 'Eric: ' for a in ans: print a.encode("utf8") else: print 'Eric:' + ans[0].encode("utf8") # 匹配模版 else: print 'Eric:' + response
def response(message): kernel = aiml.Kernel() kernel.learn(greet) res = kernel.respond(message) return res
def handle(self): # message parsing message = self.rfile.readline().strip() message = message.decode("utf-8") print("msg:"+message) message = json.loads(message) #print((message['contents'])) # message processing... data = {} data['contents'] = message # ========== intercept by input data prefix'>' if len(data['contents'])>0 and data['contents'][0]=='>': answer = "" print(data['contents'][1:]) dict_data = data['contents'][1:] fr = open('./script/script_word.pickle','rb') loaded = pickle.load(fr) if dict_data in loaded: answer = answer + '>' for ts in loaded[str(dict_data)]: answer = answer+str(ts)+" " print(answer) else: answer = "key: " + str(dict_data)+" is not existed" print(answer) else: kern = aiml.Kernel() brainLoaded = False forceReload = False while not brainLoaded: if forceReload or (len(sys.argv) >= 2 and sys.argv[1] == "reload"): kern.bootstrap(learnFiles="std-startup.xml", commands="load aiml b") brainLoaded = True # kern.saveBrain("standard.brn") else: try: kern.bootstrap(brainFile = "standard.brn") brainLoaded = True except: forceReload = True # Enter the main input/output loop. # print("\nINTERACTIVE MODE (ctrl-c to exit)") # ============ AIML =============== s = data["contents"] res = okt.pos(s) content = "" for word in res: if (word[1] == "Josa"): content = content + " "+word[0]+" " else: content = content+ word[0] response = kern.respond(content) print("tokenized : ",response) if response == "해당되는 내용이 없습니다.": response = kern.respond(data["contents"]) print("row or tokenized : ",response) # ============ DIALOGFLOW =============== if response == "해당되는 내용이 없습니다.": language = "ko" msg = data["contents"] response = detect_intent_texts(msg,language) answer = response # creating response message... self.wfile.write(answer.encode("utf-8"))
import pickle import aiml import numpy as np from keras.models import Sequential from keras.layers import Dense, Activation, Dropout from keras.optimizers import SGD import random words = [] classes = [] documents = [] ignore_words = ['?', '!'] #data_file = open('intents.json').read() #intents = json.loads(data_file) intents = aiml.Kernel() intents.bootstrap(learnFiles="std-startup.aiml", commands="load aiml b") for intent in intents['intents']: for pattern in intent['patterns']: #tokenize each word w = nltk.word_tokenize(pattern) words.extend(w) #add documents in the corpus documents.append((w, intent['tag'])) # add to our classes list if intent['tag'] not in classes: classes.append(intent['tag'])
from time import sleep, time from decimal import * import g import datetime import traceback import math import socket import threading import sys import random import aiml alice = aiml.Kernel() alice.learn("std-startup.xml") alice.respond('load aiml b') LOGINID = str(random.randint(1000, 9999)) HOST = "localhost" PORT = 9013 def funQuit(): pass def StartProgram(): #strMenuTitle = 'Socket Client' #listMenu = ['Connect Server','Quit'] #listFunction = ['funConnectServer','funQuit'] #g.rb.ShowMenu(strMenuTitle, listMenu, listFunction) funConnectServer()
def __init__(self): self.__kernel = aiml.Kernel() self.__hasBrain = False self.__brain = '.brain/bot_brain.brn' self.__command = 'LOAD TESTS FILES' self.__aiml = 'src/aiml/tests.xml'
def set_kernel(self): self.kernel = aiml.Kernel() self.kernel.setTextEncoding(None) self.kernel.bootstrap(learnFiles=self.aiml_filename)
def connect(self): k = aiml.Kernel() print k.learn('std-startup.xml') print k.respond('LOAD AIML B') return k
#!/usr/bin/env python import roslib roslib.load_manifest('howie_ros') import rospy from festival.srv import * from std_msgs.msg import String import aiml import aiml import sys # Create a Kernel object. kern = aiml.Kernel() # #rospy.wait_for_service('speak_text') speak_text_service = rospy.ServiceProxy('speak_text', FestivalSpeech) speak_text_service("Hello my name is say teen the robot. ") # When loading an AIML set, you have two options: load the original # AIML files, or load a precompiled "brain" that was created from a # previous run. If no brain file is available, we force a reload of # the AIML files. brainLoaded = False forceReload = False name = "Robbie" def speak_text(text): #therapist = aiml.Kernel() #print "I AM SAYING : " + text rospy.loginfo(text.data) #print ">>>> CALLING FESTIVAL >>>>>"
def FinalAnswer(query, keyword, token): if token == 'CDyPJxneSxHWwCySZYruxynh5j2m6fAf': print('QueryProcessstart', str(datetime.now())) final_query, indx, text_data = QueryProcess(query, keyword) print('QueryProcessend', str(datetime.now())) desc_answer = [text_data[i] for i in indx] desc_answer = ' '.join(desc_answer) try: print('aimlstart', str(datetime.now())) kern = aiml.Kernel() kern.bootstrap(brainFile=brain_file) kernel_reply = kern.respond(query) print('aimlend', str(datetime.now())) if not "Sorry, I didn't get you.." in kernel_reply: return kernel_reply elif len(indx) == 0: indices = searchindex(query, keyword) description = [text_data[i] for i in indices] description_answer = ' '.join(description) if description_answer != '': descri = re.split('[.]', description_answer) descri = [ i for i in descri if not ('?' in i or 'Get an overview' in i or 'Questions to Ask' in i or 'See' in i or 'Learn about treatment' in i or 'Last Medical Review' in i or 'Last Revised' in i or 'Chapter' in i or 'For more information' in i or 'To learn more' in i) ] descri = [ i for i in descri if i != '' and len(i.split()) > 5 ] query_list = preprocess(query).split() print('final_indexstart', str(datetime.now())) final_index = FinalIndex(final_query, descri, query_list) print('final_indexend', str(datetime.now())) final_answer = '\n'.join(descri[i] + '.' for i in final_index) if final_answer != '' or len(final_answer) > 10: return final_answer else: return random.choice(fallback) else: return random.choice(fallback) elif len(indx) > 0: ind = indx[0] question = text_data[ind] answer_data = list( db_client.local.FAQ.find({'Question': question})) if len(answer_data) > 0: final_answer = answer_data[0]['Answer'] return final_answer elif desc_answer != '': desc = re.split('[.]', desc_answer) desc = [ i for i in desc if not ('?' in i or 'Get an overview' in i or 'Questions to Ask' in i or 'See' in i or 'Learn about treatment' in i or 'Last Medical Review' in i or 'Last Revised' in i or 'Chapter' in i or 'For more information' in i or 'To learn more' in i) ] desc = [i for i in desc if i != '' and len(i.split()) > 5] query_list = preprocess(query).split() print('final_indexstart', str(datetime.now())) final_index = FinalIndex(final_query, desc, query_list) print('final_indexstart', str(datetime.now())) final_answer = '\n'.join(desc[i] + '.' for i in final_index) if final_answer != '' or len(final_answer) > 10: return final_answer else: return random.choice(fallback) else: return random.choice(fallback) except: return random.choice(fallback) else: return 'Unauthorized'
#!/usr/bin/python3 import os import aiml from flask import Flask from flask import render_template BRAIN_FILE="brain.dump" kernel = aiml.Kernel() # To increase the startup speed of the bot it is # possible to save the parsed aiml files as a # dump. This code checks if a dump exists and # otherwise loads the aiml from the xml files # and saves the brain dump. if os.path.exists(BRAIN_FILE): print("Loading from brain file: " + BRAIN_FILE) kernel.loadBrain(BRAIN_FILE) else: print("Parsing aiml files") kernel.bootstrap(learnFiles="std-startup.aiml", commands="load aiml b") print("Saving brain file: " + BRAIN_FILE) kernel.saveBrain(BRAIN_FILE) # Endless loop which passes the input to the bot and prints # its response app = Flask(__name__) @app.route("/") def index():
def __init__(self, ip): self.kernel = aiml.Kernel() self.kernel.bootstrap(brainFile = aimlPath+ "bot_brain.brn") self.lastUsed = time.time() self.ip = ip