def run_weather_bot(serve_forever=True): interpreter = RasaNLUInterpreter(getData()["model_directory"] + '/default/' + getData()["model_name"]) agent = Agent.load(getData()["dialogue"], interpreter=interpreter) if serve_forever == True: agent.handle_channel(ConsoleInputChannel()) return agent
def train_dialogue(): #domain_file,model_path,training_data_file): utils.configure_colored_logging(loglevel='INFO') agent = Agent(getData()["domain"], policies=[MemoizationPolicy(max_history=2), KerasPolicy()]) training_data = agent.load_data(getData()["stories"]) agent.train(training_data, epochs=400, batch_size=100, validation_split=0.2) agent.persist(getData()["dialogue"]) return agent
from config import getData print(getData()["data"])
from rasa_core.policies.memoization import MemoizationPolicy from rasa_core.interpreter import RasaNLUInterpreter from config import getData def run_weather_online(input_channel, interpreter, domain_file, training_data_file): agent = Agent(domain_file, policies=[MemoizationPolicy(max_history=3), KerasPolicy()], interpreter=interpreter) training_data = agent.load_data(training_data_file) agent.train_online(training_data, input_channel=input_channel, epochs=400, batch_size=100, validation_split=0.2) if __name__ == '__main__': utils.configure_colored_logging(loglevel='INFO') nlu_interpretter = RasaNLUInterpreter(getData()["model_directory"] + '/default/' + getData()["model_name"]) run_weather_online(ConsoleInputChannel(), nlu_interpretter, getData()["domain"], getData()["stories"])
from rasa_nlu import config from rasa_nlu.model import Interpreter import json import sys from config import getData def run_nlu(model, spacy_config): interpreter = Interpreter.load(model) #, config.load(spacy_config)) result = interpreter.parse(sys.argv[1]) print(json.dumps(result, indent=4, sort_keys=True)) if __name__ == '__main__': #train_nlu('./data/data.json','./config_spacy.json','./models/nlu') run_nlu( getData()["model_directory"] + '/default/' + getData()["model_name"], getData()["config_spacy"])
def doit(reqid,reqno,times,score,guess): post_url = 'http://qx8888cp.com/Servers/Game/pcdd/pcddsubmit.ashx?CaiID=15' params = config.getData(reqid,reqno,times,score,guess) print('期号:%s , 下注倍数:%d,下注钱数:%d' % (reqno,times,score) ) post = requests.post(post_url,data=json.dumps(params),cookies=config.c_dict,headers=config.headers) print(post.text)
from rasa_core.channels import HttpInputChannel from rasa_core.agent import Agent from rasa_core.interpreter import RasaNLUInterpreter from rasa_slack_connector import SlackInput from config import getData nlu_interpreter = RasaNLUInterpreter(getData()["model_directory"] + '/default/' + getData()["model_name"]) agent = Agent.load(getData()["dialogue"],interpreter= nlu_interpreter) input_channel = SlackInput(getData()["slack"]["oauth_access_token"], getData()["slack"]["user_oauth_access_token"], getData()["slack"]["verification_token"],True) agent.handle_channel(HttpInputChannel(5004,'/',input_channel))
from __future__ import absolute_import,division,unicode_literals from rasa_core import utils from rasa_core.agent import Agent from rasa_core.policies.keras_policy import KerasPolicy from rasa_core.policies.memoization import MemoizationPolicy from config import getData if __name__ == '__main__': utils.configure_colored_logging(loglevel='INFO') domain = getData()["domain"] stories = getData()["stories"] dialogue=getData()["dialogue"] agent = Agent(domain,policies=[MemoizationPolicy(max_history=2) ,KerasPolicy()]) training_data = agent.load_data(stories) agent.train(training_data, epochs=400, batch_size=100, validation_split=0.2 ) agent.persist(dialogue)
def train_nlu(data, spacy_config, model_dir): training_data = load_data(data) trainer = Trainer(config.load(spacy_config)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name=getData()["model_name"])
from rasa_nlu.training_data import load_data from rasa_nlu import config from rasa_nlu.model import Trainer, Interpreter, Metadata from config import getData import json def train_nlu(data, spacy_config, model_dir): training_data = load_data(data) trainer = Trainer(config.load(spacy_config)) trainer.train(training_data) model_directory = trainer.persist(model_dir, fixed_model_name=getData()["model_name"]) if __name__ == '__main__': train_nlu(getData()["data"], getData()["config_spacy"], getData()["model_directory"])