Example #1
0
 def __init__(self):
     # 初始化
     self.nlu = NLU()
     self.nlg = NLG()
     self.dm_w = DM_WEATHER()
     self.dm_c = DM_CLOCK()
     self.global_state_tracker = StateTracker()
     self.episode_over = False
Example #2
0
    def test_class_deduction(self):
        nlu = NLU()
        nlu.integrate("C(Bp,city)")
        nlu.integrate("C(city,place)")

        q, a, c = nlu.ask("C(Bp, place)")
        logger.info("Answer to question %s: %s" % (q, nlu.create_answer(q, a)))
        self.assertIsNotNone(a)
Example #3
0
class Weather_or_Clock():
    def __init__(self):
        # 初始化
        self.nlu = NLU()
        self.nlg = NLG()
        self.dm_w = DM_WEATHER()
        self.dm_c = DM_CLOCK()
        self.global_state_tracker = StateTracker()
        self.episode_over = False

    def response(self, user_sentence):
        # 对话过程
        if self.episode_over == False:
            user_action = self.nlu.get_diaact(
                user_sentence
            )  # user_action = {'diaact':'request+weather_action', 'request_slots':{ 'weather_action':['打伞', '洗车']}, 'inform_slots':{}, 'intent':'weather\clock\other'}
            # 只有时间和地点的可以分类放在other里,比如‘明天呢’
            print('user_action:', user_action)
            self.global_state_tracker.update_history_state(
                user_action=user_action)
            user_state = self.global_state_tracker.get_state()
            print('user_state:', user_state)
            if user_state['intent'] == 'weather':
                sys_action = self.dm_w.response(user_state)
            elif user_state['intent'] == 'clock':
                sys_action = self.dm_c.response(user_state)
            else:
                sys_action = self.other_response(raw_sentence)
            self.global_state_tracker.update_history_state(
                sys_action=sys_action)
            print('sys_action:', sys_action)
            sys_nl = self.nlg.get_sentence(sys_action)
        return sys_nl
Example #4
0
    def test_integrate_defines_probability(self):
        nlu = NLU()
        nlu.integrate("IM(F(Joe,tired),F(Joe,slow))")
        q, a, c = nlu.ask("F(Joe,tired)")
        self.assertEqual(c.probability, .5)

        nlu.integrate("F(Joe,tired)")
        q, a, c = nlu.ask("F(Joe,tired)")
        self.assertIsNotNone(c)
        self.assertEqual(c.probability, 1.0)
Example #5
0
    def test_class_other_relations(self):
        nlu = NLU()
        nlu.integrate("F(bird,fly)")
        nlu.integrate("C(penguin,bird)")
        nlu.integrate("F(penguin,swim)")

        q, a, c = nlu.ask("F(penguin, fly)")
        logger.info("State:\n%s\n" % graph(nlu.question_store))
        logger.info("Answer to question %s: %s" % (q, nlu.create_answer(q, a)))
        self.assertIsNotNone(a)
Example #6
0
    def test_implication_works_after_parent_statement(self):
        nlu = NLU()
        nlu.integrate("F(Joe,tired)")
        nlu.integrate("IM(F(Joe,tired),F(Joe,slow))")
        nlu.integrate("IM(F(Joe,slow),F(Joe,angry))")

        q, a, c = nlu.ask("F(Joe,angry)")
        self.assertIsNotNone(c)
        self.assertEqual(c.probability, 1.0)
Example #7
0
    def test_implication_transfers_probability(self):
        nlu = NLU()
        nlu.integrate("IM(F(Joe,tired),F(Joe,slow))")
        nlu.integrate("IM(F(Joe,slow),F(Joe,angry))")
        nlu.integrate("F(Joe,tired)")

        q, a, c = nlu.ask("F(Joe,angry)")
        self.assertIsNotNone(c)
        self.assertEqual(c.probability, 1.0)
Example #8
0
    def test_class_relationship_is_one_way_2(self):
        nlu = NLU()
        nlu.integrate("C(Mary, person)")
        nlu.integrate("C(Joe, person)")
        nlu.integrate("C(kid, person)")

        nlu.integrate("C(person, entity)")
        nlu.integrate("C(firm, entity)")

        logger.info("State: \n%s\n" % graph(nlu.working_memory))

        gen = InheritFromParentClass()
        concepts = list(gen.gen(nlu.question_store))
        logger.info("Step 1: Generated concepts: %r" % concepts)
        # should generate Class for Mary, Joe, kid -> entity
        for gc in concepts:
            self.assertEqual(gc.parents[1].name, 'entity')

        concepts = list(gen.gen(nlu.question_store))
        logger.info("Step 2: Generated concepts: %r" % concepts)
        # nothing else should be generated
        self.assertEqual(len(concepts), 0)
Example #9
0
    def test_trivial_match(self):
        nlu = NLU()
        nlu.integrate("C(Budapest, city)")
        q, a, c = nlu.ask("C(Budapest, ?)")
        logger.info("Answer to question %s: %s" % (q, nlu.create_answer(q, a)))

        self.assertIsNotNone(a)
        self.assertEqual(set(map(lambda x: x.name, a.values())), {'city'})
Example #10
0
def rerank_jokes(joke_indices,
                 state,
                 nlu_engine: NLU,
                 max_count=30,
                 noise=0.5):
    vector = normalize_vec(GLOBAL_COEF + get_coef(state))
    new_scores = Counter()
    for key, score in joke_indices.most_common(max_count * 2):
        new_scores[key] = score + nlu_engine.score_text(JOKES[key], vector)
        if noise:
            new_scores[key] += random.gauss(mu=0, sigma=noise)
    result = []

    for joke_id, score in new_scores.most_common(max_count):
        result.append(joke_id)
    return result
Example #11
0
    def __init__(self):
        FORMAT = '%(asctime)-15s %(levelname)-5s (PID %(process)d) %(message)s'
        logging.basicConfig(
            filename='info.log',
            level=logging.INFO,
            format=FORMAT,
        )

        parser = argparse.ArgumentParser(
            description='Start the Virtual Assistant Core.')
        parser.add_argument('--port', type=int, default=55801)
        args = parser.parse_args()

        self.loop = asyncio.get_event_loop()

        self.nlu = NLU()
        self.client_handler = VAClientHandler(self, args.port)
        self.module_handler = VAModuleHandler(self, args.port + 1)
Example #12
0
    def test_class_relationship_is_one_way(self):
        nlu = NLU()
        nlu.integrate("C(Mary, person)")
        nlu.integrate("C(Joe, person)")
        nlu.integrate("C(kid, person)")

        logger.info("State: \n%s\n" % graph(nlu.working_memory))

        gen = InheritFromParentClass()
        concepts = list(gen.gen(nlu.question_store))

        logger.info("Generated concepts: %r" % concepts)

        self.assertEqual(len(concepts), 0)
Example #13
0
    def __init__(self, config, dataset):
        super(DualVAE_classify, self).__init__()
        self.config = config
        self.dataset = dataset

        E, H = config.embed_size, config.hidden_size
        Z = config.latent_size
        D = config.dropout
        V = {}  # vocab_size

        # model components
        self.encode = nn.ModuleDict({})

        # 如果共享隐藏变量z的话,则直接使用一个转化为隐藏变量的网络,否则则分别定义
        if config.share_z:
            self.enc2lat = Hidden2Gaussian(2 * H, Z, config)
            self.z_emb = nn.Linear(Z, H)
        else:
            self.enc2lat = nn.ModuleDict({})
            self.z_emb = nn.ModuleDict({})
        self.decode = nn.ModuleDict({})

        for src in ['query', 'parse']:
            V[src] = len(dataset.vocab[src])
            self.encode[src] = SentEncoder(V[src], E, H, dropout=D)
            if not config.share_z:
                self.enc2lat[src] = Hidden2Gaussian(2 * H, Z, config)
                self.z_emb[src] = nn.Linear(Z, H)
            if src == 'query':
                self.decode[src] = Decoder(config, V[src], E, H, dataset.vocab[src], dataset.idx2word[src], \
                     num_layers=config.num_layers, dropout=D, use_attn=config.attn, dec_len=config.dec_len)
            else:
                self.decode[src] = NLU(dataset, config, config.latent_size)

        # baseline estimator
#		self.bse = nn.Linear(H, 1)

        self.gauss_kl = NormKLLoss(unit_average=False)
        self.set_optimizer()
        self._step = 0
Example #14
0
 def watson(self, txt_usr):
     traducao = ""
     try:
         from nlu import NLU
         from googletrans import Translator
         t = Translator()
         traducao = (t.translate(txt_usr, src='pt', dest='en')).text
         print(traducao)
     except Exception as e:
         print(e)
         try:
             import translators as ts
             traducao = ts.bing(txt_usr, 'pt', 'en')
             print(traducao)
         except Exception as e:
             print(e)
             try:
                 import translators as ts
                 # TODO:
                 # FAZER UMA FUNCAO ASYNCRONA QUE ESPERA UM VALOR DESSA LIB
                 # PARA PREVINIR O LOOP INFINITO QUE ELA FICA AS VEZES
                 traducao = ts.google(txt_usr, 'pt', 'en')
                 print(traducao)
             except Exception as e:
                 print(e)
                 try:
                     from translate import Translator  # TRADUCAO HORRIVEL
                     translator = Translator(from_lang="pt", to_lang="en")
                     traducao = translator.translate(
                         txt_usr)  # TRADUCAO HORRIVEL
                     print(traducao)
                 except Exception as e:
                     print(e)
                     return [False]
     try:
         emocao = NLU(traducao)
         return [True, emocao]
     except Exception as e:
         return [False]
Example #15
0
    def __init__(self):

        parser = argparse.ArgumentParser(
            description='Start the Virtual Assistant Core.')
        parser.add_argument('--port', type=int, default=55801)
        parser.add_argument('--log-level', type=str.upper, default='INFO')
        args = parser.parse_args()

        self.log_level = args.log_level.lower()

        FORMAT = '%(asctime)-15s %(levelname)-5s (PID %(process)d) %(message)s'
        logging.basicConfig(
            filename='{}.log'.format(self.log_level.lower()),
            level=getattr(logging, self.log_level.upper()),
            format=FORMAT,
        )

        self.loop = asyncio.get_event_loop()

        self.nlu = NLU()
        self.client_handler = VAClientHandler(self, args.port)
        self.module_handler = VAModuleHandler(self, args.port + 1)
Example #16
0
 def __init__(self, voc_fn, w2v_fn, w2v_dim, entity_types, entity_dict,
         action_mask_dict, obs_size, act_size, templates):
     self.nlu = NLU(voc_fn,w2v_fn,w2v_dim,entity_dict)
     self.dst = DST(entity_types)
     self.model = HCN(action_mask_dict,obs_size,act_size)
     self.templates = templates
Example #17
0
class BotHCN(object):

    def __init__(self, voc_fn, w2v_fn, w2v_dim, entity_types, entity_dict,
            action_mask_dict, obs_size, act_size, templates):
        self.nlu = NLU(voc_fn,w2v_fn,w2v_dim,entity_dict)
        self.dst = DST(entity_types)
        self.model = HCN(action_mask_dict,obs_size,act_size)
        self.templates = templates

    def train(self, data_fn, epochs=5):
        def train_dialog(dialog):
            loss = 0
            self.dst.clear()
            self.model.reset_state()
            for text,action in dialog:
                feat_bow = self.nlu.get_bow_vector(text)
                feat_emb = self.nlu.get_utter_emb(text)
                entities = self.nlu.extract_entities(text)
                self.dst.update(entities)
                feat_ctx = self.dst.get_feat()
                feats = np.concatenate((feat_bow,feat_emb,feat_ctx),axis=0)
                action_mask = self.model.get_action_mask(feat_ctx)
                loss += self.model.train_step(feats,action,action_mask)[0]
            return loss
        data = list(get_data(data_fn))
        data = convert_train_data(data,self.templates)
        data_train = data[:int(.9*len(data))]
        data_valid = data[int(.9*len(data)):]
        for epoch in xrange(epochs):
            loss = sum([train_dialog(dialog) for dialog in data_train])
            accu = self.eval(data_valid)
            print '[{0}/{1}] {2:.4f} {3:.4f}'.format(epoch,epochs,loss,accu)
        self.model.save()

    def eval(self, dialogs):
        def eval_dialog(dialog):
            correct = 0
            self.dst.clear()
            self.model.reset_state()
            for text,real in dialog:
                feat_bow = self.nlu.get_bow_vector(text)
                feat_emb = self.nlu.get_utter_emb(text)
                entities = self.nlu.extract_entities(text)
                self.dst.update(entities)
                feat_ctx = self.dst.get_feat()
                feats = np.concatenate((feat_bow,feat_emb,feat_ctx),axis=0)
                action_mask = self.model.get_action_mask(feat_ctx)
                pred = self.model.predict_action(feats,action_mask)
                correct += int(pred==real)
            return 1.*correct/len(dialog)
        return 1.*sum([eval_dialog(dialog) for dialog in dialogs])/len(dialogs)

    def test(self):
        self.dst.clear()
        self.model.load()
        self.model.reset_state()
        while True:
            text = raw_input(':: ')
            if text in ('clear','reset','restart'):
                self.dst.clear()
                self.model.reset_state()
                print ''
            elif text in ('exit','quit','stop'):
                break
            else:
                text = text or '<SILENCE>'
                feat_bow = self.nlu.get_bow_vector(text)
                feat_emb = self.nlu.get_utter_emb(text)
                entities = self.nlu.extract_entities(text)
                self.dst.update(entities)
                feat_ctx = self.dst.get_feat()
                feats = np.concatenate((feat_bow,feat_emb,feat_ctx),axis=0)
                action_mask = self.model.get_action_mask(feat_ctx)
                pred = self.model.predict_action(feats,action_mask)
                print '>>', self.templates[pred].format(**self.dst.entities)
Example #18
0
    def test_implication_with_class(self):
        nlu = NLU()
        nlu.integrate("IM(F(person,tired),F(person,slow))")
        nlu.integrate("IM(F(person,slow),F(person,angry))")
        nlu.integrate("C(Joe,person)")

        q, a, c = nlu.ask("F(Joe,angry)")
        logger.info("Answer to question %s: %s p=%r" %
                    (q, nlu.create_answer(q, a), c.probability))

        nlu.integrate("F(Joe,tired)")

        logger.info("State:\n%s\n" % graph(nlu.working_memory))
        q, a, c = nlu.ask("F(Joe,angry)")

        logger.info("State:\n%s\n" % graph(nlu.question_store))
        logger.info("Answer to question %s: %s p=%r" %
                    (q, nlu.create_answer(q, a), c.probability))
        self.assertIsNotNone(c)
        self.assertEqual(c.probability, 1.0)
Example #19
0
File: cli.py Project: szroland/nlu
import logging
from nlu import NLU
from graph import graph

if __name__ == '__main__':
    logging.basicConfig(level=logging.WARN)

    print("Welcome to the NLU command line")
    print(
        "Start typing statements and questions in Mentalase or English or help for available commands"
    )

    print("")

    nlu = NLU()
    while True:

        command = input("> ")  # type: str
        if command == 'help':
            print(
                "Input Mentalase statements or questions one line per expression, or"
            )
            print(
                "input free text statements or questions (multiple statements per line, questions separately), or"
            )
            print("one of these commands:")
            print(
                "  working_memory (wm) : graph description of working memory state"
            )
            print(
                "  question_store (qs) : graph description of last question store"
Example #20
0
def respond(text, state, nlu_engine: NLU):
    if text:
        intent, form = nlu_engine.process_text(text)
    else:
        intent = 'hello'
        form = {}

    def random_joke():
        random_jokes = Counter(
            {random.randint(0,
                            len(JOKES) - 1): 1
             for i in range(10)})
        joke_id = rerank_jokes(random_jokes, state, nlu_engine, noise=0)[0]
        state['joke_id'] = joke_id
        state['jokes'] = None
        return JOKES[joke_id]

    def choose_joke(item_id):
        joke_id = state['jokes'][item_id]
        state['joke_id'] = joke_id
        state['item_id'] = item_id
        return JOKES[joke_id]

    if intent == 'hello':
        response = 'Привет! Я бот @a_nek_bot, рассказываю анекдоты. ' \
                   '\nСкажите, например, "расскажи анекдот про щуку".' \
                   '\nСкажите "лайк" или "дизлайк", чтобы выразить отношение к шутке.' \
                   '\nСкажите "ещё", чтобы получить следующий анекдот.'
    elif intent == 'find_joke':
        if form.get('topic'):
            jokes = rerank_jokes(
                find_jokes(form['topic'], index=INVERSE_INDEX), state,
                nlu_engine)
            if not jokes:
                response = 'Простите, ничего не нашла. ' \
                           f'Зато вот какая шутка есть: \n{random_joke()}'
            else:
                state['jokes'] = jokes
                response = choose_joke(0)
        else:
            response = random_joke()
    elif intent == 'next':
        if 'item_id' in state and state.get(
                'jokes') and state['item_id'] + 1 < len(state['jokes']):
            response = choose_joke(state['item_id'] + 1)
        else:
            response = 'Я забыла, что там дальше. ' \
                f'Зато вот какая шутка есть: \n{random_joke()}'
    elif intent in {'like', 'dislike'}:
        if state.get('joke_id'):
            vec = nlu_engine.text2vec(JOKES[state['joke_id']])
            if intent == 'like':
                sign = +1
                response = 'Запомню, что вам такое нравится!'
            else:
                response = 'Запомню, что вам такое не нравится!'
                sign = -1
            nlu_engine.update_coef(vec,
                                   GLOBAL_COEF,
                                   lr=GLOBAL_LEARNING_RATE,
                                   sign=sign)
            state['coef'] = nlu_engine.update_coef(vec,
                                                   get_coef(state),
                                                   lr=LOCAL_LEARNING_RATE,
                                                   sign=sign)
        else:
            response = 'Я забыла, о чём мы говорили. ' \
                f'Зато вот какая шутка есть: \n{random_joke()}'
    else:
        response = 'Скажите мне "расскажи анекдот", и я смешно пошучу'
    return response, state
from flask import Flask, request
import credentials
from nlu import NLU
import warnings
import json

app = Flask(__name__)

FB_API_URL = 'https://graph.facebook.com/v2.6/me/messages'
VERIFY_TOKEN = credentials.fb_verify_token  # <paste your verify token here>
PAGE_ACCESS_TOKEN = credentials.fb_access_token  # paste your page access token here>"

warnings.filterwarnings("ignore")
rasa_nlu = NLU()
rasa_nlu.training()


def convertResponse(response):
    response_dict = json.loads(response)
    return response_dict["respond"]


def get_response(message):
    response = rasa_nlu.get_response(message)
    response = convertResponse(response)
    return '{}'.format(response)


def get_bot_response(message):
    """This is just a dummy function, returning a variation of what
    the user said. Replace this function with one connected to chatbot."""
Example #22
0
import argparse
import os
import telebot
from collections import defaultdict
from flask import Flask, request

from logic import respond
from nlu import NLU

TOKEN = os.getenv('TOKEN')
BASE_URL = os.getenv('BASE_URL')
WEBHOOK_URL = '/telebot_webhook/{}'.format(TOKEN)
bot = telebot.TeleBot(TOKEN)
nlu_engine = NLU()
states = defaultdict(dict)  # todo: keep them in a database if possible

server = Flask(__name__)


# basic responder for Telegram bot
@bot.message_handler()
def respond_in_telegram(message):
    uid = message.chat.id
    state = states.get(uid, {})
    state['uid'] = uid
    response, state = respond(message.text, state, nlu_engine=nlu_engine)
    states[uid] = state
    bot.send_message(chat_id=message.chat.id, text=response)


@server.route(WEBHOOK_URL, methods=['POST'])