def test_all_classifier_are_unique_for_incremental_builds(home_assistant_app_path): nlp = NaturalLanguageProcessor(home_assistant_app_path) nlp.build(incremental=True) example_cache = os.listdir( MODEL_CACHE_PATH.format(app_path=home_assistant_app_path) )[0] unique_hashs = set() for domain in nlp.domains: for intent in nlp.domains[domain].intents: _, cached_path = get_entity_model_paths( home_assistant_app_path, domain, intent, timestamp=example_cache ) hash_val = open(cached_path + ".hash", "r").read() assert hash_val not in unique_hashs unique_hashs.add(hash_val) for entity in ( nlp.domains[domain].intents[intent].entity_recognizer.entity_types ): _, cached_path = get_role_model_paths( home_assistant_app_path, domain, intent, entity, timestamp=example_cache, ) hash_val = open(cached_path + ".hash", "r").read() assert hash_val not in unique_hashs unique_hashs.add(hash_val)
def test_model_accuracies_are_similar_before_and_after_caching(home_assistant_app_path): # clear model cache model_cache_path = MODEL_CACHE_PATH.format(app_path=home_assistant_app_path) try: shutil.rmtree(MODEL_CACHE_PATH.format(app_path=home_assistant_app_path)) except FileNotFoundError: pass # Make sure no cache exists assert os.path.exists(model_cache_path) is False nlp = NaturalLanguageProcessor(home_assistant_app_path) nlp.build(incremental=True) nlp.dump() entity_eval = ( nlp.domains["times_and_dates"] .intents["change_alarm"] .entity_recognizer.evaluate() ) role_eval = ( nlp.domains["times_and_dates"] .intents["change_alarm"] .entities["sys_time"] .role_classifier.evaluate() ) entity_accuracy_no_cache = entity_eval.get_accuracy() role_accuracy_no_cache = role_eval.get_accuracy() example_cache = os.listdir( MODEL_CACHE_PATH.format(app_path=home_assistant_app_path) )[0] nlp = NaturalLanguageProcessor(home_assistant_app_path) nlp.load(example_cache) # make sure cache exists assert os.path.exists(model_cache_path) is True entity_eval = ( nlp.domains["times_and_dates"] .intents["change_alarm"] .entity_recognizer.evaluate() ) role_eval = ( nlp.domains["times_and_dates"] .intents["change_alarm"] .entities["sys_time"] .role_classifier.evaluate() ) entity_accuracy_cached = entity_eval.get_accuracy() role_accuracy_cached = role_eval.get_accuracy() assert role_accuracy_no_cache == role_accuracy_cached assert entity_accuracy_no_cache == entity_accuracy_cached
def test_model_cache_files_present_after_second_incremental_build( kwik_e_mart_app_path): # clear model cache model_cache_path = MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path) try: shutil.rmtree(MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path)) except FileNotFoundError: pass # Make sure no cache exists assert os.path.exists(model_cache_path) is False nlp = NaturalLanguageProcessor(kwik_e_mart_app_path) nlp.build(incremental=True) nlp.dump() initial_timestamp = nlp.incremental_timestamp nlp = NaturalLanguageProcessor(kwik_e_mart_app_path) nlp.build(incremental=True) nlp.dump() new_timestamp = nlp.incremental_timestamp nlp.load(initial_timestamp) nlp.load(new_timestamp)
def test_model_accuracies_are_similar_before_and_after_caching( kwik_e_mart_app_path): # clear model cache model_cache_path = MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path) try: shutil.rmtree(MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path)) except FileNotFoundError: pass # Make sure no cache exists assert os.path.exists(model_cache_path) is False nlp = NaturalLanguageProcessor(kwik_e_mart_app_path) nlp.build(incremental=True) nlp.dump() intent_eval = nlp.domains["store_info"].intent_classifier.evaluate() entity_eval = (nlp.domains["store_info"].intents["get_store_hours"]. entity_recognizer.evaluate()) intent_accuracy_no_cache = intent_eval.get_accuracy() entity_accuracy_no_cache = entity_eval.get_accuracy() example_cache = os.listdir( MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path))[0] nlp = NaturalLanguageProcessor(kwik_e_mart_app_path) nlp.load(example_cache) # make sure cache exists assert os.path.exists(model_cache_path) is True intent_eval = nlp.domains["store_info"].intent_classifier.evaluate() entity_eval = (nlp.domains["store_info"].intents["get_store_hours"]. entity_recognizer.evaluate()) intent_accuracy_cached = intent_eval.get_accuracy() entity_accuracy_cached = entity_eval.get_accuracy() assert intent_accuracy_no_cache == intent_accuracy_cached assert entity_accuracy_no_cache == entity_accuracy_cached
def test_df_converter(): df_project_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "dialogflow_sample_project") # This is the dialogflow app converted to mindmeld app mm_df_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "mm_df_converted_project") df_init = DialogflowConverter(df_project_path, mm_df_path) df_init.convert_project() mm_df_nlp = NaturalLanguageProcessor(app_path=mm_df_path) mm_df_nlp.build() # check to make sure the NLP object contains the correct hierarchy assert set(mm_df_nlp.domains.keys()) == {"app_specific", "unrelated"} assert set(mm_df_nlp.domains["app_specific"].intents.keys()) == { "accountopen_en", "accountbalancecheck_en", "accountearningcheck_context__earning_date_en", "transfermoney_no_en", "accountbalancecheck_context__account_en", "transfermoney_yes_en", "transfermoney_en", "transferamountcheck_en", "paymentdue_date_en", "accountspendingcheck_context__spending_date_en", "transferdatecheck_en", "accountspendingcheck_en", "transfersendercheck_en", "accountbalancecheck_context__balance_en", "accountearningcheck_en", } assert set(mm_df_nlp.domains["unrelated"].intents.keys()) == { "default_welcome_intent_en", "default_fallback_intent_en", } entities = set() for domain in mm_df_nlp.domains: for intent in mm_df_nlp.domains[domain].intents: for entity in mm_df_nlp.domains[domain].intents[intent].entities: entities.add(entity) for expected_entity in { "category_en", "transfer_type_en", "merchant_en", "account_en", }: assert expected_entity in entities mm_df_app = importlib.import_module("mm_df_converted_project").app mm_df_app.lazy_init(mm_df_nlp) conv = TestConversation(app=mm_df_app) conv.process("what is my balance") conv.assert_text("Here's your latest balance:") conv.assert_domain("app_specific") conv.assert_intent("accountbalancecheck_en") conv.assert_frame({}) conv.process("when is the due date") conv.assert_text("The due date is:") conv.assert_domain("app_specific") conv.assert_intent("paymentdue_date_en") conv.assert_frame({}) conv.process("transfer money") conv.assert_text("Sure. Transfer from which account?") conv.process("checking account") conv.assert_text("To which account?") conv.process("transfer to savings account") conv.assert_text("And, how much do you want to transfer?") conv.process("transfer $200") conv.assert_text( "All right. So, you're transferring $200 from your checking to a savings. Is that right?" ) conv.process("hello!") conv.assert_text([ "Hello, thanks for choosing ACME Bank.", "Hello. Welcome to ACME Bank." ]) conv.process("I dont know what the laptop") conv.assert_text([ "Sorry, I didn’t get that.", "I'm afraid I don't understand.", "Sorry, say that again?", "Sorry, can you say that again?", "I didn't get that. Can you say it again?", "Sorry, could you say that again?", "Sorry, can you tell me again?", "Sorry, tell me one more time?", "Sorry, can you say that again?", ]) # delete generated files shutil.rmtree(mm_df_path)
# -*- coding: utf-8 -*- import os from flask import Flask from mindmeld.components import NaturalLanguageProcessor from mindmeld.components.dialogue import Conversation from mindmeld.bot import WebexBotServer from mindmeld import configure_logs if __name__ == '__main__': app = Flask(__name__) # Create web hook here: https://developer.webex.com/docs/api/v1/webhooks/create-a-webhook WEBHOOK_ID = os.environ.get('WEBHOOK_ID') # Create bot access token here: https://developer.webex.com/my-apps/new ACCESS_TOKEN = os.environ.get('BOT_ACCESS_TOKEN') configure_logs() nlp = NaturalLanguageProcessor('.') nlp.build() conv = Conversation(nlp=nlp, app_path='.') server = WebexBotServer(app, WEBHOOK_ID, ACCESS_TOKEN, conv) port_number = 8080 print('Running server on port {}...'.format(port_number)) server.run(host='localhost', port=port_number)
def home_assistant_nlp(home_assistant_app_path): """Provides a built processor instance""" nlp = NaturalLanguageProcessor(app_path=home_assistant_app_path) nlp.build() nlp.dump() return nlp
def kwik_e_mart_nlp(kwik_e_mart_app_path): """Provides a built processor instance""" nlp = NaturalLanguageProcessor(app_path=kwik_e_mart_app_path) nlp.build() nlp.dump() return nlp
def food_ordering_nlp(food_ordering_app_path): """Provides a built processor instance""" nlp = NaturalLanguageProcessor(app_path=food_ordering_app_path) nlp.build() nlp.dump() return nlp