示例#1
0
class WhatsappBotServer:
    """
    A sample server class for Whatsapp integration with any MindMeld application
    """

    def __init__(self, name, app_path, nlp=None):
        """
        Args:
            name (str): The name of the server.
            app_path (str): The path of the MindMeld application.
            nlp (NaturalLanguageProcessor): MindMeld NLP component, will try to load from app path
              if None.
        """
        self.app = Flask(name)
        if not nlp:
            self.nlp = NaturalLanguageProcessor(app_path)
            self.nlp.load()
        else:
            self.nlp = nlp
        self.conv = Conversation(nlp=self.nlp, app_path=app_path)
        self.logger = logging.getLogger(__name__)

        @self.app.route("/", methods=["POST"])
        def handle_message():  # pylint: disable=unused-variable
            incoming_msg = request.values.get('Body', '').lower()
            resp = MessagingResponse()
            msg = resp.message()
            response_text = self.conv.say(incoming_msg)[0]
            msg.body(response_text)
            return str(resp)

    def run(self, host="localhost", port=7150):
        self.app.run(host=host, port=port)
示例#2
0
def test_model_accuracies_are_similar_before_and_after_caching(home_assistant_app_path):
    # clear model cache
    model_cache_path = MODEL_CACHE_PATH.format(app_path=home_assistant_app_path)
    try:
        shutil.rmtree(MODEL_CACHE_PATH.format(app_path=home_assistant_app_path))
    except FileNotFoundError:
        pass

    # Make sure no cache exists
    assert os.path.exists(model_cache_path) is False
    nlp = NaturalLanguageProcessor(home_assistant_app_path)
    nlp.build(incremental=True)
    nlp.dump()

    entity_eval = (
        nlp.domains["times_and_dates"]
        .intents["change_alarm"]
        .entity_recognizer.evaluate()
    )

    role_eval = (
        nlp.domains["times_and_dates"]
        .intents["change_alarm"]
        .entities["sys_time"]
        .role_classifier.evaluate()
    )

    entity_accuracy_no_cache = entity_eval.get_accuracy()
    role_accuracy_no_cache = role_eval.get_accuracy()

    example_cache = os.listdir(
        MODEL_CACHE_PATH.format(app_path=home_assistant_app_path)
    )[0]
    nlp = NaturalLanguageProcessor(home_assistant_app_path)
    nlp.load(example_cache)

    # make sure cache exists
    assert os.path.exists(model_cache_path) is True

    entity_eval = (
        nlp.domains["times_and_dates"]
        .intents["change_alarm"]
        .entity_recognizer.evaluate()
    )

    role_eval = (
        nlp.domains["times_and_dates"]
        .intents["change_alarm"]
        .entities["sys_time"]
        .role_classifier.evaluate()
    )

    entity_accuracy_cached = entity_eval.get_accuracy()
    role_accuracy_cached = role_eval.get_accuracy()

    assert role_accuracy_no_cache == role_accuracy_cached
    assert entity_accuracy_no_cache == entity_accuracy_cached
示例#3
0
def test_model_cache_files_present_after_second_incremental_build(
        kwik_e_mart_app_path):
    # clear model cache
    model_cache_path = MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path)
    try:
        shutil.rmtree(MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path))
    except FileNotFoundError:
        pass

    # Make sure no cache exists
    assert os.path.exists(model_cache_path) is False
    nlp = NaturalLanguageProcessor(kwik_e_mart_app_path)
    nlp.build(incremental=True)
    nlp.dump()
    initial_timestamp = nlp.incremental_timestamp

    nlp = NaturalLanguageProcessor(kwik_e_mart_app_path)
    nlp.build(incremental=True)
    nlp.dump()
    new_timestamp = nlp.incremental_timestamp

    nlp.load(initial_timestamp)
    nlp.load(new_timestamp)
示例#4
0
def test_model_accuracies_are_similar_before_and_after_caching(
        kwik_e_mart_app_path):
    # clear model cache
    model_cache_path = MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path)
    try:
        shutil.rmtree(MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path))
    except FileNotFoundError:
        pass

    # Make sure no cache exists
    assert os.path.exists(model_cache_path) is False
    nlp = NaturalLanguageProcessor(kwik_e_mart_app_path)
    nlp.build(incremental=True)
    nlp.dump()

    intent_eval = nlp.domains["store_info"].intent_classifier.evaluate()
    entity_eval = (nlp.domains["store_info"].intents["get_store_hours"].
                   entity_recognizer.evaluate())
    intent_accuracy_no_cache = intent_eval.get_accuracy()
    entity_accuracy_no_cache = entity_eval.get_accuracy()

    example_cache = os.listdir(
        MODEL_CACHE_PATH.format(app_path=kwik_e_mart_app_path))[0]
    nlp = NaturalLanguageProcessor(kwik_e_mart_app_path)
    nlp.load(example_cache)

    # make sure cache exists
    assert os.path.exists(model_cache_path) is True

    intent_eval = nlp.domains["store_info"].intent_classifier.evaluate()
    entity_eval = (nlp.domains["store_info"].intents["get_store_hours"].
                   entity_recognizer.evaluate())
    intent_accuracy_cached = intent_eval.get_accuracy()
    entity_accuracy_cached = entity_eval.get_accuracy()

    assert intent_accuracy_no_cache == intent_accuracy_cached
    assert entity_accuracy_no_cache == entity_accuracy_cached
示例#5
0
class WhatsappBotServer:

    def __init__(self, name, app_path, nlp=None):
        """
        Args:
            name (str): The name of the server.
            app_path (str): The path of the MindMeld application.
            nlp (NaturalLanguageProcessor): MindMeld NLP component, will try to load from app path
              if None.
        """
        self.firebase = firebaseHelper()
        self.app = Flask(name)
        if not nlp:
            self.nlp = NaturalLanguageProcessor(app_path)
            self.nlp.load()
        else:
            self.nlp = nlp
        self.conv = Conversation(nlp=self.nlp, app_path=app_path)
        self.logger = logging.getLogger(__name__)
        self.url = None

        @self.app.route("/", methods=["POST"])
        def handle_message():  # pylint: disable=unused-variable
            # print(request.values)
            # Getting number from which message came
            id = request.values.get('From', '')
            id = id.split('+')[1]
            # print(request.values) #uncomment this to dif deeper
            exist = self.firebase.existID(id)
            if not exist:
                result = self.firebase.createID(id)

            incoming_msg = request.values.get('Body', '').lower()
            location = {
                'Latitude': request.values.get('Latitude', ''),
                'Longitude': request.values.get('Longitude', '')
            }
            if request.values.get('Latitude', '') and request.values.get('Longitude', ''):
                intent = l_t.getIntent()
                print(intent)
                result = self.firebase.setCurrLocation(location, id)
                resp = MessagingResponse()
                msg = resp.message()
                params = dict(dynamic_resource=dict(id=id))
                if intent == 'loc_for_source':
                    incoming_msg = "source for location"
                elif intent == 'loc_for_hotel':
                    incoming_msg = "location for hotel"
                elif intent == 'loc_for_food':
                    incoming_msg = "location for food"
                else:
                    incoming_msg = "general location"
                try:
                    response_text = self.conv.say(incoming_msg, params=params)[0]
                    messages = response_text.split("~")
                    for msg in messages:
                        if msg:
                            sendMessage(msg, id)
                except IndexError:
                    msg.body("Didn't understand. sorry")
                
            else:
                resp = MessagingResponse()
                msg = resp.message()
                # Used to send dynamic id of the user making query
                params = None
                if nth.getTarget() == None :
                    params = dict(dynamic_resource =dict(id=id)) #Used to send dynamic id of the user making query
                else:
                    params = dict(dynamic_resource =dict(id=id),target_dialogue_state=nth.getTarget())
                try:
                    response_text = self.conv.say(incoming_msg, params=params)[0]
                    messages = response_text.split("~")
                    for msg in messages:
                        if msg:
                            sendMessage(msg, id)
                except IndexError:
                    msg.body("Didn't understand. sorry")
            return str(resp)

        def sendMessage(msg, number):
            # Change the from whatsapp number with your twilio account number
            valid=validators.url(msg)
            if valid :
                self.url = msg
            else:
                if self.url:
                    client.messages.create(body=msg, from_="whatsapp:+14155238886", to="whatsapp:+"+str(number), media_url=[self.url])
                    self.url = None
                else:
                    client.messages.create(body=msg, from_="whatsapp:+14155238886", to="whatsapp:+"+str(number))

    def run(self, host="localhost", port=7150):
        self.app.run(host=host, port=port)

    def start_remainder(self):
        remainder_service = remainderHelper(self.firebase)
        remainder_service.start(self.firebase.getReminders())
示例#6
0
*Assumes models have already been built (loads stored model)
"""

import argparse
from mindmeld.components import NaturalLanguageProcessor

parser = argparse.ArgumentParser()
parser.add_argument(dest="input_file",
                    type=str,
                    help="Please provide input file path")
args = parser.parse_args()
input_file = args.input_file

nlp = NaturalLanguageProcessor(
    '../mindmeld-blueprints/blueprints/video_discovery')
nlp.load()

with open(input_file, 'r') as queries:
    with open('pre_annotate.txt', 'w+') as predictions:
        for query in queries:
            query = query.rstrip()
            prediction = nlp.domains["video_content"]._children["browse"]\
                .entity_recognizer.predict(query)
            new_string = ''
            counter = 0
            for entity in prediction:
                entity_text = entity.text
                entity_start = entity.span.start
                entity_end = entity.span.end
                entity_type = entity.entity.type
                new_string += query[counter:entity_start]