Beispiel #1
0
    def evaluate_for_localized(self, runner, run, context, text, localized_test):
        localized_test, errors = runner.substitute_variables(localized_test, context)

        # tokenize our test
        tests = tokenize(localized_test.lower())

        # tokenize our input
        words = tokenize(text.lower())
        raw_words = tokenize(text)

        # run through each of our tests
        matches = set()
        matched_tests = 0
        for test in tests:
            matched = self.find_matches(matches, test, words, raw_words)
            if matched:
                matched_tests += 1

        # we are a match only if every test matches
        if matched_tests == len(tests):
            matches = sorted(list(matches))
            matched_words = " ".join([raw_words[idx] for idx in matches])
            return Test.Result.match(matched_words)
        else:
            return Test.Result.NO_MATCH
Beispiel #2
0
    def find_and_handle(cls, msg):
        words = tokenize(msg.text)

        # skip if message doesn't have any words
        if not words:
            return False

        # skip if message contact is currently active in a flow
        active_run_qs = FlowRun.objects.filter(is_active=True,
                                               contact=msg.contact,
                                               flow__is_active=True,
                                               flow__is_archived=False)
        active_run = active_run_qs.prefetch_related('steps').order_by(
            "-created_on", "-pk").first()

        if active_run and active_run.flow.ignore_triggers and not active_run.is_completed(
        ):
            return False

        # find a matching keyword trigger with an active flow
        trigger = Trigger.objects.filter(org=msg.org,
                                         is_archived=False,
                                         is_active=True,
                                         trigger_type=cls.TYPE_KEYWORD,
                                         flow__is_archived=False,
                                         flow__is_active=True)

        # if message text is only one word, then we can match 'only-word' triggers too
        match_types = (cls.MATCH_FIRST_WORD,
                       cls.MATCH_ONLY_WORD) if len(words) == 1 else (
                           cls.MATCH_FIRST_WORD, )
        trigger = trigger.filter(keyword__iexact=words[0],
                                 match_type__in=match_types)

        # trigger needs to match the contact's groups or be non-group specific
        trigger = trigger.filter(
            Q(groups__in=msg.contact.user_groups.all()) | Q(groups=None))

        trigger = trigger.prefetch_related(
            'groups', 'groups__contacts').order_by('groups__name').first()

        # if no trigger for contact groups find there is a no group trigger
        if not trigger:
            return False

        contact = msg.contact

        if not contact.is_test:
            trigger.last_triggered = msg.created_on
            trigger.trigger_count += 1
            trigger.save()

        contact.ensure_unstopped()

        # if we have an associated flow, start this contact in it
        trigger.flow.start([], [contact],
                           start_msg=msg,
                           restart_participants=True)

        return True
Beispiel #3
0
def __get_words(text, by_spaces):
    """
    Helper function which splits the given text string into words. If by_spaces is false, then text like
    '01-02-2014' will be split into 3 separate words. For backwards compatibility, this is the default for all
    expression functions.
    :param text: the text to split
    :param by_spaces: whether words should be split only by spaces or by punctuation like '-', '.' etc
    """
    if by_spaces:
        splits = regex.split(r"\s+", text, flags=regex.MULTILINE | regex.UNICODE | regex.V0)
        return [split for split in splits if split]  # return only non-empty
    else:
        return tokenize(text)
Beispiel #4
0
    def evaluate_for_localized(self, runner, run, context, text, localized_test):
        localized_test, errors = runner.substitute_variables(localized_test, context)

        # tokenize our test
        tests = tokenize(localized_test.lower())

        # tokenize our input
        words = tokenize(text.lower())
        raw_words = tokenize(text)

        # run through each of our tests
        matches = []
        for test in tests:
            match = self.test_in_words(test, words, raw_words)
            if match:
                matches.append(match)

        # we are a match if at least one test matches
        if len(matches) > 0:
            return Test.Result.match(" ".join(matches))
        else:
            return Test.Result.NO_MATCH
Beispiel #5
0
def __get_words(text, by_spaces):
    """
    Helper function which splits the given text string into words. If by_spaces is false, then text like
    '01-02-2014' will be split into 3 separate words. For backwards compatibility, this is the default for all
    expression functions.
    :param text: the text to split
    :param by_spaces: whether words should be split only by spaces or by punctuation like '-', '.' etc
    """
    if by_spaces:
        splits = regex.split(r'\s+',
                             text,
                             flags=regex.MULTILINE | regex.UNICODE | regex.V0)
        return [split for split in splits if split]  # return only non-empty
    else:
        return tokenize(text)
Beispiel #6
0
    def find_and_handle(cls, msg):
        words = tokenize(msg.text)

        # skip if message doesn't have any words
        if not words:
            return False

        # skip if message contact is currently active in a flow
        active_run_qs = FlowRun.objects.filter(
            is_active=True, contact=msg.contact, flow__is_active=True, flow__is_archived=False
        )
        active_run = active_run_qs.order_by("-created_on", "-pk").first()

        if active_run and active_run.flow.ignore_triggers and not active_run.is_completed():
            return False

        # find a matching keyword trigger with an active flow
        trigger = Trigger.objects.filter(
            org=msg.org,
            is_archived=False,
            is_active=True,
            trigger_type=cls.TYPE_KEYWORD,
            flow__is_archived=False,
            flow__is_active=True,
        )

        # if message text is only one word, then we can match 'only-word' triggers too
        match_types = (cls.MATCH_FIRST_WORD, cls.MATCH_ONLY_WORD) if len(words) == 1 else (cls.MATCH_FIRST_WORD,)
        trigger = trigger.filter(keyword__iexact=words[0], match_type__in=match_types)

        # trigger needs to match the contact's groups or be non-group specific
        trigger = trigger.filter(Q(groups__in=msg.contact.user_groups.all()) | Q(groups=None))

        trigger = trigger.prefetch_related("groups", "groups__contacts").order_by("groups__name").first()

        # if no trigger for contact groups find there is a no group trigger
        if not trigger:
            return False

        contact = msg.contact
        contact.ensure_unstopped()

        # if we have an associated flow, start this contact in it
        trigger.flow.start([], [contact], start_msg=msg, restart_participants=True)

        return True