def test_other_special_in_sequence(self): output = check_parse_tree( " test <an_extra> [op] {test_special=4}", Sequence([Literal(u"test"), extras["an_extra"], Optional(Literal(u"op"))]), ) assert output.test_special == 4 assert all(getattr(child, 'test_special', None) == None for child in output.children)
class MyBasicRule(BasicRule): element = Repetition( Alternative(( Literal("test one", value=Function(lambda: func(1))), Literal("test two", value=Function(lambda: func(2))), Literal("test three", value=Function(lambda: func(3))), )), 1, 5 )
def test_alternative_parens(self): check_parse_tree( "( test |[op] <an_extra>)", Alternative( [ Literal(u"test"), Sequence([Optional(Literal(u"op")), extras["an_extra"]]), ] ), )
def test_bool_special_in_alternative(self): output = check_parse_tree( "foo | bar {test_special} | baz", Alternative([ Literal(u"foo"), Literal(u"bar"), Literal(u"baz"), ]), ) assert getattr(output.children[0], 'test_special', None) == None assert output.children[1].test_special == True assert getattr(output.children[2], 'test_special', None) == None
def test_get_engine_sapi5_is_usable(self): """ Verify that the sapi5 engine is usable. """ engine = get_engine() self.assertTrue(isinstance(engine, EngineBase)) self.assertEqual("sapi5", engine.name) engine.speak("testing WSR") from dragonfly import Literal, Sequence from dragonfly.test import ElementTester seq = Sequence([Literal("hello"), Literal("world")]) tester = ElementTester(seq, engine=engine) results = tester.recognize("hello world") self.assertEqual([u"hello", u"world"], results)
def test_dictation(self): # Test dictation separately for SAPI5 because test_dictation.py # won't work with it. from dragonfly import Dictation, Literal, Sequence from dragonfly.test import ElementTester, RecognitionFailure seq = Sequence([Literal("hello"), Dictation("text")]) tester = ElementTester(seq) # Test one word. results = tester.recognize("hello world") assert results[0] == "hello" # Verify recognition returned dictation result. dictation = results[1] if not isinstance(dictation, DictationContainerBase): message = (u"Expected recognition result to be a dictation" u" container, but received %r" % (repr(dictation).decode("windows-1252"),)) self.fail(message.encode("windows-1252")) # Verifying dictation converts/encode successfully. self.assertEqual(str(dictation), "world") self.assertEqual(text_type(dictation), "world") self.assertTrue(isinstance(repr(dictation), string_types)) # Test incomplete. results = tester.recognize("hello") assert results is RecognitionFailure
def __init__(self, entities): """TODO: to be defined. """ self._entities = DictList('bring_me_base') if isinstance(entities, dict): self._entities.set(entities) self.mapping = { "bring me <entity>": Function(self.bring_me), "<entity_type> to bring me as <entity_name>": Function(self.bring_me_as), "remove <entity_name> from bring me": Function(self.bring_me_remove) } self.extras = [ DictListRef("entity", self._entities), Literal(self.type, "entity_type"), Dictation("entity_name").apply(lambda key: re.sub(r'[^A-Za-z\'\s]+', '', key) .lower()) ] self._subscribers = [] super().__init__()
def test_literal(self): """ Verify that the text engine is usable. """ self.engine.speak("testing text") tester = ElementTester(Literal("hello world")) results = tester.recognize("hello world") assert results == "hello world" # Check that recognition failure is possible. results = tester.recognize("goodbye") assert results is RecognitionFailure
def test_get_engine_natlink_is_usable(self): """ Verify that the natlink engine is usable. """ engine = get_engine("natlink") assert isinstance(engine, EngineBase) assert engine.name == "natlink" engine.speak("testing natlink") from dragonfly import Literal from dragonfly.test import ElementTester tester = ElementTester(Literal("hello world")) results = tester.recognize("hello world") assert results == "hello world"
def test_get_engine_automatic_is_usable(self): """ Verify that the automatically selected engine is usable. """ engine = get_engine() engine.connect() try: engine.speak("testing automatic") from dragonfly import Literal from dragonfly.test import ElementTester tester = ElementTester(Literal("hello world")) results = tester.recognize("hello world") assert results == "hello world" finally: engine.disconnect()
def test_unicode_literals(self): """ Verify that the text engine can mimic literals using non-ascii characters. """ tester = ElementTester(Literal(u"Привет, как дела?")) # Test that strings and Unicode objects can be used. results = tester.recognize("Привет, как дела?") assert results == u"Привет, как дела?" results = tester.recognize(u"Привет, как дела?") assert results == u"Привет, как дела?" # Check that recognition failure is possible. results = tester.recognize(u"до свидания") assert results is RecognitionFailure
def test_list_grammars(self): """ Verify that the 'list_grammars' RPC method works correctly. """ # Load a Grammar with three rules and check that the RPC returns the # correct data for them. g = Grammar("list_grammars_test") g.add_rule(CompoundRule(name="compound", spec="testing", exported=True)) g.add_rule( MappingRule(name="mapping", mapping={ "command a": ActionBase(), "command b": ActionBase() })) g.add_rule( Rule(name="base", element=Literal("hello world"), exported=False)) g.load() response = self.send_request("list_grammars", []) expected_grammar_data = { "name": g.name, "enabled": True, "active": True, "rules": [{ "name": "compound", "specs": ["testing"], "exported": True, "active": True }, { "name": "mapping", "specs": ["command a", "command b"], "exported": True, "active": True }, { "name": "base", "specs": ["hello world"], "exported": False, "active": True }] } # Check that the loaded grammar appears in the result. It might not # be the only grammar and that is acceptable because dragonfly's # tests can be run while user grammars are loaded. try: self.assertIn("result", response) self.assertIn(expected_grammar_data, response["result"]) finally: g.unload()
def _get_dragonfly_rule_element(target, parser, depth=0): global RULES if target not in parser.rules: raise Exception("Target {} not in parser rules".format(target)) # If already present in RULES, return it if target in RULES: return RULES[target] # Get the rule rule = parser.rules[target] # Iterate over all options option_alternative_list = [] for opt in rule.options: # Iterate over all conjunctions conjunctions_list = [] for conj in opt.conjuncts: # If the conjunction is already present if conj.name in RULES: conjunctions_list.append(RULES[conj.name]) continue # If variable: go one level deeper if conj.is_variable: result = _get_dragonfly_rule_element(conj.name, parser, depth + 1) if result: conjunctions_list.append(result) else: # Add a new literal to the list RULES[conj.name] = Literal(conj.name) conjunctions_list.append(RULES[conj.name]) logger.debug("Adding literal rule: %s", conj.name) # ToDo: apply caching? if len(conjunctions_list) == 1: option_alternative_list.append(conjunctions_list[0]) else: option_alternative_list.append(Sequence(conjunctions_list)) if len(option_alternative_list) == 1: RULES[target] = option_alternative_list[0] else: RULES[target] = Alternative(option_alternative_list) logger.debug("Adding alternative rule: %s", target) return RULES[target]
def test_basic_rule(self): """ Verify that BasicRules can be loaded and recognized correctly. """ test = [] func = lambda x: test.append(x) # Test using BasicRule directly. rule = BasicRule(element=Repetition( Alternative(( Literal("test one", value=Function(lambda: func(1))), Literal("test two", value=Function(lambda: func(2))), Literal("test three", value=Function(lambda: func(3))), )), 1, 5 )) self.add_rule(rule) self.recognize("test one test two test three".split()) assert test == [1, 2, 3], "BasicRule was not processed correctly" # Remove the rule and clear the test list. self.grammar.remove_rule(rule) del test[:] # Test using a sub-class of BasicRule. class MyBasicRule(BasicRule): element = Repetition( Alternative(( Literal("test one", value=Function(lambda: func(1))), Literal("test two", value=Function(lambda: func(2))), Literal("test three", value=Function(lambda: func(3))), )), 1, 5 ) self.add_rule(MyBasicRule()) self.recognize("test one test two test three".split()) assert test == [1, 2, 3], "BasicRule was not processed correctly"
def test_get_engine_sapi5_is_usable(self): """ Verify that the sapi5 engine is usable. """ engine = get_engine("sapi5") assert isinstance(engine, EngineBase) assert engine.name == "sapi5" engine.connect() try: engine.speak("testing WSR") from dragonfly import Literal from dragonfly.test import ElementTester tester = ElementTester(Literal("hello world"), engine=engine) results = tester.recognize("hello world") assert results == "hello world", "%r != %r" % (results, "hello world") finally: engine.disconnect()
def test_unicode_literals(self): """ Verify that the text engine can mimic literals using non-ascii characters. """ tester = ElementTester(Literal(u"touché")) # Test that strings and Unicode objects can be used. string = "touché" if isinstance(string, six.binary_type): encoding = locale.getpreferredencoding() string = string.decode("windows-1252").encode(encoding) results = tester.recognize(string) assert results == u"touché" results = tester.recognize(u"touché") assert results == u"touché" # Check that recognition failure is possible. results = tester.recognize(u"jalapeño") assert results is RecognitionFailure
def test_multiple_literals(self): check_parse_tree("test hello world ", Literal(u"test hello world"))
def test_recognition_observers(self): # RecognitionObservers are a bit quirky for the sapi5 engines, # so the tests for them are repeated here to handle that. from dragonfly import (Integer, Literal, RecognitionHistory, RecognitionObserver) from dragonfly.test import ElementTester, RecognitionFailure class RecognitionObserverTester(RecognitionObserver): """ RecognitionObserver class from the recobs doctests. """ def __init__(self): RecognitionObserver.__init__(self) self.waiting = False self.words = None def on_begin(self): self.waiting = True def on_recognition(self, words): self.waiting = False self.words = words def on_failure(self): self.waiting = False self.words = False test_recobs = RecognitionObserverTester() test_recobs.register() results = test_recobs.waiting, test_recobs.words assert results == (False, None) # Test simple literal element recognitions. test_lit = ElementTester(Literal("hello world")) assert test_lit.recognize("hello world") == "hello world" results = test_recobs.waiting, test_recobs.words assert results == (False, (u'hello', u'world')) assert test_lit.recognize("hello universe") is RecognitionFailure results = test_recobs.waiting, test_recobs.words assert results == (False, False) # Test Integer element recognitions test_int = ElementTester(Integer(min=1, max=100)) assert test_int.recognize("seven") == 7 results = test_recobs.waiting, test_recobs.words assert results == (False, (u'seven',)) assert test_int.recognize("forty seven") == 47 results = test_recobs.waiting, test_recobs.words assert results == (False, (u'forty', u'seven')) assert test_int.recognize("one hundred") is RecognitionFailure results = test_recobs.waiting, test_recobs.words assert results == (False, False) assert test_lit.recognize("hello world") == u'hello world' # Now test RecognitionHistory. history = RecognitionHistory() assert test_lit.recognize("hello world") == u'hello world' # Not yet registered, so didn't receive previous recognition. assert history == [] history.register() assert test_lit.recognize("hello world") == u'hello world' # Now registered, so should have received previous recognition. assert history == [(u'hello', u'world')] assert test_lit.recognize("hello universe") is RecognitionFailure # Failed recognitions are ignored, so history is unchanged. assert history == [(u'hello', u'world')] assert test_int.recognize("eighty six") == 86 assert history == [(u'hello', u'world'), (u'eighty', u'six')] # The RecognitionHistory class allows its maximum length to be set. history = RecognitionHistory(3) history.register() assert history == [] for i, word in enumerate(["one", "two", "three", "four", "five"]): assert test_int.recognize(word) == i + 1 assert history == [(u'three',), (u'four',), (u'five',)] history = RecognitionHistory(1) history.register() assert history == [] for i, word in enumerate(["one", "two", "three", "four", "five"]): assert test_int.recognize(word) == i + 1 assert history == [(u'five',)]
def test_parens(self): check_parse_tree("(test ) ", Literal(u"test"))
O = "odd" P = "poke" Q = "Quinn" R = "route" R2 = "root" S = "suit" T = "Tang" U = "urge" V = "vote" W = "weed" X = "decks" Y = "yak" Z = "zip" letter_number_alternatives = ( Literal(A), Literal(B), Literal(C), Literal(D), Literal(E), Literal(F), Literal(G), Literal(H), Literal(I), Literal(J), Literal(K), Literal(L), Literal(M), Literal(N), Literal(O), Literal(P),
def test_digit_in_word(self): check_parse_tree("F2", Literal(u"F2"))
def test_literal(self): check_parse_tree("test ", Literal(u"test"))
def test_sequence(self): check_parse_tree( " test <an_extra> [op]", Sequence([Literal(u"test"), extras["an_extra"], Optional(Literal(u"op"))]), )
def test_optional_alternative(self): check_parse_tree("[test|test's]", Optional(Alternative([Literal(u"test"), Literal(u"test's")])))
def test_punctuation(self): check_parse_tree(",", Literal(u",")) check_parse_tree("test's ", Literal(u"test's")) check_parse_tree("cul-de-sac ", Literal(u"cul-de-sac"))
def test_unicode(self): check_parse_tree(u"touché", Literal(u"touché"))
class AccessibilityRule(MergeRule): pronunciation = "accessibility" mapping = { # Accessibility API Mappings "go before <text_position_query>": Function(lambda text_position_query: accessibility.move_cursor( text_position_query, CursorPosition.BEFORE)), "go after <text_position_query>": Function(lambda text_position_query: accessibility.move_cursor( text_position_query, CursorPosition.AFTER)), "words <text_query>": Function(accessibility.select_text), "words <text_query> delete": Function( lambda text_query: accessibility.replace_text(text_query, "")), "replace <text_query> with <replacement>": Function(accessibility.replace_text), } extras = [ Dictation("replacement"), Compound( name="text_query", spec= ("[[([<start_phrase>] <start_relative_position> <start_relative_phrase>|<start_phrase>)] <through>] " "([<end_phrase>] <end_relative_position> <end_relative_phrase>|<end_phrase>)" ), extras=[ Dictation("start_phrase", default=""), Alternative( [Literal("before"), Literal("after")], name="start_relative_position"), Dictation("start_relative_phrase", default=""), Literal("through", "through", value=True, default=False), Dictation("end_phrase", default=""), Alternative( [Literal("before"), Literal("after")], name="end_relative_position"), Dictation("end_relative_phrase", default="") ], value_func=lambda node, extras: TextQuery( start_phrase=str(extras["start_phrase"]), start_relative_position=( CursorPosition[extras["start_relative_position"].upper()] if "start_relative_position" in extras else None), start_relative_phrase=str(extras["start_relative_phrase"]), through=extras["through"], end_phrase=str(extras["end_phrase"]), end_relative_position=( CursorPosition[extras["end_relative_position"].upper()] if "end_relative_position" in extras else None), end_relative_phrase=str(extras["end_relative_phrase"]))), Compound(name="text_position_query", spec="<phrase> [<relative_position> <relative_phrase>]", extras=[ Dictation("phrase", default=""), Alternative([Literal("before"), Literal("after")], name="relative_position"), Dictation("relative_phrase", default="") ], value_func=lambda node, extras: TextQuery( end_phrase=str(extras["phrase"]), end_relative_position=( CursorPosition[extras["relative_position"].upper()] if "relative_position" in extras else None), end_relative_phrase=str(extras["relative_phrase"]))) ]
# coding=utf-8 import unittest import string from dragonfly.parsing.parse import spec_parser, CompoundTransformer from dragonfly import Compound, Literal, Sequence, Optional, Empty, Alternative # =========================================================================== extras = {"an_extra": Alternative([Literal(u"1"), Literal(u"2")])} def check_parse_tree(spec, expected): tree = spec_parser.parse(spec) output = CompoundTransformer(extras).transform(tree) assert output.element_tree_string() == expected.element_tree_string() return output class TestLarkParser(unittest.TestCase): def test_literal(self): check_parse_tree("test ", Literal(u"test")) def test_multiple_literals(self): check_parse_tree("test hello world ", Literal(u"test hello world")) def test_parens(self): check_parse_tree("(test ) ", Literal(u"test")) def test_punctuation(self):