Beispiel #1
0
    def createGrammar():
        sommatoriaExpansion = Literal("sommatoria")
        sommatoriaExpansion.tag = '\\sum_{}^{}'
        sommatoriaRule = PublicRule("sommatoria_main", sommatoriaExpansion)
        indexExpansion = Literal("sommatoria da")
        indexExpansion.tag = None
        indexRule = PublicRule("summation_index", indexExpansion)
        intervalExpansion = Literal("sommatoria da a")
        intervalExpansion.tag = None
        intervalRule = PublicRule("summation_interval", intervalExpansion)
        #setattr section
        setattr(sommatoriaRule, 'node_type', NODE_TYPE.INTERNO)
        setattr(sommatoriaRule, 'request_new_layer', False)
        setattr(sommatoriaRule, 'next_rules_trigger_words', ['da'])
        setattr(sommatoriaRule, 'is_entry_rule', True)
        #-------------------------
        setattr(indexRule, 'node_type', NODE_TYPE.INTERNO)
        setattr(indexRule, 'request_new_layer', True)
        setattr(indexRule, 'next_rules_trigger_words', ['a'])
        setattr(indexRule, 'is_entry_rule', False)
        setattr(
            indexRule, 'go_to_begin',
            len('\\sum_{}^{}'))  #attributo che specifica se fare carry-home.
        #-------------------------------
        setattr(intervalRule, 'node_type', NODE_TYPE.INTERNO)
        setattr(intervalRule, 'request_new_layer', True)
        setattr(
            intervalRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(intervalRule, 'is_entry_rule', False)

        #grammar creation section
        g = Grammar()
        g.add_rules(sommatoriaRule, indexRule, intervalRule)
        return g
    def createGrammar():
        short_rule = Literal(
            "per")  #sto definendo un'expansion, non è una regola!!
        short_rule.tag = "\cdot"  #il contributo di questa regola per il latex finale
        long_rule = Literal("moltiplicato per")  #expansion
        long_rule.tag = "\cdot"
        multiplication_rule = PublicRule("multiplication",
                                         AlternativeSet(short_rule,
                                                        long_rule))  #rule
        #setattr section (Per ogni rule 4 setattr)
        setattr(
            multiplication_rule, 'node_type', NODE_TYPE.FOGLIA
        )  #aggiungiamo un attributo type direttamente sull'oggetto PublicRule per connotarlo come nodo o attributo
        setattr(
            multiplication_rule, 'request_new_layer', False
        )  #tiene conto del fatto che il match di questa regola possa richiedere la creazione di un nuovo layer
        setattr(
            multiplication_rule, 'next_rules_trigger_words', []
        )  #tiene conto del grafo della grammatica. Non mettere None se no salta tutto perchè None non è iterabile
        setattr(
            multiplication_rule, 'is_entry_rule',
            True)  #tiene conto se questa regola è un entry point di un grafo
        setattr(
            multiplication_rule, 'leaf_end_cursor_movement', 1
        )  #una foglia può specificare questo attributo per dire che dopo di lei il cursore deve muoversi di un tot (tipicamente per uno spazio)
        #grammar creation section
        g = Grammar()
        g.add_rule(multiplication_rule)

        return g
Beispiel #3
0
    def createGrammar():
        integraleDefinitoExpansion = Literal("integrale da")
        integraleDefinitoExpansion.tag = "\\int\\limits_{}^{}"
        definedIntegralRule = PublicRule("defined_integral_main",
                                         integraleDefinitoExpansion)
        integraleDefinitoUpLimitExpansion = Literal("integrale da a")
        integraleDefinitoUpLimitExpansion.tag = None
        upLimitRule = PublicRule("defined_integral_limit",
                                 integraleDefinitoUpLimitExpansion)
        #setattr section
        setattr(definedIntegralRule, 'node_type', NODE_TYPE.INTERNO)
        setattr(definedIntegralRule, 'request_new_layer', True)
        setattr(definedIntegralRule, 'next_rules_trigger_words', [
            'a'
        ])  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(definedIntegralRule, 'is_entry_rule', True)
        setattr(definedIntegralRule, 'go_to_begin', len('\\int\\limits_{}^{}')
                )  #attributo che specifica se fare carry-home.
        #------------------------------
        setattr(upLimitRule, 'node_type', NODE_TYPE.INTERNO)
        setattr(upLimitRule, 'request_new_layer', True)
        setattr(
            upLimitRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(upLimitRule, 'is_entry_rule', False)

        #grammar creation section
        g = Grammar()
        g.add_rules(definedIntegralRule, upLimitRule)
        return g
Beispiel #4
0
def main():
    # Create a grammar and add some rules to it
    grammar = Grammar()
    name = HiddenRule("name", AlternativeSet("john", "bob", "anna"))

    # greeting is either: 'hey', 'hey there' or 'hello'
    greeting = HiddenRule(
        "greeting",
        AlternativeSet(Sequence("hey", OptionalGrouping("there")), "hello"))

    # parting_phrase is either: 'good bye' or 'see you'
    parting_phrase = HiddenRule("parting_phrase",
                                AlternativeSet("good bye", "see you"))

    # greet is a greeting followed by a name
    greet = PublicRule("greet", Sequence(RuleRef(greeting), RuleRef(name)))

    # goodbye is a parting phrase followed by a name
    goodbye = PublicRule("goodbye",
                         Sequence(RuleRef(parting_phrase), RuleRef(name)))

    grammar.add_rules(name, greeting, parting_phrase, greet, goodbye)

    print("Grammar compiles to the following:")
    print(grammar.compile())

    # Try matching some speech strings
    print_matching(grammar, "hey john")
    print_matching(grammar, "hey there john")
    print_matching(grammar, "see you john")

    # Try matching some hidden rules
    print_matching(grammar, "bob")
    print_matching(grammar, "hey there")
    print_matching(grammar, "good bye")
Beispiel #5
0
    def _init_jsgf_only_grammar(self):
        """
        Method that initialises the grammar to use for rules not containing
        Dictation expansions.

        Override this to use a different grammar class.
        """
        self._jsgf_only_grammar = Grammar(name=self.name)
Beispiel #6
0
    def createGrammar():
        limiteMainExpansion = Literal("limite per")
        limiteMainExpansion.tag = "\\lim{ \\to }"
        limiteMainRule = PublicRule("limite_main", limiteMainExpansion)
        limiteUpP1Expansion = Literal("limite per che")
        limiteUpP1Expansion.tag = None
        limiteUpP1Rule = PublicRule("limite_up_p1", limiteUpP1Expansion)
        limiteUpP2Expansion = Literal("limite per che tende")
        limiteUpP2Expansion.tag = None
        limiteUpP2Rule = PublicRule("limite_up_p2", limiteUpP2Expansion)
        limiteUpP3Expansion = Literal("limite per che tende a")
        limiteUpP3Expansion.tag = None
        limiteUpP3Rule = PublicRule("limite_up_p3", limiteUpP3Expansion)

        #setattr section
        setattr(limiteMainRule, 'node_type', NODE_TYPE.INTERNO)
        setattr(limiteMainRule, 'request_new_layer', True)
        setattr(limiteMainRule, 'next_rules_trigger_words', [
            'che'
        ])  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(limiteMainRule, 'is_entry_rule', True)
        setattr(
            limiteMainRule, 'go_to_begin',
            len('\\lim{ \\to }'))  #attributo che specifica se fare carry-home.
        #------------------------------
        setattr(limiteUpP1Rule, 'node_type', NODE_TYPE.INTERNO)
        setattr(limiteUpP1Rule, 'request_new_layer', True)
        setattr(limiteUpP1Rule, 'next_rules_trigger_words', [
            'tende'
        ])  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(limiteUpP1Rule, 'is_entry_rule', False)
        #--------------------------------
        setattr(limiteUpP2Rule, 'node_type', NODE_TYPE.INTERNO)
        setattr(limiteUpP2Rule, 'request_new_layer', True)
        setattr(limiteUpP2Rule, 'next_rules_trigger_words', [
            'a'
        ])  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(limiteUpP2Rule, 'is_entry_rule', False)
        #------------------------------------
        setattr(limiteUpP3Rule, 'node_type', NODE_TYPE.INTERNO)
        setattr(limiteUpP3Rule, 'request_new_layer', True)
        setattr(
            limiteUpP3Rule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(limiteUpP3Rule, 'is_entry_rule', False)

        #grammar creation section
        g = Grammar()
        g.add_rules(limiteMainRule, limiteUpP1Rule, limiteUpP2Rule,
                    limiteUpP3Rule)
        return g
Beispiel #7
0
 def createGrammar():
     rule = Literal("valore assoluto di")
     rule.tag = '||'
     absoluteValueRule = PublicRule("absolute_value", rule)
     #setattr section
     setattr(absoluteValueRule, 'node_type', NODE_TYPE.INTERNO
             )  #vuol dire che si dovrà terminare con una foglia
     setattr(absoluteValueRule, 'request_new_layer', True)
     setattr(
         absoluteValueRule, 'next_rules_trigger_words', []
     )  #non mettere None se no salta tutto perchè None non è iterabile
     setattr(absoluteValueRule, 'is_entry_rule', True)
     #grammar creation section
     g = Grammar()
     g.add_rule(absoluteValueRule)
     return g
Beispiel #8
0
    def createGrammar():
        rule = Literal("gamma")
        rule.tag = "\\gamma"
        gammaRule = PublicRule("gamma", rule)
        #setattr section
        setattr(gammaRule, 'node_type', NODE_TYPE.FOGLIA)
        setattr(gammaRule, 'request_new_layer', False)
        setattr(
            gammaRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(gammaRule, 'is_entry_rule', True)
        #grammar creation section
        g = Grammar()
        g.add_rule(gammaRule)

        return g
 def createGrammar():
     rule = Literal("alla")
     rule.tag = '^{}'
     powerRule = PublicRule(
         "power", rule
     )  #volutamente non ha un tag. Non ha senso scrivere solo il '^'
     #setattr section
     setattr(powerRule, 'node_type', NODE_TYPE.INTERNO)
     setattr(powerRule, 'request_new_layer', True)
     setattr(
         powerRule, 'next_rules_trigger_words', []
     )  #non mettere None se no salta tutto perchè None non è iterabile
     setattr(powerRule, 'is_entry_rule', True)
     #grammar creation section
     g = Grammar()
     g.add_rule(powerRule)
     return g
Beispiel #10
0
 def test_same_local_and_imported_rule_names(self):
     """ Local rules with the same name have precedence over imported rules. """
     grammar = Grammar("test")
     local_z = Rule("Z", True, "z")
     grammar.add_rule(local_z)
     grammar.add_import(Import("grammars.test1.Z"))
     self.assertEqual(grammar.get_rule("Z"), local_z)
Beispiel #11
0
    def register_grammar(self, name: str, grammar: Grammar) -> None:
        if name in self._registered_grammars:
            raise KeyError(f'Grammar with name "{name}" is already registered')
        grammar_file = self._tmp_dir / _make_grammar_filename(name)
        with open(grammar_file, 'w') as f:
            f.write(grammar.compile())

        self.decoder.set_jsgf_file(_make_grammar_name(name), str(grammar_file))
        self._registered_grammars.add(name)
Beispiel #12
0
    def createGrammar():
        short_expansion = Literal("integrale")
        short_expansion.tag = "\int"
        integralRule = PublicRule("integral", short_expansion)
        #setattr section
        setattr(integralRule, 'node_type', NODE_TYPE.FOGLIA)
        setattr(integralRule, 'request_new_layer', False)
        setattr(
            integralRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(integralRule, 'is_entry_rule', True)
        setattr(
            integralRule, 'leaf_end_cursor_movement', 1
        )  #una foglia può specificare questo attributo per dire che dopo di lei il cursore deve muoversi di un tot (tipicamente per uno spazio)
        #grammar creation section
        g = Grammar()
        g.add_rule(integralRule)

        return g
Beispiel #13
0
 def createGrammar():
     rule = Literal("pedice")
     rule.tag = '_{}'
     alternative_rule = Literal("sub")
     alternative_rule.tag = '_{}'
     subscriptRule = PublicRule(
         "subscript", AlternativeSet(rule, alternative_rule)
     )  #volutamente non ha un tag. Non ha senso scrivere solo il '^'
     #setattr section
     setattr(subscriptRule, 'node_type', NODE_TYPE.INTERNO)
     setattr(subscriptRule, 'request_new_layer', True)
     setattr(
         subscriptRule, 'next_rules_trigger_words', []
     )  #non mettere None se no salta tutto perchè None non è iterabile
     setattr(subscriptRule, 'is_entry_rule', True)
     #grammar creation section
     g = Grammar()
     g.add_rule(subscriptRule)
     return g
Beispiel #14
0
def main():
    # The Sequence expansion requires all of its children expansions to be spoken
    # in sequence. The OptionalGrouping expansion optionally requires its child
    # expansion to be spoken.

    # Create a public rule using an optional expansion
    rule = PublicRule("greet", Sequence("hey", OptionalGrouping("there")))

    # Create a grammar and add the new rule to it
    grammar = Grammar("g")
    grammar.add_rule(rule)

    # Compile the grammar using compile()
    print("Grammar '%s' compiles to:" % grammar.name)
    print(grammar.compile())

    # Use or do not use the optional word 'there'
    print_matching(grammar, "hey")
    print_matching(grammar, "hey there")
Beispiel #15
0
def main():
    # Define a open/close file rule.
    open, close = Literal("open"), Literal("close")
    open.tag, close.tag = "OPEN", "CLOSE"
    cmd = PublicRule("command",
                     Sequence(AlternativeSet(open, close), "the file"))

    # Print the tags of the 'command' rule.
    print("Tags: %s\n" % cmd.tags)

    # Initialise a new grammar and add the rule to it.
    g = Grammar()
    g.add_rule(cmd)

    # Print the compiled grammar
    print("Compiled grammar is:\n%s" % g.compile())

    # Find and print rules tagged with "OPEN"
    print("Tagged rules are:\n%s\n" % g.find_tagged_rules("OPEN"))

    # Matching tags can be retrieved using r.get_tags_matching
    # The Rule.matched_tags property can also be used if Rule.matches or
    # Grammar.find_matching_rules has been called first.
    speech = "open the file"
    print("Tags matching '%s' are: %s" %
          (speech, cmd.get_tags_matching(speech)))
Beispiel #16
0
 def test_resolve_imports_cycles(self):
     """ Grammars that import from one another are resolved correctly.
     """
     memo = {}
     grammar = Grammar("test")
     grammar.add_import(Import("grammars.cycle-test1.rule1"))
     grammar.add_import(Import("grammars.cycle-test2.rule2"))
     grammar.resolve_imports(memo)
     self.assertIn("grammars.cycle-test1", memo)
     self.assertIn("grammars.cycle-test2", memo)
     cycle_test1 = memo["grammars.cycle-test1"]
     cycle_test2 = memo["grammars.cycle-test2"]
     cycle_test1.resolve_imports()
     cycle_test2.resolve_imports()
     expected_environment = {
         "test": grammar,
         "grammars.cycle-test1": cycle_test1,
         "grammars.cycle-test1.rule1": cycle_test1.get_rule("rule1"),
         "grammars.cycle-test1.Y": cycle_test1.get_rule("Y"),
         "grammars.cycle-test2": cycle_test2,
         "grammars.cycle-test2.rule2": cycle_test2.get_rule("rule2"),
         "grammars.cycle-test2.X": cycle_test2.get_rule("X"),
     }
     self.assertIs(cycle_test1.import_environment["grammars.cycle-test2"],
                   cycle_test2)
     self.assertIs(cycle_test2.import_environment["grammars.cycle-test1"],
                   cycle_test1)
     for grammar_ in (cycle_test1, cycle_test2):
         self.assertDictEqual(grammar_.import_environment,
                              expected_environment)
    def createGrammar():
        short_expansion = Literal("aperta tonda")
        short_expansion.tag = "("
        long_expansion = Literal("apri parentesi tonda")
        long_expansion.tag = "("
        long_expansion_2 = Literal("aperta parentesi tonda")
        long_expansion_2.tag = "("
        openParentesisRule = PublicRule(
            "open_parenthesis",
            AlternativeSet(short_expansion, long_expansion, long_expansion_2))
        #setattr section
        setattr(openParentesisRule, 'node_type', NODE_TYPE.FOGLIA)
        setattr(openParentesisRule, 'request_new_layer', False)
        setattr(
            openParentesisRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(openParentesisRule, 'is_entry_rule', True)
        #grammar creation section
        g = Grammar()
        g.add_rule(openParentesisRule)

        return g
    def createGrammar():
        short_expansion = Literal("chiusa graffa")
        short_expansion.tag = "}"
        long_expansion = Literal("chiudi parentesi graffa")
        long_expansion.tag = "}"
        long_expansion_2 = Literal("chiusa parentesi graffa")
        long_expansion_2.tag = "}"
        openSquareRule = PublicRule(
            "close_brace",
            AlternativeSet(short_expansion, long_expansion, long_expansion_2))
        #setattr section
        setattr(openSquareRule, 'node_type', NODE_TYPE.FOGLIA)
        setattr(openSquareRule, 'request_new_layer', False)
        setattr(
            openSquareRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(openSquareRule, 'is_entry_rule', True)
        #grammar creation section
        g = Grammar()
        g.add_rule(openSquareRule)

        return g
Beispiel #19
0
    def createGrammar():
        rule = Literal("pi greco")
        rule.tag = "\\pi"
        alternative_rule = Literal("pigreco")
        alternative_rule.tag = "\\pi"
        alternative_rule2 = Literal("p greco")
        alternative_rule2.tag = "\\pi"
        piGrecoRule = PublicRule(
            "pi-greco",
            AlternativeSet(rule, alternative_rule, alternative_rule2))
        #setattr section
        setattr(piGrecoRule, 'node_type', NODE_TYPE.FOGLIA)
        setattr(piGrecoRule, 'request_new_layer', False)
        setattr(
            piGrecoRule, 'next_rules_trigger_words', []
        )  #non mettere None se no salta tutto perchè None non è iterabile
        setattr(piGrecoRule, 'is_entry_rule', True)
        #grammar creation section
        g = Grammar()
        g.add_rule(piGrecoRule)

        return g
Beispiel #20
0
 def createGrammar():
     fracExpansion = Literal("frazione")
     fracExpansion.tag = '\\frac{}{}'
     fracRule = PublicRule("fraction_main", fracExpansion)
     numeratorExpansion = Literal("frazione numeratore")
     numeratorExpansion.tag = None
     numeratorRule = PublicRule("fraction_numerator", numeratorExpansion)
     denominatorExpansion = Literal("frazione numeratore denominatore")
     denominatorExpansion.tag = None
     denominatorRule = PublicRule("fraction_denominator",
                                  denominatorExpansion)
     #setattr section
     setattr(fracRule, 'node_type', NODE_TYPE.INTERNO)
     setattr(fracRule, 'request_new_layer', False)
     setattr(fracRule, 'next_rules_trigger_words', [
         'numeratore'
     ])  #non mettere None se no salta tutto perchè None non è iterabile
     setattr(fracRule, 'is_entry_rule', True)
     #-------------------
     setattr(numeratorRule, 'node_type', NODE_TYPE.INTERNO)
     setattr(numeratorRule, 'request_new_layer', True)
     setattr(numeratorRule, 'next_rules_trigger_words', [
         'denominatore'
     ])  #non mettere None se no salta tutto perchè None non è iterabile
     setattr(numeratorRule, 'is_entry_rule', False)
     setattr(
         numeratorRule, 'go_to_begin',
         len('\\frac{}{}'))  #attributo che specifica se fare carry-home.
     #------------------------
     setattr(denominatorRule, 'node_type', NODE_TYPE.INTERNO)
     setattr(denominatorRule, 'request_new_layer', True)
     setattr(
         denominatorRule, 'next_rules_trigger_words', []
     )  #non mettere None se no salta tutto perchè None non è iterabile
     setattr(denominatorRule, 'is_entry_rule', False)
     #grammar creation section
     g = Grammar()
     g.add_rules(fracRule, numeratorRule, denominatorRule)
     return g
def main():
    # Create a new public rule using speech alternatives
    # Note that the Sequence expansion requires all of its children expansions
    # to be spoken in sequence
    rule = PublicRule("greet", Sequence(AlternativeSet("hello", "hey"),
                                        "there"))

    # Create a grammar and add the new rule to it
    grammar = Grammar("g")
    grammar.add_rule(rule)

    # Compile the grammar using compile()
    print("Grammar '%s' compiles to:" % grammar.name)
    print(grammar.compile())

    # Find rules in the grammar that match some speech strings
    print_matching(grammar, "hello there")
    print_matching(grammar, "hey there")

    # 'hello hey there' will not match because only one alternative in an
    # AlternativeSet expansion can be matched
    print_matching(grammar, "hello hey there")
Beispiel #22
0
def main():
    # The Repeat expansion requires one or more matches for its child expansion.
    # The KleeneStar requires zero or more matches. The Sequence expansion
    # requires all of its children expansions to be spoken in sequence.

    # Create a public rule using a Repeat expansion and another using the
    # KleeneStar expansion.
    rule1 = PublicRule("repeat", Sequence(Repeat("please"), "don't crash"))
    rule2 = PublicRule("kleene", Sequence(KleeneStar("please"), "don't crash"))

    # Create a grammar and add the new rules to it
    grammar = Grammar("g")
    grammar.add_rules(rule1, rule2)

    # Compile the grammar using compile()
    print("Grammar '%s' compiles to:" % grammar.name)
    print(grammar.compile())

    # Find rules in the grammar that match some speech strings
    print_matching(grammar, "don't crash")  # only kleene will match
    print_matching(grammar, "please don't crash")  # both will match
    print_matching(grammar, "please please don't crash")  # both again
Beispiel #23
0
    def test_fully_qualified_rule_reference(self):
        """ Fully-qualified rule references do not require import statements. """
        grammar = Grammar("test")
        fully_qualified_ref = "grammars.test1.W"
        named_ref = NamedRuleRef(fully_qualified_ref)
        rule = Rule("rule", True, named_ref)
        grammar.add_rule(rule)
        expected_rule = self.grammars.test1.get_rule("W")
        for x in range(2):
            self.assertEqual(named_ref.referenced_rule, expected_rule)
            self.assertEqual(grammar.get_rule(fully_qualified_ref),
                             expected_rule)
            self.assertEqual(grammar.find_matching_rules("w"), [rule])

            # Check that the import statement is allowed.
            grammar.add_import(Import(fully_qualified_ref))
Beispiel #24
0
 def test_resolve_imports_single(self):
     """ Grammar.resolve_imports() correctly handles importing a single rule. """
     memo = {}
     grammar = Grammar("test")
     grammar.add_import(Import("grammars.test1.Z"))
     grammar.resolve_imports(memo)
     expected_grammar = self.grammars.test1
     Z, = expected_grammar.get_rules("Z")
     self.assertDictEqual(
         memo, {
             "test": grammar,
             "grammars.test1": expected_grammar,
             "grammars.test1.Z": Z
         })
Beispiel #25
0
    def test_ambiguous_qualified_names(self):
        """ Imported rules with the same qualified name must be fully referenced. """
        grammar = Grammar("test")
        grammar.add_import(Import("grammars.test6.rule"))
        grammar.add_import(Import("grammars2.test6.rule"))
        self.assertRaises(GrammarError, grammar.get_rule, "rule")
        self.assertRaises(GrammarError, grammar.get_rule, "test6.rule")

        expected1 = parse_grammar_string("""
            #JSGF V1.0;
            grammar grammars.test6;
            public <rule> = test grammars one;
        """)
        expected2 = parse_grammar_string("""
            #JSGF V1.0;
            grammar grammars2.test6;
            public <rule> = test grammars two;
        """)
        self.assertEqual(grammar.get_rule("grammars.test6.rule"),
                         expected1.get_rule("rule"))
        self.assertEqual(grammar.get_rule("grammars2.test6.rule"),
                         expected2.get_rule("rule"))
Beispiel #26
0
    def test_resolve_imports_multiple_grammars(self):
        """ Similar import statements in multiple grammars are handled efficiently.
        """
        memo = {}
        grammar = Grammar("test")
        grammar.add_import(Import("grammars.test4.BackspaceRule"))
        grammar.add_import(Import("grammars.test5.DeleteRule"))
        grammar.resolve_imports(memo)
        self.assertIn("grammars.test4", memo)
        self.assertIn("grammars.test5", memo)
        test4 = memo["grammars.test4"]
        test5 = memo["grammars.test5"]

        # The import environments of 'test', 'test4', 'test5' and 'numbers' should be
        # the same. resolve_imports() should update the import environment of every
        # grammar present in the memo dictionary.
        test4.resolve_imports(memo)
        test5.resolve_imports(memo)
        self.assertIn("grammars.numbers", memo)
        numbers = memo["grammars.numbers"]
        expected_environment = {
            "test": grammar,
            "grammars.test4": test4,
            "grammars.test4.BackspaceRule": test4.get_rule("BackspaceRule"),
            "grammars.test5": test5,
            "grammars.test5.DeleteRule": test5.get_rule("DeleteRule"),
            "grammars.numbers": numbers,
            "grammars.numbers.1to39": numbers.get_rule("1to39"),
        }
        for grammar_ in (grammar, test4, test5, numbers):
            self.assertDictEqual(grammar_.import_environment,
                                 expected_environment)

        # The 'numbers' grammar should have only be parsed once, even though it is
        # used by two separate grammars.
        for grammar_ in (test4, test5):
            self.assertIs(grammar_.import_environment["grammars.numbers"],
                          numbers)
Beispiel #27
0
 def test_resolve_imports_wildcard(self):
     """ Grammar.resolve_imports() correctly handles wildcard import statements.
     """
     memo = {}
     grammar = Grammar("test")
     grammar.add_import(Import("grammars.test1.*"))
     grammar.resolve_imports(memo)
     expected_grammar = self.grammars.test1
     Z, W = expected_grammar.get_rules("Z", "W")
     self.assertDictEqual(
         memo, {
             "test": grammar,
             "grammars.test1": expected_grammar,
             "grammars.test1.Z": Z,
             "grammars.test1.W": W,
             "grammars.test1.*": [Z, W]
         })
Beispiel #28
0
def main():
    # Create a hidden (private) rule
    rule1 = HiddenRule("hello", "hello")

    # Create a public rule referencing rule1
    rule2 = PublicRule("greet", RuleRef(rule1))

    # Create a grammar and add the new rules to it
    grammar = Grammar("g")
    grammar.add_rules(rule1, rule2)

    # Compile the grammar using compile()
    print("Grammar '%s' compiles to:" % grammar.name)
    print(grammar.compile())

    # Find rules matching 'hello'
    # rule2 will be found, but not rule1 because it is hidden
    print("Matching rule: %s" % grammar.find_matching_rules("hello")[0])
Beispiel #29
0
def main():
    # Create a public rule with the name 'hello' and a Literal expansion
    # 'hello world'.
    rule = PublicRule("hello", Literal("hello world"))

    # Note that the following creates the same rule:
    rule = PublicRule("hello", "hello world")

    # Create a grammar and add the new rule to it
    grammar = Grammar()
    grammar.add_rule(rule)

    # Compile the grammar using compile()
    # compile_to_file(file_path) may be used to write a compiled grammar to
    # a file instead.
    # Compilation is not required for finding matching rules.
    print(grammar.compile())

    # Find rules in the grammar that match 'hello world'.
    matching = grammar.find_matching_rules("hello world")
    print("Matching: %s" % matching[0])
Beispiel #30
0
class DictationGrammar(Grammar):
    """
    Grammar subclass that processes rules that use Dictation expansions so they can
    be compiled, matched and used with normal JSGF rules.
    """
    def __init__(self, rules=None, name="default"):
        """
        :type rules: list
        :type name: str
        """
        super(DictationGrammar, self).__init__(name)
        self._dictation_rules = []
        self._original_rule_map = {}
        self._init_jsgf_only_grammar()

        if rules:
            self.add_rules(*rules)

    def _init_jsgf_only_grammar(self):
        """
        Method that initialises the grammar to use for rules not containing
        Dictation expansions.

        Override this to use a different grammar class.
        """
        self._jsgf_only_grammar = Grammar(name=self.name)

    @property
    def rules(self):
        """
        The rules in this grammar.
        This includes internal generated rules as well as original rules.
        :rtype: list
        """
        return list(
            set(self._dictation_rules + self._jsgf_only_grammar.match_rules +
                list(self._original_rule_map.values())))

    @property
    def match_rules(self):
        """
        The rules that the find_matching_rules method will match against.
        :return: iterable
        """
        result = []
        result.extend([x for x in self._dictation_rules if x.visible])
        result.extend(self._jsgf_only_grammar.match_rules)
        return result

    def add_rule(self, rule):
        if not isinstance(rule, Rule):
            raise TypeError("object '%s' was not a JSGF Rule object" % rule)

        # Check if the same rule is already in the grammar.
        if rule.name in self.rule_names:
            if rule in self.rules:
                # Silently return if the rule is comparable to another in the
                # grammar.
                return
            else:
                # This is not strictly true for DictationGrammar, but still holds
                # for match_rules and output from the compile methods.
                raise GrammarError(
                    "JSGF grammars cannot have multiple rules with "
                    "the same name")

        # If the rule is not a dictation rule, add it to the JSGF only grammar and
        # the original rule map.
        if not dictation_in_expansion(rule.expansion):
            self._jsgf_only_grammar.add_rule(rule)
            self._original_rule_map[rule] = rule
            return

        # Check if the rule is a SequenceRule already and do a few things with it.
        if isinstance(rule, SequenceRule):
            if not rule.current_is_dictation_only:
                # The sequence starts with a JSGF only rule and can be
                # spoken like a normal rule
                self._jsgf_only_grammar.add_rule(rule)
            else:
                self._dictation_rules.append(rule)
            self._original_rule_map[rule] = rule
            return

        # Expand the rule's expansion into a list of 1 or more expansions.
        expanded = expand_dictation_expansion(rule.expansion)

        # Otherwise create new rules from the resulting expansions and add each to
        # either dictation_rules or _jsgf_only_grammar
        for i, x in enumerate(expanded):
            if len(expanded) == 1:
                # No need to use different names in this case
                new_name = rule.name
            else:
                new_name = "%s_%d" % (rule.name, i)
            if not dictation_in_expansion(x):
                r = Rule(new_name, rule.visible, x)

                # Add this rule to the JSGF only grammar
                self._jsgf_only_grammar.add_rule(r)

                # Keep track of the relationship between the original rule and its
                # expanded rules
                self._original_rule_map[r] = rule
            else:
                seq_rule = SequenceRule(new_name, rule.visible, x)
                self._original_rule_map[seq_rule] = rule

                if not seq_rule.current_is_dictation_only:
                    # The sequence starts with a JSGF only rule and can be
                    # spoken like a normal rule
                    self._jsgf_only_grammar.add_rule(seq_rule)
                else:
                    self._dictation_rules.append(seq_rule)

    def get_original_rule(self, rule):
        """
        Get the original rule used to generate a rule from find_matching_rules.
        :type rule: Rule
        :return: Rule
        """
        return self._original_rule_map[rule]

    def get_generated_rules(self, rule):
        """
        Get a generator yielding the rules generated from a rule added to this
        grammar.
        :type rule: Rule
        """
        for k, v in list(self._original_rule_map.items()):
            if v is rule:
                yield k

    def remove_rule(self, rule, ignore_dependent=False):
        # Find the rules generated from this rule and remove them wherever they are
        # as well as the original rule
        if isinstance(rule, string_types):
            rule_name = rule
        else:
            rule_name = rule.name

        for k, v in list(self._original_rule_map.items()):
            if v.name == rule_name:
                self._original_rule_map.pop(k)

                if k in self._dictation_rules:
                    self._dictation_rules.remove(k)

                elif k in self._jsgf_only_grammar.match_rules:
                    self._jsgf_only_grammar.remove_rule(k, ignore_dependent)

    def _compile(self, compile_as_root_grammar):
        """
        Internal method to compile the grammar.
        :type compile_as_root_grammar: bool
        :return: str
        """
        self.rearrange_rules()

        try:
            # Compile the grammar
            if compile_as_root_grammar:
                result = self._jsgf_only_grammar.compile_as_root_grammar()
            else:
                result = self._jsgf_only_grammar.compile()

            # Check for compiled rules
            rule_pattern = re.compile("(public )?<.+> = .+;")

            # If there are none, set result to "".
            if not rule_pattern.search(result):
                result = ""
        except GrammarError as e:
            if len(self._dictation_rules) > 0:
                return ""
            else:
                raise GrammarError("no Dictation rules and JSGF only grammar "
                                   "failed to compile with error: '%s'" % e)
        return result

    def compile(self):
        return self._compile(False)

    def compile_as_root_grammar(self):
        return self._compile(True)

    def rearrange_rules(self):
        """
        Move SequenceRules in this grammar between the dictation rules list and
        the internal grammar used for JSGF only rules depending on whether a
        SequenceRule's current expansion is dictation only or not.
        """
        for rule in tuple(self._jsgf_only_grammar.match_rules):
            if not isinstance(rule, SequenceRule):
                continue
            if rule.current_is_dictation_only:
                self._jsgf_only_grammar.remove_rule(rule)
                self._dictation_rules.append(rule)

        for rule in tuple(self._dictation_rules):
            if not rule.current_is_dictation_only:
                self._jsgf_only_grammar.add_rule(rule)
                self._dictation_rules.remove(rule)

    def reset_sequence_rules(self):
        """
        Reset each SequenceRule in this grammar so that they can accept matches
        again.
        """
        for r in self._jsgf_only_grammar.match_rules + self._dictation_rules:
            if isinstance(r, SequenceRule):
                r.restart_sequence()

        self.rearrange_rules()

    def find_matching_rules(self, speech, advance_sequence_rules=True):
        """
        Find each visible rule passed to the grammar that matches the 'speech'
        string. Also set matches for the original rule.
        :type speech: str
        :param advance_sequence_rules: whether to call set_next() for successful
        sequence rule matches.
        :return: iterable
        """
        # Match against each match rule and remove any rules that didn't match
        result = self.match_rules
        for rule in tuple(result):
            if not rule.matches(speech):
                result.remove(rule)

        # Get the original rule for each rule in the result and ensure that their
        # current_match values reflect the generated rules' values.
        for rule in result:
            original = self.get_original_rule(rule)
            if isinstance(rule, SequenceRule):
                SequenceRule.graft_sequence_matches(rule, original.expansion)

                # Progress to the next expansion if required
                if rule.has_next_expansion and advance_sequence_rules:
                    rule.set_next()

            else:
                original.matches(rule.expansion.current_match)

        # Move SequenceRules between _dictation_rules and _jsgf_only_grammar as
        # required
        self.rearrange_rules()

        return result