def createGrammar(): sommatoriaExpansion = Literal("sommatoria") sommatoriaExpansion.tag = '\\sum_{}^{}' sommatoriaRule = PublicRule("sommatoria_main", sommatoriaExpansion) indexExpansion = Literal("sommatoria da") indexExpansion.tag = None indexRule = PublicRule("summation_index", indexExpansion) intervalExpansion = Literal("sommatoria da a") intervalExpansion.tag = None intervalRule = PublicRule("summation_interval", intervalExpansion) #setattr section setattr(sommatoriaRule, 'node_type', NODE_TYPE.INTERNO) setattr(sommatoriaRule, 'request_new_layer', False) setattr(sommatoriaRule, 'next_rules_trigger_words', ['da']) setattr(sommatoriaRule, 'is_entry_rule', True) #------------------------- setattr(indexRule, 'node_type', NODE_TYPE.INTERNO) setattr(indexRule, 'request_new_layer', True) setattr(indexRule, 'next_rules_trigger_words', ['a']) setattr(indexRule, 'is_entry_rule', False) setattr( indexRule, 'go_to_begin', len('\\sum_{}^{}')) #attributo che specifica se fare carry-home. #------------------------------- setattr(intervalRule, 'node_type', NODE_TYPE.INTERNO) setattr(intervalRule, 'request_new_layer', True) setattr( intervalRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(intervalRule, 'is_entry_rule', False) #grammar creation section g = Grammar() g.add_rules(sommatoriaRule, indexRule, intervalRule) return g
def main(): # Create a grammar and add some rules to it grammar = RootGrammar() name = HiddenRule("name", AlternativeSet("john", "bob", "anna")) # greeting is either: 'hey', 'hey there' or 'hello' greeting = HiddenRule("greeting", AlternativeSet( Sequence("hey", OptionalGrouping("there")), "hello")) # parting_phrase is either: 'good bye' or 'see you' parting_phrase = HiddenRule("parting_phrase", AlternativeSet( "good bye", "see you")) # greet is a greeting followed by a name greet = PublicRule("greet", Sequence(RuleRef(greeting), RuleRef(name))) # goodbye is a parting phrase followed by a name goodbye = PublicRule("goodbye", Sequence( RuleRef(parting_phrase), RuleRef(name))) grammar.add_rules(name, greeting, parting_phrase, greet, goodbye) print("Root grammar compiles to the following:") print(grammar.compile()) # Try matching some speech strings print_matching(grammar, "hey john") print_matching(grammar, "hey there john") print_matching(grammar, "see you john") # Try matching some hidden rules print_matching(grammar, "bob") print_matching(grammar, "hey there") print_matching(grammar, "good bye")
def createGrammar(): integraleDefinitoExpansion = Literal("integrale da") integraleDefinitoExpansion.tag = "\\int\\limits_{}^{}" definedIntegralRule = PublicRule("defined_integral_main", integraleDefinitoExpansion) integraleDefinitoUpLimitExpansion = Literal("integrale da a") integraleDefinitoUpLimitExpansion.tag = None upLimitRule = PublicRule("defined_integral_limit", integraleDefinitoUpLimitExpansion) #setattr section setattr(definedIntegralRule, 'node_type', NODE_TYPE.INTERNO) setattr(definedIntegralRule, 'request_new_layer', True) setattr(definedIntegralRule, 'next_rules_trigger_words', [ 'a' ]) #non mettere None se no salta tutto perchè None non è iterabile setattr(definedIntegralRule, 'is_entry_rule', True) setattr(definedIntegralRule, 'go_to_begin', len('\\int\\limits_{}^{}') ) #attributo che specifica se fare carry-home. #------------------------------ setattr(upLimitRule, 'node_type', NODE_TYPE.INTERNO) setattr(upLimitRule, 'request_new_layer', True) setattr( upLimitRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(upLimitRule, 'is_entry_rule', False) #grammar creation section g = Grammar() g.add_rules(definedIntegralRule, upLimitRule) return g
def test_with_rule_ref(self): r1 = PublicRule("test", "test") e1 = Seq(RuleRef(r1), Dict()) self.assert_expansion_sequence_equal( [Seq(RuleRef(r1)), Seq(Dict())], e1) r2 = PublicRule("test", Dict()) e2 = Seq(RuleRef(r2), Dict()) self.assert_expansion_sequence_equal([Dict(), Seq(Dict())], e2)
def createGrammar(): limiteMainExpansion = Literal("limite per") limiteMainExpansion.tag = "\\lim{ \\to }" limiteMainRule = PublicRule("limite_main", limiteMainExpansion) limiteUpP1Expansion = Literal("limite per che") limiteUpP1Expansion.tag = None limiteUpP1Rule = PublicRule("limite_up_p1", limiteUpP1Expansion) limiteUpP2Expansion = Literal("limite per che tende") limiteUpP2Expansion.tag = None limiteUpP2Rule = PublicRule("limite_up_p2", limiteUpP2Expansion) limiteUpP3Expansion = Literal("limite per che tende a") limiteUpP3Expansion.tag = None limiteUpP3Rule = PublicRule("limite_up_p3", limiteUpP3Expansion) #setattr section setattr(limiteMainRule, 'node_type', NODE_TYPE.INTERNO) setattr(limiteMainRule, 'request_new_layer', True) setattr(limiteMainRule, 'next_rules_trigger_words', [ 'che' ]) #non mettere None se no salta tutto perchè None non è iterabile setattr(limiteMainRule, 'is_entry_rule', True) setattr( limiteMainRule, 'go_to_begin', len('\\lim{ \\to }')) #attributo che specifica se fare carry-home. #------------------------------ setattr(limiteUpP1Rule, 'node_type', NODE_TYPE.INTERNO) setattr(limiteUpP1Rule, 'request_new_layer', True) setattr(limiteUpP1Rule, 'next_rules_trigger_words', [ 'tende' ]) #non mettere None se no salta tutto perchè None non è iterabile setattr(limiteUpP1Rule, 'is_entry_rule', False) #-------------------------------- setattr(limiteUpP2Rule, 'node_type', NODE_TYPE.INTERNO) setattr(limiteUpP2Rule, 'request_new_layer', True) setattr(limiteUpP2Rule, 'next_rules_trigger_words', [ 'a' ]) #non mettere None se no salta tutto perchè None non è iterabile setattr(limiteUpP2Rule, 'is_entry_rule', False) #------------------------------------ setattr(limiteUpP3Rule, 'node_type', NODE_TYPE.INTERNO) setattr(limiteUpP3Rule, 'request_new_layer', True) setattr( limiteUpP3Rule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(limiteUpP3Rule, 'is_entry_rule', False) #grammar creation section g = Grammar() g.add_rules(limiteMainRule, limiteUpP1Rule, limiteUpP2Rule, limiteUpP3Rule) return g
def main(): # Create a simple rule using a Dictation expansion. rule = PublicRule("Hello_X", Sequence("hello", Dictation())) # Create a new DictationGrammar using the simple rule. grammar = DictationGrammar([rule]) # Print the compiled grammar print(grammar.compile()) # Match against some speech strings. # find_matching_rules has an optional second parameter for advancing to # the next part of the rule, which is set to False here. matching = grammar.find_matching_rules("hello", False) print("Matching rule: %s" % matching[0]) # first part of rule # Go to the next part of the rule. matching[0].set_next() # Match the dictation part. This can be anything. matching = grammar.find_matching_rules("world") print("Matching rule: %s" % matching[0]) # The entire match and the original rule's current_match value will both be 'hello world' print(matching[0].entire_match) print(rule.expansion.current_match)
def createGrammar(): short_rule = Literal( "per") #sto definendo un'expansion, non è una regola!! short_rule.tag = "\cdot" #il contributo di questa regola per il latex finale long_rule = Literal("moltiplicato per") #expansion long_rule.tag = "\cdot" multiplication_rule = PublicRule("multiplication", AlternativeSet(short_rule, long_rule)) #rule #setattr section (Per ogni rule 4 setattr) setattr( multiplication_rule, 'node_type', NODE_TYPE.FOGLIA ) #aggiungiamo un attributo type direttamente sull'oggetto PublicRule per connotarlo come nodo o attributo setattr( multiplication_rule, 'request_new_layer', False ) #tiene conto del fatto che il match di questa regola possa richiedere la creazione di un nuovo layer setattr( multiplication_rule, 'next_rules_trigger_words', [] ) #tiene conto del grafo della grammatica. Non mettere None se no salta tutto perchè None non è iterabile setattr( multiplication_rule, 'is_entry_rule', True) #tiene conto se questa regola è un entry point di un grafo setattr( multiplication_rule, 'leaf_end_cursor_movement', 1 ) #una foglia può specificare questo attributo per dire che dopo di lei il cursore deve muoversi di un tot (tipicamente per uno spazio) #grammar creation section g = Grammar() g.add_rule(multiplication_rule) return g
def main(): # Define a open/close file rule. open, close = Literal("open"), Literal("close") open.tag, close.tag = "OPEN", "CLOSE" cmd = PublicRule("command", Sequence(AlternativeSet(open, close), "the file")) # Print the tags of the 'command' rule. print("Tags: %s\n" % cmd.tags) # Initialise a new grammar and add the rule to it. g = Grammar() g.add_rule(cmd) # Print the compiled grammar print("Compiled grammar is:\n%s" % g.compile()) # Find and print rules tagged with "OPEN" print("Tagged rules are:\n%s\n" % g.find_tagged_rules("OPEN")) # Matching tags can be retrieved using r.get_tags_matching # The Rule.matched_tags property can also be used if Rule.matches or # Grammar.find_matching_rules has been called first. speech = "open the file" print("Tags matching '%s' are: %s" % (speech, cmd.get_tags_matching(speech)))
def test_matches_as_optional(self): e1 = Seq("hello", Opt(Dict())) r1 = PublicRule("test", e1) self.assertTrue(r1.matches("hello")) self.assertEqual(e1.current_match, "hello") self.assertEqual(e1.children[0].current_match, "hello") self.assertEqual(e1.children[1].current_match, "") # Test no match self.assertFalse(r1.matches("test testing")) map_expansion(e1, lambda x: self.assertTrue(x.current_match in [None, ""]))
def main(): # Create a public rule with the name 'hello' and a Literal expansion # 'hello world'. rule = PublicRule("hello", Literal("hello world")) # Note that the following creates the same rule: rule = PublicRule("hello", "hello world") # Create a grammar and add the new rule to it grammar = Grammar() grammar.add_rule(rule) # Compile the grammar using compile() # compile_to_file(file_path) may be used to write a compiled grammar to # a file instead. # Compilation is not required for finding matching rules. print(grammar.compile()) # Find rules in the grammar that match 'hello world'. matching = grammar.find_matching_rules("hello world") print("Matching: %s" % matching[0])
def createGrammar(): fracExpansion = Literal("frazione") fracExpansion.tag = '\\frac{}{}' fracRule = PublicRule("fraction_main", fracExpansion) numeratorExpansion = Literal("frazione numeratore") numeratorExpansion.tag = None numeratorRule = PublicRule("fraction_numerator", numeratorExpansion) denominatorExpansion = Literal("frazione numeratore denominatore") denominatorExpansion.tag = None denominatorRule = PublicRule("fraction_denominator", denominatorExpansion) #setattr section setattr(fracRule, 'node_type', NODE_TYPE.INTERNO) setattr(fracRule, 'request_new_layer', False) setattr(fracRule, 'next_rules_trigger_words', [ 'numeratore' ]) #non mettere None se no salta tutto perchè None non è iterabile setattr(fracRule, 'is_entry_rule', True) #------------------- setattr(numeratorRule, 'node_type', NODE_TYPE.INTERNO) setattr(numeratorRule, 'request_new_layer', True) setattr(numeratorRule, 'next_rules_trigger_words', [ 'denominatore' ]) #non mettere None se no salta tutto perchè None non è iterabile setattr(numeratorRule, 'is_entry_rule', False) setattr( numeratorRule, 'go_to_begin', len('\\frac{}{}')) #attributo che specifica se fare carry-home. #------------------------ setattr(denominatorRule, 'node_type', NODE_TYPE.INTERNO) setattr(denominatorRule, 'request_new_layer', True) setattr( denominatorRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(denominatorRule, 'is_entry_rule', False) #grammar creation section g = Grammar() g.add_rules(fracRule, numeratorRule, denominatorRule) return g
def main(): # The Repeat expansion requires one or more matches for its child expansion. # The KleeneStar requires zero or more matches. The Sequence expansion # requires all of its children expansions to be spoken in sequence. # Create a public rule using a Repeat expansion and another using the # KleeneStar expansion. rule1 = PublicRule("repeat", Sequence(Repeat("please"), "don't crash")) rule2 = PublicRule("kleene", Sequence(KleeneStar("please"), "don't crash")) # Create a grammar and add the new rules to it grammar = Grammar("g") grammar.add_rules(rule1, rule2) # Compile the grammar using compile() print("Grammar '%s' compiles to:" % grammar.name) print(grammar.compile()) # Find rules in the grammar that match some speech strings print_matching(grammar, "don't crash") # only kleene will match print_matching(grammar, "please don't crash") # both will match print_matching(grammar, "please please don't crash") # both again
def test_matches_with_literals(self): e1 = Seq("hello", Dict()) r1 = PublicRule("test", e1) self.assertTrue(r1.matches("hello world")) self.assertTrue(r1.matches("hello world")) self.assertEqual(e1.current_match, "hello world") self.assertEqual(e1.children[0].current_match, "hello") self.assertEqual(e1.children[1].current_match, "world") e2 = Seq("a", Dict(), "c") r2 = PublicRule("test", e2) self.assertTrue(r2.matches("a b c")) self.assertEqual(e2.children[0].current_match, "a") self.assertEqual(e2.children[1].current_match, "b") self.assertEqual(e2.children[2].current_match, "c") self.assertEqual(e2.current_match, "a b c") # Test with strings that don't match for e1 and e2 self.assertFalse(r1.matches("test testing")) map_expansion(e1, lambda x: self.assertIsNone(x.current_match)) self.assertFalse(r2.matches("test testing")) map_expansion(e2, lambda x: self.assertIsNone(x.current_match))
def add_to_grammar(grammar_path, file_path, gram_name): """ loads a ``Grammar`` at grammar_path and tries to add rules to it from the file in file_path then returns the new ``Grammar`` """ old_gram = parser.parse_grammar_file(grammar_path) with open(file_path, 'rt') as f: word_list = f.readlines() #remove root rule from old grammar old_gram.remove_rule(old_gram.get_rule_from_name("root")) # get list of rules from old grammar old_rules = old_gram.rules new_gram = RootGrammar(name=gram_name, case_sensitive=True) # add existing rules to new grammar i = 0 old_rules_text = list() for rules in old_rules: exp = rules.expansion.text.upper() old_rules_text.append(exp) if exp not in word_list: rule_name = "rule" + str(i) r = PublicRule(rule_name, exp, case_sensitive=True) new_gram.add_rule(r) i += 1 # add new rules to new grammar for lines in word_list: rule_name = "rule" + str(i) exp = lines.upper().strip() print("upp is ", exp) if exp not in old_rules_text and exp != "" and exp != "{" and exp != "}" and exp != ".": r = PublicRule(rule_name, exp, case_sensitive=True) new_gram.add_rule(r) i += 1 # compile new grammar back to file new_gram.compile_to_file(grammar_path, compile_as_root_grammar=True)
def createGrammar(): rule = Literal("gamma") rule.tag = "\\gamma" gammaRule = PublicRule("gamma", rule) #setattr section setattr(gammaRule, 'node_type', NODE_TYPE.FOGLIA) setattr(gammaRule, 'request_new_layer', False) setattr( gammaRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(gammaRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(gammaRule) return g
def createGrammar(): rule = Literal("valore assoluto di") rule.tag = '||' absoluteValueRule = PublicRule("absolute_value", rule) #setattr section setattr(absoluteValueRule, 'node_type', NODE_TYPE.INTERNO ) #vuol dire che si dovrà terminare con una foglia setattr(absoluteValueRule, 'request_new_layer', True) setattr( absoluteValueRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(absoluteValueRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(absoluteValueRule) return g
def createGrammar(): rule = Literal("alla") rule.tag = '^{}' powerRule = PublicRule( "power", rule ) #volutamente non ha un tag. Non ha senso scrivere solo il '^' #setattr section setattr(powerRule, 'node_type', NODE_TYPE.INTERNO) setattr(powerRule, 'request_new_layer', True) setattr( powerRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(powerRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(powerRule) return g
def _get_best_hypothesis(self, hypotheses): """ Take a list of speech hypotheses and return the most likely one. :type hypotheses: iterable :return: str | None """ # Get all distinct, non-null hypotheses. distinct = tuple([h for h in set(hypotheses) if bool(h)]) if not distinct: return None elif len(distinct) == 1: return distinct[0] # only one choice # Decide between non-null hypotheses using a Pocket Sphinx search with # each hypothesis as a grammar rule. grammar = RootGrammar() grammar.language_name = self.language for i, hypothesis in enumerate(distinct): grammar.add_rule(PublicRule("rule%d" % i, Literal(hypothesis))) compiled = grammar.compile_grammar() name = "_temp" # Store the current search name. original = self._decoder.active_search # Note that there is no need to validate words in this case because # each literal in the _temp grammar came from a Pocket Sphinx # hypothesis. self._decoder.end_utterance() self._decoder.set_jsgf_string(name, _map_to_str(compiled)) self._decoder.active_search = name # Do the processing. hyp = self._decoder.batch_process( self._audio_buffers, use_callbacks=False ) result = hyp.hypstr if hyp else None # Switch back to the previous search. self._decoder.end_utterance() # just in case self._decoder.active_search = original self._decoder.unset_search("_temp") return result
def main(): # Create a simple rule using a Dictation expansion. dictation = Dictation() dictation.tag = "dictation" # add a tag to the expansion rule = PublicRule("dictation", Sequence("hello", dictation)) # Print the compiled rule print("Compiled rule: %s" % rule.compile()) # Match a speech string against the rule. speech = "hello world" print("Rule matches '%s': %s." % (speech, rule.matches(speech))) # Print the rule's current_match values using map_expansion. def print_match(x): print("Match for %s: %s" % (x, x.current_match)) map_expansion(rule.expansion, print_match)
def __init__(self, module_name: str, module: Module, commands: List[ExpressionCommandInfo], keywords: KeywordList, on_no_keywords: Callable, on_not_recognized: Callable): super().__init__(module_name, module, keywords, on_no_keywords) self._state: Optional[Assistant] = None def make_expression(cmd: ExpressionCommandInfo): def func(ts): return cmd.func(module, self._state, ts) return cmd.expression.to_parser().addParseAction(func) self.jsgf_grammar = RootGrammar( PublicRule(f'cmd-{i}', cmd.expression.to_jsgf()) for i, cmd in enumerate(commands)) self.parsing_grammar = pyparsing.Or(map(make_expression, commands)) self.on_not_recognized = on_not_recognized
def main(): # Create a hidden (private) rule rule1 = HiddenRule("hello", "hello") # Create a public rule referencing rule1 rule2 = PublicRule("greet", RuleRef(rule1)) # Create a grammar and add the new rules to it grammar = Grammar("g") grammar.add_rules(rule1, rule2) # Compile the grammar using compile() print("Grammar '%s' compiles to:" % grammar.name) print(grammar.compile()) # Find rules matching 'hello' # rule2 will be found, but not rule1 because it is hidden print("Matching rule: %s" % grammar.find_matching_rules("hello")[0])
def createGrammar(): rule = Literal("pedice") rule.tag = '_{}' alternative_rule = Literal("sub") alternative_rule.tag = '_{}' subscriptRule = PublicRule( "subscript", AlternativeSet(rule, alternative_rule) ) #volutamente non ha un tag. Non ha senso scrivere solo il '^' #setattr section setattr(subscriptRule, 'node_type', NODE_TYPE.INTERNO) setattr(subscriptRule, 'request_new_layer', True) setattr( subscriptRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(subscriptRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(subscriptRule) return g
def createGrammar(): short_expansion = Literal("integrale") short_expansion.tag = "\int" integralRule = PublicRule("integral", short_expansion) #setattr section setattr(integralRule, 'node_type', NODE_TYPE.FOGLIA) setattr(integralRule, 'request_new_layer', False) setattr( integralRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(integralRule, 'is_entry_rule', True) setattr( integralRule, 'leaf_end_cursor_movement', 1 ) #una foglia può specificare questo attributo per dire che dopo di lei il cursore deve muoversi di un tot (tipicamente per uno spazio) #grammar creation section g = Grammar() g.add_rule(integralRule) return g
def main(): # The Sequence expansion requires all of its children expansions to be spoken # in sequence. The OptionalGrouping expansion optionally requires its child # expansion to be spoken. # Create a public rule using an optional expansion rule = PublicRule("greet", Sequence("hey", OptionalGrouping("there"))) # Create a grammar and add the new rule to it grammar = Grammar("g") grammar.add_rule(rule) # Compile the grammar using compile() print("Grammar '%s' compiles to:" % grammar.name) print(grammar.compile()) # Use or do not use the optional word 'there' print_matching(grammar, "hey") print_matching(grammar, "hey there")
def createGrammar(): short_expansion = Literal("chiusa graffa") short_expansion.tag = "}" long_expansion = Literal("chiudi parentesi graffa") long_expansion.tag = "}" long_expansion_2 = Literal("chiusa parentesi graffa") long_expansion_2.tag = "}" openSquareRule = PublicRule( "close_brace", AlternativeSet(short_expansion, long_expansion, long_expansion_2)) #setattr section setattr(openSquareRule, 'node_type', NODE_TYPE.FOGLIA) setattr(openSquareRule, 'request_new_layer', False) setattr( openSquareRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(openSquareRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(openSquareRule) return g
def createGrammar(): rule = Literal("pi greco") rule.tag = "\\pi" alternative_rule = Literal("pigreco") alternative_rule.tag = "\\pi" alternative_rule2 = Literal("p greco") alternative_rule2.tag = "\\pi" piGrecoRule = PublicRule( "pi-greco", AlternativeSet(rule, alternative_rule, alternative_rule2)) #setattr section setattr(piGrecoRule, 'node_type', NODE_TYPE.FOGLIA) setattr(piGrecoRule, 'request_new_layer', False) setattr( piGrecoRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(piGrecoRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(piGrecoRule) return g
def create_grammar(word_list, name, gram_file): """ read a list in a text file (```word_list````) and create a grammar (```name```) file (```gram_file```) for that list, such that the speech can one of any of the elements of the list """ upp_list = list() grammar = RootGrammar(name=name, case_sensitive=True) i = 0 for lines in word_list: rule_name = "rule" + str(i) upp = lines.upper().strip() #print("upp is",upp) if upp != "" and upp != "{" and upp != "}" and upp != "." and upp[ 0] != "_": r = PublicRule(rule_name, upp, case_sensitive=True) grammar.add_rule(r) upp_list.append(upp) i = i + 1 with open(gram_file, 'wt') as g: print(grammar.compile(), file=g)
def main(): # Create a new public rule using speech alternatives # Note that the Sequence expansion requires all of its children expansions # to be spoken in sequence rule = PublicRule("greet", Sequence(AlternativeSet("hello", "hey"), "there")) # Create a grammar and add the new rule to it grammar = Grammar("g") grammar.add_rule(rule) # Compile the grammar using compile() print("Grammar '%s' compiles to:" % grammar.name) print(grammar.compile()) # Find rules in the grammar that match some speech strings print_matching(grammar, "hello there") print_matching(grammar, "hey there") # 'hello hey there' will not match because only one alternative in an # AlternativeSet expansion can be matched print_matching(grammar, "hello hey there")
def createGrammar(): short_expansion = Literal("aperta tonda") short_expansion.tag = "(" long_expansion = Literal("apri parentesi tonda") long_expansion.tag = "(" long_expansion_2 = Literal("aperta parentesi tonda") long_expansion_2.tag = "(" openParentesisRule = PublicRule( "open_parenthesis", AlternativeSet(short_expansion, long_expansion, long_expansion_2)) #setattr section setattr(openParentesisRule, 'node_type', NODE_TYPE.FOGLIA) setattr(openParentesisRule, 'request_new_layer', False) setattr( openParentesisRule, 'next_rules_trigger_words', [] ) #non mettere None se no salta tutto perchè None non è iterabile setattr(openParentesisRule, 'is_entry_rule', True) #grammar creation section g = Grammar() g.add_rule(openParentesisRule) return g
list3 = [] list2.append('sil') list3.append('sil') for word in list: word = word.lower() text3 = '<' + word + '>' list3.append(text3) list2.append(word) list2.append('[ sil ] ') list3.append('[ sil ] ;') phoneme = " ".join(word for word in list2) # print(phoneme) text = "why should one halt on the way" from jsgf import PublicRule, Literal, Grammar rule = PublicRule("why", Literal(phoneme)) grammar = Grammar("forcing") grammar.add_rule(rule) align_file = 'why-align.jsgf' grammar.compile_to_file(align_file) text2 = 'sil ' + text + ' [ sil ] ' rule2 = PublicRule("wholeutt", Literal(text2)) grammar2 = Grammar("word") grammar2.add_rule(rule2) words_file = "why-words.jsgf" grammar2.compile_to_file(words_file) phoneme_final = " ".join(word for word in list3) phoneme_final = phoneme_final + "\n\n<aa> = aa | ah | er | ao;\n<ae> = ae | eh | er | ah;\n<ah> = ah | ae | er | aa;\n<ao> = ao | aa | er | uh;\n<aw> = aw | aa | uh | ow;\n<ay> = ay | aa | iy | oy | ey;\n<b> = b | p | d;\n<ch> = ch | sh | jh | t;\n<dh> = dh | th | z | v;\n<d> = d | t | jh | g | b;\n<eh> = eh | ih | er | ae;\n<er> = er | eh | ah | ao;\n<ey> = ey | eh | iy | ay;\n<f> = f | hh | th | v;\n<g> = g | k | d;\n<hh> = hh | th | f | p | t | k;\n<ih> = ih | iy | eh;\n<iy> = iy | ih;\n<jh> = jh | ch | zh | d;\n<k> = k | g | t | hh;\n<l> = l | r | w;\n<m> = m | n;\n<ng> = ng | n;\n<n> = n | m | ng;\n<ow> = ow | ao | uh | aw;\n<oy> = oy | ao | iy | ay;\n<p> = p | t | b | hh;\n<r> = r | y | l;\n<ss> = sh | s | z | th;\n<sh> = sh | s | zh | ch;\n<t> = t | ch | k | d | p | hh;\n<th> = th | s | dh | f | hh;\n<uh> = uh | ao | uw | uw;\n<uw> = uw | uh | uw;\n<v> = v | f | dh;\n<w> = w | l | y;\n<y> = y | w | r;\n<z> = z | s | dh | z;\n<zh> = zh | sh | z | jh"