def results_callback(self, words, results): NatlinkEngine._log.debug("Grammar %s: received recognition %r." % (self.grammar._name, words)) if words == "other": func = getattr(self.grammar, "process_recognition_other", None) self._process_grammar_callback( func, words=tuple(map_word(w) for w in results.getWords(0)), results=results ) return elif words == "reject": func = getattr(self.grammar, "process_recognition_failure", None) self._process_grammar_callback(func, results=results) return # If the words argument was not "other" or "reject", then # it is a sequence of (word, rule_id) 2-tuples. Convert this # into a tuple of unicode objects. words_rules = tuple((map_word(w), r) for w, r in words) words = tuple(w for w, r in words_rules) # Call the grammar's general process_recognition method, if present. func = getattr(self.grammar, "process_recognition", None) if func: if not self._process_grammar_callback(func, words=words, results=results): # Return early if the method didn't return True or equiv. return # Iterates through this grammar's rules, attempting # to decode each. If successful, call that rule's # method for processing the recognition and return. s = state_.State(words_rules, self.grammar._rule_names, self.engine) for r in self.grammar._rules: if not (r.active and r.exported): continue s.initialize_decoding() for result in r.decode(s): if s.finished(): self._retain_audio(words, results, r.name) root = s.build_parse_tree() # Notify observers using the manager *before* # processing. notify_args = (words, r, root, results) self.recobs_manager.notify_recognition(*notify_args) r.process_recognition(root) # Notify observers using the manager *after* # processing. self.recobs_manager.notify_post_recognition( *notify_args ) return NatlinkEngine._log.warning("Grammar %s: failed to decode" " recognition %r." % (self.grammar._name, words))
def process_words(self, words): # Return early if the grammar is disabled or if there are no active # rules. if not (self.grammar.enabled and self.grammar.active_rules): return self._log.debug("Grammar %s: received recognition %r." % (self.grammar.name, words)) if words == "other": func = getattr(self.grammar, "process_recognition_other", None) if func: func(words) return elif words == "reject": func = getattr(self.grammar, "process_recognition_failure", None) if func: func() return # If the words argument was not "other" or "reject", then it is a # sequence of (word, rule_id) 2-tuples. # Call the grammar's general process_recognition method, if present. func = getattr(self.grammar, "process_recognition", None) if func: if not func(words): return # Iterate through this grammar's rules, attempting to decode each. # If successful, call that rule's method for processing the # recognition and return. s = state_.State(words, self.grammar.rule_names, self.engine) for r in self.grammar.rules: if not (r.active and r.exported): continue s.initialize_decoding() for _ in r.decode(s): if s.finished(): try: root = s.build_parse_tree() # Notify observers using the manager *before* processing. self._observer_manager.notify_recognition( tuple([word for word, _ in words]), r, root) r.process_recognition(root) self._observer_manager.notify_post_recognition( tuple([word for word, _ in words]), r, root) except Exception as e: self._log.exception("Failed to process rule " "'%s': %s" % (r.name, e)) return True self._log.debug("Grammar %s: failed to decode recognition %r." % (self.grammar.name, words)) return False
def _process_rules(self, words, words_rules, results, manual_rule_ids): # Iterates through this grammar's rules, attempting # to decode each. If successful, call that rule's # method for processing the recognition and return. s = state_.State(words_rules, self.grammar._rule_names, self.engine) for r in self.grammar._rules: if not (r.active and r.exported): continue # Set dictation words manually if DNS didn't report a difference # between command and dictation words. A word is set as # dictation if it isn't a reported DNS dictation word and isn't # a word in the current top-level rule or any referenced rules. if manual_rule_ids: rule_words = self.get_rule_words(r) words_rules2 = tuple( (w, 1000000) if r < 1000000 and w not in rule_words else (w, r) for w, r in words_rules) s = state_.State(words_rules2, self.grammar._rule_names, self.engine) s.initialize_decoding() for result in r.decode(s): if s.finished(): self._retain_audio(words, results, r.name) root = s.build_parse_tree() # Notify observers using the manager *before* # processing. notify_args = (words, r, root, results) self.recobs_manager.notify_recognition(*notify_args) r.process_recognition(root) # Notify observers using the manager *after* # processing. self.recobs_manager.notify_post_recognition(*notify_args) return True return False
def results_callback(self, words, results): NatlinkEngine._log.debug("Grammar %s: received recognition %r." % (self.grammar._name, words)) if words == "other": func = getattr(self.grammar, "process_recognition_other", None) if func: words = tuple( text_type(w).encode("windows-1252") for w in results.getWords(0)) func(words) return elif words == "reject": func = getattr(self.grammar, "process_recognition_failure", None) if func: func() return # If the words argument was not "other" or "reject", then # it is a sequence of (word, rule_id) 2-tuples. Convert this # into a tuple of unicode objects. def map_word(w): if isinstance(w, text_type): return w else: return w.decode("windows-1252") words_rules = tuple((map_word(w), r) for w, r in words) words = tuple(w for w, r in words_rules) # Call the grammar's general process_recognition method, if present. func = getattr(self.grammar, "process_recognition", None) if func: if not func(words): return # Iterates through this grammar's rules, attempting # to decode each. If successful, call that rule's # method for processing the recognition and return. s = state_.State(words_rules, self.grammar._rule_names, self.engine) for r in self.grammar._rules: if not r.active: continue s.initialize_decoding() for result in r.decode(s): if s.finished(): root = s.build_parse_tree() r.process_recognition(root) return NatlinkEngine._log.warning("Grammar %s: failed to decode" " recognition %r." % (self.grammar._name, words))
def value(self, node): """ The *value* of a :class:`ModifiedPath` is the *value* obtained by decoding the original words. """ import dragonfly.grammar.state as state_ words_rules = tuple((word, 0) for word in self._words) s = state_.State(words_rules, [], node.engine) s.initialize_decoding() for _ in self._orig_root_element.decode(s): if s.finished(): root = s.build_parse_tree() return root.value() self._log_decode.error("CompoundWord %s: failed to decode original" " words %r.", self, " ".join(self._words)) return None
def process_words(self, words): # Return early if the grammar is disabled or if there are no active # rules. if not (self.grammar.enabled and self.grammar.active_rules): return TextInputEngine._log.debug("Grammar %s: received recognition %r." % (self.grammar.name, words)) if words == "other": func = getattr(self.grammar, "process_recognition_other", None) if func: func(words) return elif words == "reject": func = getattr(self.grammar, "process_recognition_failure", None) if func: func() return # If the words argument was not "other" or "reject", then it is a # sequence of (word, rule_id) 2-tuples. # Call the grammar's general process_recognition method, if present. func = getattr(self.grammar, "process_recognition", None) if func: if not func(words): return # Iterate through this grammar's rules, attempting to decode each. # If successful, call that rule's method for processing the # recognition and return. s = state_.State(words, self.grammar.rule_names, self.engine) for r in self.grammar.rules: if not r.active: continue s.initialize_decoding() for _ in r.decode(s): if s.finished(): root = s.build_parse_tree() r.process_recognition(root) return True TextInputEngine._log.debug("Grammar %s: failed to decode " "recognition %r." % (self.grammar.name, words)) return False
def process_results(self, words): """ Start the dragonfly processing of the speech hypothesis. :param words: a sequence of (word, rule_id) 2-tuples (pairs) """ SphinxEngine._log.debug("Grammar %s: received recognition %r." % (self.grammar.name, words)) words_rules = tuple((unicode(w), r) for w, r in words) rule_ids = tuple(r for _, r in words_rules) words = tuple(w for w, r in words_rules) # Call the grammar's general process_recognition method, if present. func = getattr(self.grammar, "process_recognition", None) if func: if not func(words): return # Iterates through this grammar's rules, attempting to decode each. # If successful, call that rule's method for processing the recognition # and return. s = state_.State(words_rules, rule_ids, self.engine) for r in self.grammar.rules: # TODO Remove the if windows condition when contexts are working if not r.active and sys.platform.startswith("win"): continue s.initialize_decoding() # Iterate each result from decoding state 's' with grammar rule 'r' for _ in r.decode(s): if s.finished(): root = s.build_parse_tree() r.process_recognition(root) return SphinxEngine._log.warning( "Grammar %s: failed to decode recognition %r." % (self.grammar.name, words))
def process_words(self, words): # Return early if the grammar is disabled or if there are no active # rules. if not (self.grammar.enabled and self.grammar.active_rules): return self._log.debug("Grammar %s: received recognition %r." % (self.grammar.name, words)) results_obj = None # TODO Use PS results object once implemented # TODO Make special grammar callbacks work properly. # These special methods are never called for this engine. if words == "other": func = getattr(self.grammar, "process_recognition_other", None) self._process_grammar_callback(func, words=words, results=results_obj) return elif words == "reject": func = getattr(self.grammar, "process_recognition_failure", None) self._process_grammar_callback(func, results=results_obj) return # If the words argument was not "other" or "reject", then it is a # sequence of (word, rule_id) 2-tuples. words_rules = tuple(words) words = tuple(word for word, _ in words) # Call the grammar's general process_recognition method, if present. func = getattr(self.grammar, "process_recognition", None) if func: if not self._process_grammar_callback(func, words=words, results=results_obj): # Return early if the method didn't return True or equiv. return # Iterate through this grammar's rules, attempting to decode each. # If successful, call that rule's method for processing the # recognition and return. s = state_.State(words_rules, self.grammar.rule_names, self.engine) for r in self.grammar.rules: if not (r.active and r.exported): continue s.initialize_decoding() for _ in r.decode(s): if s.finished(): # Build the parse tree used to process this rule. root = s.build_parse_tree() # Notify observers using the manager *before* # processing. notify_args = (words, r, root, results_obj) self.recobs_manager.notify_recognition( *notify_args ) # Process the rule if not in training mode. if not self.engine.training_session_active: try: r.process_recognition(root) self.recobs_manager.notify_post_recognition( *notify_args ) except Exception as e: self._log.exception("Failed to process rule " "'%s': %s" % (r.name, e)) return True self._log.debug("Grammar %s: failed to decode recognition %r." % (self.grammar.name, words)) return False