def __init__(self, element, modifier=None, modify_default=False): self._element = element self._modifier = modifier self._modify_default = modify_default AlternativeBase.__init__(self, children=(element, ), name=element.name, default=element.default)
def __init__(self, name = None, repeatables = None, finishers = None, max = 10, exported = True): if repeatables is None: repeatables = self.repeatables if finishers is None: finishers = self.finishers repeatablesRule = Repetition(Alternative(repeatables), min=1, max=max, name='repeatables') finishersRule = Alternative(finishers, name='finishers') extras = [repeatablesRule, finishersRule] MappingRule.__init__(self, name = name, extras = extras, exported = exported)
class PrimitiveInsertion(CompoundRule): spec = '<insertion>' extras = [Alternative(primitive_insertions, name='insertion')] def value(self, node): children = node.children[0].children[0].children return children[0].value()
class CountedMotion(NumericDelegateRule): spec = '[<count>] <motion>' extras = [ ruleDigitalInteger[3], Alternative([rulePrimitiveMotion, ruleParameterizedMotion], name='motion') ]
class Command(CompoundRule): spec = '[<count>] [reg <LetterMapping>] <command>' extras = [ Alternative([ ruleOperatorApplication, rulePrimitiveCommand, ], name='command'), ruleDigitalInteger[3], ruleLetterMapping ] def value(self, node): delegates = node.children[0].children[0].children value = delegates[-1].value() prefix = '' if delegates[0].value() is not None: prefix += str(delegates[0].value()) if delegates[1].value() is not None: # Hack for macros reg = delegates[1].value()[1] if value == 'macro': prefix += '@' + reg value = None else: prefix += "'" + reg if prefix: if value is not None: value = Text(prefix) + value else: value = Text(prefix) # TODO: ugly hack; should fix the grammar or generalize. if 'chaos' in zip(*node.results)[0]: return [('c', value), ('i', (NoAction(), ) * 2)] else: return [('c', value)]
def value(self, node): initial_value = AlternativeBase.value(self, node) value_is_default = initial_value == self.default if self._modifier and (self._modify_default or not value_is_default): return self._modifier(initial_value) else: return initial_value
def __init__(self, name, command, terminal_command, context): # Here we define this rule's spoken-form and special elements. Note that # nested_repetitions is the only one that contains Repetitions, and it # is not itself repeated. This is for performance purposes. spec = ("[<sequence>] " "[<nested_repetitions>] " "[<terminal_command>] " "[<final_command>]") extras = [ Repetition(command, min=1, max=5, name="sequence"), Alternative([RuleRef(rule=character_rule)], name="nested_repetitions"), ElementWrapper("terminal_command", terminal_command), RuleRef(rule=final_rule, name="final_command"), ] defaults = { "sequence": [], "nested_repetitions": None, "terminal_command": None, "final_command": None, } CompoundRule.__init__(self, name=name, spec=spec, extras=extras, defaults=defaults, exported=True, context=context)
class VimCommand(CompoundRule): spec = ('[<app>] [<literal>]') extras = [ Repetition(Alternative([ruleCommand, RuleRef(Insertion())]), max=10, name='app'), RuleRef(LiteralIdentifierInsertion(), name='literal') ] def _process_recognition(self, node, extras): insertion_buffer = [] commands = [] if 'app' in extras: for chunk in extras['app']: commands.extend(chunk) if 'literal' in extras: commands.extend(extras['literal']) for command in commands: mode, command = command if mode == 'i': insertion_buffer.append(command) else: execute_insertion_buffer(insertion_buffer) insertion_buffer = [] command.execute(extras) execute_insertion_buffer(insertion_buffer)
class Motion(CompoundRule): spec = '<motion>' extras = [ Alternative([ruleCountedMotion, ruleUncountedMotion], name='motion') ] def value(self, node): return node.children[0].children[0].children[0].value()
def add_commands( self, context=None, mapping=None, extras=None, defaults=None, ccr=True, top_level=False, weight=None, ): """Add a set of commands which can be recognised continuously. Keyword Arguments: context (Context) -- Context in which these commands will be active, if None, commands will be global (default: None) mapping (dict) -- Dictionary of rule specs to dragonfly Actions (default: None) extras (list) -- Extras which will be available for these commands (default: None) defaults (dict) -- Defaults for the extras, if necessary (default: None) ccr (bool) -- Whether these commands should be recognised continuously (default: True) top_level (bool) -- Whether these commands our top level, referencing sequences of normal commands (default: False) weight (float) -- Kaldi only. The recognition weight assigned to a group of commands (default None (kaldi default is 1.0)) """ if not (context is None or isinstance(context, Context)): self._log.error( "Context must be None or dragonfly Context subclass, not '%s'", str(context)) return full_extras = construct_extras(extras, defaults, self.global_extras, top_level) children = construct_commands(mapping, full_extras) if not children: return if context is not None: context = check_for_manuals(context, self.command_context_dictlist) if weight is not None: for c in children: c.weight = float(weight) if not top_level: if not ccr: rule = SimpleRule(element=Alternative(children), context=context) grammar = Grammar("NonCCR" + self.counter()) grammar.add_rule(rule) grammar.load() self.non_ccr_grammars.append(grammar) elif context is None: self.core_commands.extend(children) else: self.context_commands.append(children) self.contexts.append(context) self._pad_matches() else: if context is None: context = TrueContext() self.top_level_commands.append(children) self.top_level_contexts.append(context)
class MyBasicRule(BasicRule): element = Repetition( Alternative(( Literal("test one", value=Function(lambda: func(1))), Literal("test two", value=Function(lambda: func(2))), Literal("test three", value=Function(lambda: func(3))), )), 1, 5 )
def test_alternative_parens(self): check_parse_tree( "( test |[op] <an_extra>)", Alternative( [ Literal(u"test"), Sequence([Optional(Literal(u"op")), extras["an_extra"]]), ] ), )
def create(*from_rules, max_repetitions=50): name = "CCR" rules = [] for rule in from_rules: rules.append(RuleRef(rule=rule)) name += "_" + rule.name single_action = Alternative(rules) sequence = Repetition(single_action, min=1, max=max_repetitions, name="sequence") return CCRRule(name=name, spec=CCRRule.spec, extras=[sequence])
def test_bool_special_in_alternative(self): output = check_parse_tree( "foo | bar {test_special} | baz", Alternative([ Literal(u"foo"), Literal(u"bar"), Literal(u"baz"), ]), ) assert getattr(output.children[0], 'test_special', None) == None assert output.children[1].test_special == True assert getattr(output.children[2], 'test_special', None) == None
def __init__(self, choices, name = None, extras = None, default = None): # Argument type checking. assert isinstance(name, basestring) or name is None assert isinstance(choices, dict) for k, v in choices.iteritems(): assert isinstance(k, basestring) # Construct children from the given choice keys and values. self._choices = choices self._extras = extras children = [] for k, v in choices.iteritems(): if callable(v): child = Compound(spec = k, value_func = v, extras = extras) else: child = Compound(spec = k, value = v, extras = extras) children.append(child) # Initialize super class. Alternative.__init__(self, children = children, name = name, default = default)
def __init__(self, exported): extras = [ Alternative(name="word", children=[ RuleRef(AbbreviationRule(False)), RuleRef(SpecialWordRule(False)), Dictation() ]) ] CompoundRule.__init__(self, name=get_unique_rule_name(), extras=extras, exported=exported)
def __init__(self, name, context, rules): self._name = name rule_refs = list() for rule_ in rules: rule_refs.append(RuleRef(rule=rule_)) # max should not be greater than 7 self.extras.append( Repetition(Alternative(rule_refs), min=1, max=6, name="sequence")) super(RepeatRule, self).__init__(context=context)
def _add_repeater(self, matches, top_level_matches): """ Takes a tuple of bools, corresponding to which contexts were matched, and loads a SubGrammar containing a RepeatRule with all relevant commands in. """ matched_commands = [] for command_list in [ l for (l, b) in zip(self.context_commands, matches) if b ]: matched_commands.extend(command_list) matched_commands.extend(self.core_commands) if not matched_commands: return alts = Alternative(matched_commands) repeater = SimpleRule( name="Repeater%s" % self.counter(), element=Repetition(alts, min=1, max=self.MAX_REPETITIONS), context=None, ) subgrammar = SubGrammar("SG%s" % self.counter()) subgrammar.add_rule(repeater) if top_level_matches: command_lists = [ l for (l, b) in zip(self.top_level_commands, top_level_matches) if b ] matched_top_level_commands = process_top_level_commands( command_lists, alts) top_level_rules = SimpleRule( name="CommandsRef%s" % self.counter(), element=Alternative(matched_top_level_commands), ) subgrammar.add_rule(top_level_rules) subgrammar.load() self.grammar_map[matches] = subgrammar
def _get_dragonfly_rule_element(target, parser, depth=0): global RULES if target not in parser.rules: raise Exception("Target {} not in parser rules".format(target)) # If already present in RULES, return it if target in RULES: return RULES[target] # Get the rule rule = parser.rules[target] # Iterate over all options option_alternative_list = [] for opt in rule.options: # Iterate over all conjunctions conjunctions_list = [] for conj in opt.conjuncts: # If the conjunction is already present if conj.name in RULES: conjunctions_list.append(RULES[conj.name]) continue # If variable: go one level deeper if conj.is_variable: result = _get_dragonfly_rule_element(conj.name, parser, depth + 1) if result: conjunctions_list.append(result) else: # Add a new literal to the list RULES[conj.name] = Literal(conj.name) conjunctions_list.append(RULES[conj.name]) logger.debug("Adding literal rule: %s", conj.name) # ToDo: apply caching? if len(conjunctions_list) == 1: option_alternative_list.append(conjunctions_list[0]) else: option_alternative_list.append(Sequence(conjunctions_list)) if len(option_alternative_list) == 1: RULES[target] = option_alternative_list[0] else: RULES[target] = Alternative(option_alternative_list) logger.debug("Adding alternative rule: %s", target) return RULES[target]
def makePrefixedCompoundRule(prefix, mappingRule): alts = [] alts.append(RuleRef(rule=mappingRule())) singleAction = Alternative(alts) seq = Repetition(singleAction, min=1, max=16, name="mySequence") class PrefixCompoundRule(CompoundRule): spec = prefix + " <mySequence>" extras = [seq] defaults = {} def _process_recognition(self, node, extras): sequence = extras["mySequence"] for action in sequence: action.execute() release.execute() dynamicName = "Prefix" + prefix + "Rule" PrefixCompoundRule.__name__ = dynamicName PrefixCompoundRule.__qualname__ = dynamicName return PrefixCompoundRule
def __init__(self, parameter): self.__parameter = parameter spec = None extras = [] if parameter["type"] == "dictation": spec = "set " + parameter["name"] + " <value>" extras = [Dictation("value")] if parameter["type"] == "alternative": values = parameter["values"] if isinstance(values, dict): extras = [Choice("value", values)] if isinstance(values, list): extras = [Alternative(name="value", children=[Compound(spec=word, value=word) for word in values])] spec = "set " + parameter["name"] + " <value>" if parameter["type"] == "switch": spec = "enable " + parameter["name"] CompoundRule.__init__(self, name=get_unique_rule_name(), spec=spec, extras=extras)
def __init__(self): commands = git_commands.all_commands(GitCommandRuleBuilder) spec = '[<cancel>] git [<command_with_options>] [<enter>] [<cancel>]' super(GitRule, self).__init__( spec=spec, extras=[ RuleRef(name='cancel', rule=MappingRule( name='cancel', mapping={'cancel [last]': Key('c-c')}, )), Alternative( name='command_with_options', children=commands, ), RuleRef(name='enter', rule=MappingRule( name='enter', mapping={'enter': Key('enter')}, )), ], )
def test_basic_rule(self): """ Verify that BasicRules can be loaded and recognized correctly. """ test = [] func = lambda x: test.append(x) # Test using BasicRule directly. rule = BasicRule(element=Repetition( Alternative(( Literal("test one", value=Function(lambda: func(1))), Literal("test two", value=Function(lambda: func(2))), Literal("test three", value=Function(lambda: func(3))), )), 1, 5 )) self.add_rule(rule) self.recognize("test one test two test three".split()) assert test == [1, 2, 3], "BasicRule was not processed correctly" # Remove the rule and clear the test list. self.grammar.remove_rule(rule) del test[:] # Test using a sub-class of BasicRule. class MyBasicRule(BasicRule): element = Repetition( Alternative(( Literal("test one", value=Function(lambda: func(1))), Literal("test two", value=Function(lambda: func(2))), Literal("test three", value=Function(lambda: func(3))), )), 1, 5 ) self.add_rule(MyBasicRule()) self.recognize("test one test two test three".split()) assert test == [1, 2, 3], "BasicRule was not processed correctly"
import keyboard_justin as keyboard #import words #import programs release = Key("shift:up, ctrl:up, alt:up") alternatives = [] alternatives.append(RuleRef(rule=keyboard.KeystrokeRule())) """ alternatives.append(RuleRef(rule=words.FormatRule())) alternatives.append(RuleRef(rule=words.ReFormatRule())) alternatives.append(RuleRef(rule=words.NopeFormatRule())) alternatives.append(RuleRef(rule=programs.ProgramsRule())) """ root_action = Alternative(alternatives) sequence = Repetition(root_action, min=1, max=16, name="sequence") class RepeatRule(CompoundRule): # Here we define this rule's spoken-form and special elements. spec = "<sequence> [[[and] repeat [that]] <n> times]" extras = [ sequence, # Sequence of actions defined above. IntegerRef("n", 1, 100), # Times to repeat the sequence. ] defaults = { "n": 1, # Default repeat count. }
# ugly hack to get around tComment's not allowing ranges with gcc. value = node.children[0].children[0].children[0].children[1].value( ) if value in (1, '1', None): return Text('gcc') else: return Text('gc%dj' % (int(value) - 1)) else: return value ruleOperatorSelfApplication = RuleRef(OperatorSelfApplication(), name='OperatorSelfApplication') ruleOperatorApplication = Alternative( [ruleOperatorApplicationMotion, ruleOperatorSelfApplication], name='OperatorApplication') # **************************************************************************** # COMMANDS # **************************************************************************** class PrimitiveCommand(MappingRule): mapping = { 'flax': Key('X'), 'nix': Key('x'), 'undo': Key('u'), 'pesto': Key('P'), 'post': Key('p'), 'ditto': Text('.'),
"<format_type> <dictation>": Function(format_text), "control <letter>": Key("c-%(letter)s"), "equals": Text(" = "), } extras = [ letter_choice("letter"), formatting_choice("format_type"), IntegerRef("n", 1, 100), Dictation("dictation"), ] letter = RuleRef(rule=LetterRule(), name='letter') letter_sequence = Repetition(Alternative([letter]), min=1, max=12, name="letter_sequence") class LetterSequenceRule(CompoundRule): spec = "<letter_sequence>" extras = [letter_sequence] def _process_recognition(self, node, extras): letter_sequence = extras["letter_sequence"] for letter in letter_sequence: letter.execute() Key("shift:up, ctrl:up").execute()
class AccessibilityRule(MergeRule): pronunciation = "accessibility" mapping = { # Accessibility API Mappings "go before <text_position_query>": Function(lambda text_position_query: accessibility.move_cursor( text_position_query, CursorPosition.BEFORE)), "go after <text_position_query>": Function(lambda text_position_query: accessibility.move_cursor( text_position_query, CursorPosition.AFTER)), "words <text_query>": Function(accessibility.select_text), "words <text_query> delete": Function( lambda text_query: accessibility.replace_text(text_query, "")), "replace <text_query> with <replacement>": Function(accessibility.replace_text), } extras = [ Dictation("replacement"), Compound( name="text_query", spec= ("[[([<start_phrase>] <start_relative_position> <start_relative_phrase>|<start_phrase>)] <through>] " "([<end_phrase>] <end_relative_position> <end_relative_phrase>|<end_phrase>)" ), extras=[ Dictation("start_phrase", default=""), Alternative( [Literal("before"), Literal("after")], name="start_relative_position"), Dictation("start_relative_phrase", default=""), Literal("through", "through", value=True, default=False), Dictation("end_phrase", default=""), Alternative( [Literal("before"), Literal("after")], name="end_relative_position"), Dictation("end_relative_phrase", default="") ], value_func=lambda node, extras: TextQuery( start_phrase=str(extras["start_phrase"]), start_relative_position=( CursorPosition[extras["start_relative_position"].upper()] if "start_relative_position" in extras else None), start_relative_phrase=str(extras["start_relative_phrase"]), through=extras["through"], end_phrase=str(extras["end_phrase"]), end_relative_position=( CursorPosition[extras["end_relative_position"].upper()] if "end_relative_position" in extras else None), end_relative_phrase=str(extras["end_relative_phrase"]))), Compound(name="text_position_query", spec="<phrase> [<relative_position> <relative_phrase>]", extras=[ Dictation("phrase", default=""), Alternative([Literal("before"), Literal("after")], name="relative_position"), Dictation("relative_phrase", default="") ], value_func=lambda node, extras: TextQuery( end_phrase=str(extras["phrase"]), end_relative_position=( CursorPosition[extras["relative_position"].upper()] if "relative_position" in extras else None), end_relative_phrase=str(extras["relative_phrase"]))) ]
return float(value) / self.sections fraction_rule = FractionRule() # --------------------------------------------------------------------------- horz_left = Compound(config.lang.left, name="horz", value=0.0) horz_right = Compound(config.lang.right, name="horz", value=1.0) vert_top = Compound(config.lang.top, name="vert", value=0.0) vert_bottom = Compound(config.lang.bottom, name="vert", value=1.0) horz_frac = RuleRef(fraction_rule, name="horz") vert_frac = RuleRef(fraction_rule, name="vert") horz_expl = Alternative([horz_left, horz_right], name="horz_expl") horz_all = Alternative([horz_expl, horz_frac], name="horz_all") vert_expl = Alternative([vert_top, vert_bottom], name="vert_expl") vert_all = Alternative([vert_expl, vert_frac], name="vert_all") # --------------------------------------------------------------------------- position_element = Compound( spec=" <horz_expl>" # 1D, horizontal " | <vert_expl>" # 1D, vertical " | <horz_all> <vert_all>" # 2D, horizontal-vertical " | <vert_expl> <horz_all>" # 2D, vertical-horizontal " | <vert_all> <horz_expl>", # 2D, vertical-horizontal extras=[horz_expl, horz_all, vert_expl, vert_all], )
RuleRef(rule=window_control.FocusTitleRule()), RuleRef(rule=window_control.TranslateRule()), RuleRef(rule=window_control.NudgeRule()), RuleRef(rule=window_control.ResizeRule()), RuleRef(rule=window_control.StretchRule()), ] try: # putstringcommands is not included in the pushed source, because it contains personal data. from ccr import putstringcommands if putstringcommands.PutStringCommandsRule: alternatives.append(RuleRef(rule=putstringcommands.PutStringCommandsRule())) except (ImportError, NameError) as e: pass single_action = Alternative(alternatives) sequence = Repetition(single_action, min=1, max=16, name="sequence") class ChainRule(CompoundRule): spec = "<sequence>" extras = [ sequence, # Sequence of actions defined above. ] # - node -- root node of the recognition parse tree. # - extras -- dict of the "extras" special elements: # . extras["sequence"] gives the sequence of actions. def _process_recognition(self, node, extras): # print "extras: " + str(extras) # print "sequence: " + str(sequence)
command_action_map = utils.combine_maps( utils.text_map_to_action_map(symbol_map), key_action_map) #------------------------------------------------------------------------------- # Lists which will be populated later via RPC. context_word_list = List("context_word_list", []) prefix_list = List("prefix_list", prefixes) suffix_list = List("suffix_list", suffixes) # Simple element map corresponding to keystroke action maps from earlier. keystroke_element_map = { "n": (IntegerRef(None, 1, 10), 1), "text": RuleWrap(None, Alternative([ Dictation(), DictListRef(None, char_dict_list), ])), "char": DictListRef(None, char_dict_list), "custom_text": RuleWrap( None, Alternative([ Dictation(), DictListRef(None, char_dict_list), ListRef(None, prefix_list), ListRef(None, suffix_list), ])), } #-------------------------------------------------------------------------------