def refresh(self, *args): '''args: spec, list of lists of strings''' # get mapping recorded_macros = utilities.load_toml_file( settings.SETTINGS["paths"]["RECORDED_MACROS_PATH"]) if len(args) > 0: recorded_macros[args[0]] = args[1] utilities.save_toml_file( recorded_macros, settings.SETTINGS["paths"]["RECORDED_MACROS_PATH"]) mapping = {} for spec in recorded_macros: sequences = recorded_macros[spec] delay = settings.SETTINGS["miscellaneous"][ "history_playback_delay_secs"] play = Playback([(sequence, delay) for sequence in sequences]) command = play * Repeat( extra="n") if spec.endswith("[times <n>]") else play mapping[spec] = R(command, rdescript="Recorded Macro: " + spec) mapping["record from history"] = R(Function(self.record_from_history), rdescript="Record From History") mapping["delete recorded macros"] = R( Function(self.delete_recorded_macros), rdescript="Delete Recorded Macros") # reload with new mapping self.reset(mapping)
class EclipseCCR(MergeRule): pronunciation = "eclipse jump" mwith = [Navigation().get_pronunciation()] mapping = { #Line Ops "configure": R(Paste(ec_con.analysis_chars)+Key("left:2/5, c-f/20, backslash, rbracket, enter") + \ Function(ec_con.analyze_for_configure), rdescript="Eclipse: Configure"), "jump in [<n>]": R(Key("c-f, a-o")+Paste(r"[\(\[\{\<]")+Function(ec_con.regex_on)+ \ Key("enter:%(n)d/5, escape, right") , rdescript="Eclipse: Jump In"), "jump out [<n>]": R(Key("c-f, a-o")+Paste(r"[\)\] \}\>]")+Function(ec_con.regex_on)+ \ Key("enter:%(n)d/5, escape, right") , rdescript="Eclipse: Jump Out"), "jump back [<n>]": R(Key("c-f/5, a-b")+Paste(r"[\)\]\}\>]")+Function(ec_con.regex_on)+ \ Key("enter:%(n)d/5, escape, left") , rdescript="Eclipse: Jump Back"), "[go to] line <n>": R(Key("c-l") + Pause("50") + Text("%(n)d") + Key("enter")+ Pause("50"), rdescript="Eclipse: Go To Line"), "shackle <n> [<back>]": R(Key("c-l")+Key("right, cs-left")+ \ Function(ec_con.lines_relative), rdescript="Eclipse: Select Relative Lines"), } extras = [ Dictation("text"), IntegerRefST("n", 1, 1000), Boolean("back"), ] defaults = {"n": 1, "back": False}
def refresh(self, *args): '''args: spec, list of lists of strings''' # get mapping recorded_macros = utilities.load_toml_file( settings.SETTINGS["paths"]["RECORDED_MACROS_PATH"]) if len(args) > 0: recorded_macros[args[0]] = args[1] utilities.save_toml_file(recorded_macros, settings.SETTINGS["paths"]["RECORDED_MACROS_PATH"]) mapping = {} for spec in recorded_macros: # Create a copy of the string without Unicode characters. ascii_str = str(spec) sequences = recorded_macros[spec] delay = settings.SETTINGS["miscellaneous"]["history_playback_delay_secs"] # It appears that the associative string (ascii_str) must be ascii, but the sequences within Playback must be Unicode. mapping[ascii_str] = R( Playback([(sequence, delay) for sequence in sequences]), rdescript="Recorded Macro: " + ascii_str) mapping["record from history"] = R( Function(self.record_from_history), rdescript="Record From History") mapping["delete recorded macros"] = R( Function(self.delete_recorded_macros), rdescript="Delete Recorded Macros") # reload with new mapping self.reset(mapping)
def refresh(self, *args): '''args: spec, text''' aliases = utilities.load_toml_file( settings.SETTINGS["paths"]["ALIAS_PATH"]) if not Alias.toml_path in aliases: aliases[Alias.toml_path] = {} if len(args) > 0: aliases[Alias.toml_path][args[0]] = args[1] utilities.save_toml_file(aliases, settings.SETTINGS["paths"]["ALIAS_PATH"]) mapping = {} for spec in aliases[Alias.toml_path]: mapping[spec] = R( Function(context.paste_string_without_altering_clipboard, content=str(aliases[Alias.toml_path][spec]))) # R( # Text(str(aliases[Alias.toml_path][spec])), # rdescript="Alias: " + spec) mapping["alias <s>"] = R(Function(lambda s: self.alias(s)), rdescript="Create Alias") mapping["delete aliases"] = R( Function(lambda: delete_all(self, Alias.toml_path)), rdescript="Delete Aliases") self.reset(mapping)
def test_seeker_defaulting_and_chaining(self): '''this action makes the first seeker default''' action = NullAction(rspec="clean") action.set_nexus(self.nexus) alt = MockAlternative(u"my", u"spoken", u"words") sira = StackItemRegisteredAction(action, {"_node": alt}) self.nexus.state.add(sira) # mutable_integer = {"value": 0} def increment(): mutable_integer["value"] += 1 '''make backward seekers''' back_seeker = ContextSeeker(back=[ L(S(["def"], Function(lambda: None)), S(["abc"], Function(increment))) ], rspec="abc") back_seeker.set_nexus(self.nexus) '''create backward seeker stack items''' stack_seeker1 = StackItemSeeker(back_seeker, {"_node": alt}) stack_seeker2 = StackItemSeeker(back_seeker, {"_node": alt}) '''add one''' self.nexus.state.add(stack_seeker1) '''at this point, the first seeker should have defaulted and done nothing''' self.assertEqual(mutable_integer["value"], 0) self.nexus.state.add(stack_seeker2) '''the second context seeker should have been triggered by the first, incrementing the value''' self.assertEqual(mutable_integer["value"], 1)
def _deserialize(self): mapping = {} recorded_macros = self._config.get_copy() for spec in recorded_macros: sequences = recorded_macros[spec] delay = settings.settings(["miscellaneous", "history_playback_delay_secs"]) # The associative string (ascii_str) must be ascii, but the sequences within Playback must be Unicode. mapping[spec] = R( Playback([(sequence, delay) for sequence in sequences]), rdescript="Recorded Macro: " + spec) * Repeat(extra="n") mapping["record from history"] = R( Function(lambda: self._record_from_history()), rdescript="Record From History") mapping["delete recorded macros"] = R( Function(lambda: self._delete_recorded_macros()), rdescript="Delete Recorded Macros") self._smr_mapping = mapping
def test_asynchronous_finisher(self): '''make termination action''' termination = NullAction(rspec="kill") termination.set_nexus(self.nexus) alt = MockAlternative(u"my", u"spoken", u"words") sira = StackItemRegisteredAction(termination, {"_node": alt}) '''setup function for asynchronous finisher''' mutable_integer = {"value": 0} def increment(): mutable_integer["value"] += 1 # '''make asynchronous action''' asynchronous = AsynchronousAction([L(S(["kill"], lambda: None))], blocking=False, finisher=Function(increment)) asynchronous.set_nexus(self.nexus) '''make StackItemAsynchronous''' sia1 = StackItemAsynchronous(asynchronous, {"_node": alt}) '''add it''' self.nexus.state.add(sia1) # self.nexus.state.add(sira) '''finisher should be executed when asynchronous finishes''' self.assertEqual(mutable_integer["value"], 1)
def refresh(self, *args): '''args: spec, list of lists of strings''' # get mapping recorded_macros = utilities.load_json_file(settings.SETTINGS["paths"]["RECORDED_MACROS_PATH"]) if len(args)>0: recorded_macros[args[0]] = args[1] utilities.save_json_file(recorded_macros, settings.SETTINGS["paths"]["RECORDED_MACROS_PATH"]) mapping = {} for spec in recorded_macros: sequences = recorded_macros[spec] mapping[spec] = R(Playback([(sequence, 0.0) for sequence in sequences])*Repeat(extra="n"), rdescript="Recorded Macro: "+spec) mapping["record from history"] = R(Function(self.record_from_history), rdescript="Record From History") mapping["delete recorded macros"] = R(Function(self.delete_recorded_macros), rdescript="Delete Recorded Macros") # reload with new mapping self.reset(mapping)
def __init__(self, rspec="default", rdescript="unnamed command (RA)", show=False): RegisteredAction.__init__( self, Function(lambda: None), rspec=rspec, rdescript=rdescript, rundo=None, show=show)
def refresh(self, *args): aliases = utilities.load_json_file(settings.SETTINGS["paths"]["ALIAS_PATH"]) if not ChainAlias.json_path in aliases: aliases[ChainAlias.json_path] = {} if len(args) > 0 and args[0] != "": aliases[ChainAlias.json_path][args[0]] = args[1] utilities.save_json_file(aliases, settings.SETTINGS["paths"]["ALIAS_PATH"]) mapping = {} for spec in aliases[ChainAlias.json_path]: mapping[spec] = R( Text(str(aliases[ChainAlias.json_path][spec])), rdescript="Chain Alias: " + spec) mapping["chain alias"] = R( Function(self.chain_alias), rdescript="Create Chainable Alias") mapping["delete chain aliases"] = R( Function(lambda: delete_all(self, ChainAlias.json_path)), rdescript="Delete Aliases") self.reset(mapping)
def test_blocking(self): ''' Tests: 1 - successful termination (queued actions execute immediately) 2 - unsuccessful termination (queued actions are dropped) 3 - cancellation (queued actions are dropped) ''' for i in range(0, 3): '''make fake AsynchronousAction''' context_set = S(["cancel", "words"], NullAction()) context_level = L(context_set) aa1 = AsynchronousAction([context_level], blocking=True) # turn blocking on aa1.set_nexus(self.nexus) '''make fake StackItemAsynchronous''' alt = MockAlternative(u"run", u"blocker") sia1 = StackItemAsynchronous( aa1, {"_node": alt}) # the dictionary is fake Dragonfly data '''add it''' self.nexus.state.add(sia1) '''blocked function''' mutable_integer = {"value": 0} def increment(): mutable_integer["value"] += 1 '''make fake incrementing RegisteredAction''' inc = R(Function(increment), rspec="inc") inc.set_nexus(self.nexus) '''make fake StackItemRegisteredAction''' alt2 = MockAlternative(u"my", u"spoken", u"words") sira1 = StackItemRegisteredAction(inc, {"_node": alt2}) '''add it''' self.nexus.state.add(sira1) '''incrementing should be blocked at this point''' self.assertEqual(mutable_integer["value"], 0) if i == 0: '''incrementing should happen that moment of unblocking''' self.nexus.state.terminate_asynchronous(True) self.assertEqual(mutable_integer["value"], 1) elif i == 1: '''incrementing gets dropped''' self.nexus.state.terminate_asynchronous(False) self.assertEqual(mutable_integer["value"], 0) elif i == 2: '''make fake canceling RegisteredAction''' cancel = NullAction(rspec="cancel") cancel.set_nexus(self.nexus) '''make fake StackItemRegisteredAction''' alt3 = MockAlternative(u"my", u"cancel", u"words") sira2 = StackItemRegisteredAction(cancel, {"_node": alt3}) '''add it''' self.nexus.state.add(sira2) '''incrementing gets dropped''' self.assertEqual(mutable_integer["value"], 0)
def refresh(self, *args): '''args: spec, text''' aliases = utilities.load_json_file(settings.SETTINGS["paths"]["ALIAS_PATH"]) if not Alias.json_path in aliases: aliases[Alias.json_path] = {} if len(args) > 0: aliases[Alias.json_path][args[0]] = args[1] utilities.save_json_file(aliases, settings.SETTINGS["paths"]["ALIAS_PATH"]) mapping = {} for spec in aliases[Alias.json_path]: mapping[spec] = R( Text(str(aliases[Alias.json_path][spec])), rdescript="Alias: " + spec) mapping["alias <s>"] = R( Function(lambda s: self.alias(s)), rdescript="Create Alias") mapping["delete aliases"] = R( Function(lambda: delete_all(self, Alias.json_path)), rdescript="Delete Aliases") self.reset(mapping)
def __init__(self, nexus): self.nexus = nexus MergeRule.__init__( self, name="repeat that", extras=[IntegerRefST("n", 1, 50)], defaults={"n": 1}, mapping={ "again (<n> [(times|time)] | do)": R(Function(lambda n: self._create_asynchronous(n)), show=False) })
class EclipseRule(MergeRule): pronunciation = "eclipse" mapping = { "prior tab [<n>]": R(Key("cs-f6"), rdescript="Eclipse: Previous Tab") * Repeat(extra="n"), # these two must be set up in the eclipse preferences "next tab [<n>]": R(Key("c-f6"), rdescript="Eclipse: Next Tab") * Repeat(extra="n"), "open resource": R(Key("cs-r"), rdescript="Eclipse: Open Resource"), "open type": R(Key("cs-t"), rdescript="Eclipse: Open Type"), "jump to source": R(Key("f3"), rdescript="Eclipse: Jump To Source"), "editor select": R(Key("c-e"), rdescript="Eclipse: Editor Select"), "step over [<n>]": R(Key("f6/50") * Repeat(extra="n"), rdescript="Eclipse: Step Over"), "step into": R(Key("f5"), rdescript="Eclipse: Step Into"), "step out [of]": R(Key("f7"), rdescript="Eclipse: Step Out"), "resume": R(Key("f8"), rdescript="Eclipse: Resume"), "(debug | run) last": R(Key("f11"), rdescript="Eclipse: Run Last"), "mark occurrences": R(Key("as-o"), rdescript="Eclipse: Mark Occurrences"), # "terminate" changes to the settings for this hotkey: (when: in dialogs and windows) "terminate": R(Key("c-f2"), rdescript="Eclipse: Terminate Running Program"), "refractor symbol": R(Key("sa-r"), rdescript="Eclipse: Re-Factor Symbol"), "symbol next [<n>]": R(Key("c-k"), rdescript="Eclipse: Symbol Next") * Repeat(extra="n"), "symbol prior [<n>]": R(Key("cs-k"), rdescript="Eclipse: Symbol Prior") * Repeat(extra="n"), "format code": R(Key("cs-f"), rdescript="Eclipse: Format Code"), "do imports": R(Key("cs-o"), rdescript="Eclipse: Do Imports"), "comment line": R(Key("c-slash"), rdescript="Eclipse: Comment Line"), "build it": R(Key("c-b"), rdescript="Eclipse: Build"), "split view horizontal": R(Key("cs-underscore"), rdescript="Eclipse: Split View (H)"), "split view vertical": R(Key("cs-lbrace"), rdescript="Eclipse: Split View (V)"), #Line Ops "find everywhere": R(Key("ca-g"), rdescript="Eclipse: Search Project"), "find word <text> [<back>] [<go>]": R(Key("c-f")+Function(ec_con.regex_off)+Function(ec_con.find),\ rdescript="Eclipse: Find Word"), "find regex <text> [<back>] [<go>]": R(Key("c-f")+Function(ec_con.regex_on)+Function(ec_con.find),\ rdescript="Eclipse: Find Regex"), "find <a> [<b> [<c>]] [<back>] [<go>]": R(Key("c-f")+Function(ec_con.find),\ rdescript="Eclipse: Find Alpha"), "find <punctuation> [<back>] [<go>]": R(Key("c-f")+Function(ec_con.find),\ rdescript="Eclipse: Find Character(s)"), } extras = [ Dictation("text"), Dictation("mim"), IntegerRefST("n", 1, 3000), alphanumeric.get_alphabet_choice("a"), alphanumeric.get_alphabet_choice("b"), alphanumeric.get_alphabet_choice("c"), Choice("punctuation", {"hash tag": "#"}), Boolean("back"), Boolean("go"), ] defaults = { "n": 1, "mim": "", "a": None, "b": None, "c": None, "punctuation": None, "back": False, "go": False }
def test_seeker_consume(self): '''seeker actions have the option to not/consume their triggers; that is, the trigger actions do not execute and only act as triggers''' mutable_string = {"value": ""} def append_a(): mutable_string["value"] += "a" def append_b(): mutable_string["value"] += "b" def append_c(): mutable_string["value"] += "c" def append_d(): mutable_string["value"] += "d" def append_e(): mutable_string["value"] += "e" def append_f(): mutable_string["value"] += "f" '''create context levels''' set_1_1 = S(["arch"], append_a) set_1_2 = S(["bell"], append_b) set_1_2.consume = False level_1 = L(set_1_1, set_1_2) set_2_1 = S(["cellar"], append_c) set_2_2 = S(["door"], append_d) level_2 = L(set_2_1, set_2_2) set_3_1 = S(["echo"], append_e) set_3_2 = S(["frame"], append_f) set_3_2.consume = False level_3 = L(set_3_1, set_3_2) '''create context seeker''' levels = [level_1, level_2, level_3] seeker = ContextSeeker(forward=levels) seeker.set_nexus(self.nexus) '''create context seeker stack item''' alt = MockAlternative(u"my", u"spoken", u"words") stack_seeker = StackItemSeeker(seeker, {"_node": alt}) '''add it''' self.nexus.state.add(stack_seeker) '''make 3 fake triggering RegisteredActions; the first and third do not consume their triggers''' trigger1 = RegisteredAction(Function(append_a), rspec="bell") trigger2 = RegisteredAction(Function(append_c), rspec="door") trigger3 = RegisteredAction(Function(append_e), rspec="frame") trigger1.set_nexus(self.nexus) trigger2.set_nexus(self.nexus) trigger3.set_nexus(self.nexus) '''make fake StackItemRegisteredActions''' alt2 = MockAlternative(u"my", u"spoken", u"words") sira1 = StackItemRegisteredAction(trigger1, {"_node": alt2}) sira2 = StackItemRegisteredAction(trigger2, {"_node": alt2}) sira3 = StackItemRegisteredAction(trigger3, {"_node": alt2}) '''add them''' self.nexus.state.add(sira1) self.nexus.state.add(sira2) self.nexus.state.add(sira3) self.assertEqual(mutable_string["value"], "aebdf")
def test_non_ascii_function(self): """ Test handling of non-ASCII characters in Function action. """ action = Function(lambda: u"é") expected = u"\\xe9" if PY2 else u"é" self.assertIn(expected, str(action))
def test_actions_cleaned(self): '''these test functions should stay in sync with the clean methods for each stack action''' def registered_is_clean(r): return r.dragonfly_data is None and r.base is None def seeker_is_clean(s): result = True levels = [] if s.back is not None: levels += s.back if s.forward is not None: levels += s.forward for context_level in levels: result &= context_level.dragonfly_data is None return result def asynchronous_is_clean(a): return a.closure is None '''mock words being the same doesn't matter for this test, or most tests''' alt = MockAlternative(u"my", u"spoken", u"words") '''make fake NullActions''' action1 = NullAction(rspec="barkley") action2 = NullAction(rspec="gaiden") action3 = NullAction(rspec="is") action4 = NullAction(rspec="awesome") action1.set_nexus(self.nexus) action2.set_nexus(self.nexus) action3.set_nexus(self.nexus) action4.set_nexus(self.nexus) '''make fake StackItemRegisteredActions''' sira1 = StackItemRegisteredAction(action1, {"_node": alt}) sira2 = StackItemRegisteredAction(action2, {"_node": alt}) sira3 = StackItemRegisteredAction(action3, {"_node": alt}) sira4 = StackItemRegisteredAction(action4, {"_node": alt}) '''should not be clean before it's executed''' self.assertFalse(registered_is_clean(sira1)) '''add first one for backward seeker''' self.nexus.state.add(sira1) '''should be clean as soon as it's executed''' self.assertTrue(registered_is_clean(sira1)) '''make backward seeker''' back_seeker = ContextSeeker( back=[L(S(["minecraft"], Function(lambda: None)))]) back_seeker.set_nexus(self.nexus) '''create backward seeker stack item''' stack_seeker = StackItemSeeker(back_seeker, {"_node": alt}) '''add it''' self.nexus.state.add(stack_seeker) '''levels should be clean as soon as it's executed''' self.assertTrue( registered_is_clean(stack_seeker) and seeker_is_clean(stack_seeker)) # '''make forward seeker''' forward_seeker = ContextSeeker(forward=[ L(S(["cave"], Function(lambda: None))), L(S(["story"], Function(lambda: None))) ]) forward_seeker.set_nexus(self.nexus) '''create context seeker stack item''' stack_seeker2 = StackItemSeeker(forward_seeker, {"_node": alt}) '''add it''' self.nexus.state.add(stack_seeker2) self.nexus.state.add(sira2) '''levels should not be clean before seeker is executed''' self.assertFalse( registered_is_clean(stack_seeker2) or seeker_is_clean(stack_seeker2)) self.nexus.state.add(sira3) '''levels should be clean as soon as it's executed''' self.assertTrue( registered_is_clean(stack_seeker2) and seeker_is_clean(stack_seeker2)) # '''make asynchronous action''' asynchronous = AsynchronousAction( [L(S(["eternal", "daughter", "awesome"], lambda: None))], blocking=False) asynchronous.set_nexus(self.nexus) '''make StackItemAsynchronous''' sia1 = StackItemAsynchronous(asynchronous, {"_node": alt}) '''add it''' self.nexus.state.add(sia1) '''closure should not be clean before asynchronous is executed''' self.assertFalse( registered_is_clean(sia1) or seeker_is_clean(sia1) or asynchronous_is_clean(sia1)) self.nexus.state.add(sira4) '''closure should be clean after asynchronous is executed''' self.assertTrue( registered_is_clean(sia1) and seeker_is_clean(sia1) and asynchronous_is_clean(sia1))