def test_list_grammars(self): """ Verify that the 'list_grammars' RPC method works correctly. """ # Load a Grammar with three rules and check that the RPC returns the # correct data for them. g = Grammar("list_grammars_test") g.add_rule(CompoundRule(name="compound", spec="testing", exported=True)) g.add_rule(MappingRule(name="mapping", mapping={ "command a": ActionBase(), "command b": ActionBase() })) g.add_rule(Rule(name="base", element=Literal("hello world"), exported=False)) g.load() response = self.send_request("list_grammars", []) expected_grammar_data = { "name": g.name, "enabled": True, "active": True, "rules": [ {"name": "compound", "specs": ["testing"], "exported": True, "active": True}, {"name": "mapping", "specs": ["command a", "command b"], "exported": True, "active": True}, {"name": "base", "specs": ["hello world"], "exported": False, "active": True} ] } # Check that the loaded grammar appears in the result. It might not # be the only grammar and that is acceptable because dragonfly's # tests can be run while user grammars are loaded. try: self.assertIn("result", response) self.assertIn(expected_grammar_data, response["result"]) finally: g.unload()
def recognize(spec, choices_values, timeout): global RESULT RESULT = None grammar = Grammar("grammar") extras = [] for name, choices in choices_values.iteritems(): extras.append(Choice(name, dict((c,c) for c in choices))) Rule = type("Rule", (GrammarRule,),{"spec": spec, "extras": extras}) grammar.add_rule(Rule()) grammar.load() future = time.time() + timeout while time.time() < future: if RESULT is not None: break pythoncom.PumpWaitingMessages() time.sleep(.1) grammar.unload() print "RESULT:", RESULT return RESULT
def refresh(_NEXUS): ''' should be able to add new scripts on the fly and then call this ''' unload() global grammar grammar = Grammar("si/kuli") def refresh_sick_command(): server_proxy.terminate() refresh(_NEXUS) mapping = { "launch sick IDE": Function(launch_IDE), "launch sick server": Function(launch_server), "refresh sick you Lee": Function(refresh_sick_command), "sick shot": Key("cs-2"), } rule = MergeRule(name="sik", mapping=mapping) gfilter.run_on(rule) grammar.add_rule(rule) grammar.load() # start server try: # if the server is already running, this should go off without a hitch start_server_proxy() except Exception: launch_server() seconds5 = 5 control.nexus().timer.add_callback(server_proxy_timer_fn, seconds5)
def reload_grammars(): unload() global grammar grammar = Grammar("to rule them all") now = datetime.datetime.now() print "begun reloading at %s:%s" % (now.hour, now.minute) # reload module and re-add the rules imported from that module global GRAMMAR_IMPORTS for import_name in GRAMMAR_IMPORTS: try: reloader.reload(sys.modules[import_name]) import_rule = getattr(__import__(import_name, fromlist=["rules"]), "rules") grammar.add_rule(import_rule) print "Loaded module %s successfully" % import_name except RuntimeError as runtime_error: "There was an error in file %s" % import_name print runtime_error, '\n', '\n' except NameError as nameerror: "Forgot something in file %s?" % import_name print nameerror, '\n', '\n' grammar.add_rule(get_reloader_rules()) # for the "reload grammar module" code in get_reloader_rules grammar.load() print "reloaded all modules"
def test_no_hypothesis(self): """ Check that if something that doesn't match any rule is mimicked, nothing gets recognised. """ test1 = self.get_test_function() test2 = self.get_test_function() class TestRule(MappingRule): mapping = { "testing": Function(test1), "<dictation>": Function(test2) } extras = [Dictation("dictation")] g = Grammar("test") g.add_rule(TestRule()) g.load() self.assertTrue(g.loaded) self.assert_mimic_failure(None) self.assert_test_function_called(test1, 0) self.assert_test_function_called(test2, 0) self.assert_mimic_failure("") self.assert_test_function_called(test1, 0) self.assert_test_function_called(test2, 0) self.assert_mimic_success("hello") self.assert_test_function_called(test1, 0) self.assert_test_function_called(test2, 1)
def generate_commands(list_of_functions): global server_proxy global grammar mapping = {} for fname in list_of_functions: spec = " ".join(fname.split("_")) mapping[spec] = Function(execute, fname=fname) grammar.unload() grammar = Grammar("sikuli") grammar.add_rule(MappingRule(mapping=mapping, name="sikuli server")) grammar.load()
def load(): global git_grammar context = aenea.wrappers.AeneaContext( ProxyAppContext( match='regex', app_id='(?i)(?:(?:DOS|CMD).*)|(?:.*(?:TERM|SHELL).*)', ), AppContext(title='git'), ) git_grammar = Grammar('git', context=context) git_grammar.add_rule(GitRule()) git_grammar.load()
def start(self, queue): #instantiating the grammar and rule grammar = Grammar("passthrough") rule = self.Passthrough() #attaching the event rule.textrecognition+=queue.put #adding and loading rule grammar.add_rule(rule) grammar.load() while 1: pythoncom.PumpWaitingMessages() time.sleep(.1)
def test_mimic(self): """ Verify that the 'mimic' RPC method works correctly. """ g = Grammar("mimic_test") g.add_rule(CompoundRule(name="compound", spec="testing mimicry", exported=True)) g.load() # Set the grammar as exclusive. # The sapi5shared engine apparently requires this for mimic() to # work, making the method kind of useless. This does not apply to # sapi5inproc. g.set_exclusiveness(True) response = self.send_request("mimic", ["testing mimicry"]) try: self.assertIn("result", response) self.assertEqual(response["result"], True) finally: g.set_exclusiveness(False) g.unload()
def test_single_dictation(self): """ Test that the engine can handle a dragonfly rule using a Dictation element. """ test = self.get_test_function() class TestRule(MappingRule): mapping = {"<dictation>": Function(test)} extras = [Dictation("dictation")] g = Grammar("test") g.add_rule(TestRule()) g.load() self.assertTrue(g.loaded) self.assert_mimic_success("hello") self.assert_test_function_called(test, 1) # Test that it works again self.assert_mimic_success("hello") self.assert_test_function_called(test, 2) # Test again with multiple words self.assert_mimic_success("hello world") self.assert_test_function_called(test, 3)
"title": "title", # means the same thing and represents a table row "table row": "tr", "TR": "tr", "track": "track", "unordered list": "ul", "variable": "var", "video": "video", "label": "label", } ) ] # Code for initial setup of the HTML grammar htmlBootstrap = Grammar("html bootstrap") # Create a grammar to contain the command rule. htmlBootstrap.add_rule(HTMLEnabler()) htmlBootstrap.load() htmlGrammar = Grammar("html grammar") htmlGrammar.add_rule(HTMLTestRule()) htmlGrammar.add_rule(HTMLDisabler()) htmlGrammar.add_rule(HTMLTags()) htmlGrammar.load() htmlGrammar.disable() # Unload function which will be called by natlink at unload time. def unload(): global htmlGrammar if htmlGrammar: htmlGrammar.unload() htmlGrammar = None
name="normal_vim", mapping = { "happy": Function(normal) } ) i_rules = MappingRule( name="insert_vim", mapping = { "happy": Function(insert) } ) v_rules = MappingRule( name="visual_vim", mapping = { "happy": Function(visual) } ) normal_grammar.add_rule(n_rules) normal_grammar.load() insert_grammar.add_rule(i_rules) insert_grammar.load() visual_grammar.add_rule(v_rules) visual_grammar.load() def unload(): global normal_grammar, insert_grammar, visual_grammar if normal_grammar: normal_grammar.unload() if insert_grammar: insert_grammar.unload() if visual_grammar: visual_grammar.unload() normal_grammar = None insert_grammar = None visual_grammar = None
class PrintWindowsRule(CompoundRule): spec = "print [all] Windows" # Spoken form of command. def _process_recognition(self, node, extras): # Callback when command is spoken. windows = Window.get_all_windows() #windows.sort(key=lambda x: x.executable) for window in windows: if utils.windowIsValid(window): print "{:7} : {:75} : {}".format( window.handle, window.executable.encode("utf-8"), window.title.encode("utf-8")) # window.executable.lower() # window.title.lower() # window.is_visible # window.name # window.classname print_windows_grammar = Grammar("print windows according to Python") print_windows_grammar.add_rule(PrintWindowsRule()) print_windows_grammar.load() def unload(): global print_windows_grammar print_windows_grammar = utils.unloadHelper(print_windows_grammar, __name__)
if 'literal' in extras: commands.extend(extras['literal']) for command in commands: mode, command = command if mode == 'i': insertion_buffer.append(command) else: execute_insertion_buffer(insertion_buffer) insertion_buffer = [] command.execute(extras) execute_insertion_buffer(insertion_buffer) grammar.add_rule(VimCommand()) grammar.load() def unload(): aenea.vocabulary.uninhibit_global_dynamic_vocabulary('vim', VIM_TAGS) for tag in VIM_TAGS: aenea.vocabulary.unregister_dynamic_vocabulary(tag) global grammar if grammar: grammar.unload() global ExModeGrammar if ExModeGrammar: ExModeGrammar.unload() ExModeGrammar = None grammar = None
} class CSEscapeSequences(MappingRule): mapping = { "escape quotes": Text("\ ")+ Key("left") + Text("\"") + Text("\ ")+ Key("left") + Text("\""), "escape single quotes": Text("\ ")+ Key("left") + Text("\'") + Text("\ ")+ Key("left") + Text("\'"), "escape line": Text("\ ")+ Key("left") + Text("n"), "escape tab": Text("\ ")+ Key("left") + Text("t"), "escape carriage return": Text("\ ")+ Key("left") + Text("r"), } # The main C# grammar rules are activated here csBootstrap = Grammar("C sharp bootstrap") csBootstrap.add_rule(CSEnabler()) csBootstrap.load() csGrammar = Grammar("C sharp grammar") csGrammar.add_rule(CSTestRule()) csGrammar.add_rule(CSCommentsSyntax()) csGrammar.add_rule(CSDataTypes()) csGrammar.add_rule(CSComparisonOperators()) csGrammar.add_rule(CSBooleanOperators()) csGrammar.add_rule(CSControlStructures()) csGrammar.add_rule(CSUsefulMethods()) csGrammar.add_rule(CSArithmeticOperators()) csGrammar.add_rule(CSAssignmentOperators()) csGrammar.add_rule(CSMiscellaneousStuff()) csGrammar.add_rule(CSAccessModifiers()) csGrammar.add_rule(CSEscapeSequences()) csGrammar.add_rule(CSDisabler())
"expand thread": Key("Right"), "collapse thread": Key("Left"), } class ComposeCommands(MappingRule): mapping = { "sign": Key("cs-s"), "encrypt": Key("cs-e"), "sign and encrypt": Key("cs-s, cs-e"), "encrypt and sign": Key("cs-s, cs-e"), "send message": Key("c-Return"), } email_grammar.add_rule(MailCommands()) compose_grammar.add_rule(ComposeCommands()) email_grammar.load() compose_grammar.load() def unload(): global email_grammar global compose_grammar if email_grammar: email_grammar.unload() if compose_grammar: compose_grammar.unload() email_grammar = compose_grammar = None
Text("str"), "jason": Text("json"), ### Dragonfly Commands "add text map": Text("\"\": Text(\"\"),") + Key("left:12"), "add key map": Text("\"\": Key(\"\"),") + Key("left:11"), } extras = [ Integer("n", 1, 50), Integer("tab", 1, 8), Integer("number", 1, 9999), Dictation("text"), ] defaults = { "n": 1, } multiedit_more_grammar = Grammar("Multiedit More") multiedit_more_grammar.add_rule(MultiMoreKeyMap()) multiedit_more_grammar.load() def unload(): global multiedit_more_grammar multiedit_more_grammar = utils.unloadHelper(multiedit_more_grammar, __name__)
# handles Python control structures class PythonControlStructures(MappingRule): mapping = { "if": Text("if cond:") + Key("enter"), "for loop": Text("for i in iter:") + Key("enter"), "function": Text("def function():") + Key("enter"), "class": Text("class cls(object):") + Key("enter"), "init": Text("def __init__(self):") + Key("enter"), "main": Text("if __name__ == '__main__':") + Key("enter"), } # The main Python grammar rules are activated here pythonBootstrap = Grammar("python bootstrap") pythonBootstrap.add_rule(PythonEnabler()) pythonBootstrap.load() pythonGrammar = Grammar("python grammar") pythonGrammar.add_rule(PythonTestRule()) pythonGrammar.add_rule(PythonControlStructures()) pythonGrammar.add_rule(PythonDisabler()) pythonGrammar.load() pythonGrammar.disable() # Unload function which will be called by natlink at unload time. def unload(): global pythonGrammar if pythonGrammar: pythonGrammar.unload() pythonGrammar = None
"note find <text>": Key("c-q/25") + Text("%(text)s"), # Window handling. # "new tab": Key("c-n"), # "next tab [<t>]": Key("c-tab:%(t)d"), # "preev tab [<t>]": Key("cs-tab:%(t)d"), # "close tab": Key("c-w"), # "(full-screen | full screen)": Key("f11"), } extras = [ Integer("t", 1, 50), Dictation("text"), IntegerRef("n", 1, 50000), ] defaults = { "t": 1, } context = AppContext(executable='evernote') evernote_grammar = Grammar('Evernote Grammar', context=context) evernote_grammar.add_rule(CommandRule()) evernote_grammar.load() # Unload function which will be called by natlink at unload time. def unload(): global evernote_grammar evernote_grammar = utils.unloadHelper(evernote_grammar, __name__)
# Cursor grouping. "group all": Key("c-g"), "ungroup all": Key("cs-g"), "group tab": Key("c-t"), "ungroup tab": Key("cs-t"), # Copy & paste. "select all": Key("c-a"), "clear selection": Key("c-del"), #"copy (selection|that)": Key("c-insert"), #"paste (that)": Key("s-insert"), "dump screen buffer": Key("cs-f1"), }, extras=[ IntegerRef("n", 1, 100), ], ) context = AppContext(executable="console") terminator_grammar = Grammar("ConsoleZ", context=context) terminator_grammar.add_rule(rules) terminator_grammar.load() def unload(): """Unload function which will be called at unload time.""" global terminator_grammar if grammar: grammar.unload() grammar = None
count = extras['n'] - 1 direction = 'right' action = Key("w-b/10, s-tab/10, " + direction + ":%d/10" % count) + value action.execute() class IconRule(MappingRule): mapping = { "[open] icon <n>": Key("enter"), "(menu | pop up) icon <n>": Key("apps"), } extras = [IntegerRef("n", 1, 12)] def _process_recognition(self, value, extras): count = extras["n"] - 1 action = Key("w-b/10, right:%d/10" % count) + value action.execute() taskbar_grammar = Grammar("taskbar") taskbar_grammar.add_rule(TaskRule()) taskbar_grammar.add_rule(IconRule()) taskbar_grammar.load() def unload(): global taskbar_grammar if taskbar_grammar: taskbar_grammar.unload() taskbar_grammar = None
status_rule, log_rule, branch_rule, pull_rule, ]) class GitRule(CompoundRule): spec = 'git <command>' extras = [git_command] def process_recognition(self, node): self.value(node).execute() def value(self, node): cmd = node.children[0].children[0].children[1].children[0].children[0] value = Text('git ' + cmd.value()) return value git_grammar = Grammar('git') git_grammar.add_rule(GitRule()) git_grammar.load() def unload(): global git_grammar if git_grammar: git_grammar.unload() git_grammar = None
from dragonfly import MappingRule, Text, Grammar from supporting import utils class Aliases(MappingRule): mapping = { "Rumpelstiltskin": Text("placeholder alias"), } aliases_grammar = Grammar("The actual aliases grammar") aliases_grammar.add_rule(Aliases()) aliases_grammar.load() def unload(): global aliases_grammar aliases_grammar = utils.unloadHelper(aliases_grammar, __name__)
def create_grammar(): grammar = Grammar("programs") grammar.add_rule(ProgramRule()) grammar.load() return grammar, True
# Pause("20").execute() # Key("3,9").execute() # elif (words[0] == "preev") | (words[0] == "previous"): # print "preev" # Mimic("open").execute() # Pause("20").execute() # Key("8,8").execute() # else: # print "Commmand had incorrect word: " + words[0] # else: # print "Wrong number of words in command: " + words # #gmail_context = AppContext(executable="chrome", title="Gmail") context = AppContext(executable="chrome") chrome_grammar = Grammar("Google Chrome", context=context) chrome_grammar.add_rule(GlobalChromeMappings()) chrome_grammar.add_rule(GmailMappings()) chrome_grammar.add_rule(OpenGmailLineRule()) # chrome_grammar.add_rule(NavigateCalendarWeeks()) chrome_grammar.load() def unload(): global chrome_grammar if chrome_grammar: print "unloading " + __name__ + "..." chrome_grammar.unload() chrome_grammar = None
Function(output_value), "<type_name> [<value_name>]": Function(output_type_annotation), "<type_name> [<value_name>] is constant": Function(output_type_annotation, is_constant=True), } extras = [ Dictation("value_name", default=""), type_name_choice("type_name") ] # The main Barney grammar rules are activated here barney_bootstrap = Grammar("barney bootstrap") barney_bootstrap.add_rule(BarneyEnabler()) barney_bootstrap.load() barney_grammar = Grammar("barney grammar") barney_grammar.add_rule(BarneyUtilities()) barney_grammar.add_rule(BarneyDisabler()) barney_grammar.load() barney_grammar.disable() def unload(): global barney_grammar if barney_grammar: barney_grammar.unload() barney_grammar = None
} class JavaEscapeSequences(MappingRule): mapping = { "escape quotes": Text("\ ")+ Key("left") + Text("\"") + Text("\ ")+ Key("left") + Text("\""), "escape single quotes": Text("\ ")+ Key("left") + Text("\'") + Text("\ ")+ Key("left") + Text("\'"), "escape line": Text("\ ")+ Key("left") + Text("n"), "escape tab": Text("\ ")+ Key("left") + Text("t"), "escape carriage return": Text("\ ")+ Key("left") + Text("r"), } # The main Java grammar rules are activated here javaBootstrap = Grammar("java bootstrap") javaBootstrap.add_rule(JavaEnabler()) javaBootstrap.load() javaGrammar = Grammar("java grammar") javaGrammar.add_rule(JavaTestRule()) javaGrammar.add_rule(JavaCommentsSyntax()) javaGrammar.add_rule(JavaDataTypes()) javaGrammar.add_rule(JavaComparisonOperators()) javaGrammar.add_rule(JavaBooleanOperators()) javaGrammar.add_rule(JavaControlStructures()) javaGrammar.add_rule(JavaUsefulMethods()) javaGrammar.add_rule(JavaArithmeticOperators()) javaGrammar.add_rule(JavaAssignmentOperators()) javaGrammar.add_rule(JavaMiscellaneousStuff()) javaGrammar.add_rule(JavaAccessModifiers()) javaGrammar.add_rule(JavaEscapeSequences()) javaGrammar.add_rule(JavaDisabler())
def hmc_confirm(value, nexus): nexus.comm.get_com("hmc").do_action(value) def hmc_settings_complete(nexus): nexus.comm.get_com("hmc").complete() class HMCRule(MappingRule): mapping = { "kill homunculus": R(Function(kill, nexus=_NEXUS), rdescript="Kill Helper Window"), "complete": R(Function(complete, nexus=_NEXUS), rdescript="Complete Input") } grammar = Grammar("hmc", context=AppContext(title=settings.HOMUNCULUS_VERSION)) grammar.add_rule(HMCRule()) grammar.load() class HMCHistoryRule(MappingRule): mapping = { # specific to macro recorder "check <n>": R(Function(hmc_checkbox, nexus=_NEXUS), rdescript="Check Checkbox"), "check from <n> to <n2>": R(Function(hmc_recording_check_range, nexus=_NEXUS), rdescript="Check Range"), "exclude <n>": R(Function(hmc_recording_exclude, nexus=_NEXUS), rdescript="Uncheck Checkbox"), "[make] repeatable": R(Function(hmc_recording_repeatable, nexus=_NEXUS), rdescript="Make Macro Repeatable") } extras = [ IntegerRefST("n", 1, 25), IntegerRefST("n2", 1, 25), ] grammar_history = Grammar("hmc history", context=AppContext(title=settings.HMC_TITLE_RECORDING)) grammar_history.add_rule(HMCHistoryRule())
from dragonfly import AppContext, Grammar, CompoundRule, Key # Voice command rule combining spoken form and recognition processing. class ExampleRule(CompoundRule): spec = "do something computer" # Spoken form of command. def _process_recognition(self, node, extras): # Callback when command is spoken. a1 = Key("k") a1.execute() # Create a grammar which contains and loads the command rule. grammar = Grammar("example grammar", context = AppContext(executable='Notepad')) # Create a grammar to contain the command rule. grammar.add_rule(ExampleRule()) # Add the command rule to the grammar. grammar.load() # Load the grammar.
class AnkiMapping(MappingRule): mapping = { "add card": Key("c-enter"), } extras = [ Integer("t", 1, 50), Dictation("text"), Dictation("text2"), IntegerRef("n", 1, 50000), Integer("w", 0, 10), Integer("x", 0, 10), Integer("y", 0, 10), Integer("z", 0, 10) ] defaults = {"t": 1, "text": "", "text2": ""} context = AppContext(executable='anki') anki_grammar = Grammar('Anki Grammar', context=context) anki_grammar.add_rule(AnkiMapping()) anki_grammar.load() def unload(): global anki_grammar anki_grammar = utils.unloadHelper(anki_grammar, __name__)
extras = [ IntegerRef("n", 1, 10), Dictation("room"), ] defaults = { "n": 1, } class ChatRule(MappingRule): mapping = { "at <user>": Text("@%(user)s "), "send": Key("enter"), } extras = [ Choice("user", config.usernames.map), ] context = AppContext(executable="hipchat") terminator_grammar = Grammar("hipchat_general", context=context) terminator_grammar.add_rule(NavigationRule()) terminator_grammar.add_rule(ChatRule()) terminator_grammar.load() # Unload function which will be called by natlink at unload time. def unload(): global terminator_grammar if grammar: grammar.unload() grammar = None
"double (equal|equals)": Text("=="), "(strict|double) not equals": Text("!=="), "(triple|strict) (equal|equals)": Text("==="), }, extras=[ # Special elements in the specs of the mapping. Dictation("text"), Dictation("inner_text"), Dictation("text_left"), Dictation("text_right") ], defaults={ "text": "", "inner_text": "", "text_left": "", "text_right": "" }) text_edit_rule = SeriesMappingRule(text_edit_mapping_rule) text_edit_grammar.add_rule(text_edit_rule) text_edit_grammar.load() def unload(): global text_edit_grammar if text_edit_grammar: text_edit_grammar.unload() text_edit_grammar = None
Dictation("module_name", default=""), Dictation("alias_name", default=""), Dictation("name", default=""), Dictation("binding", default=""), Dictation("comment", default=""), gen_server_command_choice("gen_server_command"), log_level_choice("log_level"), interactive_command_choice("interactive_command"), mix_command_choice("mix_command"), comment_choice("comment_type"), ] # The main Elixir grammar rules are activated here elixirBootstrap = Grammar("elixir bootstrap") elixirBootstrap.add_rule(ElixirEnabler()) elixirBootstrap.load() elixirGrammar = Grammar("elixir grammar") elixirGrammar.add_rule(ElixirUtilities()) elixirGrammar.add_rule(ElixirDisabler()) elixirGrammar.load() elixirGrammar.disable() def unload(): global elixirGrammar if elixirGrammar: elixirGrammar.unload() elixirGrammar = None
class MasterGrammar(object): """A MasterGrammar is built up from a specific set of active rules. They synthesize the different rule types into one dragonfly grammar. There is only ever one master grammar active at a time.""" def __init__(self, baseRuleSet, client, ruleCache): self.client = client self.ruleCache = ruleCache # Hashes that are directly part of this grammar self.baseRuleSet = set(baseRuleSet) # Hashes of rules that we discover are dependencies # of the base rule set self.dependencyRuleSet = set() # hash -> dragonfly rule self.concreteRules = {} # one hash per merge group, hash is of hashes of rules that were merged self.seriesRules = set() # one hash, hash is of hashes of rules that were merged self.terminatorRule = "" # one hash per rule, hash is the rule's actual hash self.independentRules = set() # Rule references are stored as hashes, so rules that # contain rule refs already effectively include those # rules in their hash, so just hashing the base set is # all we need. x = hashlib.sha256() x.update("".join(sorted([r for r in self.baseRuleSet]))) self.hash = x.hexdigest()[:32] # Hashes of rules we depend on but haven't arrived yet. # These will be discovered during the dfly grammar building # process. self.missing = set() self.checkDeps(self.fullRullSet) # build self.missing self.finalDflyRule = None self.dflyGrammar = None # word lists are *not* hashed. they are global state the # client can update at any time, and the change has to be # propogated into the currently active grammar. the client # can choose to make them rule specific by making the name # be the hash of the rule the word list applies to, but this # is only convention and not enforced self.concreteWordLists = {} @property def fullRullSet(self): return self.baseRuleSet | self.dependencyRuleSet def satisfyDependency(self, r): """Marks dependency on hash r as satisfied, and tries to build if no more known deps are missing. During the build process new indirect dependencies may still be discovered however.""" assert r in self.missing self.missing.remove(r) if not self.missing: self.build() def checkDep(self, r): "Checks if dep r is present. Not recursive." if r not in self.ruleCache: self.ruleCache[r] = NeedDependency() if isinstance(self.ruleCache[r], NeedDependency): self.ruleCache[r].add(self.hash) self.missing.add(r) return False return True def checkMissing(self): if self.missing: raise MissingDependency(copy(self.missing)) def checkDeps(self, ruleSet): "Recursively check if all deps in ruleSet are satisfied." if not ruleSet: return True newDeps = set() for r in ruleSet: if self.checkDep(r): rule = self.ruleCache[r] # HashedRule rule = rule.rule log.info("rule [%s]" % (rule, )) for e in rule.extras: if hasattr(e, "rule_ref"): newDeps.add(e.rule_ref) self.dependencyRuleSet.update(newDeps) self.checkDeps(newDeps) def ready(self): return len(self.missing) == 0 def build(self): if self.dflyGrammar: # already built return buildStartTime = time.time() self.checkMissing() self.checkDeps(self.fullRullSet) self.checkMissing() # from here on we assume all deps are present all the way down seriesGroups = {} terminal = {} allRules = [] mergeStartTime = time.time() # Merge series and terminal rules, set independent rules aside self.fullName = [] for r in self.fullRullSet: rule = self.ruleCache[r].rule hash = self.ruleCache[r].hash if rule.ruleType == RuleType.SERIES: if rule.seriesMergeGroup not in seriesGroups: seriesGroups[rule.seriesMergeGroup] = {} x = seriesGroups[rule.seriesMergeGroup] elif rule.ruleType == RuleType.TERMINAL: x = terminal elif rule.ruleType == RuleType.INDEPENDENT: x = {} if "mapping" not in x: x["mapping"] = {} if "extras" not in x: x["extras"] = {} if "defaults" not in x: x["defaults"] = {} if "name" not in x: x["name"] = "" if "hash" not in x: x["hash"] = set() x["ruleType"] = rule.ruleType x["seriesMergeGroup"] = rule.seriesMergeGroup x["name"] = x["name"] + ("," if x["name"] else "") + rule.name x["mapping"].update(rule.mapping.items()) for e in rule.extras: x["extras"][e.name] = e x["defaults"].update(rule.defaults.items()) log.info("Adding hash [%s] to name [%s]" % (hash, x["name"])) x["hash"].add(hash) x["built"] = False x["exported"] = (rule.ruleType == RuleType.INDEPENDENT) # allRules will contain all the rules we have left # *after* merging. So only one series rule per merge # group and only one terminal rule. allRules.append(x) mergeEndTime = time.time() log.info("Grammar merge time: %ss" % (mergeEndTime - mergeStartTime)) # We really should be doing a topological sort, but this # isn't a frequent operation so this inefficiency should # be OK. Keep trying to link deps until they're all good. uniqueRules = [] for r in allRules: if r not in uniqueRules: uniqueRules.append(r) self.fullName.append(r["name"]) self.fullName = ",".join(self.fullName) allRules = uniqueRules # collapse the hashes for r in allRules: assert type(r["hash"]) == set assert len(r["hash"]) >= 1 if r["ruleType"] in (RuleType.SERIES, RuleType.TERMINAL): # We generate a composite hash for our new composite rules log.info("Multi-hash: [%s]" % r["hash"]) hashes = sorted(list(r["hash"])) x = hashlib.sha256() x.update("".join(sorted([h for h in hashes]))) hash = x.hexdigest()[:32] log.info("Composite: [%s]" % hash) else: # We just use the exising hash for a rule if it's not composite [hash] = r["hash"] log.info("Single hash: [%s]" % r["hash"]) r["hash"] = hash allPrototypes = {i["hash"]: i for i in allRules} self.concreteTime = 0 cleanupTime = 0 for k, v in allPrototypes.items(): if not v["built"]: cleanupStart = time.time() self.cleanupProtoRule(v, allPrototypes) cleanupEnd = time.time() cleanupTime += (cleanupEnd - cleanupStart) log.info("Total Cleanup time: %ss" % cleanupTime) log.info("Total Concrete time: %ss" % (self.concreteTime)) #log.info("made it out of loop") self.buildFinalMergedRule() buildEndTime = time.time() log.info("Grammar build time: %ss" % (buildEndTime - buildStartTime)) self.setupFinalDflyGrammar() def buildFinalMergedRule(self): #log.info("Building final merged rule.") if not self.seriesRules and not self.terminatorRule: return extras = [] seriesRefNames = [] for i, r in enumerate(self.seriesRules): name = "s" + str(i) seriesRefNames.append(name) ref = dfly.RuleRef(self.concreteRules[r], name) extras.append(ref) seriesPart = "[" + " | ".join([("<" + r + ">") for r in seriesRefNames]) + "]" terminatorPart = "" if self.terminatorRule: extras.append( dfly.RuleRef(self.concreteRules[self.terminatorRule], "terminator")) terminatorPart = " [<terminator>]" masterPhrase = seriesPart + terminatorPart mapping = { masterPhrase: ReportingAction(masterPhrase, self.client, self.hash) } log.info( "Building master grammar rule with name [%s] mapping [%s] extras [%s] defaults [%s]" % (self.fullName, mapping, extras, {})) masterTimeStart = time.time() self.finalDflyRule = MappingRule(name=self.hash, mapping=mapping, extras=extras, defaults={}, exported=True) masterTimeEnd = time.time() log.info("Master rule construction time: %ss" % (masterTimeEnd - masterTimeStart)) def setupFinalDflyGrammar(self): log.info("Setting up final grammar.") assert not self.dflyGrammar self.dflyGrammar = Grammar(self.fullName + "Grammar") if self.finalDflyRule: self.dflyGrammar.add_rule(self.finalDflyRule) for r in self.independentRules: self.dflyGrammar.add_rule(self.concreteRules[r]) loadStart = time.time() self.dflyGrammar.load() loadEnd = time.time() log.info("Grammar load time: %ss" % (loadEnd - loadStart)) get_engine().set_exclusiveness(self.dflyGrammar, 1) # These should never be recognized on their own, only as part of the # master rule, quirk of dragonfly that you have to do this even though # they're only pulled in by ruleref. for r in self.seriesRules: self.concreteRules[r].disable() if self.terminatorRule: self.concreteRules[self.terminatorRule].disable() # independent rules only enabled via being a dependency need to have disable # called on their dragonfly version so that they don't get recognized by # themselves, same quirk. notEnabledRules = self.dependencyRuleSet - self.baseRuleSet for r in notEnabledRules: self.concreteRules[r].disable() # they're enabled by default, don't activate until explicitly made to self.dflyGrammar.disable() def active(self): #log.info("active check [%s %s %s]" % (self.dflyGrammar is None, self.dflyGrammar and self.dflyGrammar.loaded, self.dflyGrammar and self.dflyGrammar.enabled)) return self.dflyGrammar and self.dflyGrammar.loaded and self.dflyGrammar.enabled def activate(self): self.build() self.dflyGrammar.enable() log.info("Grammar activated: [%s]" % self.hash) def deactivate(self): # it's possible we never built successfully if self.dflyGrammar: self.dflyGrammar.disable() log.info("Grammar deactivated: [%s]" % self.hash) def unload(self): self.deactivate() if self.dflyGrammar: self.dflyGrammar.unload() def buildConcreteRule(self, r): # for independent rules we could use the plain # name, but it turns out Dragon crashes if your # names get too long, so for combined rules we # just use the hash as the name... hopefully # that's under the limit name = r["hash"] if r["ruleType"] == RuleType.SERIES: t = SeriesMappingRule elif r["ruleType"] == RuleType.TERMINAL: t = MappingRule else: t = MappingRule constructionStartTime = time.time() log.info( "Building rule [%s] with size [%s] num extras [%s] num defaults [%s]" % (r["name"], len(r["mapping"]), len( r["extras"]), len(r["defaults"]))) rule = t(name=name, mapping=r["mapping"], extras=r["extras"], defaults=r["defaults"], exported=r["exported"]) constructionEndTime = time.time() log.info("Rule construction time: %ss" % (constructionEndTime - constructionStartTime)) self.concreteRules[r["hash"]] = rule if r["ruleType"] == RuleType.SERIES: self.seriesRules.add(r["hash"]) elif r["ruleType"] == RuleType.TERMINAL: self.terminatorRule = r["hash"] elif r["ruleType"] == RuleType.INDEPENDENT: self.independentRules.add(r["hash"]) else: assert False log.info("done building") def cleanupProtoRule(self, r, allPrototypes): # have to uniquify in this round about way because lists # aren't hashable and we need them for ListRef. if type(r["extras"]) == dict: r["extras"] = r["extras"].values() newExtras = [] for e in r["extras"]: if isinstance(e, protocol.Integer): newExtras.append(dfly.Integer(e.name, e.min, e.max)) elif isinstance(e, protocol.Dictation): newExtras.append(dfly.Dictation(e.name)) elif isinstance(e, protocol.Repetition): if e.rule_ref not in self.concreteRules: self.cleanupProtoRule(allPrototypes[e.rule_ref], allPrototypes) # Dragonfly wants RuleRef to take a RuleRef rather than an actual # Rule, so we just make one rather than forcing the server to # handle this, see protocol.py comments. concrete = self.concreteRules[e.rule_ref] log.info("concrete type: [%s]" % type(concrete)) newExtras.append( dfly.Repetition(dfly.RuleRef(rule=concrete), e.min, e.max, e.name)) elif isinstance(e, protocol.RuleRef): if e.rule_ref not in self.concreteRules: self.cleanupProtoRule(allPrototypes[e.rule_ref], allPrototypes) newExtras.append( dfly.RuleRef(self.concreteRules[e.rule_ref], e.name)) elif isinstance(e, protocol.ListRef): self.concreteWordLists[e.name] = List(e.name + "ConcreteList") # self.concreteWordLists[e.name].set(e.words) newExtras.append( dfly.ListRef(e.ref_name, self.concreteWordLists[e.name])) else: raise Exception("Unknown extra type: [%s]" % e) r["extras"] = newExtras self.concreteStartTime = time.time() self.buildConcreteRule(r) self.concreteEndTime = time.time() self.concreteTime += (self.concreteEndTime - self.concreteStartTime) r["built"] = True return True def updateWordList(self, name, words): if name not in self.concreteWordLists: # log.info("Word list [%s] not in grammar [%s], ignoring" % (name, self.hash)) return # We want to check if the value has actually changed because List's # set method will blindly tell Dragon to delete its old list and replace # it with this one and we don't want to disturb Dragon unless we have to # because Dragon is slow. if sorted(words) != sorted(self.concreteWordLists[name]): log.info( "Updating word list [%s] on grammar [%s] with contents [%s]" % (name, self.hash, len(words))) log.info("old list: %s" % self.concreteWordLists[name]) # TODO: need to check existing load state, then send a loading message here, then restore # old state. This way we can see when word lists are taking a long time to load... updateStart = time.time() self.concreteWordLists[name].set(words) updateEnd = time.time() log.info("Word list update time: %ss" % (updateEnd - updateStart))
class JavaScriptAssignmentOperators(MappingRule): mapping = { "plus equals": Text("+="), "minus equals": Text("-="), "multiply equals": Text("*="), "divide equals": Text("/="), "modulus equals": Text("%="), } JavaScriptBootstrap = Grammar("JavaScript bootstrap") # Create a grammar to contain the command rule. JavaScriptBootstrap.add_rule(JavaScriptEnabler()) JavaScriptBootstrap.load() JavaScriptGrammar = Grammar("JavaScript grammar") JavaScriptGrammar.add_rule(JavaScriptTestRule()) JavaScriptGrammar.add_rule(JavaScriptControlStructures()) JavaScriptGrammar.add_rule(JavaScriptCommentsSyntax()) JavaScriptGrammar.add_rule(JavaScriptMiscellaneousStuff()) JavaScriptGrammar.add_rule(JavaScriptComparisonOperators()) JavaScriptGrammar.add_rule(JavaScriptArithmeticOperators()) JavaScriptGrammar.add_rule(JavaScriptAssignmentOperators()) JavaScriptGrammar.add_rule(JavaScriptDisabler()) JavaScriptGrammar.load() JavaScriptGrammar.disable() # Unload function which will be called by natlink at unload time. def unload():
class HMCRule(MergeRule): mapping = { "kill homunculus": R(Function(kill, nexus=_NEXUS), rdescript="Kill Helper Window"), "complete": R(Function(complete, nexus=_NEXUS), rdescript="Complete Input") } grammar = Grammar("hmc", context=AppContext(title=settings.HOMUNCULUS_VERSION)) r1 = HMCRule() gfilter.run_on(r1) grammar.add_rule(r1) if settings.SETTINGS["feature_rules"]["hmc"]: grammar.load() class HMCHistoryRule(MergeRule): mapping = { # specific to macro recorder "check <n>": R(Function(hmc_checkbox, nexus=_NEXUS), rdescript="Check Checkbox"), "check from <n> to <n2>": R(Function(hmc_recording_check_range, nexus=_NEXUS), rdescript="Check Range"), "exclude <n>": R(Function(hmc_recording_exclude, nexus=_NEXUS), rdescript="Uncheck Checkbox"), "[make] repeatable": R(Function(hmc_recording_repeatable, nexus=_NEXUS),
from dragonfly import CompoundRule from dragonfly import Grammar from dragonfly import Key class AnnoyingRule(CompoundRule): spec = "torch annoying bug" # Spoken form of command. def _process_recognition(self, node, extras): # Callback when command is spoken. Key("alt:up,s-down").execute() annoying_grammar = Grammar("annoying bug") annoying_grammar.add_rule(AnnoyingRule()) annoying_grammar.load() def unload(): global annoying_grammar if annoying_grammar: print "unloading " + __name__ + "..." annoying_grammar.unload() annoying_grammar = None
# Useful commands for encapsulation of quotes, etc. class UsefulStuff(MappingRule): mapping = { "in quotes": Text("\"\"") + Key("left"), "in single quotes": Text("\'\'") + Key("left"), "dirty bird": Text("()") + Key("left"), "in brackets": Text("[]") + Key("left"), "in braces": Text("{}") + Key("left"), "in angle brackets": Text("<>") + Key("left"), "in parameters": Text("()"), "arrow": Text("->"), "double arrow": Text("=>"), "fat arrow": Text("=>"), } class GitCommands(MappingRule): mapping = { "commit all": Text("git add ."), "commit message": Text("git commit -m \"\"") + Key("left"), "repo push": Text("git push"), "repo pull": Text("git pull") } globalStuff = Grammar("useful custom global commands" ) # Create a grammar to contain the command rule. globalStuff.add_rule(UsefulStuff()) globalStuff.add_rule(GitCommands()) globalStuff.load()
### programming "short object": Text("obj"), "short string": Text("str"), "jason": Text("json"), ### Dragonfly Commands "add text map": Text("\"\": Text(\"\"),") + Key("left:12"), "add key map": Text("\"\": Key(\"\"),") + Key("left:11"), } extras=[ Integer("n", 1, 50), Integer("tab", 1, 8), Integer("number", 1, 9999), Dictation("text"), ] defaults = { "n": 1, } multiedit_alternate_grammar = Grammar("Multiedit Alternate") multiedit_alternate_grammar.add_rule(AlternateKeyMap()) multiedit_alternate_grammar.load() def unload(): global multiedit_alternate_grammar if multiedit_alternate_grammar: print "unloading " + __name__ + "..." multiedit_alternate_grammar.unload() multiedit_alternate_grammar = None
from dragonfly import MappingRule, Key, Grammar rules = MappingRule( name="logiCapture", mapping={ '(start|stop|toggle) recording': Key('a-r'), '(pause|unpause) recording': Key('a-p'), }, ) context: None = None # AppContext(executable="logiCapture") logi_capture_grammar = Grammar("logiCapture", context=context) logi_capture_grammar.add_rule(rules) logi_capture_grammar.load() EXPORT_GRAMMARS = [logi_capture_grammar] def unload(): global logi_capture_grammar if logi_capture_grammar: logi_capture_grammar.unload() logi_capture_grammar = None
IntegerRef("pos6", 1, 10), IntegerRef("pos7", 1, 10), IntegerRef("pos8", 1, 10), IntegerRef("pos9", 1, 10), Dictation("text"), Choice("action", actions), ], defaults={ "pos1": 1 } ) # Use global context, and activate/deactivate grammar dynamically. grammarNavigation = Grammar("Grid navigation", context=GlobalDynamicContext()) grammarNavigation.add_rule(navigate_rule) # Add the top-level rule. grammarNavigation.load() # Load the grammar. grammarNavigation.disable() def mouse_grid_start(pos1=None, pos2=None, pos3=None, pos4=None, pos5=None, pos6=None, pos7=None, pos8=None, pos9=None, action=None): if should_send_to_aenea(): lib.grid_base_x.set_grammar_reference(grammarNavigation) grammarNavigation.enable() lib.grid_base_x.mouse_grid(pos1, pos2, pos3, pos4, pos5, pos6, pos7, pos8, pos9, action) else: lib.grid_base_win.set_grammar_reference(grammarNavigation) grammarNavigation.enable() lib.grid_base_win.mouse_grid(pos1, pos2, pos3, pos4, pos5, pos6, pos7, pos8, pos9, action)
"create [a] snippet": Key("cs-enter"), "view [all] downloads": Key("cs-j"), "edit [message]": Key("e"), "delete [message]": Key("delete"), "(add reaction|react)": Key("r"), "open thread": Key("t"), "toggle pin": Key("p"), "share [message]": Key("s"), "toggle (star|favorite)": Key(""), "Mark as unread from (here|this message)": Key("u"), "remind me about this [message]": Key("m"), }, extras=[ LetterSequenceRef('letter_sequence'), Dictation("text"), ShortIntegerRef('n', 1, 101) ], defaults={"n": 1}) context = AppContext(executable="slack") slack_grammar = Grammar("slack", context=context) slack_grammar.add_rule(rules) slack_grammar.load() EXPORT_GRAMMARS = [slack_grammar] def unload(): global slack_grammar if slack_grammar: slack_grammar.unload() slack_grammar = None
"step over": Key("f10"), "step into": Key("f11"), "Debug start": Key("ca-slash/60") + Text("Launch V") + Key("enter/50") # "Attach start": Key("ca-slash/60") + Text("Attach to V") + Key("enter/50") # Custom key mappings. # "(run SSH session|run SSH console|run remote terminal|run remote console)": Key("a-f11/25, enter"), } extras = [ Integer("t", 1, 50), Dictation("text"), IntegerRef("n", 1, 50000), Integer("w", 0, 10), Integer("x", 0, 10), Integer("y", 0, 10), Integer("z", 0, 10), ] defaults = { "t": 1, } context = AppContext(executable="code") idea_grammar = Grammar("code", context=context) idea_grammar.add_rule(CommandRule()) idea_grammar.load() def unload(): global idea_grammar idea_grammar = utils.unloadHelper(idea_grammar, __name__)
mapping = { "close [<n>] ( frame | frames )": Key("c-w:%(n)d"), "open frame": Key("c-t"), "open window": Key("c-n"), "reopen [<n>] ( frame | frames )": Key("cs-t:%(n)d"), "[ go to ] frame [<n>]": Key("c-%(n)d"), "frame left [<n>]": Key("cs-tab:%(n)d"), "frame right [<n>]": Key("c-tab:%(n)d"), "search [<text>]": Key("c-k") + Text("%(text)s"), "find [<text>]": Key("c-f") + Text("%(text)s"), "history": Key("c-h"), "reload": Key("c-r"), "next [<n>]": Key("c-g:%(n)d"), "previous [<n>]": Key("cs-g:%(n)d"), "back [<n>]": Key("a-left:%(n)d"), "forward [<n>]": Key("a-right:%(n)d"), } extras = [IntegerRef("n", 1, 10), Dictation("text")] defaults = {"n":1, "text":""} chromium_grammar.add_rule(ChromiumRule()) chromium_grammar.load() def unload(): global chromium_grammar if chromium_grammar: chromium_grammar.unload() chromium_grammar = None
some macro file needs to either directly or indirectly import log.py and run log.py's setup_log() function, which initializes Dragonfly's logging system. """ class LoggingRule(CompoundRule): spec = "is logging enabled" # Spoken form of command. def _process_recognition(self, node, extras): # Callback when command is spoken. print 'Yes, looging should be enabled.' testlog = logging.getLogger("dfly.test") testlog.debug("Test the dragonfly test log.") engine_log = logging.getLogger("engine") engine_log.info("The file log should see this, but not stdout.") engine_log.warning("Both file and stdout logs should see this.") #Create a grammar which contains and loads the command rule. logging_rule = LoggingRule() logging_grammar = Grammar("Is logging enabled?") logging_grammar.add_rule(logging_rule) logging_grammar.load() def unload(): global logging_grammar logging_grammar = utils.unloadHelper(logging_grammar, __name__)
class CPPEscapeSequences(MappingRule): mapping = { "escape quotes": Text("\ ")+ Key("left") + Text("\"") + Text("\ ")+ Key("left") + Text("\""), "escape single quotes": Text("\ ")+ Key("left") + Text("\'") + Text("\ ")+ Key("left") + Text("\'"), "escape line": Text("\ ")+ Key("left") + Text("n"), "escape tab": Text("\ ")+ Key("left") + Text("t"), "escape carriage return": Text("\ ")+ Key("left") + Text("r"), } CPPBootstrap = Grammar("C++ bootstrap") # Create a grammar to contain the command rule. CPPBootstrap.add_rule(CPPEnabler()) CPPBootstrap.load() CPPGrammar = Grammar("C++ grammar") CPPGrammar.add_rule(CPPTestRule()) CPPGrammar.add_rule(CPPControlStructures()) CPPGrammar.add_rule(CPPCommentsSyntax()) CPPGrammar.add_rule(CPPUsefulFunctions()) CPPGrammar.add_rule(CPPPreprocessorDirectives()) CPPGrammar.add_rule(CPPOperators()) CPPGrammar.add_rule(CPPEscapeSequences()) CPPGrammar.add_rule(CPPFunctionsAndClassesSyntax()) CPPGrammar.add_rule(CPPDataTypes()) CPPGrammar.add_rule(CPPDisabler()) CPPGrammar.load() CPPGrammar.disable()
Text("cap rubber:monit:stop RUBBER_ENV="), "cap rubber (postgres|PostgreSQL) start": Text("cap rubber:postgresql:start RUBBER_ENV="), "cap rubber (postgres|PostgreSQL) stop": Text("cap rubber:postgresql:stop RUBBER_ENV="), }) class MyCommandsRule(MappingRule): mapping = config.cmd.map extras = [ Dictation("text"), ] global_context = None # Context is None, so grammar will be globally active. terminator_grammar = Grammar( "Capistrano commands", context=global_context) # Create this module's grammar. terminator_grammar.add_rule(MyCommandsRule()) # Add the top-level rule. terminator_grammar.load() # Load the grammar. def unload(): """Unload function which will be called at unload time.""" global terminator_grammar if grammar: grammar.unload() grammar = None
Key("f5/30, s-tab, up:2, down:2, tab") + Function(printNumber) + Key("enter, escape"), "(shoreline | show | toggle) line numbers": Key("cas-l"), "hide line numbers": Key("cas-k"), } extras = [ Integer("t", 1, 50), Integer("w", 0, 10), Integer("x", 0, 10), Integer("y", 0, 10), Integer("z", 0, 10), ] defaults = { "t": 1, } winword_context = AppContext(executable="winword") winword_grammar = Grammar("Microsoft Word", context=winword_context) winword_grammar.add_rule(CommandRule()) winword_grammar.load() # Unload function which will be called by natlink at unload time. def unload(): global winword_grammar winword_grammar = utils.unloadHelper(winword_grammar, __name__)
mapping = { "property <command>": Function(middle_slash_format) + Text(":;") + Key("left"), "comment": Text("/* */"), } extras = [ Dictation("command"), ] # Code for initial setup of the HTML grammar cssBootstrap = Grammar( "css bootstrap") # Create a grammar to contain the command rule. cssBootstrap.add_rule(CSSEnabler()) cssBootstrap.load() cssGrammar = Grammar("css grammar") cssGrammar.add_rule(CSSTestRule()) cssGrammar.add_rule(CSSDisabler()) cssGrammar.add_rule(CSSValues()) cssGrammar.add_rule(CSSSelectors()) cssGrammar.add_rule(CSSTags()) cssGrammar.load() cssGrammar.disable() # Unload function which will be called by natlink at unload time. def unload(): global cssGrammar if cssGrammar: cssGrammar.unload()
import sys sys.path.append(r"C:\Users\archangel\Desktop\我的项目") from speechAssistant.openModule import openRule as oR from dragonfly import Grammar #问题1 不支持英文文件名(致命) open_R=oR.openRule() gr=Grammar("n") gr.add_rule(open_R) gr.load() print("end")
"touch two": Mouse("left:2"), 'touch are': Mouse('right'), "touch mid": Mouse("middle"), "[<n>] scroll down": (Mouse("wheeldown") + Pause('5')) * Repeat(extra='n') * 2, "[<n>] scroll up": (Mouse("wheelup") + Pause('5')) * Repeat(extra='n') * 2, "[<n>] scroll right": (Mouse("wheelright") + Pause('5')) * Repeat(extra='n') * 2, "[<n>] scroll left": (Mouse("wheelleft") + Pause('5')) * Repeat(extra='n') * 2, "drag": Mouse("left:down"), "drop": Mouse("left:up"), "[<n>] alt tab": Key("alt:down,tab/50:%(n)d/50,alt:up"), "alt tab show": Key("alt:down,tab/10,s-tab"), 'reload natlink': Function(reload_natlink), } extras = [ IntegerRef('n', 1, 101, default=1), Dictation('text'), ] global_grammar = Grammar('global grammar') global_grammar.add_rule(GlobalRule()) global_grammar.add_rule(FormatRule()) global_grammar.add_rule(SpellLetterSequenceRule()) global_grammar.load() def unload(): global global_grammar if global_grammar: global_grammar.unload() global_grammar = None
from dragonfly import Grammar, CompoundRule # Voice command rule combining spoken form and recognition processing. class ExampleRule(CompoundRule): spec = "do something computer" # Spoken form of command. def _process_recognition(self, node, extras): # Callback when command is spoken. print "Voice command spoken." # Create a grammar which contains and loads the command rule. grammar = Grammar("example grammar") # Create a grammar to contain the command rule. grammar.add_rule(ExampleRule()) # Add the command rule to the grammar. grammar.load() # Load the grammar.
#--------------------------------------------------------------------------- # Lock screen rule. class LockRule(CompoundRule): spec = config.lang.lock_screen def _process_recognition(self, node, extras): self._log.debug("%s: locking screen." % self) # Put the microphone to sleep. natlink.setMicState("sleeping") # Lock screen. success = ctypes.windll.user32.LockWorkStation() if not success: self._log.error("%s: failed to lock screen." % self) #--------------------------------------------------------------------------- # Create and manage this module's grammar. grammar = Grammar("lock screen") grammar.add_rule(LockRule()) grammar.load() def unload(): global grammar if grammar: grammar.unload() grammar = None
"cap rubber set up remote aliases <text>": Text("RUBBER_ENV=") + Function(lib.format.lowercase_text) + Text(" cap rubber:setup_remote_aliases"), "cap rubber set up D N S aliases <text>": Text("RUBBER_ENV=") + Function(lib.format.lowercase_text) + Text(" cap rubber:setup_dns_aliases"), "cap rubber add role to <text>": Text("RUBBER_ENV=") + Function(lib.format.lowercase_text) + Text(" cap rubber:roles:add ROLES= ALIAS=") + Function(lib.format.lowercase_text), "cap rubber create staging <text>": Text("RUBBER_ENV=") + Function(lib.format.lowercase_text) + Text(" cap rubber:create_staging"), "cap rubber destroy staging <text>": Text("RUBBER_ENV=") + Function(lib.format.lowercase_text) + Text(" cap rubber:destroy_staging"), "cap rubber monit start": Text("cap rubber:monit:start RUBBER_ENV="), "cap rubber monit stop": Text("cap rubber:monit:stop RUBBER_ENV="), "cap rubber (postgres|PostgreSQL) start": Text("cap rubber:postgresql:start RUBBER_ENV="), "cap rubber (postgres|PostgreSQL) stop": Text("cap rubber:postgresql:stop RUBBER_ENV="), } ) class MyCommandsRule(MappingRule): mapping = config.cmd.map extras = [ Dictation("text"), ] global_context = None # Context is None, so grammar will be globally active. terminator_grammar = Grammar("Capistrano commands", context=global_context) # Create this module's grammar. terminator_grammar.add_rule(MyCommandsRule()) # Add the top-level rule. terminator_grammar.load() # Load the grammar. def unload(): """Unload function which will be called at unload time.""" global terminator_grammar if grammar: grammar.unload() grammar = None
mapping = { "if": Text("if condition:") + Key("enter"), "while loop": Text("while condition:") + Key("enter"), "for loop": Text("for something in something:") + Key("enter"), "function": Text("def functionName():") + Key("enter"), "class": Text("class className(inheritance):") + Key("enter"), } # The main Python grammar rules are activated here pythonBootstrap = Grammar("python bootstrap") pythonBootstrap.add_rule(PythonEnabler()) pythonBootstrap.load() pythonGrammar = Grammar("python grammar") pythonGrammar.add_rule(PythonTestRule()) pythonGrammar.add_rule(PythonCommentsSyntax()) pythonGrammar.add_rule(PythonControlStructures()) pythonGrammar.add_rule(PythonDisabler()) pythonGrammar.load() pythonGrammar.disable() # Unload function which will be called by natlink at unload time. def unload(): global pythonGrammar if pythonGrammar: pythonGrammar.unload() pythonGrammar = None
# if words[0] == "next": # print "next" # Mimic("open").execute() # Pause("20").execute() # Key("3,9").execute() # elif (words[0] == "preev") | (words[0] == "previous"): # print "preev" # Mimic("open").execute() # Pause("20").execute() # Key("8,8").execute() # else: # print "Commmand had incorrect word: " + words[0] # else: # print "Wrong number of words in command: " + words # #gmail_context = AppContext(executable="chrome", title="Gmail") context = AppContext(executable="chrome") chrome_grammar = Grammar("Google Chrome", context=context) chrome_grammar.add_rule(GlobalChromeMappings()) chrome_grammar.add_rule(GmailMappings()) chrome_grammar.add_rule(OpenGmailLineRule()) # chrome_grammar.add_rule(NavigateCalendarWeeks()) chrome_grammar.load() def unload(): global chrome_grammar chrome_grammar = utils.unloadHelper(chrome_grammar, __name__)
"view right": R(Key('s-f3'), rdescript="Sync Dirs: view right"), "remove selection": R(Key('c-m'), rdescript="Sync Dirs: remove selection"), "synchronize": R(Key('a-c'), rdescript="Total Commander: synchronize button"), } context = AppContext(executable="totalcmd") | AppContext(executable="totalcmd64") grammar = Grammar("Total Commander", context=context) syncdir_context = AppContext(executable="totalcmd", title='Synchronize directories') syncdir_context |= AppContext(executable="totalcmd64", title='Synchronize directories') syncdir_grammar = Grammar("Total Commander Sync Dirs", context=syncdir_context) if settings.SETTINGS["apps"]["totalcmd"]: if settings.SETTINGS["miscellaneous"]["rdp_mode"]: control.nexus().merger.add_global_rule(SyncDirsRule()) control.nexus().merger.add_global_rule(TotalCommanderRule()) else: syncdir_rule = SyncDirsRule(name="totalcmd sync dirs") gfilter.run_on(syncdir_rule) syncdir_grammar.add_rule(syncdir_rule) syncdir_grammar.load() rule = TotalCommanderRule(name="totalcmd") gfilter.run_on(rule) grammar.add_rule(rule) grammar.load()
#In order to make this work, you need Windows Speech Recognition and dragonfly. #Dragonfly has a bunch of pre-requisites as well that you can find on their Github page. from dragonfly import Grammar, MappingRule, Text, Dictation import pythoncom import time test_com = MappingRule(\ name="test",\ mapping = {"write <text>": Text("%(text)s")},\ extras=[Dictation("text"),],) grammar = Grammar("test grammar") grammar.add_rule(test_com) grammar.load() #Keeps the program running to execute the commands while True: pythoncom.PumpWaitingMessages() time.sleep(0.1)
"table row": "tr", "TR": "tr", "track": "track", "unordered list": "ul", "variable": "var", "video": "video", "label": "label", }) ] # Code for initial setup of the HTML grammar htmlBootstrap = Grammar( "html bootstrap") # Create a grammar to contain the command rule. htmlBootstrap.add_rule(HTMLEnabler()) htmlBootstrap.load() htmlGrammar = Grammar("html grammar") htmlGrammar.add_rule(HTMLTestRule()) htmlGrammar.add_rule(HTMLDisabler()) htmlGrammar.add_rule(HTMLTags()) htmlGrammar.load() htmlGrammar.disable() # Unload function which will be called by natlink at unload time. def unload(): global htmlGrammar if htmlGrammar: htmlGrammar.unload() htmlGrammar = None