Exemple #1
0
pycharm_context = AppContext(executable="pycharm64")
webstorm_context = AppContext(executable="webstorm64")
idea_context = AppContext(executable="idea64")
total_commander_context = AppContext(executable="TOTALCMD64")
vim_context = (code_context | firefox_context | terminal_preview_context
               | discord_context | powerbash_context | graphical_neovim_context
               | skype_context | ripcord_context | edge_context | slack_context
               | hexchat_context | clion_context | goland_context
               | pycharm_context | webstorm_context | total_commander_context
               | idea_context)

# the bootstrapper loads the grammar
VimBootstrap = Grammar("vim bootstrap", context=vim_context)
VimBootstrap.add_rule(VimEnabler())
VimBootstrap.load()

VimGrammar = Grammar("vim grammar", context=vim_context)
VimGrammar.add_rule(LetterSequenceRule())
VimGrammar.add_rule(VimUtilities())
VimGrammar.add_rule(VimDisabler())
VimGrammar.add_rule(VimEnabler())
VimGrammar.load()
VimGrammar.enable()


def unload():
    global VimGrammar
    if VimGrammar:
        VimGrammar.unload()
    VimGrammar = None
Exemple #2
0

class DynamicConfigurationUtilities(MappingRule):
    mapping = {
        "set sleep to <sleep_value>": Function(set_sleep),
        "show sleep value": Function(show_sleep),
    }

    extras = [
        IntegerRef("sleep_value", 0, 251),
    ]


# The main DynamicConfiguration grammar rules are activated here
dynamic_configuration_bootstrap = Grammar("dynamic_configuration bootstrap")
dynamic_configuration_bootstrap.add_rule(DynamicConfigurationEnabler())
dynamic_configuration_bootstrap.load()

dynamic_configuration_grammar = Grammar("dynamic_configuration grammar")
dynamic_configuration_grammar.add_rule(DynamicConfigurationUtilities())
dynamic_configuration_grammar.add_rule(DynamicConfigurationDisabler())
dynamic_configuration_grammar.load()
dynamic_configuration_grammar.enable()


def unload():
    global dynamic_configuration_grammar
    if dynamic_configuration_grammar:
        dynamic_configuration_grammar.unload()
    dynamic_configuration_grammar = None
Exemple #3
0
class MasterGrammar(object):
    """A MasterGrammar is built up from a specific set of active rules. They
    synthesize the different rule types into one dragonfly grammar. There is
    only ever one master grammar active at a time."""
    def __init__(self, baseRuleSet, client, ruleCache):
        self.client = client
        self.ruleCache = ruleCache

        # Hashes that are directly part of this grammar
        self.baseRuleSet = set(baseRuleSet)
        # Hashes of rules that we discover are dependencies
        # of the base rule set
        self.dependencyRuleSet = set()

        # hash -> dragonfly rule
        self.concreteRules = {}
        # one hash per merge group, hash is of hashes of rules that were merged
        self.seriesRules = set()
        # one hash, hash is of hashes of rules that were merged
        self.terminatorRule = ""
        # one hash per rule, hash is the rule's actual hash
        self.independentRules = set()

        # Rule references are stored as hashes, so rules that
        # contain rule refs already effectively include those
        # rules in their hash, so just hashing the base set is
        # all we need.
        x = hashlib.sha256()
        x.update("".join(sorted([r for r in self.baseRuleSet])))
        self.hash = x.hexdigest()[:32]

        # Hashes of rules we depend on but haven't arrived yet.
        # These will be discovered during the dfly grammar building
        # process.
        self.missing = set()
        self.checkDeps(self.fullRullSet)  # build self.missing
        self.finalDflyRule = None
        self.dflyGrammar = None

        # word lists are *not* hashed. they are global state the
        # client can update at any time, and the change has to be
        # propogated into the currently active grammar. the client
        # can choose to make them rule specific by making the name
        # be the hash of the rule the word list applies to, but this
        # is only convention and not enforced
        self.concreteWordLists = {}

    @property
    def fullRullSet(self):
        return self.baseRuleSet | self.dependencyRuleSet

    def satisfyDependency(self, r):
        """Marks dependency on hash r as satisfied, and tries to build if no more known
        deps are missing. During the build process new indirect dependencies may still
        be discovered however."""
        assert r in self.missing
        self.missing.remove(r)
        if not self.missing:
            self.build()

    def checkDep(self, r):
        "Checks if dep r is present. Not recursive."
        if r not in self.ruleCache:
            self.ruleCache[r] = NeedDependency()
        if isinstance(self.ruleCache[r], NeedDependency):
            self.ruleCache[r].add(self.hash)
            self.missing.add(r)
            return False
        return True

    def checkMissing(self):
        if self.missing:
            raise MissingDependency(copy(self.missing))

    def checkDeps(self, ruleSet):
        "Recursively check if all deps in ruleSet are satisfied."
        if not ruleSet:
            return True

        newDeps = set()
        for r in ruleSet:
            if self.checkDep(r):
                rule = self.ruleCache[r]  # HashedRule

                rule = rule.rule
                log.info("rule [%s]" % (rule, ))
                for e in rule.extras:
                    if hasattr(e, "rule_ref"):
                        newDeps.add(e.rule_ref)

        self.dependencyRuleSet.update(newDeps)
        self.checkDeps(newDeps)

    def ready(self):
        return len(self.missing) == 0

    def build(self):
        if self.dflyGrammar:
            # already built
            return

        buildStartTime = time.time()

        self.checkMissing()
        self.checkDeps(self.fullRullSet)
        self.checkMissing()

        # from here on we assume all deps are present all the way down
        seriesGroups = {}
        terminal = {}

        allRules = []

        mergeStartTime = time.time()

        # Merge series and terminal rules, set independent rules aside
        self.fullName = []
        for r in self.fullRullSet:
            rule = self.ruleCache[r].rule
            hash = self.ruleCache[r].hash
            if rule.ruleType == RuleType.SERIES:
                if rule.seriesMergeGroup not in seriesGroups:
                    seriesGroups[rule.seriesMergeGroup] = {}
                x = seriesGroups[rule.seriesMergeGroup]
            elif rule.ruleType == RuleType.TERMINAL:
                x = terminal
            elif rule.ruleType == RuleType.INDEPENDENT:
                x = {}

            if "mapping" not in x:
                x["mapping"] = {}
            if "extras" not in x:
                x["extras"] = {}
            if "defaults" not in x:
                x["defaults"] = {}
            if "name" not in x:
                x["name"] = ""
            if "hash" not in x:
                x["hash"] = set()

            x["ruleType"] = rule.ruleType
            x["seriesMergeGroup"] = rule.seriesMergeGroup
            x["name"] = x["name"] + ("," if x["name"] else "") + rule.name
            x["mapping"].update(rule.mapping.items())
            for e in rule.extras:
                x["extras"][e.name] = e
            x["defaults"].update(rule.defaults.items())
            log.info("Adding hash [%s] to name [%s]" % (hash, x["name"]))
            x["hash"].add(hash)
            x["built"] = False
            x["exported"] = (rule.ruleType == RuleType.INDEPENDENT)

            # allRules will contain all the rules we have left
            # *after* merging. So only one series rule per merge
            # group and only one terminal rule.
            allRules.append(x)

        mergeEndTime = time.time()
        log.info("Grammar merge time: %ss" % (mergeEndTime - mergeStartTime))

        # We really should be doing a topological sort, but this
        # isn't a frequent operation so this inefficiency should
        # be OK. Keep trying to link deps until they're all good.
        uniqueRules = []
        for r in allRules:
            if r not in uniqueRules:
                uniqueRules.append(r)
                self.fullName.append(r["name"])
        self.fullName = ",".join(self.fullName)
        allRules = uniqueRules

        # collapse the hashes
        for r in allRules:
            assert type(r["hash"]) == set
            assert len(r["hash"]) >= 1
            if r["ruleType"] in (RuleType.SERIES, RuleType.TERMINAL):
                # We generate a composite hash for our new composite rules
                log.info("Multi-hash: [%s]" % r["hash"])
                hashes = sorted(list(r["hash"]))
                x = hashlib.sha256()
                x.update("".join(sorted([h for h in hashes])))
                hash = x.hexdigest()[:32]
                log.info("Composite: [%s]" % hash)
            else:
                # We just use the exising hash for a rule if it's not composite
                [hash] = r["hash"]
                log.info("Single hash: [%s]" % r["hash"])
            r["hash"] = hash

        allPrototypes = {i["hash"]: i for i in allRules}

        self.concreteTime = 0
        cleanupTime = 0
        for k, v in allPrototypes.items():
            if not v["built"]:
                cleanupStart = time.time()
                self.cleanupProtoRule(v, allPrototypes)
                cleanupEnd = time.time()
                cleanupTime += (cleanupEnd - cleanupStart)

        log.info("Total Cleanup time: %ss" % cleanupTime)
        log.info("Total Concrete time: %ss" % (self.concreteTime))

        #log.info("made it out of loop")
        self.buildFinalMergedRule()

        buildEndTime = time.time()
        log.info("Grammar build time: %ss" % (buildEndTime - buildStartTime))

        self.setupFinalDflyGrammar()

    def buildFinalMergedRule(self):
        #log.info("Building final merged rule.")
        if not self.seriesRules and not self.terminatorRule:
            return

        extras = []
        seriesRefNames = []
        for i, r in enumerate(self.seriesRules):
            name = "s" + str(i)
            seriesRefNames.append(name)
            ref = dfly.RuleRef(self.concreteRules[r], name)
            extras.append(ref)
        seriesPart = "[" + " | ".join([("<" + r + ">")
                                       for r in seriesRefNames]) + "]"

        terminatorPart = ""
        if self.terminatorRule:
            extras.append(
                dfly.RuleRef(self.concreteRules[self.terminatorRule],
                             "terminator"))
            terminatorPart = " [<terminator>]"

        masterPhrase = seriesPart + terminatorPart
        mapping = {
            masterPhrase: ReportingAction(masterPhrase, self.client, self.hash)
        }

        log.info(
            "Building master grammar rule with name [%s] mapping [%s] extras [%s] defaults [%s]"
            % (self.fullName, mapping, extras, {}))
        masterTimeStart = time.time()
        self.finalDflyRule = MappingRule(name=self.hash,
                                         mapping=mapping,
                                         extras=extras,
                                         defaults={},
                                         exported=True)
        masterTimeEnd = time.time()
        log.info("Master rule construction time: %ss" %
                 (masterTimeEnd - masterTimeStart))

    def setupFinalDflyGrammar(self):
        log.info("Setting up final grammar.")

        assert not self.dflyGrammar
        self.dflyGrammar = Grammar(self.fullName + "Grammar")
        if self.finalDflyRule:
            self.dflyGrammar.add_rule(self.finalDflyRule)
        for r in self.independentRules:
            self.dflyGrammar.add_rule(self.concreteRules[r])
        loadStart = time.time()
        self.dflyGrammar.load()
        loadEnd = time.time()
        log.info("Grammar load time: %ss" % (loadEnd - loadStart))
        get_engine().set_exclusiveness(self.dflyGrammar, 1)

        # These should never be recognized on their own, only as part of the
        # master rule, quirk of dragonfly that you have to do this even though
        # they're only pulled in by ruleref.
        for r in self.seriesRules:
            self.concreteRules[r].disable()
        if self.terminatorRule:
            self.concreteRules[self.terminatorRule].disable()

        # independent rules only enabled via being a dependency need to have disable
        # called on their dragonfly version so that they don't get recognized by
        # themselves, same quirk.
        notEnabledRules = self.dependencyRuleSet - self.baseRuleSet
        for r in notEnabledRules:
            self.concreteRules[r].disable()

        # they're enabled by default, don't activate until explicitly made to
        self.dflyGrammar.disable()

    def active(self):
        #log.info("active check [%s %s %s]" % (self.dflyGrammar is None, self.dflyGrammar and self.dflyGrammar.loaded, self.dflyGrammar and self.dflyGrammar.enabled))
        return self.dflyGrammar and self.dflyGrammar.loaded and self.dflyGrammar.enabled

    def activate(self):
        self.build()
        self.dflyGrammar.enable()
        log.info("Grammar activated: [%s]" % self.hash)

    def deactivate(self):
        # it's possible we never built successfully
        if self.dflyGrammar:
            self.dflyGrammar.disable()
            log.info("Grammar deactivated: [%s]" % self.hash)

    def unload(self):
        self.deactivate()
        if self.dflyGrammar:
            self.dflyGrammar.unload()

    def buildConcreteRule(self, r):
        # for independent rules we could use the plain
        # name, but it turns out Dragon crashes if your
        # names get too long, so for combined rules we
        # just use the hash as the name... hopefully
        # that's under the limit
        name = r["hash"]
        if r["ruleType"] == RuleType.SERIES:
            t = SeriesMappingRule
        elif r["ruleType"] == RuleType.TERMINAL:
            t = MappingRule
        else:
            t = MappingRule

        constructionStartTime = time.time()

        log.info(
            "Building rule [%s] with size [%s] num extras [%s] num defaults [%s]"
            % (r["name"], len(r["mapping"]), len(
                r["extras"]), len(r["defaults"])))

        rule = t(name=name,
                 mapping=r["mapping"],
                 extras=r["extras"],
                 defaults=r["defaults"],
                 exported=r["exported"])
        constructionEndTime = time.time()

        log.info("Rule construction time: %ss" %
                 (constructionEndTime - constructionStartTime))

        self.concreteRules[r["hash"]] = rule

        if r["ruleType"] == RuleType.SERIES:
            self.seriesRules.add(r["hash"])
        elif r["ruleType"] == RuleType.TERMINAL:
            self.terminatorRule = r["hash"]
        elif r["ruleType"] == RuleType.INDEPENDENT:
            self.independentRules.add(r["hash"])
        else:
            assert False

        log.info("done building")

    def cleanupProtoRule(self, r, allPrototypes):
        # have to uniquify in this round about way because lists
        # aren't hashable and we need them for ListRef.
        if type(r["extras"]) == dict:
            r["extras"] = r["extras"].values()

        newExtras = []
        for e in r["extras"]:
            if isinstance(e, protocol.Integer):
                newExtras.append(dfly.Integer(e.name, e.min, e.max))
            elif isinstance(e, protocol.Dictation):
                newExtras.append(dfly.Dictation(e.name))
            elif isinstance(e, protocol.Repetition):
                if e.rule_ref not in self.concreteRules:
                    self.cleanupProtoRule(allPrototypes[e.rule_ref],
                                          allPrototypes)

                # Dragonfly wants RuleRef to take a RuleRef rather than an actual
                # Rule, so we just make one rather than forcing the server to
                # handle this, see protocol.py comments.
                concrete = self.concreteRules[e.rule_ref]
                log.info("concrete type: [%s]" % type(concrete))
                newExtras.append(
                    dfly.Repetition(dfly.RuleRef(rule=concrete), e.min, e.max,
                                    e.name))
            elif isinstance(e, protocol.RuleRef):
                if e.rule_ref not in self.concreteRules:
                    self.cleanupProtoRule(allPrototypes[e.rule_ref],
                                          allPrototypes)

                newExtras.append(
                    dfly.RuleRef(self.concreteRules[e.rule_ref], e.name))
            elif isinstance(e, protocol.ListRef):
                self.concreteWordLists[e.name] = List(e.name + "ConcreteList")
                # self.concreteWordLists[e.name].set(e.words)
                newExtras.append(
                    dfly.ListRef(e.ref_name, self.concreteWordLists[e.name]))
            else:
                raise Exception("Unknown extra type: [%s]" % e)

        r["extras"] = newExtras

        self.concreteStartTime = time.time()
        self.buildConcreteRule(r)
        self.concreteEndTime = time.time()
        self.concreteTime += (self.concreteEndTime - self.concreteStartTime)

        r["built"] = True
        return True

    def updateWordList(self, name, words):
        if name not in self.concreteWordLists:
            # log.info("Word list [%s] not in grammar [%s], ignoring" % (name, self.hash))
            return

        # We want to check if the value has actually changed because List's
        # set method will blindly tell Dragon to delete its old list and replace
        # it with this one and we don't want to disturb Dragon unless we have to
        # because Dragon is slow.
        if sorted(words) != sorted(self.concreteWordLists[name]):
            log.info(
                "Updating word list [%s] on grammar [%s] with contents [%s]" %
                (name, self.hash, len(words)))
            log.info("old list: %s" % self.concreteWordLists[name])
            # TODO: need to check existing load state, then send a loading message here, then restore
            # old state. This way we can see when word lists are taking a long time to load...
            updateStart = time.time()
            self.concreteWordLists[name].set(words)
            updateEnd = time.time()
            log.info("Word list update time: %ss" % (updateEnd - updateStart))
Exemple #4
0
    }

    extras = [
        Dictation("branch_name", default=""),
        Dictation("repository_name", default=""),
        Dictation("directory_name", default=""),
        git_command_choice("git_command"),
        formatting_choice("format_type"),
        directory_command_choice("directory_command"),
        git_clipboard_command_choice("git_clipboard_command"),
    ]


# The main Terminal grammar rules are activated here
terminal_bootstrap = Grammar("terminal bootstrap")
terminal_bootstrap.add_rule(TerminalEnabler())
terminal_bootstrap.load()

terminal_grammar = Grammar("terminal grammar")
terminal_grammar.add_rule(TerminalUtilities())
terminal_grammar.add_rule(TerminalDisabler())
terminal_grammar.load()
terminal_grammar.enable()


def unload():
    global terminal_grammar
    if terminal_grammar:
        terminal_grammar.unload()
    terminal_grammar = None
Exemple #5
0
class MasterGrammar(object):
    """A MasterGrammar is built up from a specific set of active rules. They
    synthesize the different rule types into one dragonfly grammar. There is
    only ever one master grammar active at a time."""

    def __init__(self, baseRuleSet, client, ruleCache):
        self.client = client
        self.ruleCache = ruleCache

        # Hashes that are directly part of this grammar
        self.baseRuleSet = set(baseRuleSet)
        # Hashes of rules that we discover are dependencies
        # of the base rule set
        self.dependencyRuleSet = set()

        # hash -> dragonfly rule
        self.concreteRules = {}
        # one hash per merge group, hash is of hashes of rules that were merged
        self.seriesRules = set()
        # one hash, hash is of hashes of rules that were merged
        self.terminatorRule = ""
        # one hash per rule, hash is the rule's actual hash
        self.independentRules = set()

        # Rule references are stored as hashes, so rules that
        # contain rule refs already effectively include those
        # rules in their hash, so just hashing the base set is
        # all we need.
        x = hashlib.sha256()
        x.update("".join(sorted([r for r in self.baseRuleSet])))
        self.hash = x.hexdigest()[:32]

        # Hashes of rules we depend on but haven't arrived yet.
        # These will be discovered during the dfly grammar building
        # process.
        self.missing = set()
        self.checkDeps(self.fullRullSet) # build self.missing
        self.finalDflyRule = None
        self.dflyGrammar = None

        # word lists are *not* hashed. they are global state the
        # client can update at any time, and the change has to be
        # propogated into the currently active grammar. the client
        # can choose to make them rule specific by making the name
        # be the hash of the rule the word list applies to, but this
        # is only convention and not enforced
        self.concreteWordLists = {}

    @property
    def fullRullSet(self):
        return self.baseRuleSet | self.dependencyRuleSet

    def satisfyDependency(self, r):
        """Marks dependency on hash r as satisfied, and tries to build if no more known
        deps are missing. During the build process new indirect dependencies may still
        be discovered however."""
        assert r in self.missing
        self.missing.remove(r)
        if not self.missing:
            self.build()

    def checkDep(self, r):
        "Checks if dep r is present. Not recursive."
        if r not in self.ruleCache:
            self.ruleCache[r] = NeedDependency()
        if isinstance(self.ruleCache[r], NeedDependency):
            self.ruleCache[r].add(self.hash)
            self.missing.add(r)
            return False
        return True

    def checkMissing(self):
        if self.missing:
            raise MissingDependency(copy(self.missing))

    def checkDeps(self, ruleSet):
        "Recursively check if all deps in ruleSet are satisfied."
        if not ruleSet:
            return True

        newDeps = set()
        for r in ruleSet:
            if self.checkDep(r):
                rule = self.ruleCache[r] # HashedRule

                rule = rule.rule
                log.info("rule [%s]" % (rule,))
                for e in rule.extras:
                    if hasattr(e, "rule_ref"):
                        newDeps.add(e.rule_ref)

        self.dependencyRuleSet.update(newDeps)
        self.checkDeps(newDeps)

    def ready(self):
        return len(self.missing) == 0

    def build(self):
        if self.dflyGrammar:
            # already built
            return

        buildStartTime = time.time()

        self.checkMissing()
        self.checkDeps(self.fullRullSet)
        self.checkMissing()

        # from here on we assume all deps are present all the way down
        seriesGroups = {}
        terminal = {}

        allRules = []

        mergeStartTime = time.time()

        # Merge series and terminal rules, set independent rules aside
        self.fullName = []
        for r in self.fullRullSet:
            rule = self.ruleCache[r].rule
            hash = self.ruleCache[r].hash
            if rule.ruleType == RuleType.SERIES:
                if rule.seriesMergeGroup not in seriesGroups:
                    seriesGroups[rule.seriesMergeGroup] = {}
                x = seriesGroups[rule.seriesMergeGroup]
            elif rule.ruleType == RuleType.TERMINAL:
                x = terminal
            elif rule.ruleType == RuleType.INDEPENDENT:
                x = {}

            if "mapping" not in x:
                x["mapping"] = {}
            if "extras" not in x:
                x["extras"] = {}
            if "defaults" not in x:
                x["defaults"] = {}
            if "name" not in x:
                x["name"] = ""
            if "hash" not in x:
                x["hash"] = set()

            x["ruleType"] = rule.ruleType
            x["seriesMergeGroup"] = rule.seriesMergeGroup
            x["name"] = x["name"] + ("," if x["name"] else "") + rule.name
            x["mapping"].update(rule.mapping.items())
            for e in rule.extras:
                x["extras"][e.name] = e
            x["defaults"].update(rule.defaults.items())
            log.info("Adding hash [%s] to name [%s]" % (hash, x["name"]))
            x["hash"].add(hash)
            x["built"] = False
            x["exported"] = (rule.ruleType == RuleType.INDEPENDENT)

            # allRules will contain all the rules we have left
            # *after* merging. So only one series rule per merge
            # group and only one terminal rule.
            allRules.append(x)

        mergeEndTime = time.time()
        log.info("Grammar merge time: %ss" % (mergeEndTime - mergeStartTime))

        # We really should be doing a topological sort, but this
        # isn't a frequent operation so this inefficiency should
        # be OK. Keep trying to link deps until they're all good.
        uniqueRules = []
        for r in allRules:
            if r not in uniqueRules:
                uniqueRules.append(r)
                self.fullName.append(r["name"])
        self.fullName = ",".join(self.fullName)
        allRules = uniqueRules

        # collapse the hashes
        for r in allRules:
            assert type(r["hash"]) == set
            assert len(r["hash"]) >= 1
            if r["ruleType"] in (RuleType.SERIES, RuleType.TERMINAL):
                # We generate a composite hash for our new composite rules
                log.info("Multi-hash: [%s]" % r["hash"])
                hashes = sorted(list(r["hash"]))
                x = hashlib.sha256()
                x.update("".join(sorted([h for h in hashes])))
                hash = x.hexdigest()[:32]
                log.info("Composite: [%s]" % hash)
            else:
                # We just use the exising hash for a rule if it's not composite
                [hash] = r["hash"]
                log.info("Single hash: [%s]" % r["hash"])
            r["hash"] = hash

        allPrototypes = { i["hash"] : i for i in allRules }

        self.concreteTime = 0
        cleanupTime = 0
        for k, v in allPrototypes.items():
            if not v["built"]:
                cleanupStart = time.time()
                self.cleanupProtoRule(v, allPrototypes)
                cleanupEnd = time.time()
                cleanupTime += (cleanupEnd - cleanupStart)

        log.info("Total Cleanup time: %ss" % cleanupTime)
        log.info("Total Concrete time: %ss" % (self.concreteTime))

        #log.info("made it out of loop")
        self.buildFinalMergedRule()

        buildEndTime = time.time()
        log.info("Grammar build time: %ss" % (buildEndTime - buildStartTime))

        self.setupFinalDflyGrammar()

    def buildFinalMergedRule(self):
        #log.info("Building final merged rule.")
        if not self.seriesRules and not self.terminatorRule:
            return

        extras = []
        seriesRefNames = []
        for i, r in enumerate(self.seriesRules):
            name = "s" + str(i)
            seriesRefNames.append(name)
            ref = dfly.RuleRef(self.concreteRules[r], name)
            extras.append(ref)
        seriesPart = "[" + " | ".join([("<" + r + ">") for r in seriesRefNames]) + "]"

        terminatorPart = ""
        if self.terminatorRule:
            extras.append(dfly.RuleRef(self.concreteRules[self.terminatorRule], "terminator"))
            terminatorPart = " [<terminator>]"

        masterPhrase = seriesPart + terminatorPart
        mapping = {
            masterPhrase : ReportingAction(masterPhrase, self.client, self.hash)
        }

        log.info("Building master grammar rule with name [%s] mapping [%s] extras [%s] defaults [%s]"
                 % (self.fullName, mapping, extras, {}))
        masterTimeStart = time.time()
        self.finalDflyRule = MappingRule(name=self.hash, mapping=mapping, extras=extras,
                                         defaults={}, exported=True)
        masterTimeEnd = time.time()
        log.info("Master rule construction time: %ss" % (masterTimeEnd - masterTimeStart))

    def setupFinalDflyGrammar(self):
        log.info("Setting up final grammar.")

        assert not self.dflyGrammar
        self.dflyGrammar = Grammar(self.fullName + "Grammar")
        if self.finalDflyRule:
            self.dflyGrammar.add_rule(self.finalDflyRule)
        for r in self.independentRules:
            self.dflyGrammar.add_rule(self.concreteRules[r])
        loadStart = time.time()
        self.dflyGrammar.load()
        loadEnd = time.time()
        log.info("Grammar load time: %ss" % (loadEnd - loadStart))
        get_engine().set_exclusiveness(self.dflyGrammar, 1)

        # These should never be recognized on their own, only as part of the
        # master rule, quirk of dragonfly that you have to do this even though
        # they're only pulled in by ruleref.
        for r in self.seriesRules:
            self.concreteRules[r].disable()
        if self.terminatorRule:
            self.concreteRules[self.terminatorRule].disable()

        # independent rules only enabled via being a dependency need to have disable
        # called on their dragonfly version so that they don't get recognized by
        # themselves, same quirk.
        notEnabledRules = self.dependencyRuleSet - self.baseRuleSet
        for r in notEnabledRules:
            self.concreteRules[r].disable()

        # they're enabled by default, don't activate until explicitly made to
        self.dflyGrammar.disable()

    def active(self):
        #log.info("active check [%s %s %s]" % (self.dflyGrammar is None, self.dflyGrammar and self.dflyGrammar.loaded, self.dflyGrammar and self.dflyGrammar.enabled))
        return self.dflyGrammar and self.dflyGrammar.loaded and self.dflyGrammar.enabled

    def activate(self):
        self.build()
        self.dflyGrammar.enable()
        log.info("Grammar activated: [%s]" % self.hash)

    def deactivate(self):
        # it's possible we never built successfully
        if self.dflyGrammar:
            self.dflyGrammar.disable()
            log.info("Grammar deactivated: [%s]" % self.hash)

    def unload(self):
        self.deactivate()
        if self.dflyGrammar:
            self.dflyGrammar.unload()

    def buildConcreteRule(self, r):
        # for independent rules we could use the plain
        # name, but it turns out Dragon crashes if your
        # names get too long, so for combined rules we
        # just use the hash as the name... hopefully
        # that's under the limit
        name = r["hash"]
        if r["ruleType"] == RuleType.SERIES:
            t = SeriesMappingRule
        elif r["ruleType"] == RuleType.TERMINAL:
            t = MappingRule
        else:
            t = MappingRule

        constructionStartTime = time.time()

        log.info("Building rule [%s] with size [%s] num extras [%s] num defaults [%s]" % (r["name"], len(r["mapping"]), len(r["extras"]), len(r["defaults"])))

        rule = t(name=name, mapping=r["mapping"], extras=r["extras"],
                 defaults=r["defaults"], exported=r["exported"])
        constructionEndTime = time.time()

        log.info("Rule construction time: %ss" % (constructionEndTime - constructionStartTime))

        self.concreteRules[r["hash"]] = rule

        if r["ruleType"] == RuleType.SERIES:
            self.seriesRules.add(r["hash"])
        elif r["ruleType"] == RuleType.TERMINAL:
            self.terminatorRule = r["hash"]
        elif r["ruleType"] == RuleType.INDEPENDENT:
            self.independentRules.add(r["hash"])
        else:
            assert False

        log.info("done building")

    def cleanupProtoRule(self, r, allPrototypes):
        # have to uniquify in this round about way because lists
        # aren't hashable and we need them for ListRef.
        if type(r["extras"]) == dict:
            r["extras"] = r["extras"].values()

        newExtras = []
        for e in r["extras"]:
            if isinstance(e, protocol.Integer):
                newExtras.append(dfly.Integer(e.name, e.min, e.max))
            elif isinstance(e, protocol.Dictation):
                newExtras.append(dfly.Dictation(e.name))
            elif isinstance(e, protocol.Repetition):
                if e.rule_ref not in self.concreteRules:
                    self.cleanupProtoRule(allPrototypes[e.rule_ref], allPrototypes)

                # Dragonfly wants RuleRef to take a RuleRef rather than an actual
                # Rule, so we just make one rather than forcing the server to
                # handle this, see protocol.py comments.
                concrete = self.concreteRules[e.rule_ref]
                log.info("concrete type: [%s]" % type(concrete))
                newExtras.append(dfly.Repetition(dfly.RuleRef(rule=concrete),
                                                 e.min, e.max, e.name))
            elif isinstance(e, protocol.RuleRef):
                if e.rule_ref not in self.concreteRules:
                    self.cleanupProtoRule(allPrototypes[e.rule_ref], allPrototypes)

                newExtras.append(dfly.RuleRef(self.concreteRules[e.rule_ref], e.name))
            elif isinstance(e, protocol.ListRef):
                self.concreteWordLists[e.name] = List(e.name + "ConcreteList")
                # self.concreteWordLists[e.name].set(e.words)
                newExtras.append(dfly.ListRef(e.ref_name, self.concreteWordLists[e.name]))
            else:
                raise Exception("Unknown extra type: [%s]" % e)

        r["extras"] = newExtras

        self.concreteStartTime = time.time()
        self.buildConcreteRule(r)
        self.concreteEndTime = time.time()
        self.concreteTime += (self.concreteEndTime - self.concreteStartTime)

        r["built"] = True
        return True

    def updateWordList(self, name, words):
        if name not in self.concreteWordLists:
            # log.info("Word list [%s] not in grammar [%s], ignoring" % (name, self.hash))
            return

        # We want to check if the value has actually changed because List's
        # set method will blindly tell Dragon to delete its old list and replace
        # it with this one and we don't want to disturb Dragon unless we have to
        # because Dragon is slow.
        if sorted(words) != sorted(self.concreteWordLists[name]):
            log.info("Updating word list [%s] on grammar [%s] with contents [%s]" % (name, self.hash, len(words)))
            log.info("old list: %s" % self.concreteWordLists[name])
            # TODO: need to check existing load state, then send a loading message here, then restore
            # old state. This way we can see when word lists are taking a long time to load...
            updateStart = time.time()
            self.concreteWordLists[name].set(words)
            updateEnd = time.time()
            log.info("Word list update time: %ss" % (updateEnd - updateStart))