Example #1
0
    def check_refinement(self, parser_rules):
        rules = []
        templates = []
        for name, R in parser_rules:
            if isinstance(R, RuleTemplate):
                rules.append(ls_grammar.tokenize(R.get_subgrammar_rule()))
                templates.append(R)
            else:
                rules.append(ls_grammar.tokenize(R))
        go = GrammarObject(rules)
        go.set_langlet_id(self.langlet.langlet_id)
        go.langlet.lex_nfa = self.langlet.lex_nfa
        go.langlet.token = go.langlet.parse_token = self.langlet.token
        go.create_grammar(expansion=False)
        go.langlet._load_unparser()
        nfas = go.get_nfas()
        # check that grammars are specialized

        for rt in templates:
            nid = rt.get_nid()
            nfa = nfas[rt.get_nid()]
            traces = compute_span_traces(nfa)
            for tr in traces:
                tro = TraceObject(tr[:-1])
                if not self.trchecker.checktrace(tro, start_symbol=nid):
                    raise GrammarError("Not a refinement: '%s'" % rt.rule_name)
        return go
Example #2
0
    def check_refinement(self, parser_rules):
        rules     = []
        templates = []
        for name, R in parser_rules:
            if isinstance(R, RuleTemplate):
                rules.append(ls_grammar.tokenize(R.get_subgrammar_rule()))
                templates.append(R)
            else:
                rules.append(ls_grammar.tokenize(R))
        go = GrammarObject(rules)
        go.set_langlet_id(self.langlet.langlet_id)
        go.langlet.lex_nfa = self.langlet.lex_nfa
        go.langlet.token = go.langlet.parse_token = self.langlet.token
        go.create_grammar(expansion = False)
        go.langlet._load_unparser()
        nfas = go.get_nfas()
        # check that grammars are specialized

        for rt in templates:
            nid = rt.get_nid()
            nfa = nfas[rt.get_nid()]
            traces = compute_span_traces(nfa)
            for tr in traces:
                tro = TraceObject(tr[:-1])
                if not self.trchecker.checktrace(tro, start_symbol = nid):
                    raise GrammarError("Not a refinement: '%s'"%rt.rule_name)
        return go
Example #3
0
    def create_refined_grammar(self):
        parser_rules = self.get_parse_gen()
        variables = {}
        n = 0
        for name in dir(self):
            m = getattr(self, name)
            if hasattr(m, "refined"):
                doc = m.__doc__.strip()
                if doc.startswith(name) and doc[len(name):len(name) +
                                                1] in " :":
                    rt = RuleTemplate(self.langlet, doc)
                    for i, (nm, _) in enumerate(parser_rules):
                        if name == nm:
                            parser_rules[i] = (name, rt)
                #variables[names] = m.__call__() or []
        go = self.check_refinement(parser_rules)
        go.variables = variables
        return go

        if n == 0:
            return go
        else:
            rules = []
            for name, R in parser_rules:
                if isinstance(R, RuleTemplate):
                    g_rule = R.get_grammar_rule()
                    rules.append(ls_grammar.tokenize(g_rule))
                    for varname in R.variables:
                        rules.append(
                            ls_grammar.tokenize(varname +
                                                ": '$%s$'" % varname))
                else:
                    rules.append(ls_grammar.tokenize(R))
            go = GrammarObject(rules)
            go.set_langlet_id(self.langlet.langlet_id)
            go.langlet.lex_nfa = self.langlet.lex_nfa
            go.langlet.token = go.langlet.parse_token = self.langlet.token
            go.create_grammar(expansion=False)
            go.langlet._load_unparser()
            return go
Example #4
0
    def create_refined_grammar(self):
        parser_rules = self.get_parse_gen()
        variables    = {}
        n = 0
        for name in dir(self):
            m = getattr(self, name)
            if hasattr(m, "refined"):
                doc = m.__doc__.strip()
                if doc.startswith(name) and doc[len(name):len(name)+1] in " :":
                    rt = RuleTemplate(self.langlet, doc)
                    for i, (nm, _) in enumerate(parser_rules):
                        if name == nm:
                            parser_rules[i] = (name, rt)
                #variables[names] = m.__call__() or []
        go = self.check_refinement(parser_rules)
        go.variables = variables
        return go

        if n == 0:
            return go
        else:
            rules = []
            for name, R in parser_rules:
                if isinstance(R, RuleTemplate):
                    g_rule = R.get_grammar_rule()
                    rules.append(ls_grammar.tokenize(g_rule))
                    for varname in R.variables:
                        rules.append(ls_grammar.tokenize(varname + ": '$%s$'"%varname))
                else:
                    rules.append(ls_grammar.tokenize(R))
            go = GrammarObject(rules)
            go.set_langlet_id(self.langlet.langlet_id)
            go.langlet.lex_nfa = self.langlet.lex_nfa
            go.langlet.token = go.langlet.parse_token = self.langlet.token
            go.create_grammar(expansion = False)
            go.langlet._load_unparser()
            return go