def expr_call(self, method, args, fallible): # NOTE: Currently "AstBuilder" functions are made fallible using the # fallible_methods taken from some Rust code which extract this # information to produce a JSON file. if self.method_trait == "AstBuilder": fallible = None return grammar.CallMethod(method, args or (), types.Type(self.method_trait), fallible is not None)
def finish_grammar(nt_defs, goals, variable_terminals, synthetic_terminals, single_grammar=True, extensions=[]): nt_grammars = {} for nt_name, eq, _ in nt_defs: if nt_name in nt_grammars: raise ValueError( "duplicate definitions for nonterminal {!r}".format(nt_name)) nt_grammars[nt_name] = eq # Figure out which grammar we were trying to get (":" for syntactic, # "::" for lexical) based on the goal symbols. goals = list(goals) if len(goals) == 0: raise ValueError("no goal nonterminals specified") if single_grammar: selected_grammars = set(nt_grammars[goal] for goal in goals) assert len(selected_grammars) != 0 if len(selected_grammars) > 1: raise ValueError( "all goal nonterminals must be part of the same grammar; " "got {!r} (matching these grammars: {!r})".format( set(goals), set(selected_grammars))) [selected_grammar] = selected_grammars terminal_set = set() def hack_production(p): for i, e in enumerate(p.body): if isinstance(e, str) and e[:1] == "`": if len(e) < 3 or e[-1:] != "`": raise ValueError( "Unrecognized grammar symbol: {!r} (in {!r})".format( e, p)) p[i] = token = e[1:-1] terminal_set.add(token) nonterminals = {} for nt_name, eq, rhs_list_or_lambda in nt_defs: if single_grammar and eq != selected_grammar: continue if isinstance(rhs_list_or_lambda, grammar.NtDef): nonterminals[nt_name] = rhs_list_or_lambda else: rhs_list = rhs_list_or_lambda for p in rhs_list: if not isinstance(p, grammar.Production): raise ValueError( "invalid grammar: ifdef in non-function-call context") hack_production(p) if nt_name in nonterminals: raise ValueError("unsupported: multiple definitions for nt " + nt_name) nonterminals[nt_name] = rhs_list for t in terminal_set: if t in nonterminals: raise ValueError( "grammar contains both a terminal `{}` and nonterminal {}". format(t, t)) # Add execution modes to generate the various functions needed to handle # syntax parsing and full parsing execution modes. exec_modes = collections.defaultdict(OrderedSet) noop_parser = types.Type("ParserTrait", (types.Lifetime("alloc"), types.UnitType)) token_parser = types.Type("ParserTrait", (types.Lifetime("alloc"), types.Type("StackValue", (types.Lifetime("alloc"), )))) ast_builder = types.Type("AstBuilderDelegate", (types.Lifetime("alloc"), )) # Full parsing takes token as input and build an AST. exec_modes["full_actions"].extend([token_parser, ast_builder]) # Syntax parsing takes token as input but skip building the AST. # TODO: The syntax parser is commented out for now, as we need something to # be produced when we cannot call the AstBuilder for producing the values. # No-op parsing is used for the simulator, which is so far used for # querying whether we can end the incremental input and lookup if a state # can accept some kind of tokens. exec_modes["noop_actions"].add(noop_parser) # Extensions are using an equivalent of Rust types to define the kind of # parsers to be used, this map is used to convert these type names to the # various execution modes. full_parser = types.Type("FullParser") syntax_parser = types.Type("SyntaxParser") noop_parser = types.Type("NoopParser") type_to_modes = { noop_parser: ["noop_actions", "full_actions"], syntax_parser: ["full_actions"], full_parser: ["full_actions"], } result = grammar.Grammar(nonterminals, goal_nts=goals, variable_terminals=variable_terminals, synthetic_terminals=synthetic_terminals, exec_modes=exec_modes, type_to_modes=type_to_modes) result.patch(extensions) return result
def parameterized_type(self, name, args): return types.Type(name, args)
def simple_type(self, name): return types.Type(name)