def unpack_typed_params(ctx, typed_params): """unpack_typed_params(typed_params) -> [(var1, ty1), ...]""" results = [] for child in typed_params.children: if child.data == "untyped_param": var_name, = child.children results.append((easy.Var(str(var_name)), easy.Hole())) elif child.data == "param_group": ty = unpack_term_ast(ctx, child.children[-1]) for var_name in child.children[:-1]: results.append((easy.Var(str(var_name)), ty)) else: assert False return results
def parse(toks, i): if toks[i] == "(" and toks[i + 1] in ["fun", "forall"]: var = easy.Var(toks[i + 2]) assert toks[i + 3] == ":" type_term, j = parse(toks, i + 4) assert toks[j] == "." expr_term, k = parse(toks, j + 1) assert toks[k] == ")" kind = { "fun": easy.Abstraction, "forall": easy.DependentProduct }[toks[i + 1]] return kind(var, type_term, expr_term), k + 1 elif toks[i] == "(": applicand, j = parse(toks, i + 1) applicee, k = parse(toks, j) assert toks[k] == ")" return easy.Application(applicand, applicee), k + 1 elif toks[i].startswith("Type"): universe_index = int(toks[i][4:]) return easy.SortType(universe_index), i + 1 elif toks[i] == "Prop": return easy.SortProp(), i + 1 elif toks[i].startswith("@"): ind_name = toks[i][1:] assert toks[i + 1] == "." con_name = toks[i + 2] return easy.InductiveConstructor(ind_name, con_name), i + 3 elif toks[i] == "match": matchand, i = parse(toks, i + 1) assert toks[i] == "as" as_term, i = parse(toks, i + 1) assert toks[i] == "in" in_term, i = parse(toks, i + 1) assert toks[i] == "return" return_term, i = parse(toks, i + 1) assert toks[i] == "with" i += 1 arms = [] while toks[i] == "|": pattern, i = parse(toks, i + 1) assert toks[i] == "=>" result, i = parse(toks, i + 1) arms.append(easy.Match.Arm(pattern, result)) assert toks[i] == "end", "Expected end: %r" % (toks[i], ) match_term = easy.Match(matchand, as_term, in_term, return_term, arms) return match_term, i + 1 return easy.Var(toks[i]), i + 1
def vernac_inductive(context, vernac): name, typed_params, arity, constructors = vernac.children # Unpack the typed_params into the inductive's parameters. parameters = parsing.unpack_typed_params_to_Parameters(context, typed_params) arity = parsing.unpack_term_ast(context, arity) ind = easy.Inductive( context, str(name), parameters, arity, ) # Add the constructors. for constructor in constructors.children: con_name, con_typed_params, con_type = constructor.children con_type = parsing.unpack_term_ast(context, con_type) # In this case the typed params are just sugar for extending the type, # so apply the con_typed_params as additional products around the type. con_type = parsing.wrap_with_typed_params(context, con_typed_params, con_type, "dependent_product") ind.add_constructor(context, str(con_name), con_type) # Add the inductive in globally for use by later definitions. context.extend_def(easy.Var(str(name)), easy.InductiveRef(str(name)), in_place=True) print "Added:" ind.pprint()
def vernac_definition(context, vernac): name, typed_params, type_annotation, body = vernac.children # TODO: Properly unfold the typed_params as abstractions around the body. # TODO: Properly check the type annotation. body = parsing.unpack_term_ast(context, body) body = parsing.wrap_with_typed_params(context, typed_params, body, "abstraction") context.extend_def(easy.Var(str(name)), body, in_place=True)
def unpack_term_ast(ctx, ast): # This is a binding of some sort. if isinstance(ast, lark.lexer.Token): name = str(ast) if name.startswith("Type"): universe_index = int(name[4:]) return easy.SortType(universe_index) if name == "Prop": return easy.SortProp() # XXX: Make sure the name is bound! # if not ctx.contains_def(easy.Var(name)) and not ctx.contains_ty(easy.Var(name)): # raise ValueError("Unbound name: %s" % (name,)) return easy.Var(name) if ast.data in ("dependent_product", "abstraction"): typed_params, result_ty = ast.children result = unpack_term_ast(ctx, result_ty) return wrap_with_typed_params(ctx, typed_params, result, ast.data) if ast.data == "arrow": A, B = [unpack_term_ast(ctx, child) for child in ast.children] return easy.DependentProduct(easy.Var("!"), A, B) if ast.data == "application": fn, arg = [unpack_term_ast(ctx, child) for child in ast.children] return easy.Application(fn, arg) if ast.data == "annotation": x, ty = [unpack_term_ast(ctx, child) for child in ast.children] return easy.Annotation(x, ty) if ast.data == "constructor": # XXX: Check name presense. ind_name, con_name = map(str, ast.children) return easy.ConstructorRef(ind_name, con_name) if ast.data == "match": ast_matchand, ast_extensions, ast_arms = ast.children match_term = unpack_term_ast(ctx, ast_matchand) extensions = { "as": easy.Hole(), "in": easy.Hole(), "return": easy.Hole(), } # Process extensions. for child in ast_extensions.children: extension_term, = child.children extension_term = unpack_term_ast(ctx, extension_term) extensions[{ "as_term": "as", "in_term": "in", "return_term": "return", }[child.data]] = extension_term # Process the arms. arms = [] for arm in ast_arms.children: arm_pattern, arm_result = [unpack_term_ast(ctx, child) for child in arm.children] arms.append(easy.Match.Arm(arm_pattern, arm_result)) return easy.Match( match_term, extensions["as"], extensions["in"], extensions["return"], arms, ) if ast.data == "fix": name, typed_params, optional_type_annotation, body = ast.children parameters = unpack_typed_params_to_Parameters(ctx, typed_params) return_ty = unpack_optional_type_annotation(ctx, optional_type_annotation) body = unpack_term_ast(ctx, body) return easy.Fix( str(name), parameters, return_ty, body, ) raise NotImplementedError("Unhandled AST node: %r" % (ast.data,))
def vernac_axiom(context, vernac): name, ty = vernac.children ty = parsing.unpack_term_ast(context, ty) context.extend_def(easy.Var(str(name)), easy.Axiom(ty), in_place=True)