def generate_schema(input_file, output_file, schema_file): with open(input_file) as stream: source = stream.read() schema = {} for comment in pyasdl.fetch_comments(source): tag, _, value = comment.strip().partition(": ") if tag in BaseSchemaGenerator.SCHEMA_FIELDS: schema[tag] = ast.literal_eval(value) tree = pyasdl.parse(source) schema_generator = ESDLSchemaGenerator(schema) declarations = "\n".join(definition.construct() for definition in schema_generator.visit(tree)) with open(output_file, "w") as stream: stream.write("START MIGRATION TO {\n") stream.write(f"{INDENT}module ast {{\n") stream.write(textwrap.indent(declarations, INDENT * 2)) stream.write(f"\n{INDENT}}}") stream.write("\n};\n") schema["enum_types"] = schema_generator.enum_types # Check whether all fields are satisified for the # schema_file, and raise a SchemaError if there are # missing fields with open(schema_file, "w") as stream: json.dump(schema, stream)
def main(): parser = ArgumentParser() parser.add_argument("file", type=Path) options = parser.parse_args() with open(options.file) as source: tree = pyasdl.parse(source.read()) visitor = GraphQLGenerator() for ql_type in visitor.visit(tree): print(str(ql_type))
def main(): parser = ArgumentParser() parser.add_argument("file", type=FileType()) options = parser.parse_args() with options.file as source: tree = pyasdl.parse(source.read()) visitor = FieldDBGenerator() layout = visitor.visit(tree) print(json.dumps(layout, indent=4))
def main(): parser = ArgumentParser() parser.add_argument("files", type=Path, nargs="+") options = parser.parse_args() asdls = [] for file in options.files: with open(file) as stream: source = stream.read() version = retrive_version(source) asdls.append((version, pyasdl.parse(source))) stub = generate_stubs(asdls) print("from __future__ import annotations") print("import sys") print("import typing") print( textwrap.dedent("""\ class AST: _attributes: typing.ClassVar[typing.Tuple[str, ...]] _fields: typing.ClassVar[typing.Tuple[str, ...]] def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: ... # Allow any attribute access (taken from types.SingleNamespace) def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... """)) print("identifier = str") print("string = typing.AnyStr") print( textwrap.dedent("""\ constant = typing.Union[ str, bytes, # strings int, float, complex, # numbers bool, # other tuple, frozenset, # sequences None, type(Ellipsis) # singletons ] """)) print(ast.unparse(ast.fix_missing_locations(stub)))
def load_asdl_map(source): tree = pyasdl.parse(source) for node_type in tree.body: ast_node_type = getattr(ast, node_type.name) if isinstance(node_type.value, pyasdl.Sum): for constructor in node_type.value.types: if not hasattr(ast, constructor.name): continue ast_constructor_type = getattr(ast, constructor.name) NODE_DB[ast_node_type][ast_constructor_type] = ( constructor.fields or []) else: ... # FIX-ME(medium): awaiting support for position-less nodes.
def main(): parser = ArgumentParser() parser.add_argument("file", type=Path) options = parser.parse_args() with open(options.file) as source: tree = pyasdl.parse(source.read()) visitor = GraphQLGenerator() print("START MIGRATION TO {") print(DEFAULT_INDENT + "module ast {") for ql_type in visitor.visit(tree): print(textwrap.indent(str(ql_type), DEFAULT_INDENT * 2)) print(DEFAULT_INDENT + "}") print("};")
def main(): parser = ArgumentParser() parser.add_argument("file", type=Path) parser.add_argument("--with-defaults", action="store_true") parser.add_argument("-o", "--out", default=1) options = parser.parse_args() with open(options.file) as stream: tree = pyasdl.parse(stream.read()) generator = PythonGenerator(with_defaults=options.with_defaults) stub = generator.generate(tree) with open(options.out, "w") as stream: stream.write("from __future__ import annotations\n\n") stream.write("import typing\n") stream.write("from enum import Enum as _Enum, auto as _auto\n") stream.write( "from dataclasses import dataclass as _dataclass, field as _field\n" ) stream.write("identifier = str\n") stream.write("string = typing.AnyStr\n") stream.write( textwrap.dedent( """\ constant = typing.Union[ str, bytes, # strings int, float, complex, # numbers bool, # other tuple, frozenset, # sequences None, type(Ellipsis) # singletons ]\n """ ) ) stream.write("class AST: ...\n") stream.write(ast.unparse(ast.fix_missing_locations(stub))) stream.write("\n")