def generate(options): parser = AnokyParser() try: if "filename" not in options: print("No filename specified.") return filename = options.filename stream = FileStream(filename) file_node = parser.parse(stream) parsed = indented_lisp_printer(file_node) expander = DefaultExpander() ec = expander.expand_unit(file_node) expanded = indented_lisp_printer(file_node) generator = DefaultGenerator() py_module = generator.generate_unit( file_node, # provide expansion context to generation context EC=ec) if options.verbose: print(parsed) print("\n〰〰〰〰〰〰 After macro expansion 〰〰〰〰〰〰") print(expanded) print("\n〰〰〰〰〰〰 Generated Python code 〰〰〰〰〰〰\n") astpp.parseprint(py_module) print("\n〰〰〰〰〰〰 Python retrosource 〰〰〰〰〰〰\n") print(ASTFormatter().format(py_module)) return py_module except CompilerError as e: print(e.trace)
def colorize(message: list) -> list: if not 3 <= len(message) <= 4: return error( "Colorization request format is:\n input: ['colorize', file_name:str, file_contents:str, binary=False]\n output: ['colorize', token_ranges:list(list(color_code, first_index, index_after))]") file_name = message[1] file_contents = message[2] if not isinstance(file_name, str): return error('Colorization request: "file_name" arg must be a string.') if not isinstance(file_contents, str): return error('Colorization request: "file_contents" arg must be a string.') if VERBOSE: print("\tfile-name: " + file_name) print("\tfile-contents: " + ( repr(file_contents) if len(file_contents) < 80 else repr(file_contents[0:80]) + " ...")) if len(message) == 4: binary = message[3] if not isinstance(file_contents, bool): return error('Colorization request: "binary" arg must be a string.') else: binary = True stream = StringStream(file_contents, name=file_name) parser = AnokyParser() code_expander = DefaultExpander() code_generator = DefaultGenerator() try: node = parser.parse(stream) code_expander.expand_unit(node) code_generator.generate_unit(node) colorized_tokens = [] def extract_colorized_tokens(element): nonlocal colorized_tokens if element.color is not None and is_not_none(element, ".range.first_position.index") and is_not_none( element, ".range.position_after.index"): token_color = element.color token_first = element.range.first_position.index token_after = element.range.position_after.index if not isinstance(token_color, int): return error('Colorization request: color of token "%s" was not int!' % element.text) colorized_tokens.append([token_color, token_first, token_after]) if isinstance(element.code, Node): for subelement in element.code: extract_colorized_tokens(subelement) for element in node: extract_colorized_tokens(element) except CompilerError as e: return error(e) return pack(['colorize', colorized_tokens])
def _compile_to_ast(filepath): parser = AnokyParser() expander = DefaultExpander() generator = DefaultGenerator() stream = FileStream(filepath) file_node = parser.parse(stream) expander.expand_unit(file_node) py_module = generator.generate_unit(file_node) ast.fix_missing_locations(py_module) return py_module
def expand(options): parser = AnokyParser() try: if "filename" not in options: print("No filename specified.") return filename = options.filename stream = FileStream(filename) file_node = parser.parse(stream) # print(indented_lisp_printer(file_node)) expander = DefaultExpander() ec = expander.expand_unit(file_node) # print("\n〰〰〰〰〰〰 After macro expansion 〰〰〰〰〰〰") # print(indented_lisp_printer(file_node)) generator = DefaultGenerator() py_module = generator.generate_unit(file_node, # provide expansion context to generation context EC=ec) # print("\n〰〰〰〰〰〰 Generated Python code 〰〰〰〰〰〰\n") # astpp.parseprint(py_module) # print("\n〰〰〰〰〰〰 Python retrosource 〰〰〰〰〰〰\n") print(ASTFormatter().format(py_module)) if options.execute: ast.fix_missing_locations(py_module) compiled_module = compile(py_module, filename="<ast>", mode="exec") exec(compiled_module) except CompilerError as e: print(e.trace)
def colorize(message: list) -> list: if not 3 <= len(message) <= 4: return error( "Colorization request format is:\n input: ['colorize', file_name:str, file_contents:str, binary=False]\n output: ['colorize', token_ranges:list(list(color_code, first_index, index_after))]" ) file_name = message[1] file_contents = message[2] if not isinstance(file_name, str): return error( 'Colorization request: "file_name" arg must be a string.') if not isinstance(file_contents, str): return error( 'Colorization request: "file_contents" arg must be a string.') if VERBOSE: print("\tfile-name: " + file_name) print("\tfile-contents: " + (repr(file_contents) if len(file_contents) < 80 else repr(file_contents[0:80]) + " ...")) if len(message) == 4: binary = message[3] if not isinstance(file_contents, bool): return error( 'Colorization request: "binary" arg must be a string.') else: binary = True stream = StringStream(file_contents, name=file_name) parser = AnokyParser() code_expander = DefaultExpander() code_generator = DefaultGenerator() try: node = parser.parse(stream) code_expander.expand_unit(node) code_generator.generate_unit(node) colorized_tokens = [] def extract_colorized_tokens(element): nonlocal colorized_tokens if element.color is not None and is_not_none( element, ".range.first_position.index") and is_not_none( element, ".range.position_after.index"): token_color = element.color token_first = element.range.first_position.index token_after = element.range.position_after.index if not isinstance(token_color, int): return error( 'Colorization request: color of token "%s" was not int!' % element.text) colorized_tokens.append( [token_color, token_first, token_after]) if isinstance(element.code, Node): for subelement in element.code: extract_colorized_tokens(subelement) for element in node: extract_colorized_tokens(element) except CompilerError as e: return error(e) return pack(['colorize', colorized_tokens])
from anoky.syntax.token import is_token from prompt_toolkit.history import InMemoryHistory from prompt_toolkit import prompt import argparse import ast import astpp import sys import traceback import os import anoky.syntax.tokens as Tokens __parser__ = AnokyParser() __macros__ = default_macro_table() __id_macros__ = default_id_macro_table() __special_forms__ = default_special_forms_table() code_expander = DefaultExpander() code_generator = DefaultGenerator() def anoky_tokenize(stream, options): tokenized_node = __parser__.tokenize_into_node(stream, emmit_restart_tokens=False) if options.print_tokens: print('\n——›– Tokenized source –‹——') for token in tokenized_node: print(str(token)) errors = [] for token in tokenized_node: if is_token(token, Tokens.ERROR): errors.append(token) if len(errors) > 0: message = ''