def selftest() -> bool: """Run a simple self-test of DHParser. """ print("DHParser selftest...") print("\nSTAGE I: Trying to compile EBNF-Grammar:\n") builtin_ebnf_parser = get_ebnf_grammar() docstring = str(builtin_ebnf_parser.__doc__) # type: str ebnf_src = docstring[docstring.find('@'):] ebnf_transformer = get_ebnf_transformer() ebnf_compiler = get_ebnf_compiler('EBNF') result, errors, _ = compile_source(ebnf_src, None, builtin_ebnf_parser, ebnf_transformer, ebnf_compiler) generated_ebnf_parser = cast(str, result) if errors: print("Selftest FAILED :-(") print("\n\n".join(str(err) for err in errors)) return False print(generated_ebnf_parser) print("\n\nSTAGE 2: Selfhosting-test: " "Trying to compile EBNF-Grammar with generated parser...\n") selfhosted_ebnf_parser = compileDSL(ebnf_src, None, generated_ebnf_parser, ebnf_transformer, ebnf_compiler) # ebnf_compiler.gen_transformer_skeleton() print(selfhosted_ebnf_parser) return True
def compile_EBNF(self, text: str): from DHParser.compile import compile_source from DHParser.ebnf import get_ebnf_preprocessor, get_ebnf_grammar, get_ebnf_transformer, \ get_ebnf_compiler compiler = get_ebnf_compiler("EBNFServerAnalyse", text) result, messages, _ = compile_source(text, get_ebnf_preprocessor(), get_ebnf_grammar(), get_ebnf_transformer(), compiler) # TODO: return errors as well as (distilled) information about symbols for code propositions return None
def compile_EBNF(text: str, diagnostics_signature: bytes) -> (str, bytes): from DHParser.compile import compile_source from DHParser.ebnf import get_ebnf_preprocessor, get_ebnf_grammar, get_ebnf_transformer, \ get_ebnf_compiler from DHParser.toolkit import json_dumps compiler = get_ebnf_compiler("EBNFServerAnalyse", text) result, messages, _ = compile_source(text, get_ebnf_preprocessor(), get_ebnf_grammar(), get_ebnf_transformer(), compiler) # TODO: return errors as well as (distilled) information about symbols for code propositions signature = b''.join(msg.signature() for msg in messages) if signature != diagnostics_signature: # publish diagnostics only if the same diagnostics have not been reported earlier, already diagnostics = [msg.diagnosticObj() for msg in messages] return json_dumps( diagnostics), signature # TODO: bytes is not a JSON-type proper! else: return '[]', signature
def load_compiler_suite(compiler_suite: str) -> \ Tuple[PreprocessorFactoryFunc, ParserFactoryFunc, TransformerFactoryFunc, CompilerFactoryFunc]: """ Extracts a compiler suite from file or string `compiler_suite` and returns it as a tuple (preprocessor, parser, ast, compiler). Returns: 4-tuple (preprocessor function, parser class, ast transformer function, compiler class) """ global RX_SECTION_MARKER assert isinstance(compiler_suite, str) source = load_if_file(compiler_suite) dhpath = relative_path(os.path.dirname('.'), DHPARSER_PARENTDIR) imports = DHPARSER_IMPORTS.format(dhparser_parentdir=dhpath) if is_python_code(compiler_suite): sections = split_source(compiler_suite, source) _, imports, preprocessor_py, parser_py, ast_py, compiler_py, _ = sections # TODO: Compile in one step and pick parts from namespace later ? preprocessor = compile_python_object(imports + preprocessor_py, r'get_(?:\w+_)?preprocessor$') parser = compile_python_object(imports + parser_py, r'get_(?:\w+_)?grammar$') ast = compile_python_object(imports + ast_py, r'get_(?:\w+_)?transformer$') else: # Assume source is an ebnf grammar. # Is there really any reasonable application case for this? lg_dir = suspend_logging() compiler_py, messages, _ = compile_source(source, None, get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler(compiler_suite, source)) resume_logging(lg_dir) if has_errors(messages): raise DefinitionError(only_errors(messages), source) preprocessor = get_ebnf_preprocessor parser = get_ebnf_grammar ast = get_ebnf_transformer compiler = compile_python_object(imports + compiler_py, r'get_(?:\w+_)?compiler$') if callable(preprocessor) and callable(parser) and callable(Callable) and callable(compiler): return preprocessor, parser, ast, compiler raise ValueError('Could not generate compiler suite from source code!')
def grammar_instance(grammar_representation) -> Tuple[Grammar, str]: """ Returns a grammar object and the source code of the grammar, from the given `grammar`-data which can be either a file name, ebnf-code, python-code, a Grammar-derived grammar class or an instance of such a class (i.e. a grammar object already). """ if isinstance(grammar_representation, str): # read grammar grammar_src = load_if_file(grammar_representation) if is_python_code(grammar_src): parser_py = grammar_src # type: str messages = [] # type: List[Error] else: lg_dir = suspend_logging() result, messages, _ = compile_source( grammar_src, None, get_ebnf_grammar(), get_ebnf_transformer(), get_ebnf_compiler()) parser_py = cast(str, result) resume_logging(lg_dir) if has_errors(messages): raise DefinitionError(only_errors(messages), grammar_src) imports = DHPARSER_IMPORTS.format( dhparser_parentdir=relative_path('.', DHPARSER_PARENTDIR)) grammar_class = compile_python_object(imports + parser_py, r'\w+Grammar$') if inspect.isclass(grammar_class) and issubclass(grammar_class, Grammar): parser_root = grammar_class() else: raise ValueError('Could not compile or Grammar class!') else: # assume that dsl_grammar is a ParserHQ-object or Grammar class grammar_src = '' if isinstance(grammar_representation, Grammar): parser_root = grammar_representation else: # assume ``grammar_representation`` is a grammar class and get the root object parser_root = grammar_representation() return parser_root, grammar_src
def compileDSL(text_or_file: str, preprocessor: Optional[PreprocessorFunc], dsl_grammar: Union[str, Grammar], ast_transformation: TransformationFunc, compiler: Compiler) -> Any: """ Compiles a text in a domain specific language (DSL) with an EBNF-specified grammar. Returns the compiled text or raises a compilation error. Raises: CompilationError if any errors occurred during compilation """ assert isinstance(text_or_file, str) assert isinstance(compiler, Compiler) parser, grammar_src = grammar_instance(dsl_grammar) result, messages, AST = compile_source(text_or_file, preprocessor, parser, ast_transformation, compiler) if has_errors(messages): src = load_if_file(text_or_file) raise CompilationError(only_errors(messages), src, grammar_src, AST, result) return result
def compile_on_disk(source_file: str, compiler_suite="", extension=".xml") -> Iterable[Error]: """ Compiles the a source file with a given compiler and writes the result to a file. If no ``compiler_suite`` is given it is assumed that the source file is an EBNF grammar. In this case the result will be a Python script containing a parser for that grammar as well as the skeletons for a preprocessor, AST transformation table, and compiler. If the Python script already exists only the parser name in the script will be updated. (For this to work, the different names need to be delimited section marker blocks.). `compile_on_disk()` returns a list of error messages or an empty list if no errors occurred. Parameters: source_file(str): The file name of the source text to be compiled. compiler_suite(str): The file name of the parser/compiler-suite (usually ending with 'Parser.py'), with which the source file shall be compiled. If this is left empty, the source file is assumed to be an EBNF-Grammar that will be compiled with the internal EBNF-Compiler. extension(str): The result of the compilation (if successful) is written to a file with the same name but a different extension than the source file. This parameter sets the extension. Returns: A (potentially empty) list of error or warning messages. """ filepath = os.path.normpath(source_file) f = None # Optional[TextIO] with open(source_file, encoding="utf-8") as f: source = f.read() rootname = os.path.splitext(filepath)[0] dhpath = relative_path(os.path.dirname(rootname), DHPARSER_PARENTDIR) compiler_name = as_identifier(os.path.basename(rootname)) if compiler_suite: sfactory, pfactory, tfactory, cfactory = load_compiler_suite(compiler_suite) compiler1 = cfactory() else: sfactory = get_ebnf_preprocessor # PreprocessorFactoryFunc pfactory = get_ebnf_grammar # ParserFactoryFunc tfactory = get_ebnf_transformer # TransformerFactoryFunc cfactory = get_ebnf_compiler # CompilerFactoryFunc compiler1 = cfactory() # Compiler is_ebnf_compiler = False # type: bool if isinstance(compiler1, EBNFCompiler): is_ebnf_compiler = True compiler1.set_grammar_name(compiler_name, source_file) result, messages, _ = compile_source(source, sfactory(), pfactory(), tfactory(), compiler1) if has_errors(messages): return messages elif is_ebnf_compiler: # trans == get_ebnf_transformer or trans == EBNFTransformer: # either an EBNF- or no compiler suite given ebnf_compiler = cast(EBNFCompiler, compiler1) # type: EBNFCompiler global SECTION_MARKER, RX_SECTION_MARKER, PREPROCESSOR_SECTION, PARSER_SECTION, \ AST_SECTION, COMPILER_SECTION, END_SECTIONS_MARKER, RX_WHITESPACE f = None try: parser_name = rootname + 'Parser.py' f = open(parser_name, 'r', encoding="utf-8") source = f.read() sections = split_source(parser_name, source) intro, imports, preprocessor, _, ast, compiler, outro = sections ast_trans_python_src = imports + ast # DHPARSER_IMPORTS.format(dhparser_parentdir=dhpath) ast_trans_table = dict() # type: TransformationDict try: ast_trans_table = compile_python_object(ast_trans_python_src, r'(?:\w+_)?AST_transformation_table$') except Exception as e: if isinstance(e, NameError): err_str = 'NameError "{}" while compiling AST-Transformation. ' \ 'Possibly due to a forgotten import at the beginning ' \ 'of the AST-Block (!)'.format(str(e)) else: err_str = 'Exception {} while compiling AST-Transformation: {}' \ .format(str(type(e)), str(e)) messages.append(Error(err_str, 0, CANNOT_VERIFY_TRANSTABLE_WARNING)) if is_logging(): with open(os.path.join(log_dir(), rootname + '_AST_src.py'), 'w', encoding='utf-8') as f: f.write(ast_trans_python_src) messages.extend(ebnf_compiler.verify_transformation_table(ast_trans_table)) # TODO: Verify compiler except (PermissionError, FileNotFoundError, IOError): intro, imports, preprocessor, _, ast, compiler, outro = '', '', '', '', '', '', '' finally: if f: f.close() f = None if RX_WHITESPACE.fullmatch(intro): intro = '#!/usr/bin/env python3' if RX_WHITESPACE.fullmatch(outro): outro = read_template('DSLParser.pyi').format(NAME=compiler_name) if RX_WHITESPACE.fullmatch(imports): imports = DHParser.ebnf.DHPARSER_IMPORTS.format(dhparser_parentdir=dhpath) if RX_WHITESPACE.fullmatch(preprocessor): preprocessor = ebnf_compiler.gen_preprocessor_skeleton() if RX_WHITESPACE.fullmatch(ast): ast = ebnf_compiler.gen_transformer_skeleton() if RX_WHITESPACE.fullmatch(compiler): compiler = ebnf_compiler.gen_compiler_skeleton() compilerscript = rootname + 'Parser.py' try: f = open(compilerscript, 'w', encoding="utf-8") f.write(intro) f.write(SECTION_MARKER.format(marker=SYMBOLS_SECTION)) f.write(imports) f.write(SECTION_MARKER.format(marker=PREPROCESSOR_SECTION)) f.write(preprocessor) f.write(SECTION_MARKER.format(marker=PARSER_SECTION)) f.write(cast(str, result)) f.write(SECTION_MARKER.format(marker=AST_SECTION)) f.write(ast) f.write(SECTION_MARKER.format(marker=COMPILER_SECTION)) f.write(compiler) f.write(SECTION_MARKER.format(marker=END_SECTIONS_MARKER)) f.write(outro) except (PermissionError, FileNotFoundError, IOError) as error: print('# Could not write file "' + compilerscript + '" because of: ' + "\n# ".join(str(error).split('\n)'))) print(result) finally: if f: f.close() if platform.system() != "Windows": # set file permissions so that the compilerscript can be executed st = os.stat(compilerscript) os.chmod(compilerscript, st.st_mode | stat.S_IEXEC) else: f = None try: f = open(rootname + extension, 'w', encoding="utf-8") if isinstance(result, Node): if extension.lower() == '.xml': f.write(result.as_xml()) else: f.write(result.as_sxpr()) elif isinstance(result, str): f.write(result) else: raise AssertionError('Illegal result type: ' + str(type(result))) except (PermissionError, FileNotFoundError, IOError) as error: print('# Could not write file "' + rootname + '.py" because of: ' + "\n# ".join(str(error).split('\n)'))) print(result) finally: if f: f.close() return messages