def build_extension(self, grammar_source): grammar = parse_string(grammar_source, GrammarParser) # Because setUp() already changes the current directory to the # temporary path, use a relative path here to prevent excessive # path lengths when compiling. generate_parser_c_extension(grammar, Path('.'), library_dir=self.library_dir)
def verify_ast_generation(source: str, stmt: str, tmp_path: PurePath) -> None: grammar = parse_string(source, GrammarParser) extension = generate_parser_c_extension(grammar, tmp_path) expected_ast = ast.parse(stmt) actual_ast = extension.parse_string(stmt) assert ast.dump(expected_ast) == ast.dump(actual_ast)
def verify_ast_generation(self, source: str, stmt: str, tmp_path: PurePath) -> None: grammar = parse_string(source, GrammarParser) extension = generate_parser_c_extension(grammar, Path(tmp_path)) expected_ast = ast.parse(stmt) actual_ast = extension.parse_string(stmt, mode=1) self.assertEqual(ast_dump(expected_ast), ast_dump(actual_ast))
def test_with_stmt_with_paren(self) -> None: grammar_source = """ start[mod_ty]: a=[statements] ENDMARKER { Module(a, NULL, p->arena) } statements[asdl_seq*]: a=statement+ { _PyPegen_seq_flatten(p, a) } statement[asdl_seq*]: a=compound_stmt { _PyPegen_singleton_seq(p, a) } compound_stmt[stmt_ty]: with_stmt with_stmt[stmt_ty]: ( a='with' '(' b=','.with_item+ ')' ':' c=block { _Py_With(b, _PyPegen_singleton_seq(p, c), NULL, EXTRA) } ) with_item[withitem_ty]: ( e=NAME o=['as' t=NAME { t }] { _Py_withitem(e, _PyPegen_set_expr_context(p, o, Store), p->arena) } ) block[stmt_ty]: a=pass_stmt NEWLINE { a } | NEWLINE INDENT a=pass_stmt DEDENT { a } pass_stmt[stmt_ty]: a='pass' { _Py_Pass(EXTRA) } """ stmt = "with (\n a as b,\n c as d\n): pass" grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, Path(self.tmp_path)) the_ast = extension.parse_string(stmt, mode=1) self.assertTrue( ast_dump(the_ast).startswith( "Module(body=[With(items=[withitem(context_expr=Name(id='a', ctx=Load()), optional_vars=Name(id='b', ctx=Store())), " "withitem(context_expr=Name(id='c', ctx=Load()), optional_vars=Name(id='d', ctx=Store()))]" ))
def main() -> None: print( f"Testing {GRAMMAR_FILE} starting at nesting depth of {INITIAL_NESTING_DEPTH}..." ) with TemporaryDirectory() as tmp_dir: nesting_depth = INITIAL_NESTING_DEPTH rules, parser, tokenizer = build_parser(GRAMMAR_FILE) python_parser = generate_parser(rules) c_parser = generate_parser_c_extension(rules, Path(tmp_dir)) c_succeeded = True python_succeeded = True while c_succeeded or python_succeeded: expr = f"{'(' * nesting_depth}0{')' * nesting_depth}" if c_succeeded: c_succeeded = check_nested_expr(nesting_depth, c_parser, "C") if python_succeeded: python_succeeded = check_nested_expr(nesting_depth, python_parser, "Python") nesting_depth += NESTED_INCR_AMT sys.exit(1)
def test_error_in_rules(self) -> None: grammar_source = """ start: expr+ NEWLINE? ENDMARKER expr: NAME {PyTuple_New(-1)} """ grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, Path(self.tmp_path)) # PyTuple_New raises SystemError if an invalid argument was passed. with self.assertRaises(SystemError): extension.parse_string("a", mode=0)
def test_syntax_error_for_string(text: str, tmp_path: PurePath) -> None: grammar_source = """ start: expr+ NEWLINE? ENDMARKER expr: NAME """ grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, tmp_path) try: extension.parse_string(text, mode=1) except SyntaxError as e: tb = traceback.format_exc() assert 'File "<string>", line 1' in tb
def test_syntax_error_for_string(self) -> None: grammar_source = """ start: expr+ NEWLINE? ENDMARKER expr: NAME """ grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, Path(self.tmp_path)) for text in ("a b 42 b a", "名 名 42 名 名"): try: extension.parse_string(text, mode=0) except SyntaxError as e: tb = traceback.format_exc() self.assertTrue('File "<string>", line 1' in tb) self.assertTrue(f"SyntaxError: invalid syntax" in tb)
def check_input_strings_for_grammar(grammar, tmp_path, valid_cases=None, invalid_cases=None): rules = parse_string(grammar, GrammarParser).rules extension = generate_parser_c_extension(rules, tmp_path) if valid_cases: for case in valid_cases: extension.parse_string(case) if invalid_cases: for case in invalid_cases: with pytest.raises(SyntaxError): extension.parse_string(case)
def test_syntax_error_for_file(text: str, tmp_path: PurePath) -> None: grammar_source = """ start: expr+ NEWLINE? ENDMARKER expr: NAME """ grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, tmp_path) the_file = tmp_path / "some_file.py" with open(the_file, "w") as fd: fd.write(text) try: extension.parse_file(str(the_file), mode=1) except SyntaxError as e: tb = traceback.format_exc() assert 'some_file.py", line 1' in tb
def check_input_strings_for_grammar( source: str, tmp_path: PurePath, valid_cases: Sequence[str] = (), invalid_cases: Sequence[str] = (), ) -> None: grammar = parse_string(source, GrammarParser) extension = generate_parser_c_extension(grammar, tmp_path) if valid_cases: for case in valid_cases: extension.parse_string(case) if invalid_cases: for case in invalid_cases: with pytest.raises(SyntaxError): extension.parse_string(case)
def test_same_name_different_types(self) -> None: source = """ start[mod_ty]: a=import_from+ NEWLINE ENDMARKER { Module(a, NULL, p->arena)} import_from[stmt_ty]: ( a='from' !'import' c=simple_name 'import' d=import_as_names_from { _Py_ImportFrom(c->v.Name.id, d, 0, EXTRA) } | a='from' '.' 'import' c=import_as_names_from { _Py_ImportFrom(NULL, c, 1, EXTRA) } ) simple_name[expr_ty]: NAME import_as_names_from[asdl_seq*]: a=','.import_as_name_from+ { a } import_as_name_from[alias_ty]: a=NAME 'as' b=NAME { _Py_alias(((expr_ty) a)->v.Name.id, ((expr_ty) b)->v.Name.id, p->arena) } """ grammar = parse_string(source, GrammarParser) extension = generate_parser_c_extension(grammar, Path(self.tmp_path)) for stmt in ("from a import b as c", "from . import a as b"): expected_ast = ast.parse(stmt) actual_ast = extension.parse_string(stmt, mode=1) self.assertEqual(ast_dump(expected_ast), ast_dump(actual_ast))
def test_c_parser(tmp_path: PurePath) -> None: grammar_source = """ start[mod_ty]: a=stmt* $ { Module(a, NULL, p->arena) } stmt[stmt_ty]: a=expr_stmt { a } expr_stmt[stmt_ty]: a=expression NEWLINE { _Py_Expr(a, EXTRA) } expression[expr_ty]: ( l=expression '+' r=term { _Py_BinOp(l, Add, r, EXTRA) } | l=expression '-' r=term { _Py_BinOp(l, Sub, r, EXTRA) } | t=term { t } ) term[expr_ty]: ( l=term '*' r=factor { _Py_BinOp(l, Mult, r, EXTRA) } | l=term '/' r=factor { _Py_BinOp(l, Div, r, EXTRA) } | f=factor { f } ) factor[expr_ty]: ('(' e=expression ')' { e } | a=atom { a } ) atom[expr_ty]: ( n=NAME { n } | n=NUMBER { n } | s=STRING { s } ) """ grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, tmp_path) expressions = [ "4+5", "4-5", "4*5", "1+4*5", "1+4/5", "(1+1) + (1+1)", "(1+1) - (1+1)", "(1+1) * (1+1)", "(1+1) / (1+1)", ] for expr in expressions: the_ast = extension.parse_string(expr) expected_ast = ast.parse(expr) assert ast.dump(the_ast) == ast.dump(expected_ast)
def build_extension(self, grammar_source): grammar = parse_string(grammar_source, GrammarParser) generate_parser_c_extension(grammar, Path(self.tmp_path))
def create_tmp_extension(tmp_path: PurePath) -> Any: with open(os.path.join("data", "simpy.gram"), "r") as grammar_file: grammar_source = grammar_file.read() grammar = parse_string(grammar_source, GrammarParser) extension = generate_parser_c_extension(grammar, tmp_path) return extension