def __init__(self,
                 grammar,
                 source,
                 start_symbol='file_input',
                 tokenizer=None,
                 start_parsing=True):
        # Todo Remove start_parsing (with False)

        self._used_names = {}
        self._scope_names_stack = [{}]
        self._last_failed_start_pos = (0, 0)
        self._global_names = []

        # For the fast parser.
        self.position_modifier = pt.PositionModifier()

        self._added_newline = False
        # The Python grammar needs a newline at the end of each statement.
        if not source.endswith('\n') and start_symbol == 'file_input':
            source += '\n'
            self._added_newline = True

        self.source = source
        self._start_symbol = start_symbol
        self._grammar = grammar

        self._parsed = None

        if start_parsing:
            if tokenizer is None:
                tokenizer = tokenize.source_tokens(source,
                                                   use_exact_op_types=True)
            self.parse(tokenizer)
Exemple #2
0
    def __init__(self, grammar, source, module_path=None, tokenizer=None):
        self._ast_mapping = {
            'expr_stmt': pt.ExprStmt,
            'classdef': pt.Class,
            'funcdef': pt.Function,
            'file_input': pt.Module,
            'import_name': pt.ImportName,
            'import_from': pt.ImportFrom,
            'break_stmt': pt.KeywordStatement,
            'continue_stmt': pt.KeywordStatement,
            'return_stmt': pt.ReturnStmt,
            'raise_stmt': pt.KeywordStatement,
            'yield_expr': pt.YieldExpr,
            'del_stmt': pt.KeywordStatement,
            'pass_stmt': pt.KeywordStatement,
            'global_stmt': pt.GlobalStmt,
            'nonlocal_stmt': pt.KeywordStatement,
            'assert_stmt': pt.AssertStmt,
            'if_stmt': pt.IfStmt,
            'with_stmt': pt.WithStmt,
            'for_stmt': pt.ForStmt,
            'while_stmt': pt.WhileStmt,
            'try_stmt': pt.TryStmt,
            'comp_for': pt.CompFor,
            'decorator': pt.Decorator,
            'lambdef': pt.Lambda,
            'old_lambdef': pt.Lambda,
            'lambdef_nocond': pt.Lambda,
        }

        self.syntax_errors = []

        self._global_names = []
        self._omit_dedent_list = []
        self._indent_counter = 0
        self._last_failed_start_pos = (0, 0)

        # TODO do print absolute import detection here.
        #try:
        #    del python_grammar_no_print_statement.keywords["print"]
        #except KeyError:
        #    pass  # Doesn't exist in the Python 3 grammar.

        #if self.options["print_function"]:
        #    python_grammar = pygram.python_grammar_no_print_statement
        #else:
        self._used_names = {}
        self._scope_names_stack = [{}]
        self._error_statement_stacks = []

        added_newline = False
        # The Python grammar needs a newline at the end of each statement.
        if not source.endswith('\n'):
            source += '\n'
            added_newline = True

        # For the fast parser.
        self.position_modifier = pt.PositionModifier()
        p = PgenParser(grammar, self.convert_node, self.convert_leaf,
                       self.error_recovery)
        tokenizer = tokenizer or tokenize.source_tokens(source)
        self.module = p.parse(self._tokenize(tokenizer))
        if self.module.type != 'file_input':
            # If there's only one statement, we get back a non-module. That's
            # not what we want, we want a module, so we add it here:
            self.module = self.convert_node(
                grammar, grammar.symbol2number['file_input'], [self.module])

        if added_newline:
            self.remove_last_newline()
        self.module.used_names = self._used_names
        self.module.path = module_path
        self.module.global_names = self._global_names
        self.module.error_statement_stacks = self._error_statement_stacks