class PythonGrammar(Grammar): _error_normalizer_config = ErrorFinderConfig() _token_namespace = token _start_symbol = 'file_input' def __init__(self, version_info, bnf_text): super(PythonGrammar, self).__init__(bnf_text, tokenizer=self._tokenize_lines, parser=PythonParser, diff_parser=DiffParser) self.version_info = version_info def _tokenize_lines(self, lines, start_pos): return tokenize_lines(lines, self.version_info, start_pos=start_pos) def _tokenize(self, code): # Used by Jedi. return tokenize(code, self.version_info)
class PythonGrammar(Grammar): _error_normalizer_config = ErrorFinderConfig() _token_namespace = PythonTokenTypes _start_nonterminal = 'file_input' def __init__(self, version_info: PythonVersionInfo, bnf_text: str): super().__init__(bnf_text, tokenizer=self._tokenize_lines, parser=PythonParser, diff_parser=DiffParser) self.version_info = version_info def _tokenize_lines(self, lines, **kwargs): return tokenize_lines(lines, version_info=self.version_info, **kwargs) def _tokenize(self, code): # Used by Jedi. return tokenize(code, version_info=self.version_info)