def __init__( self, tree: ast.AST, file_tokens: Sequence[tokenize.TokenInfo], filename: str = constants.STDIN, ) -> None: """ Creates new checker instance. These parameter names should not be changed. ``flake8`` has special API that passes concrete parameters to the plugins that ask for them. ``flake8`` also decides how to execute this plugin based on its parameters. This one is executed once per module. Parameters: tree: ``ast`` parsed by ``flake8``. Differs from ``ast.parse`` since it is mutated by multiple ``flake8`` plugins. Why mutated? Since it is really expensive to copy all ``ast`` information in terms of memory. file_tokens: ``tokenize.tokenize`` parsed file tokens. filename: module file name, might be empty if piping is used. """ self.tree = transform(tree) self.filename = filename self.file_tokens = file_tokens
def __init__( self, tree: ast.AST, file_tokens: Sequence[tokenize.TokenInfo], filename: str = constants.STDIN, ) -> None: """ Creates new checker instance. These parameter names should not be changed. ``flake8`` has special API that passes concrete parameters to the plugins that ask for them. ``flake8`` also decides how to execute this plugin based on its parameters. This one is executed once per module. Arguments: tree: ``ast`` tree parsed by ``flake8``. file_tokens: ``tokenize.tokenize`` parsed file tokens. filename: module file name, might be empty if piping is used. """ self.tree = transform(tree) self.filename = filename self.file_tokens = file_tokens
def factory(code: str, do_compile: bool = True) -> ast.AST: code_to_parse = dedent(code) if do_compile: # We need to compile to check some syntax features # that are validated after the `ast` is processed: # like double arguments or `break` outside of loops. compile(code_to_parse, '<filename>', 'exec') # noqa: Z421 return transform(ast.parse(code_to_parse))
def factory(code: str, do_compile: bool = True) -> ast.AST: code_to_parse = dedent(code) if do_compile: _compile_code(code_to_parse) return transform(ast.parse(code_to_parse))