def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, custom_typing_module: str = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The pyversion (major, minor) argument determines the Python syntax variant. """ is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert pyversion[0] < 3 and not is_stub_file ast = ast27.parse(source, fnam, 'exec') tree = ASTConverter( pyversion=pyversion, is_stub=is_stub_file, custom_typing_module=custom_typing_module, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam tree.is_stub = is_stub_file return tree except (SyntaxError, TypeCommentParseError) as e: if errors: errors.set_file('<input>' if fnam is None else fnam) errors.report(e.lineno, e.offset, e.msg) else: raise return MypyFile([], [], False, set())
def __init__(self, data_dir: str, lib_path: List[str], target: int, output_dir: str, pyversion: int, flags: List[str], ignore_prefix: str, custom_typing_module: str, html_report_dir: str) -> None: self.data_dir = data_dir self.errors = Errors() self.errors.set_ignore_prefix(ignore_prefix) self.lib_path = lib_path self.target = target self.output_dir = output_dir self.pyversion = pyversion self.flags = flags self.custom_typing_module = custom_typing_module self.html_report_dir = html_report_dir self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors, pyversion=pyversion) self.semantic_analyzer_pass3 = ThirdPass(self.errors) self.type_checker = TypeChecker(self.errors, self.semantic_analyzer.modules, self.pyversion) self.states = List[State]() self.module_files = Dict[str, str]() self.module_deps = Dict[Tuple[str, str], bool]() self.missing_modules = Set[str]()
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, custom_typing_module: str = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The pyversion (major, minor) argument determines the Python syntax variant. """ is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert pyversion[0] < 3 and not is_stub_file ast = ast27.parse(source, fnam, 'exec') tree = ASTConverter(pyversion=pyversion, is_stub=is_stub_file, custom_typing_module=custom_typing_module, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam tree.is_stub = is_stub_file return tree except (SyntaxError, TypeCommentParseError) as e: if errors: errors.set_file('<input>' if fnam is None else fnam) errors.report(e.lineno, e.offset, e.msg) else: raise return MypyFile([], [], False, set())
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, custom_typing_module: str = None, implicit_any: bool = False) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The pyversion (major, minor) argument determines the Python syntax variant. """ is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: ast = typed_ast.parse(source, fnam, 'exec') except SyntaxError as e: if errors: errors.set_file('<input>' if fnam is None else fnam) errors.report(e.lineno, e.msg) # type: ignore else: raise else: tree = ASTConverter().visit(ast) tree.path = fnam tree.is_stub = is_stub_file return tree return MypyFile([], [], False, set(), weak_opts=set())
def __init__(self, data_dir: str, lib_path: List[str], target: int, pyversion: Tuple[int, int], flags: List[str], ignore_prefix: str, custom_typing_module: str, reports: Reports) -> None: self.data_dir = data_dir self.errors = Errors() self.errors.set_ignore_prefix(ignore_prefix) self.lib_path = lib_path self.target = target self.pyversion = pyversion self.flags = flags self.custom_typing_module = custom_typing_module self.reports = reports self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors, pyversion=pyversion) modules = self.semantic_analyzer.modules self.semantic_analyzer_pass3 = ThirdPass(modules, self.errors) self.type_checker = TypeChecker(self.errors, modules, self.pyversion) self.states = [] # type: List[State] self.module_files = {} # type: Dict[str, str] self.module_deps = {} # type: Dict[Tuple[str, str], bool] self.missing_modules = set() # type: Set[str]
def __init__(self, data_dir: str, lib_path: List[str], target: int, output_dir: str, pyversion: int, flags: List[str], ignore_prefix: str) -> None: self.data_dir = data_dir self.errors = Errors() self.errors.set_ignore_prefix(ignore_prefix) self.lib_path = lib_path self.target = target self.output_dir = output_dir self.pyversion = pyversion self.flags = flags self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors) self.semantic_analyzer_pass3 = ThirdPass(self.errors) self.type_checker = TypeChecker(self.errors, self.semantic_analyzer.modules, self.pyversion) self.states = List[State]() self.module_files = Dict[str, str]() self.icode = Dict[str, FuncIcode]() self.binary_path = None # type: str self.module_deps = Dict[Tuple[str, str], bool]()
def parse_type_comment(type_comment: str, line: int, errors: Errors) -> Optional[Type]: try: typ = ast35.parse(type_comment, '<type_comment>', 'eval') except SyntaxError as e: errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR) return None else: assert isinstance(typ, ast35.Expression) return TypeConverter(errors, line=line).visit(typ.body)
def parse_type_comment(type_comment: str, line: int, errors: Errors) -> Optional[Type]: try: typ = ast3.parse(type_comment, '<type_comment>', 'eval') except SyntaxError as e: errors.report(line, e.offset, TYPE_COMMENT_SYNTAX_ERROR) return None else: assert isinstance(typ, ast3.Expression) return TypeConverter(errors, line=line).visit(typ.body)
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, options: Options = Options()) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True errors.set_file('<input>' if fnam is None else fnam, None) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert options.python_version[0] < 3 and not is_stub_file ast = ast27.parse(source, fnam, 'exec') tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg) tree = MypyFile([], [], False, set()) if raise_on_error and errors.is_errors(): errors.raise_error() return tree
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, custom_typing_module: str = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The pyversion (major, minor) argument determines the Python syntax variant. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True errors.set_file('<input>' if fnam is None else fnam) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert pyversion[0] >= 3 or is_stub_file ast = ast35.parse(source, fnam, 'exec') tree = ASTConverter(pyversion=pyversion, is_stub=is_stub_file, errors=errors, custom_typing_module=custom_typing_module, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg) tree = MypyFile([], [], False, set()) if raise_on_error and errors.is_errors(): errors.raise_error() return tree
def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None: for module in scc: state = graph[module] assert state.tree analyzer = TypeArgumentAnalyzer(errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): state.tree.accept(analyzer)
def check_type_arguments(graph: 'Graph', scc: List[str], errors: Errors) -> None: for module in scc: state = graph[module] assert state.tree analyzer = TypeArgumentAnalyzer( errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): state.tree.accept(analyzer)
def _make_manager(self) -> BuildManager: errors = Errors() options = Options() manager = BuildManager( data_dir='', lib_path=[], ignore_prefix='', source_set=BuildSourceSet([]), reports=Reports('', {}), options=options, version_id=__version__, plugin=Plugin(options), errors=errors, ) return manager
def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], state: 'State', errors: Errors) -> None: """Check type arguments against type variable bounds and restrictions. This mirrors the logic in check_type_arguments() except that we process only some targets. This is used in fine grained incremental mode. """ analyzer = TypeArgumentAnalyzer(errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): for target in targets: analyzer.recurse_into_functions = not isinstance( target.node, MypyFile) target.node.accept(analyzer)
def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], state: 'State', errors: Errors) -> None: """Check type arguments against type variable bounds and restrictions. This mirrors the logic in check_type_arguments() except that we process only some targets. This is used in fine grained incremental mode. """ analyzer = TypeArgumentAnalyzer(errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): for target in targets: analyzer.recurse_into_functions = not isinstance(target.node, MypyFile) target.node.accept(analyzer)
def __init__(self, mypy_base_dir, lib_path, target, output_dir, flags): self.mypy_base_dir = mypy_base_dir self.errors = Errors() self.lib_path = lib_path self.target = target self.output_dir = output_dir self.flags = flags self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors) self.type_checker = TypeChecker(self.errors, self.semantic_analyzer.modules) self.states = [] self.module_files = {} self.icode = None self.binary_path = None self.module_deps = {}
def _make_manager(self) -> BuildManager: errors = Errors() options = Options() fscache = FileSystemCache() manager = BuildManager( data_dir='', lib_path=[], ignore_prefix='', source_set=BuildSourceSet([]), reports=Reports('', {}), options=options, version_id=__version__, plugin=Plugin(options), errors=errors, flush_errors=lambda msgs, serious: None, fscache=fscache, ) return manager
def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Optional[Options] = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True if options is None: options = Options() errors.set_file(fnam, module) is_stub_file = fnam.endswith('.pyi') try: if is_stub_file: feature_version = defaults.PYTHON3_VERSION[1] else: assert options.python_version[0] >= 3 feature_version = options.python_version[1] ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version) tree = ASTConverter( options=options, is_stub=is_stub_file, errors=errors, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.reportErrorCode(errorcode.SYNTAX_ERROR(e.msg), e.lineno, e.offset, blocker=True) tree = MypyFile([], [], False, set()) if raise_on_error and errors.is_errors(): errors.raise_error() return tree
def parse(source: Union[str, bytes], fnam: str, module: Optional[str], errors: Optional[Errors] = None, options: Optional[Options] = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True if options is None: options = Options() errors.set_file(fnam, module) is_stub_file = fnam.endswith('.pyi') try: assert options.python_version[0] < 3 and not is_stub_file # Disable deprecation warnings about <>. with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) ast = ast27.parse(source, fnam, 'exec') tree = ASTConverter( options=options, errors=errors, ).visit(ast) assert isinstance(tree, MypyFile) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno if e.lineno is not None else -1, e.offset, e.msg, blocker=True, code=codes.SYNTAX) tree = MypyFile([], [], False, {}) if raise_on_error and errors.is_errors(): errors.raise_error() return tree
def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None, pyversion: Tuple[int, int] = defaults.PYTHON3_VERSION, custom_typing_module: str = None) -> MypyFile: """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. The pyversion (major, minor) argument determines the Python syntax variant. """ raise_on_error = False if errors is None: errors = Errors() raise_on_error = True errors.set_file('<input>' if fnam is None else fnam) is_stub_file = bool(fnam) and fnam.endswith('.pyi') try: assert pyversion[0] >= 3 or is_stub_file feature_version = pyversion[ 1] if not is_stub_file else defaults.PYTHON3_VERSION[1] ast = ast3.parse(source, fnam, 'exec', feature_version=feature_version) tree = ASTConverter( pyversion=pyversion, is_stub=is_stub_file, errors=errors, custom_typing_module=custom_typing_module, ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: errors.report(e.lineno, e.offset, e.msg) tree = MypyFile([], [], False, set()) if raise_on_error and errors.is_errors(): errors.raise_error() return tree
def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], state: 'State', errors: Errors) -> None: """Check type arguments against type variable bounds and restrictions. This mirrors the logic in check_type_arguments() except that we process only some targets. This is used in fine grained incremental mode. """ analyzer = TypeArgumentAnalyzer(errors, state.options, errors.is_typeshed_file(state.path or '')) with state.wrap_context(): with strict_optional_set(state.options.strict_optional): for target in targets: func = None # type: Optional[Union[FuncDef, OverloadedFuncDef]] if isinstance(target.node, (FuncDef, OverloadedFuncDef)): func = target.node saved = (state.id, target.active_typeinfo, func) # module, class, function with errors.scope.saved_scope(saved) if errors.scope else nothing(): analyzer.recurse_into_functions = func is not None target.node.accept(analyzer)
def _make_manager(self) -> BuildManager: errors = Errors() options = Options() fscache = FileSystemCache() search_paths = SearchPaths((), (), (), ()) manager = BuildManager( data_dir='', search_paths=search_paths, ignore_prefix='', source_set=BuildSourceSet([]), reports=Reports('', {}), options=options, version_id=__version__, plugin=Plugin(options), plugins_snapshot={}, errors=errors, flush_errors=lambda msgs, serious: None, fscache=fscache, stdout=sys.stdout, stderr=sys.stderr, ) return manager
def tokenizer_format_call( format_str: str) -> Optional[Tuple[List[str], List[FormatOp]]]: """Tokenize a str.format() format string. The core function parse_format_value() is shared with mypy. With these specifiers, we then parse the literal substrings of the original format string and convert `ConversionSpecifier` to `FormatOp`. Return: A list of string literals and a list of FormatOps. The literals are interleaved with FormatOps and the length of returned literals should be exactly one more than FormatOps. Return None if it cannot parse the string. """ # Creates an empty MessageBuilder here. # It wouldn't be used since the code has passed the type-checking. specifiers = parse_format_value(format_str, EMPTY_CONTEXT, MessageBuilder(Errors(), {})) if specifiers is None: return None format_ops = generate_format_ops(specifiers) if format_ops is None: return None literals: List[str] = [] last_end = 0 for spec in specifiers: # Skip { and } literals.append(format_str[last_end:spec.start_pos - 1]) last_end = spec.start_pos + len(spec.whole_seq) + 1 literals.append(format_str[last_end:]) # Deal with escaped {{ literals = [x.replace('{{', '{').replace('}}', '}') for x in literals] return literals, format_ops
class BuildManager: """This is the central class for building a mypy program. It coordinates parsing, import processing, semantic analysis and type checking. It manages state objects that actually perform the build steps. Attributes: data_dir: Mypy data directory (contains stubs) target: Build target; selects which passes to perform lib_path: Library path for looking up modules semantic_analyzer: Semantic analyzer, pass 2 semantic_analyzer_pass3: Semantic analyzer, pass 3 type_checker: Type checker errors: Used for reporting all errors pyversion: Python version (major, minor) flags: Build options states: States of all individual files that are being processed. Each file in a build is always represented by a single state object (after it has been encountered for the first time). This is the only place where states are stored. module_files: Map from module name to source file path. There is a 1:1 mapping between modules and source files. module_deps: Cache for module dependencies (direct or indirect). Item (m, n) indicates whether m depends on n (directly or indirectly). missing_modules: Set of modules that could not be imported encountered so far """ def __init__(self, data_dir: str, lib_path: List[str], target: int, pyversion: Tuple[int, int], flags: List[str], ignore_prefix: str, custom_typing_module: str, reports: Reports) -> None: self.data_dir = data_dir self.errors = Errors() self.errors.set_ignore_prefix(ignore_prefix) self.lib_path = lib_path self.target = target self.pyversion = pyversion self.flags = flags self.custom_typing_module = custom_typing_module self.reports = reports self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors, pyversion=pyversion) modules = self.semantic_analyzer.modules self.semantic_analyzer_pass3 = ThirdPass(modules, self.errors) self.type_checker = TypeChecker(self.errors, modules, self.pyversion) self.states = [] # type: List[State] self.module_files = {} # type: Dict[str, str] self.module_deps = {} # type: Dict[Tuple[str, str], bool] self.missing_modules = set() # type: Set[str] def process(self, initial_states: List['UnprocessedFile']) -> BuildResult: """Perform a build. The argument is a state that represents the main program file. This method should only be called once per a build manager object. The return values are identical to the return values of the build function. """ self.states += initial_states for initial_state in initial_states: self.module_files[initial_state.id] = initial_state.path for initial_state in initial_states: initial_state.load_dependencies() # Process states in a loop until all files (states) have been # semantically analyzed or type checked (depending on target). # # We type check all files before the rest of the passes so that we can # report errors and fail as quickly as possible. while True: # Find the next state that has all its dependencies met. next = self.next_available_state() if not next: trace('done') break # Potentially output some debug information. trace('next {} ({})'.format(next.path, next.state())) # Set the import context for reporting error messages correctly. self.errors.set_import_context(next.import_context) # Process the state. The process method is reponsible for adding a # new state object representing the new state of the file. next.process() # Raise exception if the build failed. The build can fail for # various reasons, such as parse error, semantic analysis error, # etc. if self.errors.is_blockers(): self.errors.raise_error() # If there were no errors, all files should have been fully processed. for s in self.states: assert s.state() == final_state, ( '{} still unprocessed in state {}'.format(s.path, s.state())) if self.errors.is_errors(): self.errors.raise_error() # Collect a list of all files. trees = [] # type: List[MypyFile] for state in self.states: trees.append(cast(ParsedFile, state).tree) # Perform any additional passes after type checking for all the files. self.final_passes(trees, self.type_checker.type_map) return BuildResult(self.semantic_analyzer.modules, self.type_checker.type_map) def next_available_state(self) -> 'State': """Find a ready state (one that has all its dependencies met).""" i = len(self.states) - 1 while i >= 0: if self.states[i].is_ready(): num_incomplete = self.states[i].num_incomplete_deps() if num_incomplete == 0: # This is perfect; no need to look for the best match. return self.states[i] i -= 1 return None def has_module(self, name: str) -> bool: """Have we seen a module yet?""" return name in self.module_files def file_state(self, path: str) -> int: """Return the state of a source file. In particular, return UNSEEN_STATE if the file has no associated state. This function does not consider any dependencies. """ for s in self.states: if s.path == path: return s.state() return UNSEEN_STATE def module_state(self, name: str) -> int: """Return the state of a module. In particular, return UNSEEN_STATE if the file has no associated state. This considers also module dependencies. """ if not self.has_module(name): return UNSEEN_STATE state = final_state fs = self.file_state(self.module_files[name]) if earlier_state(fs, state): state = fs return state def is_dep(self, m1: str, m2: str, done: Set[str] = None) -> bool: """Does m1 import m2 directly or indirectly?""" # Have we computed this previously? dep = self.module_deps.get((m1, m2)) if dep is not None: return dep if not done: done = set([m1]) # m1 depends on m2 iff one of the deps of m1 depends on m2. st = self.lookup_state(m1) for m in st.dependencies: if m in done: continue done.add(m) # Cache this dependency. self.module_deps[m1, m] = True # Search recursively. if m == m2 or self.is_dep(m, m2, done): # Yes! Mark it in the cache. self.module_deps[m1, m2] = True return True # No dependency. Mark it in the cache. self.module_deps[m1, m2] = False return False def lookup_state(self, module: str) -> 'State': for state in self.states: if state.id == module: return state raise RuntimeError('%s not found' % module) def all_imported_modules_in_file(self, file: MypyFile) -> List[Tuple[str, int]]: """Find all reachable import statements in a file. Return list of tuples (module id, import line number) for all modules imported in file. """ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: """Function to correct for relative imports.""" file_id = file.fullname() rel = imp.relative if rel == 0: return imp.id if os.path.basename(file.path).startswith('__init__.'): rel -= 1 if rel != 0: file_id = ".".join(file_id.split(".")[:-rel]) new_id = file_id + "." + imp.id if imp.id else file_id return new_id res = [] # type: List[Tuple[str, int]] for imp in file.imports: if not imp.is_unreachable: if isinstance(imp, Import): for id, _ in imp.ids: res.append((id, imp.line)) elif isinstance(imp, ImportFrom): cur_id = correct_rel_imp(imp) res.append((cur_id, imp.line)) # Also add any imported names that are submodules. for name, __ in imp.names: sub_id = cur_id + '.' + name if self.is_module(sub_id): res.append((sub_id, imp.line)) elif isinstance(imp, ImportAll): res.append((correct_rel_imp(imp), imp.line)) return res def is_module(self, id: str) -> bool: """Is there a file in the file system corresponding to module id?""" return find_module(id, self.lib_path) is not None def final_passes(self, files: List[MypyFile], types: Dict[Node, Type]) -> None: """Perform the code generation passes for type checked files.""" if self.target in [SEMANTIC_ANALYSIS, TYPE_CHECK]: pass # Nothing to do. else: raise RuntimeError('Unsupported target %d' % self.target) def log(self, message: str) -> None: if VERBOSE in self.flags: print('LOG: %s' % message)
def temp_message_builder() -> MessageBuilder: """Return a message builder usable for throwaway errors (which may not format properly).""" return MessageBuilder(Errors(), {})
def temp_message_builder() -> MessageBuilder: """Return a message builder usable for collecting errors locally.""" return MessageBuilder(Errors())
def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: Errors) -> None: """Calculate abstract status of a class. Set is_abstract of the type to True if the type has an unimplemented abstract attribute. Also compute a list of abstract attributes. Report error is required ABCMeta metaclass is missing. """ if typ.typeddict_type: return # TypedDict can't be abstract concrete: Set[str] = set() abstract: List[str] = [] abstract_in_this_class: List[str] = [] if typ.is_newtype: # Special case: NewTypes are considered as always non-abstract, so they can be used as: # Config = NewType('Config', Mapping[str, str]) # default = Config({'cannot': 'modify'}) # OK typ.abstract_attributes = [] return for base in typ.mro: for name, symnode in base.names.items(): node = symnode.node if isinstance(node, OverloadedFuncDef): # Unwrap an overloaded function definition. We can just # check arbitrarily the first overload item. If the # different items have a different abstract status, there # should be an error reported elsewhere. if node.items: # can be empty for invalid overloads func: Optional[Node] = node.items[0] else: func = None else: func = node if isinstance(func, Decorator): fdef = func.func if fdef.is_abstract and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) elif isinstance(node, Var): if node.is_abstract_var and name not in concrete: typ.is_abstract = True abstract.append(name) if base is typ: abstract_in_this_class.append(name) concrete.add(name) # In stubs, abstract classes need to be explicitly marked because it is too # easy to accidentally leave a concrete class abstract by forgetting to # implement some methods. typ.abstract_attributes = sorted(abstract) if is_stub_file: if typ.declared_metaclass and typ.declared_metaclass.type.fullname == 'abc.ABCMeta': return if typ.is_protocol: return if abstract and not abstract_in_this_class: def report(message: str, severity: str) -> None: errors.report(typ.line, typ.column, message, severity=severity) attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) report("Class {} has abstract attributes {}".format(typ.fullname, attrs), 'error') report("If it is meant to be abstract, add 'abc.ABCMeta' as an explicit metaclass", 'note') if typ.is_final and abstract: attrs = ", ".join('"{}"'.format(attr) for attr in sorted(abstract)) errors.report(typ.line, typ.column, "Final class {} has abstract attributes {}".format(typ.fullname, attrs))
def extract_django_settings_module(config_file_path: Optional[str]) -> str: errors = Errors() if config_file_path is None: errors.report( 0, None, "'django_settings_module' is not set: no mypy config file specified" ) errors.raise_error() parser = configparser.ConfigParser() parser.read(config_file_path) # type: ignore if not parser.has_section('mypy.plugins.django-stubs'): errors.report( 0, None, "'django_settings_module' is not set: no section [mypy.plugins.django-stubs]", file=config_file_path) errors.raise_error() if not parser.has_option('mypy.plugins.django-stubs', 'django_settings_module'): errors.report( 0, None, "'django_settings_module' is not set: setting is not provided", file=config_file_path) errors.raise_error() django_settings_module = parser.get('mypy.plugins.django-stubs', 'django_settings_module').strip('\'"') return django_settings_module
class BuildManager: """This is the central class for building a mypy program. It coordinates parsing, import processing, semantic analysis and type checking. It manages state objects that actually perform the build steps. Attributes: data_dir: Mypy data directory (contains stubs) target: Build target; selects which passes to perform lib_path: Library path for looking up modules semantic_analyzer: Semantic analyzer, pass 2 semantic_analyzer_pass3: Semantic analyzer, pass 3 type_checker: Type checker errors: Used for reporting all errors output_dir: Store output files here (Python) pyversion: Python version (2 or 3) flags: Build options states: States of all individual files that are being processed. Each file in a build is always represented by a single state object (after it has been encountered for the first time). This is the only place where states are stored. module_files: Map from module name to source file path. There is a 1:1 mapping between modules and source files. icode: Generated icode (when compiling via C) binary_path: Path of the generated binary (or None) module_deps: Cache for module dependencies (direct or indirect). Item (m, n) indicates whether m depends on n (directly or indirectly). TODO Refactor code related to transformation, icode generation etc. to external objects. This module should not directly depend on them. """ def __init__(self, data_dir: str, lib_path: List[str], target: int, output_dir: str, pyversion: int, flags: List[str], ignore_prefix: str) -> None: self.data_dir = data_dir self.errors = Errors() self.errors.set_ignore_prefix(ignore_prefix) self.lib_path = lib_path self.target = target self.output_dir = output_dir self.pyversion = pyversion self.flags = flags self.semantic_analyzer = SemanticAnalyzer(lib_path, self.errors) self.semantic_analyzer_pass3 = ThirdPass(self.errors) self.type_checker = TypeChecker(self.errors, self.semantic_analyzer.modules, self.pyversion) self.states = List[State]() self.module_files = Dict[str, str]() self.icode = Dict[str, FuncIcode]() self.binary_path = None # type: str self.module_deps = Dict[Tuple[str, str], bool]() def process(self, initial_state: 'UnprocessedFile') -> BuildResult: """Perform a build. The argument is a state that represents the main program file. This method should only be called once per a build manager object. The return values are identical to the return values of the build function. """ self.states.append(initial_state) # Process states in a loop until all files (states) have been # semantically analyzed or type checked (depending on target). # # We type check all files before the rest of the passes so that we can # report errors and fail as quickly as possible. while True: # Find the next state that has all its dependencies met. next = self.next_available_state() if not next: trace('done') break # Potentially output some debug information. trace('next {} ({})'.format(next.path, next.state())) # Set the import context for reporting error messages correctly. self.errors.set_import_context(next.import_context) # Process the state. The process method is reponsible for adding a # new state object representing the new state of the file. next.process() # Raise exception if the build failed. The build can fail for # various reasons, such as parse error, semantic analysis error, # etc. if self.errors.is_errors(): self.errors.raise_error() # If there were no errors, all files should have been fully processed. for s in self.states: assert s.state() == final_state, ( '{} still unprocessed'.format(s.path)) # Collect a list of all files. trees = List[MypyFile]() for state in self.states: trees.append((cast('ParsedFile', state)).tree) # Perform any additional passes after type checking for all the files. self.final_passes(trees, self.type_checker.type_map) return BuildResult(self.semantic_analyzer.modules, self.type_checker.type_map, self.icode, self.binary_path) def next_available_state(self) -> 'State': """Find a ready state (one that has all its dependencies met).""" i = len(self.states) - 1 while i >= 0: if self.states[i].is_ready(): num_incomplete = self.states[i].num_incomplete_deps() if num_incomplete == 0: # This is perfect; no need to look for the best match. return self.states[i] i -= 1 return None def has_module(self, name: str) -> bool: """Have we seen a module yet?""" return name in self.module_files def file_state(self, path: str) -> int: """Return the state of a source file. In particular, return UNSEEN_STATE if the file has no associated state. This function does not consider any dependencies. """ for s in self.states: if s.path == path: return s.state() return UNSEEN_STATE def module_state(self, name: str) -> int: """Return the state of a module. In particular, return UNSEEN_STATE if the file has no associated state. This considers also module dependencies. """ if not self.has_module(name): return UNSEEN_STATE state = final_state fs = self.file_state(self.module_files[name]) if earlier_state(fs, state): state = fs return state def is_dep(self, m1: str, m2: str, done: Set[str] = None) -> bool: """Does m1 import m2 directly or indirectly?""" # Have we computed this previously? dep = self.module_deps.get((m1, m2)) if dep is not None: return dep if not done: done = set([m1]) # m1 depends on m2 iff one of the deps of m1 depends on m2. st = self.lookup_state(m1) for m in st.dependencies: if m in done: continue done.add(m) # Cache this dependency. self.module_deps[m1, m] = True # Search recursively. if m == m2 or self.is_dep(m, m2, done): # Yes! Mark it in the cache. self.module_deps[m1, m2] = True return True # No dependency. Mark it in the cache. self.module_deps[m1, m2] = False return False def lookup_state(self, module: str) -> 'State': for state in self.states: if state.id == module: return state raise RuntimeError('%s not found' % str) def all_imported_modules_in_file(self, file: MypyFile) -> List[Tuple[str, int]]: """Find all import statements in a file. Return list of tuples (module id, import line number) for all modules imported in file. """ # TODO also find imports not at the top level of the file res = List[Tuple[str, int]]() for imp in file.imports: if isinstance(imp, Import): for id, _ in imp.ids: res.append((id, imp.line)) elif isinstance(imp, ImportFrom): res.append((imp.id, imp.line)) # Also add any imported names that are submodules. for name, __ in imp.names: sub_id = imp.id + '.' + name if self.is_module(sub_id): res.append((sub_id, imp.line)) elif isinstance(imp, ImportAll): res.append((imp.id, imp.line)) return res def is_module(self, id: str) -> bool: """Is there a file in the file system corresponding to module id?""" return find_module(id, self.lib_path) is not None def final_passes(self, files: List[MypyFile], types: Dict[Node, Type]) -> None: """Perform the code generation passes for type checked files.""" if self.target == TRANSFORM: self.transform(files) elif self.target == ICODE: self.transform(files) self.generate_icode(files, types) elif self.target == C: self.transform(files) self.generate_icode(files, types) self.generate_c_and_compile(files) elif self.target in [SEMANTIC_ANALYSIS, TYPE_CHECK]: pass # Nothing to do. else: raise RuntimeError('Unsupported target %d' % self.target) def get_python_out_path(self, f: MypyFile) -> str: if f.fullname() == '__main__': return os.path.join(self.output_dir, basename(f.path)) else: components = f.fullname().split('.') if os.path.basename(f.path) == '__init__.py': components.append('__init__.py') else: components[-1] += '.py' return os.path.join(self.output_dir, *components) def transform(self, files: List[MypyFile]) -> None: for f in files: if f.fullname() == 'typing': # The typing module is special and is currently not # transformed. continue # Transform parse tree and produce pretty-printed output. v = transform.DyncheckTransformVisitor( self.type_checker.type_map, self.semantic_analyzer.modules, is_pretty=True) f.accept(v) def generate_icode(self, files: List[MypyFile], types: Dict[Node, Type]) -> None: builder = icode.IcodeBuilder(types) for f in files: # TODO remove ugly builtins hack if not f.path.endswith('/builtins.py'): f.accept(builder) self.icode = builder.generated def generate_c_and_compile(self, files: List[MypyFile]) -> None: gen = cgen.CGenerator() for fn, icode in self.icode.items(): gen.generate_function('M' + fn, icode) program_name = os.path.splitext(basename(files[0].path))[0] c_file = '%s.c' % program_name # Write C file. self.log('writing %s' % c_file) out = open(c_file, 'w') out.writelines(gen.output()) out.close() if COMPILE_ONLY not in self.flags: # Generate binary file. data_dir = self.data_dir vm_dir = os.path.join(data_dir, 'vm') cc = os.getenv('CC', 'gcc') cflags = shlex.split(os.getenv('CFLAGS', '-O2')) cmdline = [cc] + cflags +['-I%s' % vm_dir, '-o%s' % program_name, c_file, os.path.join(vm_dir, 'runtime.c')] self.log(' '.join(cmdline)) status = subprocess.call(cmdline) # TODO check status self.log('removing %s' % c_file) os.remove(c_file) self.binary_path = os.path.join('.', program_name) def log(self, message: str) -> None: if VERBOSE in self.flags: print('LOG: %s' % message)