def add_new_class_for_module(module: MypyFile, name: str, bases: List[Instance], fields: 'OrderedDict[str, MypyType]') -> TypeInfo: new_class_unique_name = checker.gen_unique_name(name, module.names) # make new class expression classdef = ClassDef(new_class_unique_name, Block([])) classdef.fullname = module.fullname() + '.' + new_class_unique_name # make new TypeInfo new_typeinfo = TypeInfo(SymbolTable(), classdef, module.fullname()) new_typeinfo.bases = bases calculate_mro(new_typeinfo) new_typeinfo.calculate_metaclass_type() def add_field_to_new_typeinfo(var: Var, is_initialized_in_class: bool = False, is_property: bool = False) -> None: var.info = new_typeinfo var.is_initialized_in_class = is_initialized_in_class var.is_property = is_property var._fullname = new_typeinfo.fullname() + '.' + var.name() new_typeinfo.names[var.name()] = SymbolTableNode(MDEF, var) # add fields var_items = [Var(item, typ) for item, typ in fields.items()] for var_item in var_items: add_field_to_new_typeinfo(var_item, is_property=True) classdef.info = new_typeinfo module.names[new_class_unique_name] = SymbolTableNode( GDEF, new_typeinfo, plugin_generated=True) return new_typeinfo
def merge_asts(old: MypyFile, old_symbols: SymbolTable, new: MypyFile, new_symbols: SymbolTable) -> None: """Merge a new version of a module AST to a previous version. The main idea is to preserve the identities of externally visible nodes in the old AST (that have a corresponding node in the new AST). All old node state (outside identity) will come from the new AST. When this returns, 'old' will refer to the merged AST, but 'new_symbols' will be the new symbol table. 'new' and 'old_symbols' will no longer be valid. """ assert new.fullname() == old.fullname() # Find the mapping from new to old node identities for all nodes # whose identities should be preserved. replacement_map = replacement_map_from_symbol_table( old_symbols, new_symbols, prefix=old.fullname()) # Also replace references to the new MypyFile node. replacement_map[new] = old # Perform replacements to everywhere within the new AST (not including symbol # tables). node = replace_nodes_in_ast(new, replacement_map) assert node is old # Also replace AST node references in the *new* symbol table (we'll # continue to use the new symbol table since it has all the new definitions # that have no correspondence in the old AST). replace_nodes_in_symbol_table(new_symbols, replacement_map)
def add_new_class_for_module(module: MypyFile, name: str, bases: List[Instance], fields: 'OrderedDict[str, MypyType]') -> TypeInfo: new_class_unique_name = checker.gen_unique_name(name, module.names) # make new class expression classdef = ClassDef(new_class_unique_name, Block([])) classdef.fullname = module.fullname() + '.' + new_class_unique_name # make new TypeInfo new_typeinfo = TypeInfo(SymbolTable(), classdef, module.fullname()) new_typeinfo.bases = bases calculate_mro(new_typeinfo) new_typeinfo.calculate_metaclass_type() # add fields for field_name, field_type in fields.items(): var = Var(field_name, type=field_type) var.info = new_typeinfo var._fullname = new_typeinfo.fullname() + '.' + field_name new_typeinfo.names[field_name] = SymbolTableNode(MDEF, var, plugin_generated=True) classdef.info = new_typeinfo module.names[new_class_unique_name] = SymbolTableNode( GDEF, new_typeinfo, plugin_generated=True) return new_typeinfo
def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: if file.fullname() == 'django.conf' and self.django_settings_module: return [(10, self.django_settings_module, -1)] if file.fullname() == 'django.db.models.query': return [(10, 'mypy_extensions', -1)] return []
def get_python_out_path(self, f: MypyFile) -> str: if f.fullname() == '__main__': return os.path.join(self.output_dir, basename(f.path)) else: components = f.fullname().split('.') if os.path.basename(f.path) == '__init__.py': components.append('__init__.py') else: components[-1] += '.py' return os.path.join(self.output_dir, *components)
def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) num_total = visitor.num_imprecise + visitor.num_precise + visitor.num_any if num_total > 0: self.counts[tree.fullname()] = (visitor.num_any, num_total)
def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Callable[[], None]]) -> None: self.errors.set_file(fnam, file_node.fullname()) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.accept(file_node)
def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Callable[[], None]]) -> None: self.recurse_into_functions = True self.errors.set_file(fnam, file_node.fullname()) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.accept(file_node)
def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) num_unanalyzed_lines = list(visitor.line_map.values()).count( stats.TYPE_UNANALYZED) # count each line of dead code as one expression of type "Any" num_any = visitor.num_any + num_unanalyzed_lines num_total = visitor.num_imprecise + visitor.num_precise + num_any if num_total > 0: self.counts[tree.fullname()] = (num_any, num_total)
def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True, visit_untyped_defs=False) tree.accept(visitor) self.any_types_counter[tree.fullname()] = visitor.type_of_any_counter num_unanalyzed_lines = list(visitor.line_map.values()).count(stats.TYPE_UNANALYZED) # count each line of dead code as one expression of type "Any" num_any = visitor.num_any_exprs + num_unanalyzed_lines num_total = visitor.num_imprecise_exprs + visitor.num_precise_exprs + num_any if num_total > 0: self.counts[tree.fullname()] = (num_any, num_total)
def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: self.recurse_into_functions = True self.errors.set_file(fnam, file_node.fullname()) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.cur_mod_node = file_node self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.scope.enter_file(file_node.fullname()) self.accept(file_node) self.scope.leave()
def dump_type_stats(tree: MypyFile, path: str, modules: Dict[str, MypyFile], inferred: bool = False, typemap: Optional[Dict[Expression, Type]] = None) -> None: if is_special_module(path): return print(path) visitor = StatisticsVisitor(inferred, filename=tree.fullname(), modules=modules, typemap=typemap) tree.accept(visitor) for line in visitor.output: print(line) print(' ** precision **') print(' precise ', visitor.num_precise_exprs) print(' imprecise', visitor.num_imprecise_exprs) print(' any ', visitor.num_any_exprs) print(' ** kinds **') print(' simple ', visitor.num_simple_types) print(' generic ', visitor.num_generic_types) print(' function ', visitor.num_function_types) print(' tuple ', visitor.num_tuple_types) print(' TypeVar ', visitor.num_typevar_types) print(' complex ', visitor.num_complex_types) print(' any ', visitor.num_any_types)
def generate_html_report(tree: MypyFile, path: str, type_map: Dict[Expression, Type], output_dir: str) -> None: if is_special_module(path): return # There may be more than one right answer for "what should we do here?" # but this is a reasonable one. path = os.path.relpath(path) if path.startswith('..'): return visitor = StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) assert not os.path.isabs(path) and not path.startswith('..') # This line is *wrong* if the preceding assert fails. target_path = os.path.join(output_dir, 'html', path) # replace .py or .pyi with .html target_path = os.path.splitext(target_path)[0] + '.html' assert target_path.endswith('.html') ensure_dir_exists(os.path.dirname(target_path)) output = [] # type: List[str] append = output.append append('''\ <html> <head> <style> .red { background-color: #faa; } .yellow { background-color: #ffa; } .white { } .lineno { color: #999; } </style> </head> <body> <pre>''') num_imprecise_lines = 0 num_lines = 0 with open(path) as input_file: for i, line in enumerate(input_file): lineno = i + 1 status = visitor.line_map.get(lineno, TYPE_PRECISE) style_map = { TYPE_PRECISE: 'white', TYPE_IMPRECISE: 'yellow', TYPE_ANY: 'red' } style = style_map[status] append('<span class="lineno">%4d</span> ' % lineno + '<span class="%s">%s</span>' % (style, cgi.escape(line))) if status != TYPE_PRECISE: num_imprecise_lines += 1 if line.strip(): num_lines += 1 append('</pre>') append('</body></html>') with open(target_path, 'w') as output_file: output_file.writelines(output) target_path = target_path[len(output_dir) + 1:] html_files.append((path, target_path, num_lines, num_imprecise_lines))
def visit_mypy_file(self, mfile: MypyFile) -> int: if mfile.fullname() in ('typing', 'abc'): # These module are special; their contents are currently all # built-in primitives. return -1 self.enter() # Initialize non-int global variables. for name in sorted(mfile.names): node = mfile.names[name].node if (isinstance(node, Var) and name not in nodes.implicit_module_attrs): v = cast(Var, node) if (not is_named_instance(v.type, 'builtins.int') and v.fullname() != 'typing.Undefined'): tmp = self.alloc_register() self.add(SetRNone(tmp)) self.add(SetGR(v.fullname(), tmp)) for d in mfile.defs: d.accept(self) self.add_implicit_return() self.generated['__init'] = FuncIcode(0, self.blocks, self.register_types) # TODO leave? return -1
def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: path = os.path.relpath(tree.path) visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) class_name = os.path.basename(path) file_info = FileInfo(path, tree._fullname) class_element = etree.Element('class', filename=path, complexity='1.0', name=class_name) etree.SubElement(class_element, 'methods') lines_element = etree.SubElement(class_element, 'lines') with tokenize.open(path) as input_file: class_lines_covered = 0 class_total_lines = 0 for lineno, _ in enumerate(input_file, 1): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) hits = 0 branch = False if status == stats.TYPE_EMPTY: continue class_total_lines += 1 if status != stats.TYPE_ANY: class_lines_covered += 1 hits = 1 if status == stats.TYPE_IMPRECISE: branch = True file_info.counts[status] += 1 line_element = etree.SubElement(lines_element, 'line', number=str(lineno), precision=stats.precision_names[status], hits=str(hits), branch=str(branch).lower()) if branch: line_element.attrib['condition-coverage'] = '50% (1/2)' class_element.attrib['branch-rate'] = '0' class_element.attrib['line-rate'] = get_line_rate(class_lines_covered, class_total_lines) # parent_module is set to whichever module contains this file. For most files, we want # to simply strip the last element off of the module. But for __init__.py files, # the module == the parent module. parent_module = file_info.module.rsplit('.', 1)[0] if file_info.name.endswith('__init__.py'): parent_module = file_info.module if parent_module not in self.root_package.packages: self.root_package.packages[parent_module] = CoberturaPackage(parent_module) current_package = self.root_package.packages[parent_module] packages_to_update = [self.root_package, current_package] for package in packages_to_update: package.total_lines += class_total_lines package.covered_lines += class_lines_covered current_package.classes[class_name] = class_element
def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() if o in self.alias_deps: for alias in self.alias_deps[o]: self.add_dependency(make_trigger(alias)) super().visit_mypy_file(o) self.scope.leave()
def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() self.add_type_alias_deps(self.scope.current_target()) for trigger, targets in o.plugin_deps.items(): self.map.setdefault(trigger, set()).update(targets) super().visit_mypy_file(o) self.scope.leave()
def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), modules=modules, typemap=type_map, all_nodes=True, visit_untyped_defs=False) tree.accept(visitor) self.any_types_counter[tree.fullname()] = visitor.type_of_any_counter num_unanalyzed_lines = list(visitor.line_map.values()).count( stats.TYPE_UNANALYZED) # count each line of dead code as one expression of type "Any" num_any = visitor.num_any_exprs + num_unanalyzed_lines num_total = visitor.num_imprecise_exprs + visitor.num_precise_exprs + num_any if num_total > 0: self.counts[tree.fullname()] = (num_any, num_total)
def visit_mypy_file(self, mypyfile: MypyFile) -> Register: if mypyfile.fullname() in ('typing', 'abc'): # These module are special; their contents are currently all # built-in primitives. return INVALID_REGISTER # First pass: Build ClassIRs and TypeInfo-to-ClassIR mapping. for node in mypyfile.defs: if isinstance(node, ClassDef): self.prepare_class_def(node) # Second pass: Generate ops. self.current_module_name = mypyfile.fullname() for node in mypyfile.defs: node.accept(self) return INVALID_REGISTER
def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: self.recurse_into_functions = True self.errors.set_file(fnam, file_node.fullname(), scope=self.scope) self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.cur_mod_node = file_node self.sem.globals = file_node.names with experiments.strict_optional_set(options.strict_optional): self.scope.enter_file(file_node.fullname()) self.accept(file_node) self.analyze_symbol_table(file_node.names) self.scope.leave() del self.cur_mod_node self.patches = []
def merge_asts(old: MypyFile, old_symbols: SymbolTable, new: MypyFile, new_symbols: SymbolTable) -> None: """Merge a new version of a module AST to a previous version. The main idea is to preserve the identities of externally visible nodes in the old AST (that have a corresponding node in the new AST). All old node state (outside identity) will come from the new AST. When this returns, 'old' will refer to the merged AST, but 'new_symbols' will be the new symbol table. 'new' and 'old_symbols' will no longer be valid. """ assert new.fullname() == old.fullname() replacement_map = replacement_map_from_symbol_table( old_symbols, new_symbols, prefix=old.fullname()) replacement_map[new] = old node = replace_nodes_in_ast(new, replacement_map) assert node is old replace_nodes_in_symbol_table(new_symbols, replacement_map)
def visit_file(self, file_node: MypyFile, fnam: str, options: Options, patches: List[Tuple[int, Callable[[], None]]]) -> None: self.recurse_into_functions = True self.options = options self.sem.options = options self.patches = patches self.is_typeshed_file = self.errors.is_typeshed_file(fnam) self.sem.cur_mod_id = file_node.fullname() self.cur_mod_node = file_node self.sem.globals = file_node.names
def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: # for settings if file.fullname( ) == 'django.conf' and self.django_context.django_settings_module: return [ self._new_dependency( self.django_context.django_settings_module) ] # for values / values_list if file.fullname() == 'django.db.models': return [ self._new_dependency('mypy_extensions'), self._new_dependency('typing') ] # for `get_user_model()` if self.django_context.settings: if (file.fullname() == 'django.contrib.auth' or file.fullname() in {'django.http', 'django.http.request'}): auth_user_model_name = self.django_context.settings.AUTH_USER_MODEL try: auth_user_module = self.django_context.apps_registry.get_model( auth_user_model_name).__module__ except LookupError: # get_user_model() model app is not installed return [] return [self._new_dependency(auth_user_module)] # ensure that all mentioned to='someapp.SomeModel' are loaded with corresponding related Fields defined_model_classes = self.django_context.model_modules.get( file.fullname()) if not defined_model_classes: return [] deps = set() for model_class in defined_model_classes: # forward relations for field in self.django_context.get_model_fields(model_class): if isinstance(field, RelatedField): related_model_cls = self.django_context.get_field_related_model_cls( field) if related_model_cls is None: continue related_model_module = related_model_cls.__module__ if related_model_module != file.fullname(): deps.add(self._new_dependency(related_model_module)) # reverse relations for relation in model_class._meta.related_objects: related_model_cls = self.django_context.get_field_related_model_cls( relation) related_model_module = related_model_cls.__module__ if related_model_module != file.fullname(): deps.add(self._new_dependency(related_model_module)) return list(deps)
def create_indirect_imported_name( file_node: MypyFile, module: str, relative: int, imported_name: str) -> Optional[SymbolTableNode]: """Create symbol table entry for a name imported from another module. These entries act as indirect references. """ target_module, ok = correct_relative_import( file_node.fullname(), relative, module, file_node.is_package_init_file()) if not ok: return None target_name = '%s.%s' % (target_module, imported_name) link = ImportedName(target_name) # Use GDEF since this refers to a module-level definition. return SymbolTableNode(GDEF, link)
def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: self.last_xml = None path = os.path.relpath(tree.path) if stats.is_special_module(path): return if path.startswith('..'): return if 'stubs' in path.split('/'): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) root = etree.Element('mypy-report-file', name=path, module=tree._fullname) doc = etree.ElementTree(root) file_info = FileInfo(path, tree._fullname) with tokenize.open(path) as input_file: for lineno, line_text in enumerate(input_file, 1): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 etree.SubElement(root, 'line', number=str(lineno), precision=stats.precision_names[status], content=line_text.rstrip('\n').translate( self.control_fixer), any_info=self._get_any_info_for_line( visitor, lineno)) # Assumes a layout similar to what XmlReporter uses. xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction( 'xml-stylesheet', 'type="text/xsl" href="%s"' % pathname2url(xslt_path)) root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc self.files.append(file_info)
def get_import_star_modules(api: SemanticAnalyzerPass2, module: MypyFile) -> List[str]: import_star_modules = [] for module_import in module.imports: # relative import * are not resolved by mypy if isinstance(module_import, ImportAll) and module_import.relative: absolute_import_path, correct = correct_relative_import( module.fullname(), module_import.relative, module_import.id, is_cur_package_init_file=False) if not correct: return [] for path in [absolute_import_path] + get_import_star_modules( api, module=api.modules.get(absolute_import_path)): if path not in import_star_modules: import_star_modules.append(path) return import_star_modules
def create_indirect_imported_name(file_node: MypyFile, module: str, relative: int, imported_name: str) -> Optional[SymbolTableNode]: """Create symbol table entry for a name imported from another module. These entries act as indirect references. """ target_module, ok = correct_relative_import( file_node.fullname(), relative, module, file_node.is_package_init_file()) if not ok: return None target_name = '%s.%s' % (target_module, imported_name) link = ImportedName(target_name) # Use GDEF since this refers to a module-level definition. return SymbolTableNode(GDEF, link)
def on_file(self, tree: MypyFile, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type], options: Options) -> None: path = os.path.relpath(tree.path) if should_skip_path(path): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), modules=modules, typemap=type_map, all_nodes=True) tree.accept(visitor) file_info = FileInfo(path, tree._fullname) for lineno, _ in iterate_python_lines(path): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 self.files.append(file_info)
def dump_type_stats(tree: MypyFile, path: str, inferred: bool = False, typemap: Optional[Dict[Expression, Type]] = None) -> None: if is_special_module(path): return print(path) visitor = StatisticsVisitor(inferred, filename=tree.fullname(), typemap=typemap) tree.accept(visitor) for line in visitor.output: print(line) print(' ** precision **') print(' precise ', visitor.num_precise_exprs) print(' imprecise', visitor.num_imprecise_exprs) print(' any ', visitor.num_any_exprs) print(' ** kinds **') print(' simple ', visitor.num_simple_types) print(' generic ', visitor.num_generic_types) print(' function ', visitor.num_function_types) print(' tuple ', visitor.num_tuple_types) print(' TypeVar ', visitor.num_typevar_types) print(' complex ', visitor.num_complex_types) print(' any ', visitor.num_any_types)
def on_file(self, tree: MypyFile, type_map: Dict[Expression, Type], options: Options) -> None: self.last_xml = None path = os.path.relpath(tree.path) if stats.is_special_module(path): return if path.startswith('..'): return if 'stubs' in path.split('/'): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname(), typemap=type_map, all_nodes=True) tree.accept(visitor) root = etree.Element('mypy-report-file', name=path, module=tree._fullname) doc = etree.ElementTree(root) file_info = FileInfo(path, tree._fullname) with tokenize.open(path) as input_file: for lineno, line_text in enumerate(input_file, 1): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 etree.SubElement(root, 'line', number=str(lineno), precision=stats.precision_names[status], content=line_text.rstrip('\n').translate(self.control_fixer), any_info=self._get_any_info_for_line(visitor, lineno)) # Assumes a layout similar to what XmlReporter uses. xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction('xml-stylesheet', 'type="text/xsl" href="%s"' % pathname2url(xslt_path)) root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc self.files.append(file_info)
def visit_mypy_file(self, o: MypyFile) -> None: self.errors.set_file(o.path, o.fullname(), scope=self.scope) self.scope.enter_file(o.fullname()) super().visit_mypy_file(o) self.scope.leave()
def visit_mypy_file(self, f: MypyFile) -> None: self.is_typing = f.fullname() == 'typing' or f.fullname() == 'builtins' super().visit_mypy_file(f)
def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() self.add_type_alias_deps(self.scope.current_target()) super().visit_mypy_file(o) self.scope.leave()
def visit_mypy_file(self, o: MypyFile) -> None: self.enter_file_scope(o.fullname()) super().visit_mypy_file(o) self.leave_scope()
def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: if file.fullname() == 'gino.api': return [(10, 'gino.crud', -1)] return []
def visit_mypy_file(self, o: MypyFile) -> None: self.cur_mod_node = o self.cur_mod_id = o.fullname() super().visit_mypy_file(o)
def get_additional_deps(self, file: MypyFile) -> List[Tuple[int, str, int]]: if file.fullname() == '__main__': return [(10, 'err', -1)] return []
def visit_mypy_file(self, o: MypyFile) -> None: self.scope.enter_file(o.fullname()) self.is_package_init_file = o.is_package_init_file() super().visit_mypy_file(o) self.scope.leave()