def _set_classdef_environment(self, node: astroid.ClassDef) -> None: """Method to set environment of a ClassDef node.""" node.type_environment = Environment() for name in node.instance_attrs: node.type_environment.locals[ name] = self.type_constraints.fresh_tvar( node.instance_attrs[name][0]) self.type_store.classes[node.name][name] = [ (node.type_environment.locals[name], 'attribute') ] for name in node.locals: if name in ['__module__', '__qualname__']: node.type_environment.locals[name] = str else: node.type_environment.locals[ name] = self.type_constraints.fresh_tvar( node.locals[name][0]) self.type_store.classes[node.name]['__bases'] = [ _node_to_type(base) for base in node.bases ] try: self.type_store.classes[node.name]['__mro'] = [ cls.name for cls in node.mro() ] except astroid.exceptions.DuplicateBasesError: self.type_store.classes[node.name]['__mro'] = [node.name]
def _is_enum_subclass(cls: astroid.ClassDef) -> bool: """Return whether cls is a subclass of an Enum.""" try: return any(klass.name in ENUM_BASE_NAMES and getattr(klass.root(), "name", None) == "enum" for klass in cls.mro()) except MroError: return False
def unimplemented_abstract_methods( node: astroid.ClassDef, is_abstract_cb: astroid.FunctionDef = None ) -> Dict[str, astroid.node_classes.NodeNG]: """ Get the unimplemented abstract methods for the given *node*. A method can be considered abstract if the callback *is_abstract_cb* returns a ``True`` value. The check defaults to verifying that a method is decorated with abstract methods. The function will work only for new-style classes. For old-style classes, it will simply return an empty dictionary. For the rest of them, it will return a dictionary of abstract method names and their inferred objects. """ if is_abstract_cb is None: is_abstract_cb = partial(decorated_with, qnames=ABC_METHODS) visited = {} # type: Dict[str, astroid.node_classes.NodeNG] try: mro = reversed(node.mro()) except NotImplementedError: # Old style class, it will not have a mro. return {} except astroid.ResolveError: # Probably inconsistent hierarchy, don'try # to figure this out here. return {} for ancestor in mro: for obj in ancestor.values(): inferred = obj if isinstance(obj, astroid.AssignName): inferred = safe_infer(obj) if not inferred: # Might be an abstract function, # but since we don't have enough information # in order to take this decision, we're taking # the *safe* decision instead. if obj.name in visited: del visited[obj.name] continue if not isinstance(inferred, astroid.FunctionDef): if obj.name in visited: del visited[obj.name] if isinstance(inferred, astroid.FunctionDef): # It's critical to use the original name, # since after inferring, an object can be something # else than expected, as in the case of the # following assignment. # # class A: # def keys(self): pass # __iter__ = keys abstract = is_abstract_cb(inferred) if abstract: visited[obj.name] = inferred elif not abstract and obj.name in visited: del visited[obj.name] return visited
def _set_classdef_environment(self, node: astroid.ClassDef) -> None: """Method to set environment of a ClassDef node.""" node.type_environment = Environment() for name in node.instance_attrs: node.type_environment.locals[name] = self.type_constraints.fresh_tvar(node.instance_attrs[name][0]) self.type_store.classes[node.name][name] = [(node.type_environment.locals[name], 'attribute')] for name in node.locals: if name in ['__module__', '__qualname__']: node.type_environment.locals[name] = str else: node.type_environment.locals[name] = self.type_constraints.fresh_tvar(node.locals[name][0]) self.type_store.classes[node.name]['__bases'] = [_node_to_type(base) for base in node.bases] try: self.type_store.classes[node.name]['__mro'] = [cls.name for cls in node.mro()] except astroid.exceptions.DuplicateBasesError: self.type_store.classes[node.name]['__mro'] = [node.name]