def instance_has_bool(class_def: nodes.ClassDef) -> bool: try: class_def.getattr("__bool__") return True except astroid.AttributeInferenceError: ... return False
def _count_methods_in_class(node: nodes.ClassDef) -> int: all_methods = sum(1 for method in node.methods() if not method.name.startswith("_")) # Special methods count towards the number of public methods, # but don't count towards there being too many methods. for method in node.mymethods(): if SPECIAL_OBJ.search(method.name) and method.name != "__init__": all_methods += 1 return all_methods
def visit_classdef(self, node: nodes.ClassDef) -> None: """Called when a ClassDef node is visited.""" ancestor: nodes.ClassDef for ancestor in node.ancestors(): for class_matches in self._class_matchers: if ancestor.name == class_matches.base_class: self._visit_class_functions(node, class_matches.matches)
def _get_parents_iter( node: nodes.ClassDef, ignored_parents: FrozenSet[str]) -> Iterator[nodes.ClassDef]: r"""Get parents of ``node``, excluding ancestors of ``ignored_parents``. If we have the following inheritance diagram: F / D E \/ B C \/ A # class A(B, C): ... And ``ignored_parents`` is ``{"E"}``, then this function will return ``{A, B, C, D}`` -- both ``E`` and its ancestors are excluded. """ parents: Set[nodes.ClassDef] = set() to_explore = cast(List[nodes.ClassDef], list(node.ancestors(recurs=False))) while to_explore: parent = to_explore.pop() if parent.qname() in ignored_parents: continue if parent not in parents: # This guard might appear to be performing the same function as # adding the resolved parents to a set to eliminate duplicates # (legitimate due to diamond inheritance patterns), but its # additional purpose is to prevent cycles (not normally possible, # but potential due to inference) and thus guarantee termination # of the while-loop yield parent parents.add(parent) to_explore.extend(parent.ancestors(recurs=False))
def _visit_class_functions(self, node: nodes.ClassDef, matches: list[TypeHintMatch]) -> None: for match in matches: for function_node in node.mymethods(): function_name: str | None = function_node.name if match.function_name == function_name: self._check_function(function_node, match)
def visit_classdef(self, node: nodes.ClassDef) -> None: locals_and_methods = set(node.locals).union(x.name for x in node.mymethods()) if "__eq__" in locals_and_methods and "__hash__" not in locals_and_methods: self.add_message("eq-without-hash", node=node, confidence=interfaces.HIGH)
def leave_classdef(self, node: nodes.ClassDef) -> None: """check number of public methods""" my_methods = sum(1 for method in node.mymethods() if not method.name.startswith("_")) # Does the class contain less than n public methods ? # This checks only the methods defined in the current class, # since the user might not have control over the classes # from the ancestors. It avoids some false positives # for classes such as unittest.TestCase, which provides # a lot of assert methods. It doesn't make sense to warn # when the user subclasses TestCase to add his own tests. if my_methods > self.config.max_public_methods: self.add_message( "too-many-public-methods", node=node, args=(my_methods, self.config.max_public_methods), ) # Stop here if the class is excluded via configuration. if node.type == "class" and self._exclude_too_few_public_methods: for ancestor in node.ancestors(): if any( pattern.match(ancestor.qname()) for pattern in self._exclude_too_few_public_methods): return # Stop here for exception, metaclass, interface classes and other # classes for which we don't need to count the methods. if node.type != "class" or _is_exempt_from_public_methods(node): return # Does the class contain more than n public methods ? # This checks all the methods defined by ancestors and # by the current class. all_methods = _count_methods_in_class(node) if all_methods < self.config.min_public_methods: self.add_message( "too-few-public-methods", node=node, args=(all_methods, self.config.min_public_methods), )
def get_methods(self, node: nodes.ClassDef) -> list[nodes.FunctionDef]: """Return visible methods.""" methods = [ m for m in node.values() if isinstance(m, nodes.FunctionDef) and not isinstance(m, astroid.objects.Property) and not decorated_with_property(m) and self.show_attr(m.name) ] return sorted(methods, key=lambda n: n.name)
def visit_classdef(self, node: nodes.ClassDef) -> None: """Visit an astroid.Class node. * set the locals_type and instance_attrs_type mappings * set the implements list and build it * optionally tag the node with a unique id """ if hasattr(node, "locals_type"): return node.locals_type = collections.defaultdict(list) if self.tag: node.uid = self.generate_id() # resolve ancestors for baseobj in node.ancestors(recurs=False): specializations = getattr(baseobj, "specializations", []) specializations.append(node) baseobj.specializations = specializations # resolve instance attributes node.instance_attrs_type = collections.defaultdict(list) for assignattrs in node.instance_attrs.values(): for assignattr in assignattrs: if not isinstance(assignattr, nodes.Unknown): self.handle_assignattr_type(assignattr, node) # resolve implemented interface try: node.implements = list(interfaces(node, self.inherited_interfaces)) except astroid.InferenceError: node.implements = []
def apply_type_shim(cls: ClassDef, _context: Any = None) -> Iterator[ClassDef]: """ Morphs model fields to representative type """ base_nodes: List[ClassDef] = [cls] # Use the type inference standard try: base_nodes.extend(list(cls.getattr("field_type")[0].infer())) except AstroidError: pass return iter(base_nodes)
def visit_classdef(self, ast: ast_node.ClassDef): self.write("class %s {" % ast.name) self.indent += 1 for instance_attrs in ast.instance_attrs.values(): for attr in instance_attrs: if isinstance(attr, ast_node.AssignAttr): attr.accept(self) break for local in ast.values(): local.accept(self) self.indent -= 1 self.write("}") for anc in ast.ancestors(recurs=False): cls = next(anc.infer()) if isinstance(cls, ast_node.ClassDef): module = cls.root().name if is_target_module(module): self.write("%s <|- %s.%s" % (ast.name, cls.root().name, cls.name))
def _forbid_class_getitem_access(node: nodes.ClassDef) -> None: """ Disable the access to __class_getitem__ method for the node in parameters """ def full_raiser(origin_func, attr, *args, **kwargs): """ Raises an AttributeInferenceError in case of access to __class_getitem__ method. Otherwise just call origin_func. """ if attr == "__class_getitem__": raise AttributeInferenceError( "__class_getitem__ access is not allowed") return origin_func(attr, *args, **kwargs) try: node.getattr("__class_getitem__") # If we are here, then we are sure to modify object that do have __class_getitem__ method (which origin is one the # protocol defined in collections module) whereas the typing module consider it should not # We do not want __class_getitem__ to be found in the classdef partial_raiser = partial(full_raiser, node.getattr) node.getattr = partial_raiser except AttributeInferenceError: pass
def _process_class_member_and_attrs(cldef: ClassDef, pkgfiles: Set[str]) -> Set[str]: """ Determine the class attributes, methods and instance attributes defined by a class. The function will inspect and populate them from the parents while the parent is defined by a module file in the same mmpack package. This function is called recursively to populate a class. Args: cldef: the astroid node defining the class pkgfiles: set of files in the same mmpack package Returns: set of name corresponding to the class attributes and methods and instance attributes. """ syms = set() # add member and attributes from parent classes implemented in files of # the same package for base in cldef.ancestors(recurs=False): mod = base.root() if _is_module_packaged(mod, pkgfiles): syms.update(_process_class_member_and_attrs(base, pkgfiles)) # Add public class attributes for attr in cldef.locals: if (isinstance(cldef.locals[attr][-1], AssignName) and _is_public_sym(attr)): syms.add(attr) # Add public class methods and instance attributes syms.update({m.name for m in cldef.mymethods() if _is_public_sym(m.name)}) syms.update({a for a in cldef.instance_attrs if _is_public_sym(a)}) return syms
def visit_classdef(self, node: nodes.ClassDef): """check size of inheritance hierarchy and number of instance attributes""" nb_parents = sum( 1 for ancestor in node.ancestors() if ancestor.qname() not in STDLIB_CLASSES_IGNORE_ANCESTOR) if nb_parents > self.config.max_parents: self.add_message( "too-many-ancestors", node=node, args=(nb_parents, self.config.max_parents), ) if len(node.instance_attrs) > self.config.max_attributes: self.add_message( "too-many-instance-attributes", node=node, args=(len(node.instance_attrs), self.config.max_attributes), )
def get_attrs(self, node: nodes.ClassDef) -> list[str]: """Return visible attributes, possibly with class name.""" attrs = [] properties = [ (n, m) for n, m in node.items() if isinstance(m, nodes.FunctionDef) and decorated_with_property(m) ] for node_name, associated_nodes in ( list(node.instance_attrs_type.items()) + list(node.locals_type.items()) + properties ): if not self.show_attr(node_name): continue names = self.class_names(associated_nodes) if names: node_name = f"{node_name} : {', '.join(names)}" attrs.append(node_name) return sorted(attrs)
def visit_classdef(self, node: nodes.ClassDef) -> None: """Visit an astroid.Class node. * set the locals_type and instance_attrs_type mappings * set the implements list and build it * optionally tag the node with a unique id """ if hasattr(node, "locals_type"): return node.locals_type = collections.defaultdict(list) if self.tag: node.uid = self.generate_id() # resolve ancestors for baseobj in node.ancestors(recurs=False): specializations = getattr(baseobj, "specializations", []) specializations.append(node) baseobj.specializations = specializations # resolve instance attributes node.instance_attrs_type = collections.defaultdict(list) for assignattrs in tuple(node.instance_attrs.values()): for assignattr in assignattrs: if not isinstance(assignattr, nodes.Unknown): self.handle_assignattr_type(assignattr, node) # resolve implemented interface try: ifaces = interfaces(node) if ifaces is not None: node.implements = list(ifaces) if node.implements: # TODO: 3.0: Remove support for __implements__ warnings.warn( "pyreverse will drop support for resolving and displaying implemented interfaces in pylint 3.0. " "The implementation relies on the '__implements__' attribute proposed in PEP 245, which was rejected " "in 2006.", DeprecationWarning, ) else: node.implements = [] except astroid.InferenceError: node.implements = []
def base_classes_of_node(instance: nodes.ClassDef) -> List[nodes.Name]: """Return all the classes names that a ClassDef inherit from including 'object'.""" try: return [instance.name] + [x.name for x in instance.ancestors()] except TypeError: return [instance.name]
def transform_model(cls: ClassDef) -> None: """ Anything that uses the ModelMeta needs _meta and id. Also keep track of relationships and make them in the related model class. """ if cls.name != "Model": appname = "models" for mcls in cls.get_children(): if isinstance(mcls, ClassDef): for attr in mcls.get_children(): if isinstance(attr, Assign) and attr.targets[0].name == "app": appname = attr.value.value mname = f"{appname}.{cls.name}" MODELS[mname] = cls for relname, relval in FUTURE_RELATIONS.get(mname, []): cls.locals[relname] = relval for attr in cls.get_children(): if isinstance(attr, (Assign, AnnAssign)): try: attrname = attr.value.func.attrname except AttributeError: pass else: if attrname in [ "OneToOneField", "ForeignKeyField", "ManyToManyField" ]: tomodel = attr.value.args[0].value relname = "" if attr.value.keywords: for keyword in attr.value.keywords: if keyword.arg == "related_name": relname = keyword.value.value if not relname: relname = cls.name.lower() + "s" # Injected model attributes need to also have the relation manager if attrname == "ManyToManyField": relval = [ # attr.value.func, MANAGER.ast_from_module_name( "tortoise.fields.relational").lookup( "ManyToManyFieldInstance")[1][0], MANAGER.ast_from_module_name( "tortoise.fields.relational").lookup( "ManyToManyRelation")[1][0], ] elif attrname == "ForeignKeyField": relval = [ MANAGER.ast_from_module_name( "tortoise.fields.relational").lookup( "ForeignKeyFieldInstance")[1][0], MANAGER.ast_from_module_name( "tortoise.fields.relational").lookup( "ReverseRelation")[1][0], ] elif attrname == "OneToOneField": relval = [ MANAGER.ast_from_module_name( "tortoise.fields.relational").lookup( "OneToOneFieldInstance")[1][0], MANAGER.ast_from_module_name( "tortoise.fields.relational").lookup( "OneToOneRelation")[1][0], ] if tomodel in MODELS: MODELS[tomodel].locals[relname] = relval else: FUTURE_RELATIONS.setdefault(tomodel, []).append( (relname, relval)) cls.locals["_meta"] = [ MANAGER.ast_from_module_name("tortoise.models").lookup("MetaInfo")[1] [0].instantiate_class() ] if "id" not in cls.locals: cls.locals["id"] = [nodes.ClassDef("id", None)]
def is_model(cls: ClassDef) -> bool: """ Guard to apply this transform to Models only """ return bool(cls.metaclass()) and cls.metaclass().qname( ) == "tortoise.models.ModelMeta"
def infer_enum_class(node: nodes.ClassDef) -> nodes.ClassDef: """Specific inference for enums.""" for basename in (b for cls in node.mro() for b in cls.basenames): if node.root().name == "enum": # Skip if the class is directly from enum module. break dunder_members = {} target_names = set() for local, values in node.locals.items(): if any(not isinstance(value, nodes.AssignName) for value in values): continue stmt = values[0].statement(future=True) if isinstance(stmt, nodes.Assign): if isinstance(stmt.targets[0], nodes.Tuple): targets = stmt.targets[0].itered() else: targets = stmt.targets elif isinstance(stmt, nodes.AnnAssign): targets = [stmt.target] else: continue inferred_return_value = None if stmt.value is not None: if isinstance(stmt.value, nodes.Const): if isinstance(stmt.value.value, str): inferred_return_value = repr(stmt.value.value) else: inferred_return_value = stmt.value.value else: inferred_return_value = stmt.value.as_string() new_targets = [] for target in targets: if isinstance(target, nodes.Starred): continue target_names.add(target.name) # Replace all the assignments with our mocked class. classdef = dedent(""" class {name}({types}): @property def value(self): return {return_value} @property def name(self): return "{name}" """.format( name=target.name, types=", ".join(node.basenames), return_value=inferred_return_value, )) if "IntFlag" in basename: # Alright, we need to add some additional methods. # Unfortunately we still can't infer the resulting objects as # Enum members, but once we'll be able to do that, the following # should result in some nice symbolic execution classdef += INT_FLAG_ADDITION_METHODS.format( name=target.name) fake = AstroidBuilder( AstroidManager(), apply_transforms=False).string_build(classdef)[target.name] fake.parent = target.parent for method in node.mymethods(): fake.locals[method.name] = [method] new_targets.append(fake.instantiate_class()) dunder_members[local] = fake node.locals[local] = new_targets members = nodes.Dict(parent=node) members.postinit([(nodes.Const(k, parent=members), nodes.Name(v.name, parent=members)) for k, v in dunder_members.items()]) node.locals["__members__"] = [members] # The enum.Enum class itself defines two @DynamicClassAttribute data-descriptors # "name" and "value" (which we override in the mocked class for each enum member # above). When dealing with inference of an arbitrary instance of the enum # class, e.g. in a method defined in the class body like: # class SomeEnum(enum.Enum): # def method(self): # self.name # <- here # In the absence of an enum member called "name" or "value", these attributes # should resolve to the descriptor on that particular instance, i.e. enum member. # For "value", we have no idea what that should be, but for "name", we at least # know that it should be a string, so infer that as a guess. if "name" not in target_names: code = dedent(""" @property def name(self): return '' """) name_dynamicclassattr = AstroidBuilder( AstroidManager()).string_build(code)["name"] node.locals["name"] = [name_dynamicclassattr] break return node
def is_model_field(cls: ClassDef) -> bool: """ Guard to apply this transform to Model Fields only """ type_name = "tortoise.fields.base.Field" return bool(cls.is_subtype_of(type_name)) and cls.qname() != type_name
def visit_classdef(self, node: nodes.ClassDef) -> None: self._check_name("class", node.name, node) for attr, anodes in node.instance_attrs.items(): if not any(node.instance_attr_ancestors(attr)): self._check_name("attr", attr, anodes[0])
def transform(cls: nodes.ClassDef): """ Astroid (used by pylint) calls this function on each class definition it discovers. cls is an Astroid AST representation of that class. Our purpose here is to extract the schema dict from API model classes so that we can inform pylint about all of the attributes on those models. We do this by injecting attributes on the class for each property in the schema. """ # This is a class which defines attributes in "schema" variable using json schema. # Those attributes are then assigned during run time inside the constructor # Get the value node for the "schema =" assignment schema_dict_node = next(cls.igetattr("schema")) extra_schema_properties = {} # If the "schema =" assignment's value node is not a simple type (like a dictionary), # then pylint cannot infer exactly what it does. Most of the time, this is actually # a function call to copy the schema from another class. So, let's find the dictionary. if schema_dict_node is astroid.Uninferable: # the assignment probably looks like this: # schema = copy.deepcopy(ActionAPI.schema) # so far we only have the value, but we need the actual assignment assigns = [ n for n in cls.get_children() if isinstance(n, nodes.Assign) ] schema_assign_name_node = cls.local_attr("schema")[0] schema_assign_node = next( assign for assign in assigns if assign.targets[0] == schema_assign_name_node) assigns.remove(schema_assign_node) # We only care about "schema = copy.deepcopy(...)" schema_dict_node = infer_copy_deepcopy(schema_assign_node.value) if not schema_dict_node: # This is not an API model class, as it doesn't have # something we can resolve to a dictionary. return # OK, now we need to look for any properties that dynamically modify # the dictionary that was just copied from somewhere else. # See the note below for why we only care about "properties" here. for assign_node in assigns: # we're looking for assignments like this: # schema["properties"]["ttl"] = {...} target = assign_node.targets[0] try: if (isinstance(target, nodes.Subscript) and target.value.value.name == "schema" and target.value.slice.value.value == "properties"): property_name_node = target.slice.value else: # not schema["properties"] continue except AttributeError: continue # schema["properties"]["execution"] = copy.deepcopy(ActionExecutionAPI.schema) inferred_value = infer_copy_deepcopy(assign_node.value) extra_schema_properties[property_name_node] = ( inferred_value if inferred_value else assign_node.value) if not isinstance(schema_dict_node, nodes.Dict): # Not a class we are interested in (like BaseAPI) return # We only care about "properties" in the schema because that's the only part of the schema # that gets translated into dynamic attributes on the model API class. properties_dict_node = None for key_node, value_node in schema_dict_node.items: if key_node.value == "properties": properties_dict_node = value_node break if not properties_dict_node and not extra_schema_properties: # Not a class we can do anything with return # Hooray! We have the schema properties dict now, so we can start processing # each property and add an attribute for each one to the API model class node. for property_name_node, property_data_node in properties_dict_node.items + list( extra_schema_properties.items()): property_name = property_name_node.value.replace( "-", "_") # Note: We do the same in Python code # Despite the processing above to extract the schema properties dictionary # each property in the dictionary might also reference other variables, # so we still need to resolve these to figure out each property's type. # an indirect reference to copy.deepcopy() as in: # REQUIRED_ATTR_SCHEMAS = {"action": copy.deepcopy(ActionAPI.schema)} # schema = {"properties": {"action": REQUIRED_ATTR_SCHEMAS["action"]}} if isinstance(property_data_node, nodes.Subscript): var_name = property_data_node.value.name subscript = property_data_node.slice.value.value # lookup var by name (assume its at module level) var_node = next(cls.root().igetattr(var_name)) # assume it is a dict at this point data_node = None for key_node, value_node in var_node.items: if key_node.value == subscript: # infer will resolve a Dict data_node = next(value_node.infer()) if data_node is astroid.Uninferable: data_node = infer_copy_deepcopy(value_node) break if data_node: property_data_node = data_node if not isinstance(property_data_node, nodes.Dict): # if infer_copy_deepcopy already ran, we may need to resolve the dict data_node = next(property_data_node.infer()) if data_node is not astroid.Uninferable: property_data_node = data_node property_type_node = None if isinstance(property_data_node, nodes.Dict): # We have a property schema, but we only care about the property's type. for property_key_node, property_value_node in property_data_node.items: if property_key_node.value == "type": property_type_node = next(property_value_node.infer()) break if property_type_node is None and isinstance(property_data_node, nodes.Attribute): # reference schema from another file like this: # from ... import TriggerAPI # schema = {"properties": {"trigger": TriggerAPI.schema}} # We only pull a schema from another file when it is an "object" (a dict). # So, we do not need to do any difficult cross-file processing. property_type = "object" elif property_type_node is None: property_type = None elif isinstance(property_type_node, nodes.Const): property_type = property_type_node.value elif isinstance(property_type_node, (nodes.List, nodes.Tuple)): # Hack for attributes with multiple types (e.g. string, null) property_type = property_type_node.elts[ 0].value # elts has "elements" in the list/tuple else: # We should only hit this if someone has used a different approach # for dynamically constructing the property's schema. # Expose the AST at this point to facilitate handling that approach. raise Exception(property_type_node.repr_tree()) # Hooray! We've got a property's name at this point. # And we have the property's type, if that type was defined in the schema. # Now, we can construct the AST node that we'll add to the API model class. if property_type == "object": node = nodes.Dict() elif property_type == "array": node = nodes.List() elif property_type == "integer": node = scoped_nodes.builtin_lookup("int")[1][0] elif property_type == "number": node = scoped_nodes.builtin_lookup("float")[1][0] elif property_type == "string": node = scoped_nodes.builtin_lookup("str")[1][0] elif property_type == "boolean": node = scoped_nodes.builtin_lookup("bool")[1][0] elif property_type == "null": node = scoped_nodes.builtin_lookup("None")[1][0] else: # Unknown type node = astroid.ClassDef(property_name, None) # Create a "property = node" assign node assign_node = nodes.Assign(parent=cls) assign_name_node = nodes.AssignName(property_name, parent=assign_node) assign_node.postinit(targets=[assign_name_node], value=node) # Finally, add the property node as an attribute on the class. cls.locals[property_name] = [assign_name_node]