def test_instance_special_attributes(self): for inst in (Instance(MODULE['YO']), nodes.List(), nodes.Const(1)): self.assertRaises(NotFoundError, inst.getattr, '__mro__') self.assertRaises(NotFoundError, inst.getattr, '__bases__') self.assertRaises(NotFoundError, inst.getattr, '__name__') self.assertEqual(len(inst.getattr('__dict__')), 1) self.assertEqual(len(inst.getattr('__doc__')), 1)
def _inf_fn(node, context=None): if hasattr(node, CODEWATCH_MODEL_INFERENCE_KEY): # For the case where we already inferred the manager call # We just need to infer the new call # eg: # users = DjangoUser.objects.all() # users.first() # # users.first() is the node we are inferring # # Astroid has already taken care of calling `infer` on `users` # Here we detected that `users` is an inference we returned klass_obj = getattr(node, CODEWATCH_MODEL_INFERENCE_KEY) else: # Otherwise, it's a bare manager call, eg: DjangoUser.objects.all() klass_def = node.func.expr.expr.inferred()[0] klass_obj = klass_def.instantiate_class() if node.func.attrname in DJANGO_MANAGER_METHODS_LIST: # we infer a `List` node with a single `klass_obj` element klass_obj_list = nodes.List(ctx=LoadContext) klass_obj_list.elts = [klass_obj] setattr(klass_obj_list, CODEWATCH_MODEL_INFERENCE_KEY, klass_obj) return iter((klass_obj_list,)) # otherwise, just infer the `klass_obj` return iter((klass_obj,))
def visit_list(self, node, parent): """visit a List node by returning a fresh instance of it""" newnode = new.List() _lineno_parent(node, newnode, parent) newnode.elts = [self.visit(child, newnode) for child in node.elts] newnode.set_line_info(newnode.last_child()) return newnode
def visit_list(self, node, parent, assign_ctx=None): """visit a List node by returning a fresh instance of it""" newnode = new.List() _lineno_parent(node, newnode, parent) newnode.elts = [self.visit(child, newnode, assign_ctx) for child in node.elts] return newnode
def visit_list(self, node, parent): """visit a List node by returning a fresh instance of it""" context = self._get_context(node) newnode = nodes.List( ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent ) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode
def starred_assigned_stmts(self, node=None, context=None, asspath=None): stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): raise exceptions.InferenceError() if isinstance(stmt, nodes.Assign): value = stmt.value lhs = stmt.targets[0] if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: # Too many starred arguments in the expression. raise exceptions.InferenceError() if context is None: context = contextmod.InferenceContext() try: rhs = next(value.infer(context)) except exceptions.InferenceError: yield util.YES return if rhs is util.YES or not hasattr(rhs, 'elts'): # Not interested in inferred values without elts. yield util.YES return elts = collections.deque(rhs.elts[:]) if len(lhs.elts) > len(rhs.elts): # a, *b, c = (1, 2) raise exceptions.InferenceError() # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will # be the list of values which the Starred node will represent # This is done in two steps, from left to right to remove # anything before the starred node and from right to left # to remvoe anything after the starred node. for index, node in enumerate(lhs.elts): if not isinstance(node, nodes.Starred): elts.popleft() continue lhs_elts = collections.deque(reversed(lhs.elts[index:])) for node in lhs_elts: if not isinstance(node, nodes.Starred): elts.pop() continue # We're done packed = nodes.List() packed.elts = elts packed.parent = self yield packed break
def transform(cls): if cls.name in CLASS_NAME_BLACKLIST: return if cls.name.endswith('API') or 'schema' in cls.locals: # This is a class which defines attributes in "schema" variable using json schema. # Those attributes are then assigned during run time inside the constructor fqdn = cls.qname() module_name, class_name = fqdn.rsplit('.', 1) module = __import__(module_name, fromlist=[class_name]) actual_cls = getattr(module, class_name) schema = actual_cls.schema if not isinstance(schema, dict): # Not a class we are interested in return properties = schema.get('properties', {}) for property_name, property_data in six.iteritems(properties): property_name = property_name.replace( '-', '_') # Note: We do the same in Python code property_type = property_data.get('type', None) if isinstance(property_type, (list, tuple)): # Hack for attributes with multiple types (e.g. string, null) property_type = property_type[0] if property_type == 'object': node = nodes.Dict() elif property_type == 'array': node = nodes.List() elif property_type == 'integer': node = scoped_nodes.builtin_lookup('int')[1][0] elif property_type == 'number': node = scoped_nodes.builtin_lookup('float')[1][0] elif property_type == 'string': node = scoped_nodes.builtin_lookup('str')[1][0] elif property_type == 'boolean': node = scoped_nodes.builtin_lookup('bool')[1][0] elif property_type == 'null': node = scoped_nodes.builtin_lookup('None')[1][0] else: # Unknown type node = scoped_nodes.Class(property_name, None) cls.locals[property_name] = [node]
def transform(cls): if cls.name in CLASS_NAME_BLACKLIST: return if cls.name.endswith("API") or "schema" in cls.locals: # This is a class which defines attributes in "schema" variable using json schema. # Those attributes are then assigned during run time inside the constructor fqdn = cls.qname() module_name, class_name = fqdn.rsplit(".", 1) module = __import__(module_name, fromlist=[class_name]) actual_cls = getattr(module, class_name) schema = actual_cls.schema if not isinstance(schema, dict): # Not a class we are interested in return properties = schema.get("properties", {}) for property_name, property_data in six.iteritems(properties): property_name = property_name.replace( "-", "_") # Note: We do the same in Python code property_type = property_data.get("type", None) if isinstance(property_type, (list, tuple)): # Hack for attributes with multiple types (e.g. string, null) property_type = property_type[0] if property_type == "object": node = nodes.Dict() elif property_type == "array": node = nodes.List() elif property_type == "integer": node = scoped_nodes.builtin_lookup("int")[1][0] elif property_type == "number": node = scoped_nodes.builtin_lookup("float")[1][0] elif property_type == "string": node = scoped_nodes.builtin_lookup("str")[1][0] elif property_type == "boolean": node = scoped_nodes.builtin_lookup("bool")[1][0] elif property_type == "null": node = scoped_nodes.builtin_lookup("None")[1][0] else: # Unknown type node = astroid.ClassDef(property_name, None) cls.locals[property_name] = [node]
def _getattr(self, name, *args, **kw): try: return Module_getattr(self, name, *args, **kw) except NotFoundError, e: if self.name.startswith('erp5.'): raise real_module = __import__(self.name, fromlist=[self.name], level=0) try: attr = getattr(real_module, name) except AttributeError: raise e # REQUEST object (or any object non acquisition-wrapped) if (isinstance(attr, str) and attr == '<Special Object Used to Force Acquisition>'): raise e try: origin_module_name = attr.__module__ except AttributeError: from astroid import nodes if isinstance(attr, dict): ast = nodes.Dict(attr) elif isinstance(attr, list): ast = nodes.List(attr) elif isinstance(attr, tuple): ast = nodes.Tuple(attr) elif isinstance(attr, set): ast = nodes.Set(attr) else: try: ast = nodes.Const(attr) except Exception: raise e else: if self.name == origin_module_name: raise # ast_from_class() actually works for any attribute of a Module try: ast = MANAGER.ast_from_class(attr) except AstroidBuildingException: raise e self.locals[name] = [ast] return [ast]
def starred_assigned_stmts(self, node=None, context=None, asspath=None): """ Arguments: self: nodes.Starred node: TODO context: TODO asspath: TODO """ stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): raise exceptions.InferenceError( 'Statement {stmt!r} enclosing {node!r} ' 'must be an Assign or For node.', node=self, stmt=stmt, unknown=node, context=context) if isinstance(stmt, nodes.Assign): value = stmt.value lhs = stmt.targets[0] if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: raise exceptions.InferenceError( 'Too many starred arguments in the ' ' assignment targets {lhs!r}.', node=self, targets=lhs, unknown=node, context=context) if context is None: context = contextmod.InferenceContext() try: rhs = next(value.infer(context)) except exceptions.InferenceError: yield util.Uninferable return if rhs is util.Uninferable or not hasattr(rhs, 'elts'): # Not interested in inferred values without elts. yield util.Uninferable return elts = collections.deque(rhs.elts[:]) if len(lhs.elts) > len(rhs.elts): raise exceptions.InferenceError( 'More targets, {targets!r}, than ' 'values to unpack, {values!r}.', node=self, targets=lhs, values=rhs, unknown=node, context=context) # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will # be the list of values which the Starred node will represent # This is done in two steps, from left to right to remove # anything before the starred node and from right to left # to remove anything after the starred node. for index, left_node in enumerate(lhs.elts): if not isinstance(left_node, nodes.Starred): elts.popleft() continue lhs_elts = collections.deque(reversed(lhs.elts[index:])) for right_node in lhs_elts: if not isinstance(right_node, nodes.Starred): elts.pop() continue # We're done packed = nodes.List() packed.elts = elts packed.parent = self yield packed break
def starred_assigned_stmts(self, node=None, context=None, assign_path=None): """ Arguments: self: nodes.Starred node: a node related to the current underlying Node. context: Inference context used for caching already inferred objects assign_path: A list of indices, where each index specifies what item to fetch from the inference results. """ # pylint: disable=too-many-locals,too-many-branches,too-many-statements def _determine_starred_iteration_lookups(starred, target, lookups): # Determine the lookups for the rhs of the iteration itered = target.itered() for index, element in enumerate(itered): if ( isinstance(element, nodes.Starred) and element.value.name == starred.value.name ): lookups.append((index, len(itered))) break if isinstance(element, nodes.Tuple): lookups.append((index, len(element.itered()))) _determine_starred_iteration_lookups(starred, element, lookups) stmt = self.statement() if not isinstance(stmt, (nodes.Assign, nodes.For)): raise exceptions.InferenceError( "Statement {stmt!r} enclosing {node!r} " "must be an Assign or For node.", node=self, stmt=stmt, unknown=node, context=context, ) if context is None: context = contextmod.InferenceContext() if isinstance(stmt, nodes.Assign): value = stmt.value lhs = stmt.targets[0] if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1: raise exceptions.InferenceError( "Too many starred arguments in the " " assignment targets {lhs!r}.", node=self, targets=lhs, unknown=node, context=context, ) try: rhs = next(value.infer(context)) except exceptions.InferenceError: yield util.Uninferable return if rhs is util.Uninferable or not hasattr(rhs, "itered"): yield util.Uninferable return try: elts = collections.deque(rhs.itered()) except TypeError: yield util.Uninferable return # Unpack iteratively the values from the rhs of the assignment, # until the find the starred node. What will remain will # be the list of values which the Starred node will represent # This is done in two steps, from left to right to remove # anything before the starred node and from right to left # to remove anything after the starred node. for index, left_node in enumerate(lhs.elts): if not isinstance(left_node, nodes.Starred): if not elts: break elts.popleft() continue lhs_elts = collections.deque(reversed(lhs.elts[index:])) for right_node in lhs_elts: if not isinstance(right_node, nodes.Starred): if not elts: break elts.pop() continue # We're done packed = nodes.List( ctx=Store, parent=self, lineno=lhs.lineno, col_offset=lhs.col_offset ) packed.postinit(elts=elts) yield packed break if isinstance(stmt, nodes.For): try: inferred_iterable = next(stmt.iter.infer(context=context)) except exceptions.InferenceError: yield util.Uninferable return if inferred_iterable is util.Uninferable or not hasattr( inferred_iterable, "itered" ): yield util.Uninferable return try: itered = inferred_iterable.itered() except TypeError: yield util.Uninferable return target = stmt.target if not isinstance(target, nodes.Tuple): raise exceptions.InferenceError( "Could not make sense of this, the target must be a tuple", context=context, ) lookups = [] _determine_starred_iteration_lookups(self, target, lookups) if not lookups: raise exceptions.InferenceError( "Could not make sense of this, needs at least a lookup", context=context ) # Make the last lookup a slice, since that what we want for a Starred node last_element_index, last_element_length = lookups[-1] is_starred_last = last_element_index == (last_element_length - 1) lookup_slice = slice( last_element_index, None if is_starred_last else (last_element_length - last_element_index), ) lookups[-1] = lookup_slice for element in itered: # We probably want to infer the potential values *for each* element in an # iterable, but we can't infer a list of all values, when only a list of # step values are expected: # # for a, *b in [...]: # b # # *b* should now point to just the elements at that particular iteration step, # which astroid can't know about. found_element = None for lookup in lookups: if not hasattr(element, "itered"): break if not isinstance(lookup, slice): # Grab just the index, not the whole length lookup = lookup[0] try: itered_inner_element = element.itered() element = itered_inner_element[lookup] except IndexError: break except TypeError: # Most likely the itered() call failed, cannot make sense of this yield util.Uninferable return else: found_element = element unpacked = nodes.List( ctx=Store, parent=self, lineno=self.lineno, col_offset=self.col_offset ) unpacked.postinit(elts=found_element or []) yield unpacked return yield util.Uninferable
def list_node(draw, elt=const_node(), **kwargs): """Return a List node with elements drawn from elt.""" node = nodes.List() node.postinit(draw(hs.lists(elt, **kwargs))) return node
def transform(cls: nodes.ClassDef): """ Astroid (used by pylint) calls this function on each class definition it discovers. cls is an Astroid AST representation of that class. Our purpose here is to extract the schema dict from API model classes so that we can inform pylint about all of the attributes on those models. We do this by injecting attributes on the class for each property in the schema. """ # This is a class which defines attributes in "schema" variable using json schema. # Those attributes are then assigned during run time inside the constructor # Get the value node for the "schema =" assignment schema_dict_node = next(cls.igetattr("schema")) extra_schema_properties = {} # If the "schema =" assignment's value node is not a simple type (like a dictionary), # then pylint cannot infer exactly what it does. Most of the time, this is actually # a function call to copy the schema from another class. So, let's find the dictionary. if schema_dict_node is astroid.Uninferable: # the assignment probably looks like this: # schema = copy.deepcopy(ActionAPI.schema) # so far we only have the value, but we need the actual assignment assigns = [ n for n in cls.get_children() if isinstance(n, nodes.Assign) ] schema_assign_name_node = cls.local_attr("schema")[0] schema_assign_node = next( assign for assign in assigns if assign.targets[0] == schema_assign_name_node) assigns.remove(schema_assign_node) # We only care about "schema = copy.deepcopy(...)" schema_dict_node = infer_copy_deepcopy(schema_assign_node.value) if not schema_dict_node: # This is not an API model class, as it doesn't have # something we can resolve to a dictionary. return # OK, now we need to look for any properties that dynamically modify # the dictionary that was just copied from somewhere else. # See the note below for why we only care about "properties" here. for assign_node in assigns: # we're looking for assignments like this: # schema["properties"]["ttl"] = {...} target = assign_node.targets[0] try: if (isinstance(target, nodes.Subscript) and target.value.value.name == "schema" and target.value.slice.value.value == "properties"): property_name_node = target.slice.value else: # not schema["properties"] continue except AttributeError: continue # schema["properties"]["execution"] = copy.deepcopy(ActionExecutionAPI.schema) inferred_value = infer_copy_deepcopy(assign_node.value) extra_schema_properties[property_name_node] = ( inferred_value if inferred_value else assign_node.value) if not isinstance(schema_dict_node, nodes.Dict): # Not a class we are interested in (like BaseAPI) return # We only care about "properties" in the schema because that's the only part of the schema # that gets translated into dynamic attributes on the model API class. properties_dict_node = None for key_node, value_node in schema_dict_node.items: if key_node.value == "properties": properties_dict_node = value_node break if not properties_dict_node and not extra_schema_properties: # Not a class we can do anything with return # Hooray! We have the schema properties dict now, so we can start processing # each property and add an attribute for each one to the API model class node. for property_name_node, property_data_node in properties_dict_node.items + list( extra_schema_properties.items()): property_name = property_name_node.value.replace( "-", "_") # Note: We do the same in Python code # Despite the processing above to extract the schema properties dictionary # each property in the dictionary might also reference other variables, # so we still need to resolve these to figure out each property's type. # an indirect reference to copy.deepcopy() as in: # REQUIRED_ATTR_SCHEMAS = {"action": copy.deepcopy(ActionAPI.schema)} # schema = {"properties": {"action": REQUIRED_ATTR_SCHEMAS["action"]}} if isinstance(property_data_node, nodes.Subscript): var_name = property_data_node.value.name subscript = property_data_node.slice.value.value # lookup var by name (assume its at module level) var_node = next(cls.root().igetattr(var_name)) # assume it is a dict at this point data_node = None for key_node, value_node in var_node.items: if key_node.value == subscript: # infer will resolve a Dict data_node = next(value_node.infer()) if data_node is astroid.Uninferable: data_node = infer_copy_deepcopy(value_node) break if data_node: property_data_node = data_node if not isinstance(property_data_node, nodes.Dict): # if infer_copy_deepcopy already ran, we may need to resolve the dict data_node = next(property_data_node.infer()) if data_node is not astroid.Uninferable: property_data_node = data_node property_type_node = None if isinstance(property_data_node, nodes.Dict): # We have a property schema, but we only care about the property's type. for property_key_node, property_value_node in property_data_node.items: if property_key_node.value == "type": property_type_node = next(property_value_node.infer()) break if property_type_node is None and isinstance(property_data_node, nodes.Attribute): # reference schema from another file like this: # from ... import TriggerAPI # schema = {"properties": {"trigger": TriggerAPI.schema}} # We only pull a schema from another file when it is an "object" (a dict). # So, we do not need to do any difficult cross-file processing. property_type = "object" elif property_type_node is None: property_type = None elif isinstance(property_type_node, nodes.Const): property_type = property_type_node.value elif isinstance(property_type_node, (nodes.List, nodes.Tuple)): # Hack for attributes with multiple types (e.g. string, null) property_type = property_type_node.elts[ 0].value # elts has "elements" in the list/tuple else: # We should only hit this if someone has used a different approach # for dynamically constructing the property's schema. # Expose the AST at this point to facilitate handling that approach. raise Exception(property_type_node.repr_tree()) # Hooray! We've got a property's name at this point. # And we have the property's type, if that type was defined in the schema. # Now, we can construct the AST node that we'll add to the API model class. if property_type == "object": node = nodes.Dict() elif property_type == "array": node = nodes.List() elif property_type == "integer": node = scoped_nodes.builtin_lookup("int")[1][0] elif property_type == "number": node = scoped_nodes.builtin_lookup("float")[1][0] elif property_type == "string": node = scoped_nodes.builtin_lookup("str")[1][0] elif property_type == "boolean": node = scoped_nodes.builtin_lookup("bool")[1][0] elif property_type == "null": node = scoped_nodes.builtin_lookup("None")[1][0] else: # Unknown type node = astroid.ClassDef(property_name, None) # Create a "property = node" assign node assign_node = nodes.Assign(parent=cls) assign_name_node = nodes.AssignName(property_name, parent=assign_node) assign_node.postinit(targets=[assign_name_node], value=node) # Finally, add the property node as an attribute on the class. cls.locals[property_name] = [assign_name_node]