def _is_keyword_only_sentinel(node: NodeNG) -> bool: """Return True if node is the KW_ONLY sentinel.""" if not PY310_PLUS: return False inferred = helpers.safe_infer(node) return (isinstance(inferred, bases.Instance) and inferred.qname() == "dataclasses._KW_ONLY_TYPE")
def infer_random_sample(node, context=None): if len(node.args) != 2: raise astroid.UseInferenceDefault length = node.args[1] if not isinstance(length, astroid.Const): raise astroid.UseInferenceDefault if not isinstance(length.value, int): raise astroid.UseInferenceDefault inferred_sequence = helpers.safe_infer(node.args[0], context=context) if not inferred_sequence: raise astroid.UseInferenceDefault if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE): raise astroid.UseInferenceDefault if length.value > len(inferred_sequence.elts): # In this case, this will raise a ValueError raise astroid.UseInferenceDefault try: elts = random.sample(inferred_sequence.elts, length.value) except ValueError: raise astroid.UseInferenceDefault new_node = astroid.List( lineno=node.lineno, col_offset=node.col_offset, parent=node.scope() ) new_elts = [ _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno) for elt in elts ] new_node.postinit(new_elts) return iter((new_node,))
def _container_generic_transform(arg, context, klass, iterables, build_elts): if isinstance(arg, klass): return arg elif isinstance(arg, iterables): if all(isinstance(elt, nodes.Const) for elt in arg.elts): elts = [elt.value for elt in arg.elts] else: # TODO: Does not handle deduplication for sets. elts = [] for element in arg.elts: inferred = helpers.safe_infer(element, context=context) if inferred: evaluated_object = nodes.EvaluatedObject(original=element, value=inferred) elts.append(evaluated_object) elif isinstance(arg, nodes.Dict): # Dicts need to have consts as strings already. if not all(isinstance(elt[0], nodes.Const) for elt in arg.items): raise UseInferenceDefault() elts = [item[0].value for item in arg.items] elif isinstance(arg, nodes.Const) and isinstance( arg.value, (six.string_types, six.binary_type)): elts = arg.value else: return return klass.from_elements(elts=build_elts(elts))
def infer_random_sample(node, context=None): if len(node.args) != 2: raise UseInferenceDefault length = node.args[1] if not isinstance(length, Const): raise UseInferenceDefault if not isinstance(length.value, int): raise UseInferenceDefault inferred_sequence = helpers.safe_infer(node.args[0], context=context) if not inferred_sequence: raise UseInferenceDefault if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE): raise UseInferenceDefault if length.value > len(inferred_sequence.elts): # In this case, this will raise a ValueError raise UseInferenceDefault try: elts = random.sample(inferred_sequence.elts, length.value) except ValueError as exc: raise UseInferenceDefault from exc new_node = List(lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()) new_elts = [ _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno) for elt in elts ] new_node.postinit(new_elts) return iter((new_node,))
def _multiply_seq_by_int(self, opnode, other, context): node = self.__class__(parent=opnode) filtered_elts = ( helpers.safe_infer(elt, context) or util.Uninferable for elt in self.elts if elt is not util.Uninferable ) node.elts = list(filtered_elts) * other.value return node
def _infer_map(node, context): """Infer all values based on Dict.items""" values = {} for name, value in node.items: if isinstance(name, nodes.DictUnpack): double_starred = helpers.safe_infer(value, context) if double_starred in (None, util.Uninferable): raise exceptions.InferenceError if not isinstance(double_starred, nodes.Dict): raise exceptions.InferenceError(node=node, context=context) values.update(_infer_map(double_starred, context)) else: key = helpers.safe_infer(name, context=context) value = helpers.safe_infer(value, context=context) if key is None or value is None: raise exceptions.InferenceError(node=node, context=context) values[key] = value return values
def _multiply_seq_by_int(self, opnode, other, context): node = self.__class__(parent=opnode) elts = [] for elt in self.elts: infered = helpers.safe_infer(elt, context) if infered is None: infered = util.Uninferable elts.append(infered) node.elts = elts * other.value return node
def _multiply_seq_by_int(self, other, context): node = self.__class__() elts = [] for elt in self.elts: infered = helpers.safe_infer(elt, context) if infered is None: infered = util.Uninferable elts.append(infered) node.elts = elts * other.value return node
def _infer_map(node, context): """Infer all values based on Dict.items""" values = {} for name, value in node.items: if isinstance(name, nodes.DictUnpack): double_starred = helpers.safe_infer(value, context) if not double_starred: raise exceptions.InferenceError if not isinstance(double_starred, nodes.Dict): raise exceptions.InferenceError(node=node, context=context) unpack_items = _infer_map(double_starred, context) values = _update_with_replacement(values, unpack_items) else: key = helpers.safe_infer(name, context=context) value = helpers.safe_infer(value, context=context) if any(not elem for elem in (key, value)): raise exceptions.InferenceError(node=node, context=context) values = _update_with_replacement(values, {key: value}) return values
def _multiply_seq_by_int(self, opnode, other, context): node = self.__class__(parent=opnode) elts = [] filtered_elts = (elt for elt in self.elts if elt is not util.Uninferable) for elt in filtered_elts: inferred = helpers.safe_infer(elt, context) if inferred is None: inferred = util.Uninferable elts.append(inferred) node.elts = elts * other.value return node
def _infer_sequence_helper(node, context=None): """Infer all values based on _BaseContainer.elts""" values = [] for elt in node.elts: if isinstance(elt, nodes.Starred): starred = helpers.safe_infer(elt.value, context) if not starred: raise exceptions.InferenceError(node=node, context=context) if not hasattr(starred, "elts"): raise exceptions.InferenceError(node=node, context=context) values.extend(_infer_sequence_helper(starred)) elif isinstance(elt, nodes.NamedExpr): value = helpers.safe_infer(elt.value, context) if not value: raise exceptions.InferenceError(node=node, context=context) values.append(value) else: values.append(elt) return values
def _infer_str_format_call( node: nodes.Call, context: InferenceContext | None = None ) -> Iterator[nodes.Const | type[util.Uninferable]]: """Return a Const node based on the template and passed arguments.""" call = arguments.CallSite.from_call(node, context=context) if isinstance(node.func.expr, nodes.Name): value: nodes.Const = helpers.safe_infer(node.func.expr) else: value = node.func.expr format_template = value.value # Get the positional arguments passed inferred_positional = [ helpers.safe_infer(i, context) for i in call.positional_arguments ] if not all(isinstance(i, nodes.Const) for i in inferred_positional): return iter([util.Uninferable]) pos_values: list[str] = [i.value for i in inferred_positional] # Get the keyword arguments passed inferred_keyword = { k: helpers.safe_infer(v, context) for k, v in call.keyword_arguments.items() } if not all(isinstance(i, nodes.Const) for i in inferred_keyword.values()): return iter([util.Uninferable]) keyword_values: dict[str, str] = { k: v.value for k, v in inferred_keyword.items() } try: formatted_string = format_template.format(*pos_values, **keyword_values) except (IndexError, KeyError): # If there is an IndexError there are too few arguments to interpolate return iter([util.Uninferable]) return iter([nodes.const_factory(formatted_string)])
def _is_str_format_call(node: nodes.Call) -> bool: """Catch calls to str.format().""" if not isinstance(node.func, nodes.Attribute) or not node.func.attrname == "format": return False if isinstance(node.func.expr, nodes.Name): value = helpers.safe_infer(node.func.expr) else: value = node.func.expr return isinstance(value, nodes.Const) and isinstance(value.value, str)
def _infer_map(node, context): """Infer all values based on Dict.items""" values = {} for name, value in node.items: if isinstance(name, nodes.DictUnpack): double_starred = helpers.safe_infer(value, context) if double_starred in (None, util.Uninferable): raise exceptions.InferenceError if not isinstance(double_starred, nodes.Dict): raise exceptions.InferenceError(node=node, context=context) unpack_items = _infer_map(double_starred, context) values = _update_with_replacement(values, unpack_items) else: key = helpers.safe_infer(name, context=context) value = helpers.safe_infer(value, context=context) if any(elem in (None, util.Uninferable) for elem in (key, value)): raise exceptions.InferenceError(node=node, context=context) values = _update_with_replacement(values, {key: value}) return values
def _infer_map( node: nodes.Dict, context: InferenceContext | None ) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: """Infer all values based on Dict.items""" values: dict[SuccessfulInferenceResult, SuccessfulInferenceResult] = {} for name, value in node.items: if isinstance(name, nodes.DictUnpack): double_starred = helpers.safe_infer(value, context) if not double_starred: raise InferenceError if not isinstance(double_starred, nodes.Dict): raise InferenceError(node=node, context=context) unpack_items = _infer_map(double_starred, context) values = _update_with_replacement(values, unpack_items) else: key = helpers.safe_infer(name, context=context) safe_value = helpers.safe_infer(value, context=context) if any(not elem for elem in (key, safe_value)): raise InferenceError(node=node, context=context) # safe_value is SuccessfulInferenceResult as bool(Uninferable) == False values = _update_with_replacement( values, {key: safe_value}) # type: ignore[dict-item] return values
def _infer_old_style_string_formatting( instance: nodes.Const, other: nodes.NodeNG, context: InferenceContext ) -> tuple[type[util.Uninferable] | nodes.Const]: """Infer the result of '"string" % ...'. TODO: Instead of returning Uninferable we should rely on the call to '%' to see if the result is actually uninferable. """ values = None if isinstance(other, nodes.Tuple): if util.Uninferable in other.elts: return (util.Uninferable, ) inferred_positional = [ helpers.safe_infer(i, context) for i in other.elts ] if all(isinstance(i, nodes.Const) for i in inferred_positional): values = tuple(i.value for i in inferred_positional) elif isinstance(other, nodes.Dict): values: dict[Any, Any] = {} for pair in other.items: key = helpers.safe_infer(pair[0], context) if not isinstance(key, nodes.Const): return (util.Uninferable, ) value = helpers.safe_infer(pair[1], context) if not isinstance(value, nodes.Const): return (util.Uninferable, ) values[key.value] = value.value elif isinstance(other, nodes.Const): values = other.value else: return (util.Uninferable, ) try: return (nodes.const_factory(instance.value % values), ) except (TypeError, KeyError, ValueError): return (util.Uninferable, )
def _infer_seq(node, context=None): """Infer all values based on _BaseContainer.elts""" values = [] for elt in node.elts: if isinstance(elt, nodes.Starred): starred = helpers.safe_infer(elt.value, context) if starred in (None, util.Uninferable): raise exceptions.InferenceError(node=node, context=context) if not hasattr(starred, 'elts'): raise exceptions.InferenceError(node=node, context=context) values.extend(_infer_seq(starred)) else: values.append(elt) return values
def _looks_like_lru_cache(node): """Check if the given function node is decorated with lru_cache.""" if not node.decorators: return False for decorator in node.decorators.nodes: if not isinstance(decorator, astroid.Call): continue func = helpers.safe_infer(decorator.func) if func in (None, astroid.Uninferable): continue if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE: return True return False
def infer_random_sample(node, context=None): if len(node.args) != 2: raise astroid.UseInferenceDefault length = node.args[1] if not isinstance(length, astroid.Const): raise astroid.UseInferenceDefault if not isinstance(length.value, int): raise astroid.UseInferenceDefault inferred_sequence = helpers.safe_infer(node.args[0], context=context) if inferred_sequence in (None, astroid.Uninferable): raise astroid.UseInferenceDefault # TODO: might need to support more cases if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE): raise astroid.UseInferenceDefault if length.value > len(inferred_sequence.elts): # In this case, this will raise a ValueError raise astroid.UseInferenceDefault try: elts = random.sample(inferred_sequence.elts, length.value) except ValueError: raise astroid.UseInferenceDefault new_node = astroid.List( lineno=node.lineno, col_offset=node.col_offset, parent=node.scope(), ) new_elts = [ _clone_node_with_lineno( elt, parent=new_node, lineno=new_node.lineno ) for elt in elts ] new_node.postinit(new_elts) return iter((new_node, ))
def infer_call_result(self, caller, context=None): yield helpers.safe_infer(cache_info)