コード例 #1
0
def _is_keyword_only_sentinel(node: NodeNG) -> bool:
    """Return True if node is the KW_ONLY sentinel."""
    if not PY310_PLUS:
        return False
    inferred = helpers.safe_infer(node)
    return (isinstance(inferred, bases.Instance)
            and inferred.qname() == "dataclasses._KW_ONLY_TYPE")
コード例 #2
0
def infer_random_sample(node, context=None):
    if len(node.args) != 2:
        raise astroid.UseInferenceDefault

    length = node.args[1]
    if not isinstance(length, astroid.Const):
        raise astroid.UseInferenceDefault
    if not isinstance(length.value, int):
        raise astroid.UseInferenceDefault

    inferred_sequence = helpers.safe_infer(node.args[0], context=context)
    if not inferred_sequence:
        raise astroid.UseInferenceDefault

    if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
        raise astroid.UseInferenceDefault

    if length.value > len(inferred_sequence.elts):
        # In this case, this will raise a ValueError
        raise astroid.UseInferenceDefault

    try:
        elts = random.sample(inferred_sequence.elts, length.value)
    except ValueError:
        raise astroid.UseInferenceDefault

    new_node = astroid.List(
        lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()
    )
    new_elts = [
        _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
        for elt in elts
    ]
    new_node.postinit(new_elts)
    return iter((new_node,))
コード例 #3
0
def _container_generic_transform(arg, context, klass, iterables, build_elts):
    if isinstance(arg, klass):
        return arg
    elif isinstance(arg, iterables):
        if all(isinstance(elt, nodes.Const) for elt in arg.elts):
            elts = [elt.value for elt in arg.elts]
        else:
            # TODO: Does not handle deduplication for sets.
            elts = []
            for element in arg.elts:
                inferred = helpers.safe_infer(element, context=context)
                if inferred:
                    evaluated_object = nodes.EvaluatedObject(original=element,
                                                             value=inferred)
                    elts.append(evaluated_object)
    elif isinstance(arg, nodes.Dict):
        # Dicts need to have consts as strings already.
        if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
            raise UseInferenceDefault()
        elts = [item[0].value for item in arg.items]
    elif isinstance(arg, nodes.Const) and isinstance(
            arg.value, (six.string_types, six.binary_type)):
        elts = arg.value
    else:
        return
    return klass.from_elements(elts=build_elts(elts))
コード例 #4
0
def infer_random_sample(node, context=None):
    if len(node.args) != 2:
        raise UseInferenceDefault

    length = node.args[1]
    if not isinstance(length, Const):
        raise UseInferenceDefault
    if not isinstance(length.value, int):
        raise UseInferenceDefault

    inferred_sequence = helpers.safe_infer(node.args[0], context=context)
    if not inferred_sequence:
        raise UseInferenceDefault

    if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
        raise UseInferenceDefault

    if length.value > len(inferred_sequence.elts):
        # In this case, this will raise a ValueError
        raise UseInferenceDefault

    try:
        elts = random.sample(inferred_sequence.elts, length.value)
    except ValueError as exc:
        raise UseInferenceDefault from exc

    new_node = List(lineno=node.lineno, col_offset=node.col_offset, parent=node.scope())
    new_elts = [
        _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
        for elt in elts
    ]
    new_node.postinit(new_elts)
    return iter((new_node,))
コード例 #5
0
def _multiply_seq_by_int(self, opnode, other, context):
    node = self.__class__(parent=opnode)
    filtered_elts = (
        helpers.safe_infer(elt, context) or util.Uninferable
        for elt in self.elts
        if elt is not util.Uninferable
    )
    node.elts = list(filtered_elts) * other.value
    return node
コード例 #6
0
ファイル: inference.py プロジェクト: EdwinYang2000/ex1
def _infer_map(node, context):
    """Infer all values based on Dict.items"""
    values = {}
    for name, value in node.items:
        if isinstance(name, nodes.DictUnpack):
            double_starred = helpers.safe_infer(value, context)
            if double_starred in (None, util.Uninferable):
                raise exceptions.InferenceError
            if not isinstance(double_starred, nodes.Dict):
                raise exceptions.InferenceError(node=node, context=context)
            values.update(_infer_map(double_starred, context))
        else:
            key = helpers.safe_infer(name, context=context)
            value = helpers.safe_infer(value, context=context)
            if key is None or value is None:
                raise exceptions.InferenceError(node=node, context=context)
            values[key] = value
    return values
コード例 #7
0
def _multiply_seq_by_int(self, opnode, other, context):
    node = self.__class__(parent=opnode)
    filtered_elts = (
        helpers.safe_infer(elt, context) or util.Uninferable
        for elt in self.elts
        if elt is not util.Uninferable
    )
    node.elts = list(filtered_elts) * other.value
    return node
コード例 #8
0
def _multiply_seq_by_int(self, opnode, other, context):
    node = self.__class__(parent=opnode)
    elts = []
    for elt in self.elts:
        infered = helpers.safe_infer(elt, context)
        if infered is None:
            infered = util.Uninferable
        elts.append(infered)
    node.elts = elts * other.value
    return node
コード例 #9
0
def _multiply_seq_by_int(self, other, context):
    node = self.__class__()
    elts = []
    for elt in self.elts:
        infered = helpers.safe_infer(elt, context)
        if infered is None:
            infered = util.Uninferable
        elts.append(infered)
    node.elts = elts * other.value
    return node
コード例 #10
0
ファイル: inference.py プロジェクト: PolPasop/ec-cor-survey
def _infer_map(node, context):
    """Infer all values based on Dict.items"""
    values = {}
    for name, value in node.items:
        if isinstance(name, nodes.DictUnpack):
            double_starred = helpers.safe_infer(value, context)
            if not double_starred:
                raise exceptions.InferenceError
            if not isinstance(double_starred, nodes.Dict):
                raise exceptions.InferenceError(node=node, context=context)
            unpack_items = _infer_map(double_starred, context)
            values = _update_with_replacement(values, unpack_items)
        else:
            key = helpers.safe_infer(name, context=context)
            value = helpers.safe_infer(value, context=context)
            if any(not elem for elem in (key, value)):
                raise exceptions.InferenceError(node=node, context=context)
            values = _update_with_replacement(values, {key: value})
    return values
コード例 #11
0
ファイル: protocols.py プロジェクト: Marslo/VimConfig
def _multiply_seq_by_int(self, opnode, other, context):
    node = self.__class__(parent=opnode)
    elts = []
    filtered_elts = (elt for elt in self.elts if elt is not util.Uninferable)
    for elt in filtered_elts:
        inferred = helpers.safe_infer(elt, context)
        if inferred is None:
            inferred = util.Uninferable
        elts.append(inferred)
    node.elts = elts * other.value
    return node
コード例 #12
0
ファイル: inference.py プロジェクト: PolPasop/ec-cor-survey
def _infer_sequence_helper(node, context=None):
    """Infer all values based on _BaseContainer.elts"""
    values = []

    for elt in node.elts:
        if isinstance(elt, nodes.Starred):
            starred = helpers.safe_infer(elt.value, context)
            if not starred:
                raise exceptions.InferenceError(node=node, context=context)
            if not hasattr(starred, "elts"):
                raise exceptions.InferenceError(node=node, context=context)
            values.extend(_infer_sequence_helper(starred))
        elif isinstance(elt, nodes.NamedExpr):
            value = helpers.safe_infer(elt.value, context)
            if not value:
                raise exceptions.InferenceError(node=node, context=context)
            values.append(value)
        else:
            values.append(elt)
    return values
コード例 #13
0
ファイル: inference.py プロジェクト: byd913/vim
def _infer_map(node, context):
    """Infer all values based on Dict.items"""
    values = {}
    for name, value in node.items:
        if isinstance(name, nodes.DictUnpack):
            double_starred = helpers.safe_infer(value, context)
            if double_starred in (None, util.Uninferable):
                raise exceptions.InferenceError
            if not isinstance(double_starred, nodes.Dict):
                raise exceptions.InferenceError(node=node,
                                                context=context)
            values.update(_infer_map(double_starred, context))
        else:
            key = helpers.safe_infer(name, context=context)
            value = helpers.safe_infer(value, context=context)
            if key is None or value is None:
                raise exceptions.InferenceError(node=node,
                                                context=context)
            values[key] = value
    return values
コード例 #14
0
def _infer_str_format_call(
    node: nodes.Call,
    context: InferenceContext | None = None
) -> Iterator[nodes.Const | type[util.Uninferable]]:
    """Return a Const node based on the template and passed arguments."""
    call = arguments.CallSite.from_call(node, context=context)
    if isinstance(node.func.expr, nodes.Name):
        value: nodes.Const = helpers.safe_infer(node.func.expr)
    else:
        value = node.func.expr

    format_template = value.value

    # Get the positional arguments passed
    inferred_positional = [
        helpers.safe_infer(i, context) for i in call.positional_arguments
    ]
    if not all(isinstance(i, nodes.Const) for i in inferred_positional):
        return iter([util.Uninferable])
    pos_values: list[str] = [i.value for i in inferred_positional]

    # Get the keyword arguments passed
    inferred_keyword = {
        k: helpers.safe_infer(v, context)
        for k, v in call.keyword_arguments.items()
    }
    if not all(isinstance(i, nodes.Const) for i in inferred_keyword.values()):
        return iter([util.Uninferable])
    keyword_values: dict[str, str] = {
        k: v.value
        for k, v in inferred_keyword.items()
    }

    try:
        formatted_string = format_template.format(*pos_values,
                                                  **keyword_values)
    except (IndexError, KeyError):
        # If there is an IndexError there are too few arguments to interpolate
        return iter([util.Uninferable])

    return iter([nodes.const_factory(formatted_string)])
コード例 #15
0
def _is_str_format_call(node: nodes.Call) -> bool:
    """Catch calls to str.format()."""
    if not isinstance(node.func,
                      nodes.Attribute) or not node.func.attrname == "format":
        return False

    if isinstance(node.func.expr, nodes.Name):
        value = helpers.safe_infer(node.func.expr)
    else:
        value = node.func.expr

    return isinstance(value, nodes.Const) and isinstance(value.value, str)
コード例 #16
0
ファイル: inference.py プロジェクト: eriksf/dotfiles
def _infer_map(node, context):
    """Infer all values based on Dict.items"""
    values = {}
    for name, value in node.items:
        if isinstance(name, nodes.DictUnpack):
            double_starred = helpers.safe_infer(value, context)
            if double_starred in (None, util.Uninferable):
                raise exceptions.InferenceError
            if not isinstance(double_starred, nodes.Dict):
                raise exceptions.InferenceError(node=node,
                                                context=context)
            unpack_items = _infer_map(double_starred, context)
            values = _update_with_replacement(values, unpack_items)
        else:
            key = helpers.safe_infer(name, context=context)
            value = helpers.safe_infer(value, context=context)
            if any(elem in (None, util.Uninferable) for elem in (key, value)):
                raise exceptions.InferenceError(node=node,
                                                context=context)
            values = _update_with_replacement(values, {key: value})
    return values
コード例 #17
0
ファイル: inference.py プロジェクト: michael-k/astroid
def _infer_map(
    node: nodes.Dict, context: InferenceContext | None
) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]:
    """Infer all values based on Dict.items"""
    values: dict[SuccessfulInferenceResult, SuccessfulInferenceResult] = {}
    for name, value in node.items:
        if isinstance(name, nodes.DictUnpack):
            double_starred = helpers.safe_infer(value, context)
            if not double_starred:
                raise InferenceError
            if not isinstance(double_starred, nodes.Dict):
                raise InferenceError(node=node, context=context)
            unpack_items = _infer_map(double_starred, context)
            values = _update_with_replacement(values, unpack_items)
        else:
            key = helpers.safe_infer(name, context=context)
            safe_value = helpers.safe_infer(value, context=context)
            if any(not elem for elem in (key, safe_value)):
                raise InferenceError(node=node, context=context)
            # safe_value is SuccessfulInferenceResult as bool(Uninferable) == False
            values = _update_with_replacement(
                values, {key: safe_value})  # type: ignore[dict-item]
    return values
コード例 #18
0
ファイル: inference.py プロジェクト: michael-k/astroid
def _infer_old_style_string_formatting(
        instance: nodes.Const, other: nodes.NodeNG, context: InferenceContext
) -> tuple[type[util.Uninferable] | nodes.Const]:
    """Infer the result of '"string" % ...'.

    TODO: Instead of returning Uninferable we should rely
    on the call to '%' to see if the result is actually uninferable.
    """
    values = None
    if isinstance(other, nodes.Tuple):
        if util.Uninferable in other.elts:
            return (util.Uninferable, )
        inferred_positional = [
            helpers.safe_infer(i, context) for i in other.elts
        ]
        if all(isinstance(i, nodes.Const) for i in inferred_positional):
            values = tuple(i.value for i in inferred_positional)
    elif isinstance(other, nodes.Dict):
        values: dict[Any, Any] = {}
        for pair in other.items:
            key = helpers.safe_infer(pair[0], context)
            if not isinstance(key, nodes.Const):
                return (util.Uninferable, )
            value = helpers.safe_infer(pair[1], context)
            if not isinstance(value, nodes.Const):
                return (util.Uninferable, )
            values[key.value] = value.value
    elif isinstance(other, nodes.Const):
        values = other.value
    else:
        return (util.Uninferable, )

    try:
        return (nodes.const_factory(instance.value % values), )
    except (TypeError, KeyError, ValueError):
        return (util.Uninferable, )
コード例 #19
0
ファイル: inference.py プロジェクト: EdwinYang2000/ex1
def _infer_seq(node, context=None):
    """Infer all values based on _BaseContainer.elts"""
    values = []

    for elt in node.elts:
        if isinstance(elt, nodes.Starred):
            starred = helpers.safe_infer(elt.value, context)
            if starred in (None, util.Uninferable):
                raise exceptions.InferenceError(node=node, context=context)
            if not hasattr(starred, 'elts'):
                raise exceptions.InferenceError(node=node, context=context)
            values.extend(_infer_seq(starred))
        else:
            values.append(elt)
    return values
コード例 #20
0
ファイル: brain_functools.py プロジェクト: my88899/astroid
def _looks_like_lru_cache(node):
    """Check if the given function node is decorated with lru_cache."""
    if not node.decorators:
        return False

    for decorator in node.decorators.nodes:
        if not isinstance(decorator, astroid.Call):
            continue

        func = helpers.safe_infer(decorator.func)
        if func in (None, astroid.Uninferable):
            continue

        if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE:
            return True
    return False
コード例 #21
0
def _looks_like_lru_cache(node):
    """Check if the given function node is decorated with lru_cache."""
    if not node.decorators:
        return False

    for decorator in node.decorators.nodes:
        if not isinstance(decorator, astroid.Call):
            continue

        func = helpers.safe_infer(decorator.func)
        if func in (None, astroid.Uninferable):
            continue

        if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE:
            return True
    return False
コード例 #22
0
ファイル: inference.py プロジェクト: byd913/vim
def _infer_seq(node, context=None):
    """Infer all values based on _BaseContainer.elts"""
    values = []

    for elt in node.elts:
        if isinstance(elt, nodes.Starred):
            starred = helpers.safe_infer(elt.value, context)
            if starred in (None, util.Uninferable):
                raise exceptions.InferenceError(node=node,
                                                context=context)
            if not hasattr(starred, 'elts'):
                raise exceptions.InferenceError(node=node,
                                                context=context)
            values.extend(_infer_seq(starred))
        else:
            values.append(elt)
    return values
コード例 #23
0
ファイル: brain_random.py プロジェクト: Gilles00/todobackend
def infer_random_sample(node, context=None):
    if len(node.args) != 2:
        raise astroid.UseInferenceDefault

    length = node.args[1]
    if not isinstance(length, astroid.Const):
        raise astroid.UseInferenceDefault
    if not isinstance(length.value, int):
        raise astroid.UseInferenceDefault

    inferred_sequence = helpers.safe_infer(node.args[0], context=context)
    if inferred_sequence in (None, astroid.Uninferable):
        raise astroid.UseInferenceDefault

    # TODO: might need to support more cases
    if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
        raise astroid.UseInferenceDefault

    if length.value > len(inferred_sequence.elts):
        # In this case, this will raise a ValueError
        raise astroid.UseInferenceDefault

    try:
        elts = random.sample(inferred_sequence.elts, length.value)
    except ValueError:
        raise astroid.UseInferenceDefault

    new_node = astroid.List(
        lineno=node.lineno,
        col_offset=node.col_offset,
        parent=node.scope(),
    )
    new_elts = [
        _clone_node_with_lineno(
            elt,
            parent=new_node,
            lineno=new_node.lineno
        )
        for elt in elts
    ]
    new_node.postinit(new_elts)
    return iter((new_node, ))
コード例 #24
0
ファイル: brain_functools.py プロジェクト: my88899/astroid
 def infer_call_result(self, caller, context=None):
     yield helpers.safe_infer(cache_info)
コード例 #25
0
 def infer_call_result(self, caller, context=None):
     yield helpers.safe_infer(cache_info)