Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/astroid/protocols.py: 14%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
2# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE
3# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt
5"""This module contains a set of functions to handle python protocols for nodes
6where it makes sense.
7"""
9from __future__ import annotations
11import collections
12import itertools
13import operator as operator_mod
14from collections.abc import Callable, Generator, Iterator, Sequence
15from typing import TYPE_CHECKING, Any, TypeVar
17from astroid import bases, decorators, nodes, util
18from astroid.builder import extract_node
19from astroid.const import Context
20from astroid.context import InferenceContext, copy_context
21from astroid.exceptions import (
22 AstroidIndexError,
23 AstroidTypeError,
24 AttributeInferenceError,
25 InferenceError,
26 NoDefault,
27)
28from astroid.nodes import node_classes
29from astroid.typing import (
30 ConstFactoryResult,
31 InferenceResult,
32 SuccessfulInferenceResult,
33)
35if TYPE_CHECKING:
36 _TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List)
38_CONTEXTLIB_MGR = "contextlib.contextmanager"
40_UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = {
41 "+": operator_mod.pos,
42 "-": operator_mod.neg,
43 "~": operator_mod.invert,
44 "not": operator_mod.not_,
45}
48def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult:
49 """Perform unary operation on `obj`, unless it is `NotImplemented`.
51 Can raise TypeError if operation is unsupported.
52 """
53 if obj is NotImplemented:
54 value = obj
55 else:
56 func = _UNARY_OPERATORS[op]
57 value = func(obj)
58 return nodes.const_factory(value)
61def tuple_infer_unary_op(self, op):
62 return _infer_unary_op(tuple(self.elts), op)
65def list_infer_unary_op(self, op):
66 return _infer_unary_op(self.elts, op)
69def set_infer_unary_op(self, op):
70 return _infer_unary_op(set(self.elts), op)
73def const_infer_unary_op(self, op):
74 return _infer_unary_op(self.value, op)
77def dict_infer_unary_op(self, op):
78 return _infer_unary_op(dict(self.items), op)
81# Binary operations
83BIN_OP_IMPL = {
84 "+": lambda a, b: a + b,
85 "-": lambda a, b: a - b,
86 "/": lambda a, b: a / b,
87 "//": lambda a, b: a // b,
88 "*": lambda a, b: a * b,
89 "**": lambda a, b: a**b,
90 "%": lambda a, b: a % b,
91 "&": lambda a, b: a & b,
92 "|": lambda a, b: a | b,
93 "^": lambda a, b: a ^ b,
94 "<<": lambda a, b: a << b,
95 ">>": lambda a, b: a >> b,
96 "@": operator_mod.matmul,
97}
98for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
99 BIN_OP_IMPL[_KEY + "="] = _IMPL
102@decorators.yes_if_nothing_inferred
103def const_infer_binary_op(
104 self: nodes.Const,
105 opnode: nodes.AugAssign | nodes.BinOp,
106 operator: str,
107 other: InferenceResult,
108 context: InferenceContext,
109 _: SuccessfulInferenceResult,
110) -> Generator[ConstFactoryResult | util.UninferableBase]:
111 not_implemented = nodes.Const(NotImplemented)
112 if isinstance(other, nodes.Const):
113 if (
114 operator == "**"
115 and isinstance(self.value, (int, float))
116 and isinstance(other.value, (int, float))
117 and (self.value > 1e5 or other.value > 1e5)
118 ):
119 yield not_implemented
120 return
121 try:
122 impl = BIN_OP_IMPL[operator]
123 try:
124 yield nodes.const_factory(impl(self.value, other.value))
125 except TypeError:
126 # ArithmeticError is not enough: float >> float is a TypeError
127 yield not_implemented
128 except Exception: # pylint: disable=broad-except
129 yield util.Uninferable
130 except TypeError:
131 yield not_implemented
132 elif isinstance(self.value, str) and operator == "%":
133 # TODO(cpopa): implement string interpolation later on.
134 yield util.Uninferable
135 else:
136 yield not_implemented
139def _multiply_seq_by_int(
140 self: _TupleListNodeT,
141 opnode: nodes.AugAssign | nodes.BinOp,
142 value: int,
143 context: InferenceContext,
144) -> _TupleListNodeT:
145 node = self.__class__(parent=opnode)
146 if not (value > 0 and self.elts):
147 node.elts = []
148 return node
149 if len(self.elts) * value > 1e8:
150 node.elts = [util.Uninferable]
151 return node
152 filtered_elts = (
153 util.safe_infer(elt, context) or util.Uninferable
154 for elt in self.elts
155 if not isinstance(elt, util.UninferableBase)
156 )
157 node.elts = list(filtered_elts) * value
158 return node
161def _filter_uninferable_nodes(
162 elts: Sequence[InferenceResult], context: InferenceContext
163) -> Iterator[SuccessfulInferenceResult]:
164 for elt in elts:
165 if isinstance(elt, util.UninferableBase):
166 yield node_classes.UNATTACHED_UNKNOWN
167 else:
168 for inferred in elt.infer(context):
169 if not isinstance(inferred, util.UninferableBase):
170 yield inferred
171 else:
172 yield node_classes.UNATTACHED_UNKNOWN
175@decorators.yes_if_nothing_inferred
176def tl_infer_binary_op(
177 self: _TupleListNodeT,
178 opnode: nodes.AugAssign | nodes.BinOp,
179 operator: str,
180 other: InferenceResult,
181 context: InferenceContext,
182 method: SuccessfulInferenceResult,
183) -> Generator[_TupleListNodeT | nodes.Const | util.UninferableBase]:
184 """Infer a binary operation on a tuple or list.
186 The instance on which the binary operation is performed is a tuple
187 or list. This refers to the left-hand side of the operation, so:
188 'tuple() + 1' or '[] + A()'
189 """
190 from astroid import helpers # pylint: disable=import-outside-toplevel
192 # For tuples and list the boundnode is no longer the tuple or list instance
193 context.boundnode = None
194 not_implemented = nodes.Const(NotImplemented)
195 if isinstance(other, self.__class__) and operator == "+":
196 node = self.__class__(parent=opnode)
197 node.elts = list(
198 itertools.chain(
199 _filter_uninferable_nodes(self.elts, context),
200 _filter_uninferable_nodes(other.elts, context),
201 )
202 )
203 yield node
204 elif isinstance(other, nodes.Const) and operator == "*":
205 if not isinstance(other.value, int):
206 yield not_implemented
207 return
208 yield _multiply_seq_by_int(self, opnode, other.value, context)
209 elif isinstance(other, bases.Instance) and operator == "*":
210 # Verify if the instance supports __index__.
211 as_index = helpers.class_instance_as_index(other)
212 if not as_index:
213 yield util.Uninferable
214 elif not isinstance(as_index.value, int): # pragma: no cover
215 # already checked by class_instance_as_index() but faster than casting
216 raise AssertionError("Please open a bug report.")
217 else:
218 yield _multiply_seq_by_int(self, opnode, as_index.value, context)
219 else:
220 yield not_implemented
223@decorators.yes_if_nothing_inferred
224def instance_class_infer_binary_op(
225 self: nodes.ClassDef,
226 opnode: nodes.AugAssign | nodes.BinOp,
227 operator: str,
228 other: InferenceResult,
229 context: InferenceContext,
230 method: SuccessfulInferenceResult,
231) -> Generator[InferenceResult]:
232 return method.infer_call_result(self, context)
235# assignment ##################################################################
236# pylint: disable-next=pointless-string-statement
237"""The assigned_stmts method is responsible to return the assigned statement
238(e.g. not inferred) according to the assignment type.
240The `assign_path` argument is used to record the lhs path of the original node.
241For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
242will be [1, 1] once arrived to the Assign node.
244The `context` argument is the current inference context which should be given
245to any intermediary inference necessary.
246"""
249def _resolve_looppart(parts, assign_path, context):
250 """Recursive function to resolve multiple assignments on loops."""
251 assign_path = assign_path[:]
252 index = assign_path.pop(0)
253 for part in parts:
254 if isinstance(part, util.UninferableBase):
255 continue
256 if not hasattr(part, "itered"):
257 continue
258 try:
259 itered = part.itered()
260 except TypeError:
261 continue
262 try:
263 if isinstance(itered[index], (nodes.Const, nodes.Name)):
264 itered = [part]
265 except IndexError:
266 pass
267 for stmt in itered:
268 index_node = nodes.Const(index)
269 try:
270 assigned = stmt.getitem(index_node, context)
271 except (AttributeError, AstroidTypeError, AstroidIndexError):
272 continue
273 if not assign_path:
274 # we achieved to resolved the assignment path,
275 # don't infer the last part
276 yield assigned
277 elif isinstance(assigned, util.UninferableBase):
278 break
279 else:
280 # we are not yet on the last part of the path
281 # search on each possibly inferred value
282 try:
283 yield from _resolve_looppart(
284 assigned.infer(context), assign_path, context
285 )
286 except InferenceError:
287 break
290@decorators.raise_if_nothing_inferred
291def for_assigned_stmts(
292 self: nodes.For | nodes.Comprehension,
293 node: node_classes.AssignedStmtsPossibleNode = None,
294 context: InferenceContext | None = None,
295 assign_path: list[int] | None = None,
296) -> Any:
297 if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
298 # Skip inferring of async code for now
299 return {
300 "node": self,
301 "unknown": node,
302 "assign_path": assign_path,
303 "context": context,
304 }
305 if assign_path is None:
306 for lst in self.iter.infer(context):
307 if isinstance(lst, (nodes.Tuple, nodes.List)):
308 yield from lst.elts
309 else:
310 yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
311 return {
312 "node": self,
313 "unknown": node,
314 "assign_path": assign_path,
315 "context": context,
316 }
319def sequence_assigned_stmts(
320 self: nodes.Tuple | nodes.List,
321 node: node_classes.AssignedStmtsPossibleNode = None,
322 context: InferenceContext | None = None,
323 assign_path: list[int] | None = None,
324) -> Any:
325 if assign_path is None:
326 assign_path = []
327 try:
328 index = self.elts.index(node) # type: ignore[arg-type]
329 except ValueError as exc:
330 raise InferenceError(
331 "Tried to retrieve a node {node!r} which does not exist",
332 node=self,
333 assign_path=assign_path,
334 context=context,
335 ) from exc
337 assign_path.insert(0, index)
338 return self.parent.assigned_stmts(
339 node=self, context=context, assign_path=assign_path
340 )
343def assend_assigned_stmts(
344 self: nodes.AssignName | nodes.AssignAttr,
345 node: node_classes.AssignedStmtsPossibleNode = None,
346 context: InferenceContext | None = None,
347 assign_path: list[int] | None = None,
348) -> Any:
349 return self.parent.assigned_stmts(node=self, context=context)
352def _arguments_infer_argname(
353 self, name: str | None, context: InferenceContext
354) -> Generator[InferenceResult]:
355 # arguments information may be missing, in which case we can't do anything
356 # more
357 from astroid import arguments # pylint: disable=import-outside-toplevel
359 if not self.arguments:
360 yield util.Uninferable
361 return
363 args = [arg for arg in self.arguments if arg.name not in [self.vararg, self.kwarg]]
364 functype = self.parent.type
365 # first argument of instance/class method
366 if (
367 args
368 and getattr(self.arguments[0], "name", None) == name
369 and functype != "staticmethod"
370 ):
371 cls = self.parent.parent.scope()
372 is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
373 # If this is a metaclass, then the first argument will always
374 # be the class, not an instance.
375 if context.boundnode and isinstance(context.boundnode, bases.Instance):
376 cls = context.boundnode._proxied
377 if is_metaclass or functype == "classmethod":
378 yield cls
379 return
380 if functype == "method":
381 yield cls.instantiate_class()
382 return
384 if context and context.callcontext:
385 callee = context.callcontext.callee
386 while hasattr(callee, "_proxied"):
387 callee = callee._proxied
388 if getattr(callee, "name", None) == self.parent.name:
389 call_site = arguments.CallSite(context.callcontext, context.extra_context)
390 yield from call_site.infer_argument(self.parent, name, context)
391 return
393 if name == self.vararg:
394 vararg = nodes.const_factory(())
395 vararg.parent = self
396 if not args and self.parent.name == "__init__":
397 cls = self.parent.parent.scope()
398 vararg.elts = [cls.instantiate_class()]
399 yield vararg
400 return
401 if name == self.kwarg:
402 kwarg = nodes.const_factory({})
403 kwarg.parent = self
404 yield kwarg
405 return
406 # if there is a default value, yield it. And then yield Uninferable to reflect
407 # we can't guess given argument value
408 try:
409 context = copy_context(context)
410 yield from self.default_value(name).infer(context)
411 yield util.Uninferable
412 except NoDefault:
413 yield util.Uninferable
416def arguments_assigned_stmts(
417 self: nodes.Arguments,
418 node: node_classes.AssignedStmtsPossibleNode = None,
419 context: InferenceContext | None = None,
420 assign_path: list[int] | None = None,
421) -> Any:
422 from astroid import arguments # pylint: disable=import-outside-toplevel
424 try:
425 node_name = node.name # type: ignore[union-attr]
426 except AttributeError:
427 # Added to handle edge cases where node.name is not defined.
428 # https://github.com/pylint-dev/astroid/pull/1644#discussion_r901545816
429 node_name = None # pragma: no cover
431 if context and context.callcontext:
432 callee = context.callcontext.callee
433 while hasattr(callee, "_proxied"):
434 callee = callee._proxied
435 else:
436 return _arguments_infer_argname(self, node_name, context)
437 if node and getattr(callee, "name", None) == node.frame().name:
438 # reset call context/name
439 callcontext = context.callcontext
440 context = copy_context(context)
441 context.callcontext = None
442 args = arguments.CallSite(callcontext, context=context)
443 return args.infer_argument(self.parent, node_name, context)
444 return _arguments_infer_argname(self, node_name, context)
447@decorators.raise_if_nothing_inferred
448def assign_assigned_stmts(
449 self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign | nodes.TypeAlias,
450 node: node_classes.AssignedStmtsPossibleNode = None,
451 context: InferenceContext | None = None,
452 assign_path: list[int] | None = None,
453) -> Any:
454 if not assign_path:
455 yield self.value
456 return None
457 yield from _resolve_assignment_parts(
458 self.value.infer(context), assign_path, context
459 )
461 return {
462 "node": self,
463 "unknown": node,
464 "assign_path": assign_path,
465 "context": context,
466 }
469def assign_annassigned_stmts(
470 self: nodes.AnnAssign,
471 node: node_classes.AssignedStmtsPossibleNode = None,
472 context: InferenceContext | None = None,
473 assign_path: list[int] | None = None,
474) -> Any:
475 for inferred in assign_assigned_stmts(self, node, context, assign_path):
476 if inferred is None:
477 yield util.Uninferable
478 else:
479 yield inferred
482def _resolve_assignment_parts(parts, assign_path, context):
483 """Recursive function to resolve multiple assignments."""
484 assign_path = assign_path[:]
485 index = assign_path.pop(0)
486 for part in parts:
487 assigned = None
488 if isinstance(part, nodes.Dict):
489 # A dictionary in an iterating context
490 try:
491 assigned, _ = part.items[index]
492 except IndexError:
493 return
495 elif hasattr(part, "getitem"):
496 index_node = nodes.Const(index)
497 try:
498 assigned = part.getitem(index_node, context)
499 except (AstroidTypeError, AstroidIndexError):
500 return
502 if not assigned:
503 return
505 if not assign_path:
506 # we achieved to resolved the assignment path, don't infer the
507 # last part
508 yield assigned
509 elif isinstance(assigned, util.UninferableBase):
510 return
511 else:
512 # we are not yet on the last part of the path search on each
513 # possibly inferred value
514 try:
515 yield from _resolve_assignment_parts(
516 assigned.infer(context), assign_path, context
517 )
518 except InferenceError:
519 return
522@decorators.raise_if_nothing_inferred
523def excepthandler_assigned_stmts(
524 self: nodes.ExceptHandler,
525 node: node_classes.AssignedStmtsPossibleNode = None,
526 context: InferenceContext | None = None,
527 assign_path: list[int] | None = None,
528) -> Any:
529 from astroid import objects # pylint: disable=import-outside-toplevel
531 def _generate_assigned():
532 for assigned in node_classes.unpack_infer(self.type):
533 if isinstance(assigned, nodes.ClassDef):
534 assigned = objects.ExceptionInstance(assigned)
536 yield assigned
538 if isinstance(self.parent, node_classes.TryStar):
539 # except * handler has assigned ExceptionGroup with caught
540 # exceptions under exceptions attribute
541 # pylint: disable-next=stop-iteration-return
542 eg = next(
543 node_classes.unpack_infer(
544 extract_node(
545 """
546from builtins import ExceptionGroup
547ExceptionGroup
548"""
549 )
550 )
551 )
552 assigned = objects.ExceptionInstance(eg)
553 assigned.instance_attrs["exceptions"] = [
554 nodes.List.from_elements(_generate_assigned())
555 ]
556 yield assigned
557 else:
558 yield from _generate_assigned()
559 return {
560 "node": self,
561 "unknown": node,
562 "assign_path": assign_path,
563 "context": context,
564 }
567def _infer_context_manager(self, mgr, context):
568 try:
569 inferred = next(mgr.infer(context=context))
570 except StopIteration as e:
571 raise InferenceError(node=mgr) from e
572 if isinstance(inferred, bases.Generator):
573 # Check if it is decorated with contextlib.contextmanager.
574 func = inferred.parent
575 if not func.decorators:
576 raise InferenceError(
577 "No decorators found on inferred generator %s", node=func
578 )
580 for decorator_node in func.decorators.nodes:
581 decorator = next(decorator_node.infer(context=context), None)
582 if isinstance(decorator, nodes.FunctionDef):
583 if decorator.qname() == _CONTEXTLIB_MGR:
584 break
585 else:
586 # It doesn't interest us.
587 raise InferenceError(node=func)
588 try:
589 yield next(inferred.infer_yield_types())
590 except StopIteration as e:
591 raise InferenceError(node=func) from e
593 elif isinstance(inferred, bases.Instance):
594 try:
595 enter = next(inferred.igetattr("__enter__", context=context))
596 except (InferenceError, AttributeInferenceError, StopIteration) as exc:
597 raise InferenceError(node=inferred) from exc
598 if not isinstance(enter, bases.BoundMethod):
599 raise InferenceError(node=enter)
600 yield from enter.infer_call_result(self, context)
601 else:
602 raise InferenceError(node=mgr)
605@decorators.raise_if_nothing_inferred
606def with_assigned_stmts(
607 self: nodes.With,
608 node: node_classes.AssignedStmtsPossibleNode = None,
609 context: InferenceContext | None = None,
610 assign_path: list[int] | None = None,
611) -> Any:
612 """Infer names and other nodes from a *with* statement.
614 This enables only inference for name binding in a *with* statement.
615 For instance, in the following code, inferring `func` will return
616 the `ContextManager` class, not whatever ``__enter__`` returns.
617 We are doing this intentionally, because we consider that the context
618 manager result is whatever __enter__ returns and what it is binded
619 using the ``as`` keyword.
621 class ContextManager(object):
622 def __enter__(self):
623 return 42
624 with ContextManager() as f:
625 pass
627 # ContextManager().infer() will return ContextManager
628 # f.infer() will return 42.
630 Arguments:
631 self: nodes.With
632 node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
633 context: Inference context used for caching already inferred objects
634 assign_path:
635 A list of indices, where each index specifies what item to fetch from
636 the inference results.
637 """
638 try:
639 mgr = next(mgr for (mgr, vars) in self.items if vars == node)
640 except StopIteration:
641 return None
642 if assign_path is None:
643 yield from _infer_context_manager(self, mgr, context)
644 else:
645 for result in _infer_context_manager(self, mgr, context):
646 # Walk the assign_path and get the item at the final index.
647 obj = result
648 for index in assign_path:
649 if not hasattr(obj, "elts"):
650 raise InferenceError(
651 "Wrong type ({targets!r}) for {node!r} assignment",
652 node=self,
653 targets=node,
654 assign_path=assign_path,
655 context=context,
656 )
657 try:
658 obj = obj.elts[index]
659 except IndexError as exc:
660 raise InferenceError(
661 "Tried to infer a nonexistent target with index {index} "
662 "in {node!r}.",
663 node=self,
664 targets=node,
665 assign_path=assign_path,
666 context=context,
667 ) from exc
668 except TypeError as exc:
669 raise InferenceError(
670 "Tried to unpack a non-iterable value in {node!r}.",
671 node=self,
672 targets=node,
673 assign_path=assign_path,
674 context=context,
675 ) from exc
676 yield obj
677 return {
678 "node": self,
679 "unknown": node,
680 "assign_path": assign_path,
681 "context": context,
682 }
685@decorators.raise_if_nothing_inferred
686def named_expr_assigned_stmts(
687 self: nodes.NamedExpr,
688 node: node_classes.AssignedStmtsPossibleNode,
689 context: InferenceContext | None = None,
690 assign_path: list[int] | None = None,
691) -> Any:
692 """Infer names and other nodes from an assignment expression."""
693 if self.target == node:
694 yield from self.value.infer(context=context)
695 else:
696 raise InferenceError(
697 "Cannot infer NamedExpr node {node!r}",
698 node=self,
699 assign_path=assign_path,
700 context=context,
701 )
704@decorators.yes_if_nothing_inferred
705def starred_assigned_stmts( # noqa: C901
706 self: nodes.Starred,
707 node: node_classes.AssignedStmtsPossibleNode = None,
708 context: InferenceContext | None = None,
709 assign_path: list[int] | None = None,
710) -> Any:
711 """
712 Arguments:
713 self: nodes.Starred
714 node: a node related to the current underlying Node.
715 context: Inference context used for caching already inferred objects
716 assign_path:
717 A list of indices, where each index specifies what item to fetch from
718 the inference results.
719 """
721 # pylint: disable = too-many-locals, too-many-statements, too-many-branches
723 def _determine_starred_iteration_lookups(
724 starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]]
725 ) -> None:
726 # Determine the lookups for the rhs of the iteration
727 itered = target.itered()
728 for index, element in enumerate(itered):
729 if (
730 isinstance(element, nodes.Starred)
731 and element.value.name == starred.value.name
732 ):
733 lookups.append((index, len(itered)))
734 break
735 if isinstance(element, nodes.Tuple):
736 lookups.append((index, len(element.itered())))
737 _determine_starred_iteration_lookups(starred, element, lookups)
739 stmt = self.statement()
740 if not isinstance(stmt, (nodes.Assign, nodes.For)):
741 raise InferenceError(
742 "Statement {stmt!r} enclosing {node!r} must be an Assign or For node.",
743 node=self,
744 stmt=stmt,
745 unknown=node,
746 context=context,
747 )
749 if context is None:
750 context = InferenceContext()
752 if isinstance(stmt, nodes.Assign):
753 value = stmt.value
754 lhs = stmt.targets[0]
755 if not isinstance(lhs, nodes.BaseContainer):
756 yield util.Uninferable
757 return
759 if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
760 raise InferenceError(
761 "Too many starred arguments in the assignment targets {lhs!r}.",
762 node=self,
763 targets=lhs,
764 unknown=node,
765 context=context,
766 )
768 try:
769 rhs = next(value.infer(context))
770 except (InferenceError, StopIteration):
771 yield util.Uninferable
772 return
773 if isinstance(rhs, util.UninferableBase) or not hasattr(rhs, "itered"):
774 yield util.Uninferable
775 return
777 try:
778 elts = collections.deque(rhs.itered()) # type: ignore[union-attr]
779 except TypeError:
780 yield util.Uninferable
781 return
783 # Unpack iteratively the values from the rhs of the assignment,
784 # until the find the starred node. What will remain will
785 # be the list of values which the Starred node will represent
786 # This is done in two steps, from left to right to remove
787 # anything before the starred node and from right to left
788 # to remove anything after the starred node.
790 for index, left_node in enumerate(lhs.elts):
791 if not isinstance(left_node, nodes.Starred):
792 if not elts:
793 break
794 elts.popleft()
795 continue
796 lhs_elts = collections.deque(reversed(lhs.elts[index:]))
797 for right_node in lhs_elts:
798 if not isinstance(right_node, nodes.Starred):
799 if not elts:
800 break
801 elts.pop()
802 continue
804 # We're done unpacking.
805 packed = nodes.List(
806 ctx=Context.Store,
807 parent=self,
808 lineno=lhs.lineno,
809 col_offset=lhs.col_offset,
810 )
811 packed.postinit(elts=list(elts))
812 yield packed
813 break
815 if isinstance(stmt, nodes.For):
816 try:
817 inferred_iterable = next(stmt.iter.infer(context=context))
818 except (InferenceError, StopIteration):
819 yield util.Uninferable
820 return
821 if isinstance(inferred_iterable, util.UninferableBase) or not hasattr(
822 inferred_iterable, "itered"
823 ):
824 yield util.Uninferable
825 return
826 try:
827 itered = inferred_iterable.itered() # type: ignore[union-attr]
828 except TypeError:
829 yield util.Uninferable
830 return
832 target = stmt.target
834 if not isinstance(target, nodes.Tuple):
835 raise InferenceError(
836 f"Could not make sense of this, the target must be a tuple, not {type(target)!r}",
837 context=context,
838 )
840 lookups: list[tuple[int, int]] = []
841 _determine_starred_iteration_lookups(self, target, lookups)
842 if not lookups:
843 raise InferenceError(
844 "Could not make sense of this, needs at least a lookup", context=context
845 )
847 # Make the last lookup a slice, since that what we want for a Starred node
848 last_element_index, last_element_length = lookups[-1]
849 is_starred_last = last_element_index == (last_element_length - 1)
851 lookup_slice = slice(
852 last_element_index,
853 None if is_starred_last else (last_element_length - last_element_index),
854 )
855 last_lookup = lookup_slice
857 for element in itered:
858 # We probably want to infer the potential values *for each* element in an
859 # iterable, but we can't infer a list of all values, when only a list of
860 # step values are expected:
861 #
862 # for a, *b in [...]:
863 # b
864 #
865 # *b* should now point to just the elements at that particular iteration step,
866 # which astroid can't know about.
868 found_element = None
869 for index, lookup in enumerate(lookups):
870 if not hasattr(element, "itered"):
871 break
872 if index + 1 is len(lookups):
873 cur_lookup: slice | int = last_lookup
874 else:
875 # Grab just the index, not the whole length
876 cur_lookup = lookup[0]
877 try:
878 itered_inner_element = element.itered()
879 element = itered_inner_element[cur_lookup]
880 except IndexError:
881 break
882 except TypeError:
883 # Most likely the itered() call failed, cannot make sense of this
884 yield util.Uninferable
885 return
886 else:
887 found_element = element
889 unpacked = nodes.List(
890 ctx=Context.Store,
891 parent=self,
892 lineno=self.lineno,
893 col_offset=self.col_offset,
894 )
895 unpacked.postinit(elts=found_element or [])
896 yield unpacked
897 return
899 yield util.Uninferable
902@decorators.yes_if_nothing_inferred
903def match_mapping_assigned_stmts(
904 self: nodes.MatchMapping,
905 node: nodes.AssignName,
906 context: InferenceContext | None = None,
907 assign_path: None = None,
908) -> Generator[nodes.NodeNG]:
909 """Return empty generator (return -> raises StopIteration) so inferred value
910 is Uninferable.
911 """
912 return
913 yield
916@decorators.yes_if_nothing_inferred
917def match_star_assigned_stmts(
918 self: nodes.MatchStar,
919 node: nodes.AssignName,
920 context: InferenceContext | None = None,
921 assign_path: None = None,
922) -> Generator[nodes.NodeNG]:
923 """Return empty generator (return -> raises StopIteration) so inferred value
924 is Uninferable.
925 """
926 return
927 yield
930@decorators.yes_if_nothing_inferred
931def match_as_assigned_stmts(
932 self: nodes.MatchAs,
933 node: nodes.AssignName,
934 context: InferenceContext | None = None,
935 assign_path: None = None,
936) -> Generator[nodes.NodeNG]:
937 """Infer MatchAs as the Match subject if it's the only MatchCase pattern
938 else raise StopIteration to yield Uninferable.
939 """
940 if (
941 isinstance(self.parent, nodes.MatchCase)
942 and isinstance(self.parent.parent, nodes.Match)
943 and self.pattern is None
944 ):
945 yield self.parent.parent.subject
948@decorators.yes_if_nothing_inferred
949def generic_type_assigned_stmts(
950 self: nodes.TypeVar | nodes.TypeVarTuple | nodes.ParamSpec,
951 node: nodes.AssignName,
952 context: InferenceContext | None = None,
953 assign_path: None = None,
954) -> Generator[nodes.NodeNG]:
955 """Hack. Return any Node so inference doesn't fail
956 when evaluating __class_getitem__. Revert if it's causing issues.
957 """
958 yield nodes.Const(None)