1# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
2# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE
3# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt
4
5"""This module contains a set of functions to handle python protocols for nodes
6where it makes sense.
7"""
8
9from __future__ import annotations
10
11import collections
12import itertools
13import operator as operator_mod
14from collections.abc import Callable, Generator, Iterator, Sequence
15from typing import TYPE_CHECKING, Any, TypeVar
16
17from astroid import bases, decorators, nodes, util
18from astroid.const import Context
19from astroid.context import InferenceContext, copy_context
20from astroid.exceptions import (
21 AstroidIndexError,
22 AstroidTypeError,
23 AttributeInferenceError,
24 InferenceError,
25 NoDefault,
26)
27from astroid.nodes import node_classes
28from astroid.typing import (
29 ConstFactoryResult,
30 InferenceResult,
31 SuccessfulInferenceResult,
32)
33
34if TYPE_CHECKING:
35 _TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List)
36
37_CONTEXTLIB_MGR = "contextlib.contextmanager"
38
39_UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = {
40 "+": operator_mod.pos,
41 "-": operator_mod.neg,
42 "~": operator_mod.invert,
43 "not": operator_mod.not_,
44}
45
46
47def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult:
48 """Perform unary operation on `obj`, unless it is `NotImplemented`.
49
50 Can raise TypeError if operation is unsupported.
51 """
52 if obj is NotImplemented:
53 value = obj
54 else:
55 func = _UNARY_OPERATORS[op]
56 value = func(obj)
57 return nodes.const_factory(value)
58
59
60def tuple_infer_unary_op(self, op):
61 return _infer_unary_op(tuple(self.elts), op)
62
63
64def list_infer_unary_op(self, op):
65 return _infer_unary_op(self.elts, op)
66
67
68def set_infer_unary_op(self, op):
69 return _infer_unary_op(set(self.elts), op)
70
71
72def const_infer_unary_op(self, op):
73 return _infer_unary_op(self.value, op)
74
75
76def dict_infer_unary_op(self, op):
77 return _infer_unary_op(dict(self.items), op)
78
79
80# Binary operations
81
82BIN_OP_IMPL = {
83 "+": lambda a, b: a + b,
84 "-": lambda a, b: a - b,
85 "/": lambda a, b: a / b,
86 "//": lambda a, b: a // b,
87 "*": lambda a, b: a * b,
88 "**": lambda a, b: a**b,
89 "%": lambda a, b: a % b,
90 "&": lambda a, b: a & b,
91 "|": lambda a, b: a | b,
92 "^": lambda a, b: a ^ b,
93 "<<": lambda a, b: a << b,
94 ">>": lambda a, b: a >> b,
95 "@": operator_mod.matmul,
96}
97for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
98 BIN_OP_IMPL[_KEY + "="] = _IMPL
99
100
101@decorators.yes_if_nothing_inferred
102def const_infer_binary_op(
103 self: nodes.Const,
104 opnode: nodes.AugAssign | nodes.BinOp,
105 operator: str,
106 other: InferenceResult,
107 context: InferenceContext,
108 _: SuccessfulInferenceResult,
109) -> Generator[ConstFactoryResult | util.UninferableBase]:
110 not_implemented = nodes.Const(NotImplemented)
111 if isinstance(other, nodes.Const):
112 if (
113 operator == "**"
114 and isinstance(self.value, (int, float))
115 and isinstance(other.value, (int, float))
116 and (self.value > 1e5 or other.value > 1e5)
117 ):
118 yield not_implemented
119 return
120 try:
121 impl = BIN_OP_IMPL[operator]
122 try:
123 yield nodes.const_factory(impl(self.value, other.value))
124 except TypeError:
125 # ArithmeticError is not enough: float >> float is a TypeError
126 yield not_implemented
127 except Exception: # pylint: disable=broad-except
128 yield util.Uninferable
129 except TypeError:
130 yield not_implemented
131 elif isinstance(self.value, str) and operator == "%":
132 # TODO(cpopa): implement string interpolation later on.
133 yield util.Uninferable
134 else:
135 yield not_implemented
136
137
138def _multiply_seq_by_int(
139 self: _TupleListNodeT,
140 opnode: nodes.AugAssign | nodes.BinOp,
141 value: int,
142 context: InferenceContext,
143) -> _TupleListNodeT:
144 node = self.__class__(parent=opnode)
145 if value <= 0 or not self.elts:
146 node.elts = []
147 return node
148 if len(self.elts) * value > 1e8:
149 node.elts = [util.Uninferable]
150 return node
151 filtered_elts = (
152 util.safe_infer(elt, context) or util.Uninferable
153 for elt in self.elts
154 if not isinstance(elt, util.UninferableBase)
155 )
156 node.elts = list(filtered_elts) * value
157 return node
158
159
160def _filter_uninferable_nodes(
161 elts: Sequence[InferenceResult], context: InferenceContext
162) -> Iterator[SuccessfulInferenceResult]:
163 for elt in elts:
164 if isinstance(elt, util.UninferableBase):
165 yield nodes.Unknown()
166 else:
167 for inferred in elt.infer(context):
168 if not isinstance(inferred, util.UninferableBase):
169 yield inferred
170 else:
171 yield nodes.Unknown()
172
173
174@decorators.yes_if_nothing_inferred
175def tl_infer_binary_op(
176 self: _TupleListNodeT,
177 opnode: nodes.AugAssign | nodes.BinOp,
178 operator: str,
179 other: InferenceResult,
180 context: InferenceContext,
181 method: SuccessfulInferenceResult,
182) -> Generator[_TupleListNodeT | nodes.Const | util.UninferableBase]:
183 """Infer a binary operation on a tuple or list.
184
185 The instance on which the binary operation is performed is a tuple
186 or list. This refers to the left-hand side of the operation, so:
187 'tuple() + 1' or '[] + A()'
188 """
189 from astroid import helpers # pylint: disable=import-outside-toplevel
190
191 # For tuples and list the boundnode is no longer the tuple or list instance
192 context.boundnode = None
193 not_implemented = nodes.Const(NotImplemented)
194 if isinstance(other, self.__class__) and operator == "+":
195 node = self.__class__(parent=opnode)
196 node.elts = list(
197 itertools.chain(
198 _filter_uninferable_nodes(self.elts, context),
199 _filter_uninferable_nodes(other.elts, context),
200 )
201 )
202 yield node
203 elif isinstance(other, nodes.Const) and operator == "*":
204 if not isinstance(other.value, int):
205 yield not_implemented
206 return
207 yield _multiply_seq_by_int(self, opnode, other.value, context)
208 elif isinstance(other, bases.Instance) and operator == "*":
209 # Verify if the instance supports __index__.
210 as_index = helpers.class_instance_as_index(other)
211 if not as_index:
212 yield util.Uninferable
213 elif not isinstance(as_index.value, int): # pragma: no cover
214 # already checked by class_instance_as_index() but faster than casting
215 raise AssertionError("Please open a bug report.")
216 else:
217 yield _multiply_seq_by_int(self, opnode, as_index.value, context)
218 else:
219 yield not_implemented
220
221
222@decorators.yes_if_nothing_inferred
223def instance_class_infer_binary_op(
224 self: nodes.ClassDef,
225 opnode: nodes.AugAssign | nodes.BinOp,
226 operator: str,
227 other: InferenceResult,
228 context: InferenceContext,
229 method: SuccessfulInferenceResult,
230) -> Generator[InferenceResult]:
231 return method.infer_call_result(self, context)
232
233
234# assignment ##################################################################
235# pylint: disable-next=pointless-string-statement
236"""The assigned_stmts method is responsible to return the assigned statement
237(e.g. not inferred) according to the assignment type.
238
239The `assign_path` argument is used to record the lhs path of the original node.
240For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
241will be [1, 1] once arrived to the Assign node.
242
243The `context` argument is the current inference context which should be given
244to any intermediary inference necessary.
245"""
246
247
248def _resolve_looppart(parts, assign_path, context):
249 """Recursive function to resolve multiple assignments on loops."""
250 assign_path = assign_path[:]
251 index = assign_path.pop(0)
252 for part in parts:
253 if isinstance(part, util.UninferableBase):
254 continue
255 if not hasattr(part, "itered"):
256 continue
257 try:
258 itered = part.itered()
259 except TypeError:
260 continue
261 try:
262 if isinstance(itered[index], (nodes.Const, nodes.Name)):
263 itered = [part]
264 except IndexError:
265 pass
266 for stmt in itered:
267 index_node = nodes.Const(index)
268 try:
269 assigned = stmt.getitem(index_node, context)
270 except (AttributeError, AstroidTypeError, AstroidIndexError):
271 continue
272 if not assign_path:
273 # we achieved to resolved the assignment path,
274 # don't infer the last part
275 yield assigned
276 elif isinstance(assigned, util.UninferableBase):
277 break
278 else:
279 # we are not yet on the last part of the path
280 # search on each possibly inferred value
281 try:
282 yield from _resolve_looppart(
283 assigned.infer(context), assign_path, context
284 )
285 except InferenceError:
286 break
287
288
289@decorators.raise_if_nothing_inferred
290def for_assigned_stmts(
291 self: nodes.For | nodes.Comprehension,
292 node: node_classes.AssignedStmtsPossibleNode = None,
293 context: InferenceContext | None = None,
294 assign_path: list[int] | None = None,
295) -> Any:
296 if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
297 # Skip inferring of async code for now
298 return {
299 "node": self,
300 "unknown": node,
301 "assign_path": assign_path,
302 "context": context,
303 }
304 if assign_path is None:
305 for lst in self.iter.infer(context):
306 if isinstance(lst, (nodes.Tuple, nodes.List)):
307 yield from lst.elts
308 else:
309 yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
310 return {
311 "node": self,
312 "unknown": node,
313 "assign_path": assign_path,
314 "context": context,
315 }
316
317
318def sequence_assigned_stmts(
319 self: nodes.Tuple | nodes.List,
320 node: node_classes.AssignedStmtsPossibleNode = None,
321 context: InferenceContext | None = None,
322 assign_path: list[int] | None = None,
323) -> Any:
324 if assign_path is None:
325 assign_path = []
326 try:
327 index = self.elts.index(node) # type: ignore[arg-type]
328 except ValueError as exc:
329 raise InferenceError(
330 "Tried to retrieve a node {node!r} which does not exist",
331 node=self,
332 assign_path=assign_path,
333 context=context,
334 ) from exc
335
336 assign_path.insert(0, index)
337 return self.parent.assigned_stmts(
338 node=self, context=context, assign_path=assign_path
339 )
340
341
342def assend_assigned_stmts(
343 self: nodes.AssignName | nodes.AssignAttr,
344 node: node_classes.AssignedStmtsPossibleNode = None,
345 context: InferenceContext | None = None,
346 assign_path: list[int] | None = None,
347) -> Any:
348 return self.parent.assigned_stmts(node=self, context=context)
349
350
351def _arguments_infer_argname(
352 self, name: str | None, context: InferenceContext
353) -> Generator[InferenceResult]:
354 # arguments information may be missing, in which case we can't do anything
355 # more
356 from astroid import arguments # pylint: disable=import-outside-toplevel
357
358 if not self.arguments:
359 yield util.Uninferable
360 return
361
362 args = [arg for arg in self.arguments if arg.name not in [self.vararg, self.kwarg]]
363 functype = self.parent.type
364 # first argument of instance/class method
365 if (
366 args
367 and getattr(self.arguments[0], "name", None) == name
368 and functype != "staticmethod"
369 ):
370 cls = self.parent.parent.scope()
371 is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
372 # If this is a metaclass, then the first argument will always
373 # be the class, not an instance.
374 if context.boundnode and isinstance(context.boundnode, bases.Instance):
375 cls = context.boundnode._proxied
376 if is_metaclass or functype == "classmethod":
377 yield cls
378 return
379 if functype == "method":
380 yield cls.instantiate_class()
381 return
382
383 if context and context.callcontext:
384 callee = context.callcontext.callee
385 while hasattr(callee, "_proxied"):
386 callee = callee._proxied
387 if getattr(callee, "name", None) == self.parent.name:
388 call_site = arguments.CallSite(context.callcontext, context.extra_context)
389 yield from call_site.infer_argument(self.parent, name, context)
390 return
391
392 if name == self.vararg:
393 vararg = nodes.const_factory(())
394 vararg.parent = self
395 if not args and self.parent.name == "__init__":
396 cls = self.parent.parent.scope()
397 vararg.elts = [cls.instantiate_class()]
398 yield vararg
399 return
400 if name == self.kwarg:
401 kwarg = nodes.const_factory({})
402 kwarg.parent = self
403 yield kwarg
404 return
405 # if there is a default value, yield it. And then yield Uninferable to reflect
406 # we can't guess given argument value
407 try:
408 context = copy_context(context)
409 yield from self.default_value(name).infer(context)
410 yield util.Uninferable
411 except NoDefault:
412 yield util.Uninferable
413
414
415def arguments_assigned_stmts(
416 self: nodes.Arguments,
417 node: node_classes.AssignedStmtsPossibleNode = None,
418 context: InferenceContext | None = None,
419 assign_path: list[int] | None = None,
420) -> Any:
421 from astroid import arguments # pylint: disable=import-outside-toplevel
422
423 try:
424 node_name = node.name # type: ignore[union-attr]
425 except AttributeError:
426 # Added to handle edge cases where node.name is not defined.
427 # https://github.com/pylint-dev/astroid/pull/1644#discussion_r901545816
428 node_name = None # pragma: no cover
429
430 if context and context.callcontext:
431 callee = context.callcontext.callee
432 while hasattr(callee, "_proxied"):
433 callee = callee._proxied
434 else:
435 return _arguments_infer_argname(self, node_name, context)
436 if node and getattr(callee, "name", None) == node.frame().name:
437 # reset call context/name
438 callcontext = context.callcontext
439 context = copy_context(context)
440 context.callcontext = None
441 args = arguments.CallSite(callcontext, context=context)
442 return args.infer_argument(self.parent, node_name, context)
443 return _arguments_infer_argname(self, node_name, context)
444
445
446@decorators.raise_if_nothing_inferred
447def assign_assigned_stmts(
448 self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign | nodes.TypeAlias,
449 node: node_classes.AssignedStmtsPossibleNode = None,
450 context: InferenceContext | None = None,
451 assign_path: list[int] | None = None,
452) -> Any:
453 if not assign_path:
454 yield self.value
455 return None
456 yield from _resolve_assignment_parts(
457 self.value.infer(context), assign_path, context
458 )
459
460 return {
461 "node": self,
462 "unknown": node,
463 "assign_path": assign_path,
464 "context": context,
465 }
466
467
468def assign_annassigned_stmts(
469 self: nodes.AnnAssign,
470 node: node_classes.AssignedStmtsPossibleNode = None,
471 context: InferenceContext | None = None,
472 assign_path: list[int] | None = None,
473) -> Any:
474 for inferred in assign_assigned_stmts(self, node, context, assign_path):
475 if inferred is None:
476 yield util.Uninferable
477 else:
478 yield inferred
479
480
481def _resolve_assignment_parts(parts, assign_path, context):
482 """Recursive function to resolve multiple assignments."""
483 assign_path = assign_path[:]
484 index = assign_path.pop(0)
485 for part in parts:
486 assigned = None
487 if isinstance(part, nodes.Dict):
488 # A dictionary in an iterating context
489 try:
490 assigned, _ = part.items[index]
491 except IndexError:
492 return
493
494 elif hasattr(part, "getitem"):
495 index_node = nodes.Const(index)
496 try:
497 assigned = part.getitem(index_node, context)
498 except (AstroidTypeError, AstroidIndexError):
499 return
500
501 if not assigned:
502 return
503
504 if not assign_path:
505 # we achieved to resolved the assignment path, don't infer the
506 # last part
507 yield assigned
508 elif isinstance(assigned, util.UninferableBase):
509 return
510 else:
511 # we are not yet on the last part of the path search on each
512 # possibly inferred value
513 try:
514 yield from _resolve_assignment_parts(
515 assigned.infer(context), assign_path, context
516 )
517 except InferenceError:
518 return
519
520
521@decorators.raise_if_nothing_inferred
522def excepthandler_assigned_stmts(
523 self: nodes.ExceptHandler,
524 node: node_classes.AssignedStmtsPossibleNode = None,
525 context: InferenceContext | None = None,
526 assign_path: list[int] | None = None,
527) -> Any:
528 from astroid import objects # pylint: disable=import-outside-toplevel
529
530 for assigned in node_classes.unpack_infer(self.type):
531 if isinstance(assigned, nodes.ClassDef):
532 assigned = objects.ExceptionInstance(assigned)
533
534 yield assigned
535 return {
536 "node": self,
537 "unknown": node,
538 "assign_path": assign_path,
539 "context": context,
540 }
541
542
543def _infer_context_manager(self, mgr, context):
544 try:
545 inferred = next(mgr.infer(context=context))
546 except StopIteration as e:
547 raise InferenceError(node=mgr) from e
548 if isinstance(inferred, bases.Generator):
549 # Check if it is decorated with contextlib.contextmanager.
550 func = inferred.parent
551 if not func.decorators:
552 raise InferenceError(
553 "No decorators found on inferred generator %s", node=func
554 )
555
556 for decorator_node in func.decorators.nodes:
557 decorator = next(decorator_node.infer(context=context), None)
558 if isinstance(decorator, nodes.FunctionDef):
559 if decorator.qname() == _CONTEXTLIB_MGR:
560 break
561 else:
562 # It doesn't interest us.
563 raise InferenceError(node=func)
564 try:
565 yield next(inferred.infer_yield_types())
566 except StopIteration as e:
567 raise InferenceError(node=func) from e
568
569 elif isinstance(inferred, bases.Instance):
570 try:
571 enter = next(inferred.igetattr("__enter__", context=context))
572 except (InferenceError, AttributeInferenceError, StopIteration) as exc:
573 raise InferenceError(node=inferred) from exc
574 if not isinstance(enter, bases.BoundMethod):
575 raise InferenceError(node=enter)
576 yield from enter.infer_call_result(self, context)
577 else:
578 raise InferenceError(node=mgr)
579
580
581@decorators.raise_if_nothing_inferred
582def with_assigned_stmts(
583 self: nodes.With,
584 node: node_classes.AssignedStmtsPossibleNode = None,
585 context: InferenceContext | None = None,
586 assign_path: list[int] | None = None,
587) -> Any:
588 """Infer names and other nodes from a *with* statement.
589
590 This enables only inference for name binding in a *with* statement.
591 For instance, in the following code, inferring `func` will return
592 the `ContextManager` class, not whatever ``__enter__`` returns.
593 We are doing this intentionally, because we consider that the context
594 manager result is whatever __enter__ returns and what it is binded
595 using the ``as`` keyword.
596
597 class ContextManager(object):
598 def __enter__(self):
599 return 42
600 with ContextManager() as f:
601 pass
602
603 # ContextManager().infer() will return ContextManager
604 # f.infer() will return 42.
605
606 Arguments:
607 self: nodes.With
608 node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
609 context: Inference context used for caching already inferred objects
610 assign_path:
611 A list of indices, where each index specifies what item to fetch from
612 the inference results.
613 """
614 try:
615 mgr = next(mgr for (mgr, vars) in self.items if vars == node)
616 except StopIteration:
617 return None
618 if assign_path is None:
619 yield from _infer_context_manager(self, mgr, context)
620 else:
621 for result in _infer_context_manager(self, mgr, context):
622 # Walk the assign_path and get the item at the final index.
623 obj = result
624 for index in assign_path:
625 if not hasattr(obj, "elts"):
626 raise InferenceError(
627 "Wrong type ({targets!r}) for {node!r} assignment",
628 node=self,
629 targets=node,
630 assign_path=assign_path,
631 context=context,
632 )
633 try:
634 obj = obj.elts[index]
635 except IndexError as exc:
636 raise InferenceError(
637 "Tried to infer a nonexistent target with index {index} "
638 "in {node!r}.",
639 node=self,
640 targets=node,
641 assign_path=assign_path,
642 context=context,
643 ) from exc
644 except TypeError as exc:
645 raise InferenceError(
646 "Tried to unpack a non-iterable value in {node!r}.",
647 node=self,
648 targets=node,
649 assign_path=assign_path,
650 context=context,
651 ) from exc
652 yield obj
653 return {
654 "node": self,
655 "unknown": node,
656 "assign_path": assign_path,
657 "context": context,
658 }
659
660
661@decorators.raise_if_nothing_inferred
662def named_expr_assigned_stmts(
663 self: nodes.NamedExpr,
664 node: node_classes.AssignedStmtsPossibleNode,
665 context: InferenceContext | None = None,
666 assign_path: list[int] | None = None,
667) -> Any:
668 """Infer names and other nodes from an assignment expression."""
669 if self.target == node:
670 yield from self.value.infer(context=context)
671 else:
672 raise InferenceError(
673 "Cannot infer NamedExpr node {node!r}",
674 node=self,
675 assign_path=assign_path,
676 context=context,
677 )
678
679
680@decorators.yes_if_nothing_inferred
681def starred_assigned_stmts( # noqa: C901
682 self: nodes.Starred,
683 node: node_classes.AssignedStmtsPossibleNode = None,
684 context: InferenceContext | None = None,
685 assign_path: list[int] | None = None,
686) -> Any:
687 """
688 Arguments:
689 self: nodes.Starred
690 node: a node related to the current underlying Node.
691 context: Inference context used for caching already inferred objects
692 assign_path:
693 A list of indices, where each index specifies what item to fetch from
694 the inference results.
695 """
696
697 # pylint: disable = too-many-locals, too-many-statements, too-many-branches
698
699 def _determine_starred_iteration_lookups(
700 starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]]
701 ) -> None:
702 # Determine the lookups for the rhs of the iteration
703 itered = target.itered()
704 for index, element in enumerate(itered):
705 if (
706 isinstance(element, nodes.Starred)
707 and element.value.name == starred.value.name
708 ):
709 lookups.append((index, len(itered)))
710 break
711 if isinstance(element, nodes.Tuple):
712 lookups.append((index, len(element.itered())))
713 _determine_starred_iteration_lookups(starred, element, lookups)
714
715 stmt = self.statement()
716 if not isinstance(stmt, (nodes.Assign, nodes.For)):
717 raise InferenceError(
718 "Statement {stmt!r} enclosing {node!r} must be an Assign or For node.",
719 node=self,
720 stmt=stmt,
721 unknown=node,
722 context=context,
723 )
724
725 if context is None:
726 context = InferenceContext()
727
728 if isinstance(stmt, nodes.Assign):
729 value = stmt.value
730 lhs = stmt.targets[0]
731 if not isinstance(lhs, nodes.BaseContainer):
732 yield util.Uninferable
733 return
734
735 if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
736 raise InferenceError(
737 "Too many starred arguments in the assignment targets {lhs!r}.",
738 node=self,
739 targets=lhs,
740 unknown=node,
741 context=context,
742 )
743
744 try:
745 rhs = next(value.infer(context))
746 except (InferenceError, StopIteration):
747 yield util.Uninferable
748 return
749 if isinstance(rhs, util.UninferableBase) or not hasattr(rhs, "itered"):
750 yield util.Uninferable
751 return
752
753 try:
754 elts = collections.deque(rhs.itered()) # type: ignore[union-attr]
755 except TypeError:
756 yield util.Uninferable
757 return
758
759 # Unpack iteratively the values from the rhs of the assignment,
760 # until the find the starred node. What will remain will
761 # be the list of values which the Starred node will represent
762 # This is done in two steps, from left to right to remove
763 # anything before the starred node and from right to left
764 # to remove anything after the starred node.
765
766 for index, left_node in enumerate(lhs.elts):
767 if not isinstance(left_node, nodes.Starred):
768 if not elts:
769 break
770 elts.popleft()
771 continue
772 lhs_elts = collections.deque(reversed(lhs.elts[index:]))
773 for right_node in lhs_elts:
774 if not isinstance(right_node, nodes.Starred):
775 if not elts:
776 break
777 elts.pop()
778 continue
779
780 # We're done unpacking.
781 packed = nodes.List(
782 ctx=Context.Store,
783 parent=self,
784 lineno=lhs.lineno,
785 col_offset=lhs.col_offset,
786 )
787 packed.postinit(elts=list(elts))
788 yield packed
789 break
790
791 if isinstance(stmt, nodes.For):
792 try:
793 inferred_iterable = next(stmt.iter.infer(context=context))
794 except (InferenceError, StopIteration):
795 yield util.Uninferable
796 return
797 if isinstance(inferred_iterable, util.UninferableBase) or not hasattr(
798 inferred_iterable, "itered"
799 ):
800 yield util.Uninferable
801 return
802 try:
803 itered = inferred_iterable.itered() # type: ignore[union-attr]
804 except TypeError:
805 yield util.Uninferable
806 return
807
808 target = stmt.target
809
810 if not isinstance(target, nodes.Tuple):
811 raise InferenceError(
812 "Could not make sense of this, the target must be a tuple",
813 context=context,
814 )
815
816 lookups: list[tuple[int, int]] = []
817 _determine_starred_iteration_lookups(self, target, lookups)
818 if not lookups:
819 raise InferenceError(
820 "Could not make sense of this, needs at least a lookup", context=context
821 )
822
823 # Make the last lookup a slice, since that what we want for a Starred node
824 last_element_index, last_element_length = lookups[-1]
825 is_starred_last = last_element_index == (last_element_length - 1)
826
827 lookup_slice = slice(
828 last_element_index,
829 None if is_starred_last else (last_element_length - last_element_index),
830 )
831 last_lookup = lookup_slice
832
833 for element in itered:
834 # We probably want to infer the potential values *for each* element in an
835 # iterable, but we can't infer a list of all values, when only a list of
836 # step values are expected:
837 #
838 # for a, *b in [...]:
839 # b
840 #
841 # *b* should now point to just the elements at that particular iteration step,
842 # which astroid can't know about.
843
844 found_element = None
845 for index, lookup in enumerate(lookups):
846 if not hasattr(element, "itered"):
847 break
848 if index + 1 is len(lookups):
849 cur_lookup: slice | int = last_lookup
850 else:
851 # Grab just the index, not the whole length
852 cur_lookup = lookup[0]
853 try:
854 itered_inner_element = element.itered()
855 element = itered_inner_element[cur_lookup]
856 except IndexError:
857 break
858 except TypeError:
859 # Most likely the itered() call failed, cannot make sense of this
860 yield util.Uninferable
861 return
862 else:
863 found_element = element
864
865 unpacked = nodes.List(
866 ctx=Context.Store,
867 parent=self,
868 lineno=self.lineno,
869 col_offset=self.col_offset,
870 )
871 unpacked.postinit(elts=found_element or [])
872 yield unpacked
873 return
874
875 yield util.Uninferable
876
877
878@decorators.yes_if_nothing_inferred
879def match_mapping_assigned_stmts(
880 self: nodes.MatchMapping,
881 node: nodes.AssignName,
882 context: InferenceContext | None = None,
883 assign_path: None = None,
884) -> Generator[nodes.NodeNG]:
885 """Return empty generator (return -> raises StopIteration) so inferred value
886 is Uninferable.
887 """
888 return
889 yield
890
891
892@decorators.yes_if_nothing_inferred
893def match_star_assigned_stmts(
894 self: nodes.MatchStar,
895 node: nodes.AssignName,
896 context: InferenceContext | None = None,
897 assign_path: None = None,
898) -> Generator[nodes.NodeNG]:
899 """Return empty generator (return -> raises StopIteration) so inferred value
900 is Uninferable.
901 """
902 return
903 yield
904
905
906@decorators.yes_if_nothing_inferred
907def match_as_assigned_stmts(
908 self: nodes.MatchAs,
909 node: nodes.AssignName,
910 context: InferenceContext | None = None,
911 assign_path: None = None,
912) -> Generator[nodes.NodeNG]:
913 """Infer MatchAs as the Match subject if it's the only MatchCase pattern
914 else raise StopIteration to yield Uninferable.
915 """
916 if (
917 isinstance(self.parent, nodes.MatchCase)
918 and isinstance(self.parent.parent, nodes.Match)
919 and self.pattern is None
920 ):
921 yield self.parent.parent.subject
922
923
924@decorators.yes_if_nothing_inferred
925def generic_type_assigned_stmts(
926 self: nodes.TypeVar | nodes.TypeVarTuple | nodes.ParamSpec,
927 node: nodes.AssignName,
928 context: InferenceContext | None = None,
929 assign_path: None = None,
930) -> Generator[nodes.NodeNG]:
931 """Hack. Return any Node so inference doesn't fail
932 when evaluating __class_getitem__. Revert if it's causing issues.
933 """
934 yield nodes.Const(None)