Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/astroid/protocols.py: 49%

438 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:53 +0000

1# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html 

2# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE 

3# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt 

4 

5"""This module contains a set of functions to handle python protocols for nodes 

6where it makes sense. 

7""" 

8 

9from __future__ import annotations 

10 

11import collections 

12import itertools 

13import operator as operator_mod 

14from collections.abc import Callable, Generator, Iterator, Sequence 

15from typing import Any, TypeVar 

16 

17from astroid import arguments, bases, decorators, helpers, nodes, objects, util 

18from astroid.const import Context 

19from astroid.context import InferenceContext, copy_context 

20from astroid.exceptions import ( 

21 AstroidIndexError, 

22 AstroidTypeError, 

23 AttributeInferenceError, 

24 InferenceError, 

25 NoDefault, 

26) 

27from astroid.nodes import node_classes 

28from astroid.typing import ( 

29 ConstFactoryResult, 

30 InferenceResult, 

31 SuccessfulInferenceResult, 

32) 

33 

34_TupleListNodeT = TypeVar("_TupleListNodeT", nodes.Tuple, nodes.List) 

35 

36 

37def _reflected_name(name) -> str: 

38 return "__r" + name[2:] 

39 

40 

41def _augmented_name(name) -> str: 

42 return "__i" + name[2:] 

43 

44 

45_CONTEXTLIB_MGR = "contextlib.contextmanager" 

46BIN_OP_METHOD = { 

47 "+": "__add__", 

48 "-": "__sub__", 

49 "/": "__truediv__", 

50 "//": "__floordiv__", 

51 "*": "__mul__", 

52 "**": "__pow__", 

53 "%": "__mod__", 

54 "&": "__and__", 

55 "|": "__or__", 

56 "^": "__xor__", 

57 "<<": "__lshift__", 

58 ">>": "__rshift__", 

59 "@": "__matmul__", 

60} 

61 

62REFLECTED_BIN_OP_METHOD = { 

63 key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items() 

64} 

65AUGMENTED_OP_METHOD = { 

66 key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items() 

67} 

68 

69UNARY_OP_METHOD = { 

70 "+": "__pos__", 

71 "-": "__neg__", 

72 "~": "__invert__", 

73 "not": None, # XXX not '__nonzero__' 

74} 

75_UNARY_OPERATORS: dict[str, Callable[[Any], Any]] = { 

76 "+": operator_mod.pos, 

77 "-": operator_mod.neg, 

78 "~": operator_mod.invert, 

79 "not": operator_mod.not_, 

80} 

81 

82 

83def _infer_unary_op(obj: Any, op: str) -> ConstFactoryResult: 

84 """Perform unary operation on `obj`, unless it is `NotImplemented`. 

85 

86 Can raise TypeError if operation is unsupported. 

87 """ 

88 if obj is NotImplemented: 

89 value = obj 

90 else: 

91 func = _UNARY_OPERATORS[op] 

92 value = func(obj) 

93 return nodes.const_factory(value) 

94 

95 

96nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op) 

97nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op) 

98nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op) 

99nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op) 

100nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op) 

101 

102# Binary operations 

103 

104BIN_OP_IMPL = { 

105 "+": lambda a, b: a + b, 

106 "-": lambda a, b: a - b, 

107 "/": lambda a, b: a / b, 

108 "//": lambda a, b: a // b, 

109 "*": lambda a, b: a * b, 

110 "**": lambda a, b: a**b, 

111 "%": lambda a, b: a % b, 

112 "&": lambda a, b: a & b, 

113 "|": lambda a, b: a | b, 

114 "^": lambda a, b: a ^ b, 

115 "<<": lambda a, b: a << b, 

116 ">>": lambda a, b: a >> b, 

117 "@": operator_mod.matmul, 

118} 

119for _KEY, _IMPL in list(BIN_OP_IMPL.items()): 

120 BIN_OP_IMPL[_KEY + "="] = _IMPL 

121 

122 

123@decorators.yes_if_nothing_inferred 

124def const_infer_binary_op( 

125 self: nodes.Const, 

126 opnode: nodes.AugAssign | nodes.BinOp, 

127 operator: str, 

128 other: InferenceResult, 

129 context: InferenceContext, 

130 _: SuccessfulInferenceResult, 

131) -> Generator[ConstFactoryResult | util.UninferableBase, None, None]: 

132 not_implemented = nodes.Const(NotImplemented) 

133 if isinstance(other, nodes.Const): 

134 if ( 

135 operator == "**" 

136 and isinstance(self.value, (int, float)) 

137 and isinstance(other.value, (int, float)) 

138 and (self.value > 1e5 or other.value > 1e5) 

139 ): 

140 yield not_implemented 

141 return 

142 try: 

143 impl = BIN_OP_IMPL[operator] 

144 try: 

145 yield nodes.const_factory(impl(self.value, other.value)) 

146 except TypeError: 

147 # ArithmeticError is not enough: float >> float is a TypeError 

148 yield not_implemented 

149 except Exception: # pylint: disable=broad-except 

150 yield util.Uninferable 

151 except TypeError: 

152 yield not_implemented 

153 elif isinstance(self.value, str) and operator == "%": 

154 # TODO(cpopa): implement string interpolation later on. 

155 yield util.Uninferable 

156 else: 

157 yield not_implemented 

158 

159 

160nodes.Const.infer_binary_op = const_infer_binary_op 

161 

162 

163def _multiply_seq_by_int( 

164 self: _TupleListNodeT, 

165 opnode: nodes.AugAssign | nodes.BinOp, 

166 other: nodes.Const, 

167 context: InferenceContext, 

168) -> _TupleListNodeT: 

169 node = self.__class__(parent=opnode) 

170 filtered_elts = ( 

171 helpers.safe_infer(elt, context) or util.Uninferable 

172 for elt in self.elts 

173 if not isinstance(elt, util.UninferableBase) 

174 ) 

175 node.elts = list(filtered_elts) * other.value 

176 return node 

177 

178 

179def _filter_uninferable_nodes( 

180 elts: Sequence[InferenceResult], context: InferenceContext 

181) -> Iterator[SuccessfulInferenceResult]: 

182 for elt in elts: 

183 if isinstance(elt, util.UninferableBase): 

184 yield nodes.Unknown() 

185 else: 

186 for inferred in elt.infer(context): 

187 if not isinstance(inferred, util.UninferableBase): 

188 yield inferred 

189 else: 

190 yield nodes.Unknown() 

191 

192 

193@decorators.yes_if_nothing_inferred 

194def tl_infer_binary_op( 

195 self: _TupleListNodeT, 

196 opnode: nodes.AugAssign | nodes.BinOp, 

197 operator: str, 

198 other: InferenceResult, 

199 context: InferenceContext, 

200 method: SuccessfulInferenceResult, 

201) -> Generator[_TupleListNodeT | nodes.Const | util.UninferableBase, None, None]: 

202 """Infer a binary operation on a tuple or list. 

203 

204 The instance on which the binary operation is performed is a tuple 

205 or list. This refers to the left-hand side of the operation, so: 

206 'tuple() + 1' or '[] + A()' 

207 """ 

208 # For tuples and list the boundnode is no longer the tuple or list instance 

209 context.boundnode = None 

210 not_implemented = nodes.Const(NotImplemented) 

211 if isinstance(other, self.__class__) and operator == "+": 

212 node = self.__class__(parent=opnode) 

213 node.elts = list( 

214 itertools.chain( 

215 _filter_uninferable_nodes(self.elts, context), 

216 _filter_uninferable_nodes(other.elts, context), 

217 ) 

218 ) 

219 yield node 

220 elif isinstance(other, nodes.Const) and operator == "*": 

221 if not isinstance(other.value, int): 

222 yield not_implemented 

223 return 

224 yield _multiply_seq_by_int(self, opnode, other, context) 

225 elif isinstance(other, bases.Instance) and operator == "*": 

226 # Verify if the instance supports __index__. 

227 as_index = helpers.class_instance_as_index(other) 

228 if not as_index: 

229 yield util.Uninferable 

230 else: 

231 yield _multiply_seq_by_int(self, opnode, as_index, context) 

232 else: 

233 yield not_implemented 

234 

235 

236nodes.Tuple.infer_binary_op = tl_infer_binary_op 

237nodes.List.infer_binary_op = tl_infer_binary_op 

238 

239 

240@decorators.yes_if_nothing_inferred 

241def instance_class_infer_binary_op( 

242 self: bases.Instance | nodes.ClassDef, 

243 opnode: nodes.AugAssign | nodes.BinOp, 

244 operator: str, 

245 other: InferenceResult, 

246 context: InferenceContext, 

247 method: SuccessfulInferenceResult, 

248) -> Generator[InferenceResult, None, None]: 

249 return method.infer_call_result(self, context) 

250 

251 

252bases.Instance.infer_binary_op = instance_class_infer_binary_op 

253nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op 

254 

255 

256# assignment ################################################################## 

257 

258"""The assigned_stmts method is responsible to return the assigned statement 

259(e.g. not inferred) according to the assignment type. 

260 

261The `assign_path` argument is used to record the lhs path of the original node. 

262For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path 

263will be [1, 1] once arrived to the Assign node. 

264 

265The `context` argument is the current inference context which should be given 

266to any intermediary inference necessary. 

267""" 

268 

269 

270def _resolve_looppart(parts, assign_path, context): 

271 """Recursive function to resolve multiple assignments on loops.""" 

272 assign_path = assign_path[:] 

273 index = assign_path.pop(0) 

274 for part in parts: 

275 if isinstance(part, util.UninferableBase): 

276 continue 

277 if not hasattr(part, "itered"): 

278 continue 

279 try: 

280 itered = part.itered() 

281 except TypeError: 

282 continue 

283 try: 

284 if isinstance(itered[index], (nodes.Const, nodes.Name)): 

285 itered = [part] 

286 except IndexError: 

287 pass 

288 for stmt in itered: 

289 index_node = nodes.Const(index) 

290 try: 

291 assigned = stmt.getitem(index_node, context) 

292 except (AttributeError, AstroidTypeError, AstroidIndexError): 

293 continue 

294 if not assign_path: 

295 # we achieved to resolved the assignment path, 

296 # don't infer the last part 

297 yield assigned 

298 elif isinstance(assigned, util.UninferableBase): 

299 break 

300 else: 

301 # we are not yet on the last part of the path 

302 # search on each possibly inferred value 

303 try: 

304 yield from _resolve_looppart( 

305 assigned.infer(context), assign_path, context 

306 ) 

307 except InferenceError: 

308 break 

309 

310 

311@decorators.raise_if_nothing_inferred 

312def for_assigned_stmts( 

313 self: nodes.For | nodes.Comprehension, 

314 node: node_classes.AssignedStmtsPossibleNode = None, 

315 context: InferenceContext | None = None, 

316 assign_path: list[int] | None = None, 

317) -> Any: 

318 if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False): 

319 # Skip inferring of async code for now 

320 return { 

321 "node": self, 

322 "unknown": node, 

323 "assign_path": assign_path, 

324 "context": context, 

325 } 

326 if assign_path is None: 

327 for lst in self.iter.infer(context): 

328 if isinstance(lst, (nodes.Tuple, nodes.List)): 

329 yield from lst.elts 

330 else: 

331 yield from _resolve_looppart(self.iter.infer(context), assign_path, context) 

332 return { 

333 "node": self, 

334 "unknown": node, 

335 "assign_path": assign_path, 

336 "context": context, 

337 } 

338 

339 

340nodes.For.assigned_stmts = for_assigned_stmts 

341nodes.Comprehension.assigned_stmts = for_assigned_stmts 

342 

343 

344def sequence_assigned_stmts( 

345 self: nodes.Tuple | nodes.List, 

346 node: node_classes.AssignedStmtsPossibleNode = None, 

347 context: InferenceContext | None = None, 

348 assign_path: list[int] | None = None, 

349) -> Any: 

350 if assign_path is None: 

351 assign_path = [] 

352 try: 

353 index = self.elts.index(node) # type: ignore[arg-type] 

354 except ValueError as exc: 

355 raise InferenceError( 

356 "Tried to retrieve a node {node!r} which does not exist", 

357 node=self, 

358 assign_path=assign_path, 

359 context=context, 

360 ) from exc 

361 

362 assign_path.insert(0, index) 

363 return self.parent.assigned_stmts( 

364 node=self, context=context, assign_path=assign_path 

365 ) 

366 

367 

368nodes.Tuple.assigned_stmts = sequence_assigned_stmts 

369nodes.List.assigned_stmts = sequence_assigned_stmts 

370 

371 

372def assend_assigned_stmts( 

373 self: nodes.AssignName | nodes.AssignAttr, 

374 node: node_classes.AssignedStmtsPossibleNode = None, 

375 context: InferenceContext | None = None, 

376 assign_path: list[int] | None = None, 

377) -> Any: 

378 return self.parent.assigned_stmts(node=self, context=context) 

379 

380 

381nodes.AssignName.assigned_stmts = assend_assigned_stmts 

382nodes.AssignAttr.assigned_stmts = assend_assigned_stmts 

383 

384 

385def _arguments_infer_argname( 

386 self, name: str | None, context: InferenceContext 

387) -> Generator[InferenceResult, None, None]: 

388 # arguments information may be missing, in which case we can't do anything 

389 # more 

390 if not (self.arguments or self.vararg or self.kwarg): 

391 yield util.Uninferable 

392 return 

393 

394 functype = self.parent.type 

395 # first argument of instance/class method 

396 if ( 

397 self.arguments 

398 and getattr(self.arguments[0], "name", None) == name 

399 and functype != "staticmethod" 

400 ): 

401 cls = self.parent.parent.scope() 

402 is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass" 

403 # If this is a metaclass, then the first argument will always 

404 # be the class, not an instance. 

405 if context.boundnode and isinstance(context.boundnode, bases.Instance): 

406 cls = context.boundnode._proxied 

407 if is_metaclass or functype == "classmethod": 

408 yield cls 

409 return 

410 if functype == "method": 

411 yield cls.instantiate_class() 

412 return 

413 

414 if context and context.callcontext: 

415 callee = context.callcontext.callee 

416 while hasattr(callee, "_proxied"): 

417 callee = callee._proxied 

418 if getattr(callee, "name", None) == self.parent.name: 

419 call_site = arguments.CallSite(context.callcontext, context.extra_context) 

420 yield from call_site.infer_argument(self.parent, name, context) 

421 return 

422 

423 if name == self.vararg: 

424 vararg = nodes.const_factory(()) 

425 vararg.parent = self 

426 if not self.arguments and self.parent.name == "__init__": 

427 cls = self.parent.parent.scope() 

428 vararg.elts = [cls.instantiate_class()] 

429 yield vararg 

430 return 

431 if name == self.kwarg: 

432 kwarg = nodes.const_factory({}) 

433 kwarg.parent = self 

434 yield kwarg 

435 return 

436 # if there is a default value, yield it. And then yield Uninferable to reflect 

437 # we can't guess given argument value 

438 try: 

439 context = copy_context(context) 

440 yield from self.default_value(name).infer(context) 

441 yield util.Uninferable 

442 except NoDefault: 

443 yield util.Uninferable 

444 

445 

446def arguments_assigned_stmts( 

447 self: nodes.Arguments, 

448 node: node_classes.AssignedStmtsPossibleNode = None, 

449 context: InferenceContext | None = None, 

450 assign_path: list[int] | None = None, 

451) -> Any: 

452 try: 

453 node_name = node.name # type: ignore[union-attr] 

454 except AttributeError: 

455 # Added to handle edge cases where node.name is not defined. 

456 # https://github.com/pylint-dev/astroid/pull/1644#discussion_r901545816 

457 node_name = None # pragma: no cover 

458 

459 if context and context.callcontext: 

460 callee = context.callcontext.callee 

461 while hasattr(callee, "_proxied"): 

462 callee = callee._proxied 

463 else: 

464 return _arguments_infer_argname(self, node_name, context) 

465 if node and getattr(callee, "name", None) == node.frame(future=True).name: 

466 # reset call context/name 

467 callcontext = context.callcontext 

468 context = copy_context(context) 

469 context.callcontext = None 

470 args = arguments.CallSite(callcontext, context=context) 

471 return args.infer_argument(self.parent, node_name, context) 

472 return _arguments_infer_argname(self, node_name, context) 

473 

474 

475nodes.Arguments.assigned_stmts = arguments_assigned_stmts 

476 

477 

478@decorators.raise_if_nothing_inferred 

479def assign_assigned_stmts( 

480 self: nodes.AugAssign | nodes.Assign | nodes.AnnAssign, 

481 node: node_classes.AssignedStmtsPossibleNode = None, 

482 context: InferenceContext | None = None, 

483 assign_path: list[int] | None = None, 

484) -> Any: 

485 if not assign_path: 

486 yield self.value 

487 return None 

488 yield from _resolve_assignment_parts( 

489 self.value.infer(context), assign_path, context 

490 ) 

491 

492 return { 

493 "node": self, 

494 "unknown": node, 

495 "assign_path": assign_path, 

496 "context": context, 

497 } 

498 

499 

500def assign_annassigned_stmts( 

501 self: nodes.AnnAssign, 

502 node: node_classes.AssignedStmtsPossibleNode = None, 

503 context: InferenceContext | None = None, 

504 assign_path: list[int] | None = None, 

505) -> Any: 

506 for inferred in assign_assigned_stmts(self, node, context, assign_path): 

507 if inferred is None: 

508 yield util.Uninferable 

509 else: 

510 yield inferred 

511 

512 

513nodes.Assign.assigned_stmts = assign_assigned_stmts 

514nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts 

515nodes.AugAssign.assigned_stmts = assign_assigned_stmts 

516 

517 

518def _resolve_assignment_parts(parts, assign_path, context): 

519 """Recursive function to resolve multiple assignments.""" 

520 assign_path = assign_path[:] 

521 index = assign_path.pop(0) 

522 for part in parts: 

523 assigned = None 

524 if isinstance(part, nodes.Dict): 

525 # A dictionary in an iterating context 

526 try: 

527 assigned, _ = part.items[index] 

528 except IndexError: 

529 return 

530 

531 elif hasattr(part, "getitem"): 

532 index_node = nodes.Const(index) 

533 try: 

534 assigned = part.getitem(index_node, context) 

535 except (AstroidTypeError, AstroidIndexError): 

536 return 

537 

538 if not assigned: 

539 return 

540 

541 if not assign_path: 

542 # we achieved to resolved the assignment path, don't infer the 

543 # last part 

544 yield assigned 

545 elif isinstance(assigned, util.UninferableBase): 

546 return 

547 else: 

548 # we are not yet on the last part of the path search on each 

549 # possibly inferred value 

550 try: 

551 yield from _resolve_assignment_parts( 

552 assigned.infer(context), assign_path, context 

553 ) 

554 except InferenceError: 

555 return 

556 

557 

558@decorators.raise_if_nothing_inferred 

559def excepthandler_assigned_stmts( 

560 self: nodes.ExceptHandler, 

561 node: node_classes.AssignedStmtsPossibleNode = None, 

562 context: InferenceContext | None = None, 

563 assign_path: list[int] | None = None, 

564) -> Any: 

565 for assigned in node_classes.unpack_infer(self.type): 

566 if isinstance(assigned, nodes.ClassDef): 

567 assigned = objects.ExceptionInstance(assigned) 

568 

569 yield assigned 

570 return { 

571 "node": self, 

572 "unknown": node, 

573 "assign_path": assign_path, 

574 "context": context, 

575 } 

576 

577 

578nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts 

579 

580 

581def _infer_context_manager(self, mgr, context): 

582 try: 

583 inferred = next(mgr.infer(context=context)) 

584 except StopIteration as e: 

585 raise InferenceError(node=mgr) from e 

586 if isinstance(inferred, bases.Generator): 

587 # Check if it is decorated with contextlib.contextmanager. 

588 func = inferred.parent 

589 if not func.decorators: 

590 raise InferenceError( 

591 "No decorators found on inferred generator %s", node=func 

592 ) 

593 

594 for decorator_node in func.decorators.nodes: 

595 decorator = next(decorator_node.infer(context=context), None) 

596 if isinstance(decorator, nodes.FunctionDef): 

597 if decorator.qname() == _CONTEXTLIB_MGR: 

598 break 

599 else: 

600 # It doesn't interest us. 

601 raise InferenceError(node=func) 

602 try: 

603 yield next(inferred.infer_yield_types()) 

604 except StopIteration as e: 

605 raise InferenceError(node=func) from e 

606 

607 elif isinstance(inferred, bases.Instance): 

608 try: 

609 enter = next(inferred.igetattr("__enter__", context=context)) 

610 except (InferenceError, AttributeInferenceError, StopIteration) as exc: 

611 raise InferenceError(node=inferred) from exc 

612 if not isinstance(enter, bases.BoundMethod): 

613 raise InferenceError(node=enter) 

614 yield from enter.infer_call_result(self, context) 

615 else: 

616 raise InferenceError(node=mgr) 

617 

618 

619@decorators.raise_if_nothing_inferred 

620def with_assigned_stmts( 

621 self: nodes.With, 

622 node: node_classes.AssignedStmtsPossibleNode = None, 

623 context: InferenceContext | None = None, 

624 assign_path: list[int] | None = None, 

625) -> Any: 

626 """Infer names and other nodes from a *with* statement. 

627 

628 This enables only inference for name binding in a *with* statement. 

629 For instance, in the following code, inferring `func` will return 

630 the `ContextManager` class, not whatever ``__enter__`` returns. 

631 We are doing this intentionally, because we consider that the context 

632 manager result is whatever __enter__ returns and what it is binded 

633 using the ``as`` keyword. 

634 

635 class ContextManager(object): 

636 def __enter__(self): 

637 return 42 

638 with ContextManager() as f: 

639 pass 

640 

641 # ContextManager().infer() will return ContextManager 

642 # f.infer() will return 42. 

643 

644 Arguments: 

645 self: nodes.With 

646 node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`. 

647 context: Inference context used for caching already inferred objects 

648 assign_path: 

649 A list of indices, where each index specifies what item to fetch from 

650 the inference results. 

651 """ 

652 try: 

653 mgr = next(mgr for (mgr, vars) in self.items if vars == node) 

654 except StopIteration: 

655 return None 

656 if assign_path is None: 

657 yield from _infer_context_manager(self, mgr, context) 

658 else: 

659 for result in _infer_context_manager(self, mgr, context): 

660 # Walk the assign_path and get the item at the final index. 

661 obj = result 

662 for index in assign_path: 

663 if not hasattr(obj, "elts"): 

664 raise InferenceError( 

665 "Wrong type ({targets!r}) for {node!r} assignment", 

666 node=self, 

667 targets=node, 

668 assign_path=assign_path, 

669 context=context, 

670 ) 

671 try: 

672 obj = obj.elts[index] 

673 except IndexError as exc: 

674 raise InferenceError( 

675 "Tried to infer a nonexistent target with index {index} " 

676 "in {node!r}.", 

677 node=self, 

678 targets=node, 

679 assign_path=assign_path, 

680 context=context, 

681 ) from exc 

682 except TypeError as exc: 

683 raise InferenceError( 

684 "Tried to unpack a non-iterable value in {node!r}.", 

685 node=self, 

686 targets=node, 

687 assign_path=assign_path, 

688 context=context, 

689 ) from exc 

690 yield obj 

691 return { 

692 "node": self, 

693 "unknown": node, 

694 "assign_path": assign_path, 

695 "context": context, 

696 } 

697 

698 

699nodes.With.assigned_stmts = with_assigned_stmts 

700 

701 

702@decorators.raise_if_nothing_inferred 

703def named_expr_assigned_stmts( 

704 self: nodes.NamedExpr, 

705 node: node_classes.AssignedStmtsPossibleNode, 

706 context: InferenceContext | None = None, 

707 assign_path: list[int] | None = None, 

708) -> Any: 

709 """Infer names and other nodes from an assignment expression.""" 

710 if self.target == node: 

711 yield from self.value.infer(context=context) 

712 else: 

713 raise InferenceError( 

714 "Cannot infer NamedExpr node {node!r}", 

715 node=self, 

716 assign_path=assign_path, 

717 context=context, 

718 ) 

719 

720 

721nodes.NamedExpr.assigned_stmts = named_expr_assigned_stmts 

722 

723 

724@decorators.yes_if_nothing_inferred 

725def starred_assigned_stmts( # noqa: C901 

726 self: nodes.Starred, 

727 node: node_classes.AssignedStmtsPossibleNode = None, 

728 context: InferenceContext | None = None, 

729 assign_path: list[int] | None = None, 

730) -> Any: 

731 """ 

732 Arguments: 

733 self: nodes.Starred 

734 node: a node related to the current underlying Node. 

735 context: Inference context used for caching already inferred objects 

736 assign_path: 

737 A list of indices, where each index specifies what item to fetch from 

738 the inference results. 

739 """ 

740 

741 # pylint: disable=too-many-locals,too-many-statements 

742 def _determine_starred_iteration_lookups( 

743 starred: nodes.Starred, target: nodes.Tuple, lookups: list[tuple[int, int]] 

744 ) -> None: 

745 # Determine the lookups for the rhs of the iteration 

746 itered = target.itered() 

747 for index, element in enumerate(itered): 

748 if ( 

749 isinstance(element, nodes.Starred) 

750 and element.value.name == starred.value.name 

751 ): 

752 lookups.append((index, len(itered))) 

753 break 

754 if isinstance(element, nodes.Tuple): 

755 lookups.append((index, len(element.itered()))) 

756 _determine_starred_iteration_lookups(starred, element, lookups) 

757 

758 stmt = self.statement(future=True) 

759 if not isinstance(stmt, (nodes.Assign, nodes.For)): 

760 raise InferenceError( 

761 "Statement {stmt!r} enclosing {node!r} must be an Assign or For node.", 

762 node=self, 

763 stmt=stmt, 

764 unknown=node, 

765 context=context, 

766 ) 

767 

768 if context is None: 

769 context = InferenceContext() 

770 

771 if isinstance(stmt, nodes.Assign): 

772 value = stmt.value 

773 lhs = stmt.targets[0] 

774 if not isinstance(lhs, nodes.BaseContainer): 

775 yield util.Uninferable 

776 return 

777 

778 if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1: 

779 raise InferenceError( 

780 "Too many starred arguments in the assignment targets {lhs!r}.", 

781 node=self, 

782 targets=lhs, 

783 unknown=node, 

784 context=context, 

785 ) 

786 

787 try: 

788 rhs = next(value.infer(context)) 

789 except (InferenceError, StopIteration): 

790 yield util.Uninferable 

791 return 

792 if isinstance(rhs, util.UninferableBase) or not hasattr(rhs, "itered"): 

793 yield util.Uninferable 

794 return 

795 

796 try: 

797 elts = collections.deque(rhs.itered()) # type: ignore[union-attr] 

798 except TypeError: 

799 yield util.Uninferable 

800 return 

801 

802 # Unpack iteratively the values from the rhs of the assignment, 

803 # until the find the starred node. What will remain will 

804 # be the list of values which the Starred node will represent 

805 # This is done in two steps, from left to right to remove 

806 # anything before the starred node and from right to left 

807 # to remove anything after the starred node. 

808 

809 for index, left_node in enumerate(lhs.elts): 

810 if not isinstance(left_node, nodes.Starred): 

811 if not elts: 

812 break 

813 elts.popleft() 

814 continue 

815 lhs_elts = collections.deque(reversed(lhs.elts[index:])) 

816 for right_node in lhs_elts: 

817 if not isinstance(right_node, nodes.Starred): 

818 if not elts: 

819 break 

820 elts.pop() 

821 continue 

822 

823 # We're done unpacking. 

824 packed = nodes.List( 

825 ctx=Context.Store, 

826 parent=self, 

827 lineno=lhs.lineno, 

828 col_offset=lhs.col_offset, 

829 ) 

830 packed.postinit(elts=list(elts)) 

831 yield packed 

832 break 

833 

834 if isinstance(stmt, nodes.For): 

835 try: 

836 inferred_iterable = next(stmt.iter.infer(context=context)) 

837 except (InferenceError, StopIteration): 

838 yield util.Uninferable 

839 return 

840 if isinstance(inferred_iterable, util.UninferableBase) or not hasattr( 

841 inferred_iterable, "itered" 

842 ): 

843 yield util.Uninferable 

844 return 

845 try: 

846 itered = inferred_iterable.itered() # type: ignore[union-attr] 

847 except TypeError: 

848 yield util.Uninferable 

849 return 

850 

851 target = stmt.target 

852 

853 if not isinstance(target, nodes.Tuple): 

854 raise InferenceError( 

855 "Could not make sense of this, the target must be a tuple", 

856 context=context, 

857 ) 

858 

859 lookups: list[tuple[int, int]] = [] 

860 _determine_starred_iteration_lookups(self, target, lookups) 

861 if not lookups: 

862 raise InferenceError( 

863 "Could not make sense of this, needs at least a lookup", context=context 

864 ) 

865 

866 # Make the last lookup a slice, since that what we want for a Starred node 

867 last_element_index, last_element_length = lookups[-1] 

868 is_starred_last = last_element_index == (last_element_length - 1) 

869 

870 lookup_slice = slice( 

871 last_element_index, 

872 None if is_starred_last else (last_element_length - last_element_index), 

873 ) 

874 last_lookup = lookup_slice 

875 

876 for element in itered: 

877 # We probably want to infer the potential values *for each* element in an 

878 # iterable, but we can't infer a list of all values, when only a list of 

879 # step values are expected: 

880 # 

881 # for a, *b in [...]: 

882 # b 

883 # 

884 # *b* should now point to just the elements at that particular iteration step, 

885 # which astroid can't know about. 

886 

887 found_element = None 

888 for index, lookup in enumerate(lookups): 

889 if not hasattr(element, "itered"): 

890 break 

891 if index + 1 is len(lookups): 

892 cur_lookup: slice | int = last_lookup 

893 else: 

894 # Grab just the index, not the whole length 

895 cur_lookup = lookup[0] 

896 try: 

897 itered_inner_element = element.itered() 

898 element = itered_inner_element[cur_lookup] 

899 except IndexError: 

900 break 

901 except TypeError: 

902 # Most likely the itered() call failed, cannot make sense of this 

903 yield util.Uninferable 

904 return 

905 else: 

906 found_element = element 

907 

908 unpacked = nodes.List( 

909 ctx=Context.Store, 

910 parent=self, 

911 lineno=self.lineno, 

912 col_offset=self.col_offset, 

913 ) 

914 unpacked.postinit(elts=found_element or []) 

915 yield unpacked 

916 return 

917 

918 yield util.Uninferable 

919 

920 

921nodes.Starred.assigned_stmts = starred_assigned_stmts 

922 

923 

924@decorators.yes_if_nothing_inferred 

925def match_mapping_assigned_stmts( 

926 self: nodes.MatchMapping, 

927 node: nodes.AssignName, 

928 context: InferenceContext | None = None, 

929 assign_path: None = None, 

930) -> Generator[nodes.NodeNG, None, None]: 

931 """Return empty generator (return -> raises StopIteration) so inferred value 

932 is Uninferable. 

933 """ 

934 return 

935 yield 

936 

937 

938nodes.MatchMapping.assigned_stmts = match_mapping_assigned_stmts 

939 

940 

941@decorators.yes_if_nothing_inferred 

942def match_star_assigned_stmts( 

943 self: nodes.MatchStar, 

944 node: nodes.AssignName, 

945 context: InferenceContext | None = None, 

946 assign_path: None = None, 

947) -> Generator[nodes.NodeNG, None, None]: 

948 """Return empty generator (return -> raises StopIteration) so inferred value 

949 is Uninferable. 

950 """ 

951 return 

952 yield 

953 

954 

955nodes.MatchStar.assigned_stmts = match_star_assigned_stmts 

956 

957 

958@decorators.yes_if_nothing_inferred 

959def match_as_assigned_stmts( 

960 self: nodes.MatchAs, 

961 node: nodes.AssignName, 

962 context: InferenceContext | None = None, 

963 assign_path: None = None, 

964) -> Generator[nodes.NodeNG, None, None]: 

965 """Infer MatchAs as the Match subject if it's the only MatchCase pattern 

966 else raise StopIteration to yield Uninferable. 

967 """ 

968 if ( 

969 isinstance(self.parent, nodes.MatchCase) 

970 and isinstance(self.parent.parent, nodes.Match) 

971 and self.pattern is None 

972 ): 

973 yield self.parent.parent.subject 

974 

975 

976nodes.MatchAs.assigned_stmts = match_as_assigned_stmts