Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/astroid/inference.py: 73%

574 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:53 +0000

1# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html 

2# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE 

3# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt 

4 

5"""This module contains a set of functions to handle inference on astroid trees.""" 

6 

7from __future__ import annotations 

8 

9import ast 

10import functools 

11import itertools 

12import operator 

13import typing 

14from collections.abc import Callable, Generator, Iterable, Iterator 

15from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union 

16 

17from astroid import ( 

18 bases, 

19 constraint, 

20 decorators, 

21 helpers, 

22 nodes, 

23 objects, 

24 protocols, 

25 util, 

26) 

27from astroid.const import PY310_PLUS 

28from astroid.context import ( 

29 CallContext, 

30 InferenceContext, 

31 bind_context_to_node, 

32 copy_context, 

33) 

34from astroid.exceptions import ( 

35 AstroidBuildingError, 

36 AstroidError, 

37 AstroidIndexError, 

38 AstroidTypeError, 

39 AstroidValueError, 

40 AttributeInferenceError, 

41 InferenceError, 

42 NameInferenceError, 

43 _NonDeducibleTypeHierarchy, 

44) 

45from astroid.interpreter import dunder_lookup 

46from astroid.manager import AstroidManager 

47from astroid.typing import ( 

48 InferenceErrorInfo, 

49 InferenceResult, 

50 SuccessfulInferenceResult, 

51) 

52 

53if TYPE_CHECKING: 

54 from astroid.objects import Property 

55 

56 

57_T = TypeVar("_T") 

58_BaseContainerT = TypeVar("_BaseContainerT", bound=nodes.BaseContainer) 

59_FunctionDefT = TypeVar("_FunctionDefT", bound=nodes.FunctionDef) 

60 

61GetFlowFactory = typing.Callable[ 

62 [ 

63 InferenceResult, 

64 Optional[InferenceResult], 

65 Union[nodes.AugAssign, nodes.BinOp], 

66 InferenceResult, 

67 Optional[InferenceResult], 

68 InferenceContext, 

69 InferenceContext, 

70 ], 

71 "list[functools.partial[Generator[InferenceResult, None, None]]]", 

72] 

73 

74# .infer method ############################################################### 

75 

76 

77def infer_end( 

78 self: _T, context: InferenceContext | None = None, **kwargs: Any 

79) -> Iterator[_T]: 

80 """Inference's end for nodes that yield themselves on inference. 

81 

82 These are objects for which inference does not have any semantic, 

83 such as Module or Consts. 

84 """ 

85 yield self 

86 

87 

88# We add ignores to all assignments to methods 

89# See https://github.com/python/mypy/issues/2427 

90nodes.Module._infer = infer_end 

91nodes.ClassDef._infer = infer_end 

92nodes.Lambda._infer = infer_end # type: ignore[assignment] 

93nodes.Const._infer = infer_end # type: ignore[assignment] 

94nodes.Slice._infer = infer_end # type: ignore[assignment] 

95 

96 

97def _infer_sequence_helper( 

98 node: _BaseContainerT, context: InferenceContext | None = None 

99) -> list[SuccessfulInferenceResult]: 

100 """Infer all values based on _BaseContainer.elts.""" 

101 values = [] 

102 

103 for elt in node.elts: 

104 if isinstance(elt, nodes.Starred): 

105 starred = helpers.safe_infer(elt.value, context) 

106 if not starred: 

107 raise InferenceError(node=node, context=context) 

108 if not hasattr(starred, "elts"): 

109 raise InferenceError(node=node, context=context) 

110 values.extend(_infer_sequence_helper(starred)) 

111 elif isinstance(elt, nodes.NamedExpr): 

112 value = helpers.safe_infer(elt.value, context) 

113 if not value: 

114 raise InferenceError(node=node, context=context) 

115 values.append(value) 

116 else: 

117 values.append(elt) 

118 return values 

119 

120 

121@decorators.raise_if_nothing_inferred 

122def infer_sequence( 

123 self: _BaseContainerT, 

124 context: InferenceContext | None = None, 

125 **kwargs: Any, 

126) -> Iterator[_BaseContainerT]: 

127 has_starred_named_expr = any( 

128 isinstance(e, (nodes.Starred, nodes.NamedExpr)) for e in self.elts 

129 ) 

130 if has_starred_named_expr: 

131 values = _infer_sequence_helper(self, context) 

132 new_seq = type(self)( 

133 lineno=self.lineno, 

134 col_offset=self.col_offset, 

135 parent=self.parent, 

136 end_lineno=self.end_lineno, 

137 end_col_offset=self.end_col_offset, 

138 ) 

139 new_seq.postinit(values) 

140 

141 yield new_seq 

142 else: 

143 yield self 

144 

145 

146nodes.List._infer = infer_sequence # type: ignore[assignment] 

147nodes.Tuple._infer = infer_sequence # type: ignore[assignment] 

148nodes.Set._infer = infer_sequence # type: ignore[assignment] 

149 

150 

151def infer_map( 

152 self: nodes.Dict, context: InferenceContext | None = None 

153) -> Iterator[nodes.Dict]: 

154 if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items): 

155 yield self 

156 else: 

157 items = _infer_map(self, context) 

158 new_seq = type(self)( 

159 self.lineno, 

160 self.col_offset, 

161 self.parent, 

162 end_lineno=self.end_lineno, 

163 end_col_offset=self.end_col_offset, 

164 ) 

165 new_seq.postinit(list(items.items())) 

166 yield new_seq 

167 

168 

169def _update_with_replacement( 

170 lhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult], 

171 rhs_dict: dict[SuccessfulInferenceResult, SuccessfulInferenceResult], 

172) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: 

173 """Delete nodes that equate to duplicate keys. 

174 

175 Since an astroid node doesn't 'equal' another node with the same value, 

176 this function uses the as_string method to make sure duplicate keys 

177 don't get through 

178 

179 Note that both the key and the value are astroid nodes 

180 

181 Fixes issue with DictUnpack causing duplicate keys 

182 in inferred Dict items 

183 

184 :param lhs_dict: Dictionary to 'merge' nodes into 

185 :param rhs_dict: Dictionary with nodes to pull from 

186 :return : merged dictionary of nodes 

187 """ 

188 combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items()) 

189 # Overwrite keys which have the same string values 

190 string_map = {key.as_string(): (key, value) for key, value in combined_dict} 

191 # Return to dictionary 

192 return dict(string_map.values()) 

193 

194 

195def _infer_map( 

196 node: nodes.Dict, context: InferenceContext | None 

197) -> dict[SuccessfulInferenceResult, SuccessfulInferenceResult]: 

198 """Infer all values based on Dict.items.""" 

199 values: dict[SuccessfulInferenceResult, SuccessfulInferenceResult] = {} 

200 for name, value in node.items: 

201 if isinstance(name, nodes.DictUnpack): 

202 double_starred = helpers.safe_infer(value, context) 

203 if not double_starred: 

204 raise InferenceError 

205 if not isinstance(double_starred, nodes.Dict): 

206 raise InferenceError(node=node, context=context) 

207 unpack_items = _infer_map(double_starred, context) 

208 values = _update_with_replacement(values, unpack_items) 

209 else: 

210 key = helpers.safe_infer(name, context=context) 

211 safe_value = helpers.safe_infer(value, context=context) 

212 if any(not elem for elem in (key, safe_value)): 

213 raise InferenceError(node=node, context=context) 

214 # safe_value is SuccessfulInferenceResult as bool(Uninferable) == False 

215 values = _update_with_replacement(values, {key: safe_value}) 

216 return values 

217 

218 

219nodes.Dict._infer = infer_map # type: ignore[assignment] 

220 

221 

222def _higher_function_scope(node: nodes.NodeNG) -> nodes.FunctionDef | None: 

223 """Search for the first function which encloses the given 

224 scope. This can be used for looking up in that function's 

225 scope, in case looking up in a lower scope for a particular 

226 name fails. 

227 

228 :param node: A scope node. 

229 :returns: 

230 ``None``, if no parent function scope was found, 

231 otherwise an instance of :class:`astroid.nodes.scoped_nodes.Function`, 

232 which encloses the given node. 

233 """ 

234 current = node 

235 while current.parent and not isinstance(current.parent, nodes.FunctionDef): 

236 current = current.parent 

237 if current and current.parent: 

238 return current.parent # type: ignore[no-any-return] 

239 return None 

240 

241 

242def infer_name( 

243 self: nodes.Name | nodes.AssignName, 

244 context: InferenceContext | None = None, 

245 **kwargs: Any, 

246) -> Generator[InferenceResult, None, None]: 

247 """Infer a Name: use name lookup rules.""" 

248 frame, stmts = self.lookup(self.name) 

249 if not stmts: 

250 # Try to see if the name is enclosed in a nested function 

251 # and use the higher (first function) scope for searching. 

252 parent_function = _higher_function_scope(self.scope()) 

253 if parent_function: 

254 _, stmts = parent_function.lookup(self.name) 

255 

256 if not stmts: 

257 raise NameInferenceError( 

258 name=self.name, scope=self.scope(), context=context 

259 ) 

260 context = copy_context(context) 

261 context.lookupname = self.name 

262 context.constraints[self.name] = constraint.get_constraints(self, frame) 

263 

264 return bases._infer_stmts(stmts, context, frame) 

265 

266 

267# The order of the decorators here is important 

268# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5 

269nodes.Name._infer = decorators.raise_if_nothing_inferred( 

270 decorators.path_wrapper(infer_name) 

271) 

272nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper 

273 

274 

275@decorators.raise_if_nothing_inferred 

276@decorators.path_wrapper 

277def infer_call( 

278 self: nodes.Call, context: InferenceContext | None = None, **kwargs: Any 

279) -> Generator[InferenceResult, None, InferenceErrorInfo]: 

280 """Infer a Call node by trying to guess what the function returns.""" 

281 callcontext = copy_context(context) 

282 callcontext.boundnode = None 

283 if context is not None: 

284 callcontext.extra_context = _populate_context_lookup(self, context.clone()) 

285 

286 for callee in self.func.infer(context): 

287 if isinstance(callee, util.UninferableBase): 

288 yield callee 

289 continue 

290 try: 

291 if hasattr(callee, "infer_call_result"): 

292 callcontext.callcontext = CallContext( 

293 args=self.args, keywords=self.keywords, callee=callee 

294 ) 

295 yield from callee.infer_call_result(caller=self, context=callcontext) 

296 except InferenceError: 

297 continue 

298 return InferenceErrorInfo(node=self, context=context) 

299 

300 

301nodes.Call._infer = infer_call # type: ignore[assignment] 

302 

303 

304@decorators.raise_if_nothing_inferred 

305@decorators.path_wrapper 

306def infer_import( 

307 self: nodes.Import, 

308 context: InferenceContext | None = None, 

309 asname: bool = True, 

310 **kwargs: Any, 

311) -> Generator[nodes.Module, None, None]: 

312 """Infer an Import node: return the imported module/object.""" 

313 context = context or InferenceContext() 

314 name = context.lookupname 

315 if name is None: 

316 raise InferenceError(node=self, context=context) 

317 

318 try: 

319 if asname: 

320 yield self.do_import_module(self.real_name(name)) 

321 else: 

322 yield self.do_import_module(name) 

323 except AstroidBuildingError as exc: 

324 raise InferenceError(node=self, context=context) from exc 

325 

326 

327nodes.Import._infer = infer_import 

328 

329 

330@decorators.raise_if_nothing_inferred 

331@decorators.path_wrapper 

332def infer_import_from( 

333 self: nodes.ImportFrom, 

334 context: InferenceContext | None = None, 

335 asname: bool = True, 

336 **kwargs: Any, 

337) -> Generator[InferenceResult, None, None]: 

338 """Infer a ImportFrom node: return the imported module/object.""" 

339 context = context or InferenceContext() 

340 name = context.lookupname 

341 if name is None: 

342 raise InferenceError(node=self, context=context) 

343 if asname: 

344 try: 

345 name = self.real_name(name) 

346 except AttributeInferenceError as exc: 

347 # See https://github.com/pylint-dev/pylint/issues/4692 

348 raise InferenceError(node=self, context=context) from exc 

349 try: 

350 module = self.do_import_module() 

351 except AstroidBuildingError as exc: 

352 raise InferenceError(node=self, context=context) from exc 

353 

354 try: 

355 context = copy_context(context) 

356 context.lookupname = name 

357 stmts = module.getattr(name, ignore_locals=module is self.root()) 

358 return bases._infer_stmts(stmts, context) 

359 except AttributeInferenceError as error: 

360 raise InferenceError( 

361 str(error), target=self, attribute=name, context=context 

362 ) from error 

363 

364 

365nodes.ImportFrom._infer = infer_import_from # type: ignore[assignment] 

366 

367 

368def infer_attribute( 

369 self: nodes.Attribute | nodes.AssignAttr, 

370 context: InferenceContext | None = None, 

371 **kwargs: Any, 

372) -> Generator[InferenceResult, None, InferenceErrorInfo]: 

373 """Infer an Attribute node by using getattr on the associated object.""" 

374 for owner in self.expr.infer(context): 

375 if isinstance(owner, util.UninferableBase): 

376 yield owner 

377 continue 

378 

379 context = copy_context(context) 

380 old_boundnode = context.boundnode 

381 try: 

382 context.boundnode = owner 

383 if isinstance(owner, (nodes.ClassDef, bases.Instance)): 

384 frame = owner if isinstance(owner, nodes.ClassDef) else owner._proxied 

385 context.constraints[self.attrname] = constraint.get_constraints( 

386 self, frame=frame 

387 ) 

388 yield from owner.igetattr(self.attrname, context) 

389 except ( 

390 AttributeInferenceError, 

391 InferenceError, 

392 AttributeError, 

393 ): 

394 pass 

395 finally: 

396 context.boundnode = old_boundnode 

397 return InferenceErrorInfo(node=self, context=context) 

398 

399 

400# The order of the decorators here is important 

401# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5 

402nodes.Attribute._infer = decorators.raise_if_nothing_inferred( 

403 decorators.path_wrapper(infer_attribute) 

404) 

405# won't work with a path wrapper 

406nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute) 

407 

408 

409@decorators.raise_if_nothing_inferred 

410@decorators.path_wrapper 

411def infer_global( 

412 self: nodes.Global, context: InferenceContext | None = None, **kwargs: Any 

413) -> Generator[InferenceResult, None, None]: 

414 if context is None or context.lookupname is None: 

415 raise InferenceError(node=self, context=context) 

416 try: 

417 return bases._infer_stmts(self.root().getattr(context.lookupname), context) 

418 except AttributeInferenceError as error: 

419 raise InferenceError( 

420 str(error), target=self, attribute=context.lookupname, context=context 

421 ) from error 

422 

423 

424nodes.Global._infer = infer_global # type: ignore[assignment] 

425 

426 

427_SUBSCRIPT_SENTINEL = object() 

428 

429 

430def infer_subscript( 

431 self: nodes.Subscript, context: InferenceContext | None = None, **kwargs: Any 

432) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: 

433 """Inference for subscripts. 

434 

435 We're understanding if the index is a Const 

436 or a slice, passing the result of inference 

437 to the value's `getitem` method, which should 

438 handle each supported index type accordingly. 

439 """ 

440 

441 found_one = False 

442 for value in self.value.infer(context): 

443 if isinstance(value, util.UninferableBase): 

444 yield util.Uninferable 

445 return None 

446 for index in self.slice.infer(context): 

447 if isinstance(index, util.UninferableBase): 

448 yield util.Uninferable 

449 return None 

450 

451 # Try to deduce the index value. 

452 index_value = _SUBSCRIPT_SENTINEL 

453 if value.__class__ == bases.Instance: 

454 index_value = index 

455 elif index.__class__ == bases.Instance: 

456 instance_as_index = helpers.class_instance_as_index(index) 

457 if instance_as_index: 

458 index_value = instance_as_index 

459 else: 

460 index_value = index 

461 

462 if index_value is _SUBSCRIPT_SENTINEL: 

463 raise InferenceError(node=self, context=context) 

464 

465 try: 

466 assigned = value.getitem(index_value, context) 

467 except ( 

468 AstroidTypeError, 

469 AstroidIndexError, 

470 AstroidValueError, 

471 AttributeInferenceError, 

472 AttributeError, 

473 ) as exc: 

474 raise InferenceError(node=self, context=context) from exc 

475 

476 # Prevent inferring if the inferred subscript 

477 # is the same as the original subscripted object. 

478 if self is assigned or isinstance(assigned, util.UninferableBase): 

479 yield util.Uninferable 

480 return None 

481 yield from assigned.infer(context) 

482 found_one = True 

483 

484 if found_one: 

485 return InferenceErrorInfo(node=self, context=context) 

486 return None 

487 

488 

489# The order of the decorators here is important 

490# See https://github.com/pylint-dev/astroid/commit/0a8a75db30da060a24922e05048bc270230f5 

491nodes.Subscript._infer = decorators.raise_if_nothing_inferred( # type: ignore[assignment] 

492 decorators.path_wrapper(infer_subscript) 

493) 

494nodes.Subscript.infer_lhs = decorators.raise_if_nothing_inferred(infer_subscript) 

495 

496 

497@decorators.raise_if_nothing_inferred 

498@decorators.path_wrapper 

499def _infer_boolop( 

500 self: nodes.BoolOp, context: InferenceContext | None = None, **kwargs: Any 

501) -> Generator[InferenceResult, None, InferenceErrorInfo | None]: 

502 """Infer a boolean operation (and / or / not). 

503 

504 The function will calculate the boolean operation 

505 for all pairs generated through inference for each component 

506 node. 

507 """ 

508 values = self.values 

509 if self.op == "or": 

510 predicate = operator.truth 

511 else: 

512 predicate = operator.not_ 

513 

514 try: 

515 inferred_values = [value.infer(context=context) for value in values] 

516 except InferenceError: 

517 yield util.Uninferable 

518 return None 

519 

520 for pair in itertools.product(*inferred_values): 

521 if any(isinstance(item, util.UninferableBase) for item in pair): 

522 # Can't infer the final result, just yield Uninferable. 

523 yield util.Uninferable 

524 continue 

525 

526 bool_values = [item.bool_value() for item in pair] 

527 if any(isinstance(item, util.UninferableBase) for item in bool_values): 

528 # Can't infer the final result, just yield Uninferable. 

529 yield util.Uninferable 

530 continue 

531 

532 # Since the boolean operations are short circuited operations, 

533 # this code yields the first value for which the predicate is True 

534 # and if no value respected the predicate, then the last value will 

535 # be returned (or Uninferable if there was no last value). 

536 # This is conforming to the semantics of `and` and `or`: 

537 # 1 and 0 -> 1 

538 # 0 and 1 -> 0 

539 # 1 or 0 -> 1 

540 # 0 or 1 -> 1 

541 value = util.Uninferable 

542 for value, bool_value in zip(pair, bool_values): 

543 if predicate(bool_value): 

544 yield value 

545 break 

546 else: 

547 yield value 

548 

549 return InferenceErrorInfo(node=self, context=context) 

550 

551 

552nodes.BoolOp._infer = _infer_boolop 

553 

554 

555# UnaryOp, BinOp and AugAssign inferences 

556 

557 

558def _filter_operation_errors( 

559 self: _T, 

560 infer_callable: Callable[ 

561 [_T, InferenceContext | None], 

562 Generator[InferenceResult | util.BadOperationMessage, None, None], 

563 ], 

564 context: InferenceContext | None, 

565 error: type[util.BadOperationMessage], 

566) -> Generator[InferenceResult, None, None]: 

567 for result in infer_callable(self, context): 

568 if isinstance(result, error): 

569 # For the sake of .infer(), we don't care about operation 

570 # errors, which is the job of pylint. So return something 

571 # which shows that we can't infer the result. 

572 yield util.Uninferable 

573 else: 

574 yield result 

575 

576 

577def _infer_unaryop( 

578 self: nodes.UnaryOp, context: InferenceContext | None = None 

579) -> Generator[InferenceResult | util.BadUnaryOperationMessage, None, None]: 

580 """Infer what an UnaryOp should return when evaluated.""" 

581 for operand in self.operand.infer(context): 

582 try: 

583 yield operand.infer_unary_op(self.op) 

584 except TypeError as exc: 

585 # The operand doesn't support this operation. 

586 yield util.BadUnaryOperationMessage(operand, self.op, exc) 

587 except AttributeError as exc: 

588 meth = protocols.UNARY_OP_METHOD[self.op] 

589 if meth is None: 

590 # `not node`. Determine node's boolean 

591 # value and negate its result, unless it is 

592 # Uninferable, which will be returned as is. 

593 bool_value = operand.bool_value() 

594 if not isinstance(bool_value, util.UninferableBase): 

595 yield nodes.const_factory(not bool_value) 

596 else: 

597 yield util.Uninferable 

598 else: 

599 if not isinstance(operand, (bases.Instance, nodes.ClassDef)): 

600 # The operation was used on something which 

601 # doesn't support it. 

602 yield util.BadUnaryOperationMessage(operand, self.op, exc) 

603 continue 

604 

605 try: 

606 try: 

607 methods = dunder_lookup.lookup(operand, meth) 

608 except AttributeInferenceError: 

609 yield util.BadUnaryOperationMessage(operand, self.op, exc) 

610 continue 

611 

612 meth = methods[0] 

613 inferred = next(meth.infer(context=context), None) 

614 if ( 

615 isinstance(inferred, util.UninferableBase) 

616 or not inferred.callable() 

617 ): 

618 continue 

619 

620 context = copy_context(context) 

621 context.boundnode = operand 

622 context.callcontext = CallContext(args=[], callee=inferred) 

623 

624 call_results = inferred.infer_call_result(self, context=context) 

625 result = next(call_results, None) 

626 if result is None: 

627 # Failed to infer, return the same type. 

628 yield operand 

629 else: 

630 yield result 

631 except AttributeInferenceError as inner_exc: 

632 # The unary operation special method was not found. 

633 yield util.BadUnaryOperationMessage(operand, self.op, inner_exc) 

634 except InferenceError: 

635 yield util.Uninferable 

636 

637 

638@decorators.raise_if_nothing_inferred 

639@decorators.path_wrapper 

640def infer_unaryop( 

641 self: nodes.UnaryOp, context: InferenceContext | None = None, **kwargs: Any 

642) -> Generator[InferenceResult, None, InferenceErrorInfo]: 

643 """Infer what an UnaryOp should return when evaluated.""" 

644 yield from _filter_operation_errors( 

645 self, _infer_unaryop, context, util.BadUnaryOperationMessage 

646 ) 

647 return InferenceErrorInfo(node=self, context=context) 

648 

649 

650nodes.UnaryOp._infer_unaryop = _infer_unaryop 

651nodes.UnaryOp._infer = infer_unaryop 

652 

653 

654def _is_not_implemented(const) -> bool: 

655 """Check if the given const node is NotImplemented.""" 

656 return isinstance(const, nodes.Const) and const.value is NotImplemented 

657 

658 

659def _infer_old_style_string_formatting( 

660 instance: nodes.Const, other: nodes.NodeNG, context: InferenceContext 

661) -> tuple[util.UninferableBase | nodes.Const]: 

662 """Infer the result of '"string" % ...'. 

663 

664 TODO: Instead of returning Uninferable we should rely 

665 on the call to '%' to see if the result is actually uninferable. 

666 """ 

667 if isinstance(other, nodes.Tuple): 

668 if util.Uninferable in other.elts: 

669 return (util.Uninferable,) 

670 inferred_positional = [helpers.safe_infer(i, context) for i in other.elts] 

671 if all(isinstance(i, nodes.Const) for i in inferred_positional): 

672 values = tuple(i.value for i in inferred_positional) 

673 else: 

674 values = None 

675 elif isinstance(other, nodes.Dict): 

676 values: dict[Any, Any] = {} 

677 for pair in other.items: 

678 key = helpers.safe_infer(pair[0], context) 

679 if not isinstance(key, nodes.Const): 

680 return (util.Uninferable,) 

681 value = helpers.safe_infer(pair[1], context) 

682 if not isinstance(value, nodes.Const): 

683 return (util.Uninferable,) 

684 values[key.value] = value.value 

685 elif isinstance(other, nodes.Const): 

686 values = other.value 

687 else: 

688 return (util.Uninferable,) 

689 

690 try: 

691 return (nodes.const_factory(instance.value % values),) 

692 except (TypeError, KeyError, ValueError): 

693 return (util.Uninferable,) 

694 

695 

696def _invoke_binop_inference( 

697 instance: InferenceResult, 

698 opnode: nodes.AugAssign | nodes.BinOp, 

699 op: str, 

700 other: InferenceResult, 

701 context: InferenceContext, 

702 method_name: str, 

703) -> Generator[InferenceResult, None, None]: 

704 """Invoke binary operation inference on the given instance.""" 

705 methods = dunder_lookup.lookup(instance, method_name) 

706 context = bind_context_to_node(context, instance) 

707 method = methods[0] 

708 context.callcontext.callee = method 

709 

710 if ( 

711 isinstance(instance, nodes.Const) 

712 and isinstance(instance.value, str) 

713 and op == "%" 

714 ): 

715 return iter(_infer_old_style_string_formatting(instance, other, context)) 

716 

717 try: 

718 inferred = next(method.infer(context=context)) 

719 except StopIteration as e: 

720 raise InferenceError(node=method, context=context) from e 

721 if isinstance(inferred, util.UninferableBase): 

722 raise InferenceError 

723 if not isinstance( 

724 instance, (nodes.Const, nodes.Tuple, nodes.List, nodes.ClassDef, bases.Instance) 

725 ): 

726 raise InferenceError # pragma: no cover # Used as a failsafe 

727 return instance.infer_binary_op(opnode, op, other, context, inferred) 

728 

729 

730def _aug_op( 

731 instance: InferenceResult, 

732 opnode: nodes.AugAssign, 

733 op: str, 

734 other: InferenceResult, 

735 context: InferenceContext, 

736 reverse: bool = False, 

737) -> functools.partial[Generator[InferenceResult, None, None]]: 

738 """Get an inference callable for an augmented binary operation.""" 

739 method_name = protocols.AUGMENTED_OP_METHOD[op] 

740 return functools.partial( 

741 _invoke_binop_inference, 

742 instance=instance, 

743 op=op, 

744 opnode=opnode, 

745 other=other, 

746 context=context, 

747 method_name=method_name, 

748 ) 

749 

750 

751def _bin_op( 

752 instance: InferenceResult, 

753 opnode: nodes.AugAssign | nodes.BinOp, 

754 op: str, 

755 other: InferenceResult, 

756 context: InferenceContext, 

757 reverse: bool = False, 

758) -> functools.partial[Generator[InferenceResult, None, None]]: 

759 """Get an inference callable for a normal binary operation. 

760 

761 If *reverse* is True, then the reflected method will be used instead. 

762 """ 

763 if reverse: 

764 method_name = protocols.REFLECTED_BIN_OP_METHOD[op] 

765 else: 

766 method_name = protocols.BIN_OP_METHOD[op] 

767 return functools.partial( 

768 _invoke_binop_inference, 

769 instance=instance, 

770 op=op, 

771 opnode=opnode, 

772 other=other, 

773 context=context, 

774 method_name=method_name, 

775 ) 

776 

777 

778def _bin_op_or_union_type( 

779 left: bases.UnionType | nodes.ClassDef | nodes.Const, 

780 right: bases.UnionType | nodes.ClassDef | nodes.Const, 

781) -> Generator[InferenceResult, None, None]: 

782 """Create a new UnionType instance for binary or, e.g. int | str.""" 

783 yield bases.UnionType(left, right) 

784 

785 

786def _get_binop_contexts(context, left, right): 

787 """Get contexts for binary operations. 

788 

789 This will return two inference contexts, the first one 

790 for x.__op__(y), the other one for y.__rop__(x), where 

791 only the arguments are inversed. 

792 """ 

793 # The order is important, since the first one should be 

794 # left.__op__(right). 

795 for arg in (right, left): 

796 new_context = context.clone() 

797 new_context.callcontext = CallContext(args=[arg]) 

798 new_context.boundnode = None 

799 yield new_context 

800 

801 

802def _same_type(type1, type2) -> bool: 

803 """Check if type1 is the same as type2.""" 

804 return type1.qname() == type2.qname() 

805 

806 

807def _get_binop_flow( 

808 left: InferenceResult, 

809 left_type: InferenceResult | None, 

810 binary_opnode: nodes.AugAssign | nodes.BinOp, 

811 right: InferenceResult, 

812 right_type: InferenceResult | None, 

813 context: InferenceContext, 

814 reverse_context: InferenceContext, 

815) -> list[functools.partial[Generator[InferenceResult, None, None]]]: 

816 """Get the flow for binary operations. 

817 

818 The rules are a bit messy: 

819 

820 * if left and right have the same type, then only one 

821 method will be called, left.__op__(right) 

822 * if left and right are unrelated typewise, then first 

823 left.__op__(right) is tried and if this does not exist 

824 or returns NotImplemented, then right.__rop__(left) is tried. 

825 * if left is a subtype of right, then only left.__op__(right) 

826 is tried. 

827 * if left is a supertype of right, then right.__rop__(left) 

828 is first tried and then left.__op__(right) 

829 """ 

830 op = binary_opnode.op 

831 if _same_type(left_type, right_type): 

832 methods = [_bin_op(left, binary_opnode, op, right, context)] 

833 elif helpers.is_subtype(left_type, right_type): 

834 methods = [_bin_op(left, binary_opnode, op, right, context)] 

835 elif helpers.is_supertype(left_type, right_type): 

836 methods = [ 

837 _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), 

838 _bin_op(left, binary_opnode, op, right, context), 

839 ] 

840 else: 

841 methods = [ 

842 _bin_op(left, binary_opnode, op, right, context), 

843 _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), 

844 ] 

845 

846 if ( 

847 PY310_PLUS 

848 and op == "|" 

849 and ( 

850 isinstance(left, (bases.UnionType, nodes.ClassDef)) 

851 or isinstance(left, nodes.Const) 

852 and left.value is None 

853 ) 

854 and ( 

855 isinstance(right, (bases.UnionType, nodes.ClassDef)) 

856 or isinstance(right, nodes.Const) 

857 and right.value is None 

858 ) 

859 ): 

860 methods.extend([functools.partial(_bin_op_or_union_type, left, right)]) 

861 return methods 

862 

863 

864def _get_aug_flow( 

865 left: InferenceResult, 

866 left_type: InferenceResult | None, 

867 aug_opnode: nodes.AugAssign, 

868 right: InferenceResult, 

869 right_type: InferenceResult | None, 

870 context: InferenceContext, 

871 reverse_context: InferenceContext, 

872) -> list[functools.partial[Generator[InferenceResult, None, None]]]: 

873 """Get the flow for augmented binary operations. 

874 

875 The rules are a bit messy: 

876 

877 * if left and right have the same type, then left.__augop__(right) 

878 is first tried and then left.__op__(right). 

879 * if left and right are unrelated typewise, then 

880 left.__augop__(right) is tried, then left.__op__(right) 

881 is tried and then right.__rop__(left) is tried. 

882 * if left is a subtype of right, then left.__augop__(right) 

883 is tried and then left.__op__(right). 

884 * if left is a supertype of right, then left.__augop__(right) 

885 is tried, then right.__rop__(left) and then 

886 left.__op__(right) 

887 """ 

888 bin_op = aug_opnode.op.strip("=") 

889 aug_op = aug_opnode.op 

890 if _same_type(left_type, right_type): 

891 methods = [ 

892 _aug_op(left, aug_opnode, aug_op, right, context), 

893 _bin_op(left, aug_opnode, bin_op, right, context), 

894 ] 

895 elif helpers.is_subtype(left_type, right_type): 

896 methods = [ 

897 _aug_op(left, aug_opnode, aug_op, right, context), 

898 _bin_op(left, aug_opnode, bin_op, right, context), 

899 ] 

900 elif helpers.is_supertype(left_type, right_type): 

901 methods = [ 

902 _aug_op(left, aug_opnode, aug_op, right, context), 

903 _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), 

904 _bin_op(left, aug_opnode, bin_op, right, context), 

905 ] 

906 else: 

907 methods = [ 

908 _aug_op(left, aug_opnode, aug_op, right, context), 

909 _bin_op(left, aug_opnode, bin_op, right, context), 

910 _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), 

911 ] 

912 return methods 

913 

914 

915def _infer_binary_operation( 

916 left: InferenceResult, 

917 right: InferenceResult, 

918 binary_opnode: nodes.AugAssign | nodes.BinOp, 

919 context: InferenceContext, 

920 flow_factory: GetFlowFactory, 

921) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: 

922 """Infer a binary operation between a left operand and a right operand. 

923 

924 This is used by both normal binary operations and augmented binary 

925 operations, the only difference is the flow factory used. 

926 """ 

927 

928 context, reverse_context = _get_binop_contexts(context, left, right) 

929 left_type = helpers.object_type(left) 

930 right_type = helpers.object_type(right) 

931 methods = flow_factory( 

932 left, left_type, binary_opnode, right, right_type, context, reverse_context 

933 ) 

934 for method in methods: 

935 try: 

936 results = list(method()) 

937 except AttributeError: 

938 continue 

939 except AttributeInferenceError: 

940 continue 

941 except InferenceError: 

942 yield util.Uninferable 

943 return 

944 else: 

945 if any(isinstance(result, util.UninferableBase) for result in results): 

946 yield util.Uninferable 

947 return 

948 

949 if all(map(_is_not_implemented, results)): 

950 continue 

951 not_implemented = sum( 

952 1 for result in results if _is_not_implemented(result) 

953 ) 

954 if not_implemented and not_implemented != len(results): 

955 # Can't infer yet what this is. 

956 yield util.Uninferable 

957 return 

958 

959 yield from results 

960 return 

961 # The operation doesn't seem to be supported so let the caller know about it 

962 yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type) 

963 

964 

965def _infer_binop( 

966 self: nodes.BinOp, context: InferenceContext | None = None 

967) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: 

968 """Binary operation inference logic.""" 

969 left = self.left 

970 right = self.right 

971 

972 # we use two separate contexts for evaluating lhs and rhs because 

973 # 1. evaluating lhs may leave some undesired entries in context.path 

974 # which may not let us infer right value of rhs 

975 context = context or InferenceContext() 

976 lhs_context = copy_context(context) 

977 rhs_context = copy_context(context) 

978 lhs_iter = left.infer(context=lhs_context) 

979 rhs_iter = right.infer(context=rhs_context) 

980 for lhs, rhs in itertools.product(lhs_iter, rhs_iter): 

981 if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)): 

982 # Don't know how to process this. 

983 yield util.Uninferable 

984 return 

985 

986 try: 

987 yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow) 

988 except _NonDeducibleTypeHierarchy: 

989 yield util.Uninferable 

990 

991 

992@decorators.yes_if_nothing_inferred 

993@decorators.path_wrapper 

994def infer_binop( 

995 self: nodes.BinOp, context: InferenceContext | None = None, **kwargs: Any 

996) -> Generator[InferenceResult, None, None]: 

997 return _filter_operation_errors( 

998 self, _infer_binop, context, util.BadBinaryOperationMessage 

999 ) 

1000 

1001 

1002nodes.BinOp._infer_binop = _infer_binop 

1003nodes.BinOp._infer = infer_binop 

1004 

1005COMPARE_OPS: dict[str, Callable[[Any, Any], bool]] = { 

1006 "==": operator.eq, 

1007 "!=": operator.ne, 

1008 "<": operator.lt, 

1009 "<=": operator.le, 

1010 ">": operator.gt, 

1011 ">=": operator.ge, 

1012 "in": lambda a, b: a in b, 

1013 "not in": lambda a, b: a not in b, 

1014} 

1015UNINFERABLE_OPS = { 

1016 "is", 

1017 "is not", 

1018} 

1019 

1020 

1021def _to_literal(node: SuccessfulInferenceResult) -> Any: 

1022 # Can raise SyntaxError or ValueError from ast.literal_eval 

1023 # Can raise AttributeError from node.as_string() as not all nodes have a visitor 

1024 # Is this the stupidest idea or the simplest idea? 

1025 return ast.literal_eval(node.as_string()) 

1026 

1027 

1028def _do_compare( 

1029 left_iter: Iterable[InferenceResult], op: str, right_iter: Iterable[InferenceResult] 

1030) -> bool | util.UninferableBase: 

1031 """ 

1032 If all possible combinations are either True or False, return that: 

1033 >>> _do_compare([1, 2], '<=', [3, 4]) 

1034 True 

1035 >>> _do_compare([1, 2], '==', [3, 4]) 

1036 False 

1037 

1038 If any item is uninferable, or if some combinations are True and some 

1039 are False, return Uninferable: 

1040 >>> _do_compare([1, 3], '<=', [2, 4]) 

1041 util.Uninferable 

1042 """ 

1043 retval: bool | None = None 

1044 if op in UNINFERABLE_OPS: 

1045 return util.Uninferable 

1046 op_func = COMPARE_OPS[op] 

1047 

1048 for left, right in itertools.product(left_iter, right_iter): 

1049 if isinstance(left, util.UninferableBase) or isinstance( 

1050 right, util.UninferableBase 

1051 ): 

1052 return util.Uninferable 

1053 

1054 try: 

1055 left, right = _to_literal(left), _to_literal(right) 

1056 except (SyntaxError, ValueError, AttributeError): 

1057 return util.Uninferable 

1058 

1059 try: 

1060 expr = op_func(left, right) 

1061 except TypeError as exc: 

1062 raise AstroidTypeError from exc 

1063 

1064 if retval is None: 

1065 retval = expr 

1066 elif retval != expr: 

1067 return util.Uninferable 

1068 # (or both, but "True | False" is basically the same) 

1069 

1070 assert retval is not None 

1071 return retval # it was all the same value 

1072 

1073 

1074def _infer_compare( 

1075 self: nodes.Compare, context: InferenceContext | None = None, **kwargs: Any 

1076) -> Generator[nodes.Const | util.UninferableBase, None, None]: 

1077 """Chained comparison inference logic.""" 

1078 retval: bool | util.UninferableBase = True 

1079 

1080 ops = self.ops 

1081 left_node = self.left 

1082 lhs = list(left_node.infer(context=context)) 

1083 # should we break early if first element is uninferable? 

1084 for op, right_node in ops: 

1085 # eagerly evaluate rhs so that values can be re-used as lhs 

1086 rhs = list(right_node.infer(context=context)) 

1087 try: 

1088 retval = _do_compare(lhs, op, rhs) 

1089 except AstroidTypeError: 

1090 retval = util.Uninferable 

1091 break 

1092 if retval is not True: 

1093 break # short-circuit 

1094 lhs = rhs # continue 

1095 if retval is util.Uninferable: 

1096 yield retval # type: ignore[misc] 

1097 else: 

1098 yield nodes.Const(retval) 

1099 

1100 

1101nodes.Compare._infer = _infer_compare # type: ignore[assignment] 

1102 

1103 

1104def _infer_augassign( 

1105 self: nodes.AugAssign, context: InferenceContext | None = None 

1106) -> Generator[InferenceResult | util.BadBinaryOperationMessage, None, None]: 

1107 """Inference logic for augmented binary operations.""" 

1108 context = context or InferenceContext() 

1109 

1110 rhs_context = context.clone() 

1111 

1112 lhs_iter = self.target.infer_lhs(context=context) 

1113 rhs_iter = self.value.infer(context=rhs_context) 

1114 for lhs, rhs in itertools.product(lhs_iter, rhs_iter): 

1115 if any(isinstance(value, util.UninferableBase) for value in (rhs, lhs)): 

1116 # Don't know how to process this. 

1117 yield util.Uninferable 

1118 return 

1119 

1120 try: 

1121 yield from _infer_binary_operation( 

1122 left=lhs, 

1123 right=rhs, 

1124 binary_opnode=self, 

1125 context=context, 

1126 flow_factory=_get_aug_flow, 

1127 ) 

1128 except _NonDeducibleTypeHierarchy: 

1129 yield util.Uninferable 

1130 

1131 

1132@decorators.raise_if_nothing_inferred 

1133@decorators.path_wrapper 

1134def infer_augassign( 

1135 self: nodes.AugAssign, context: InferenceContext | None = None, **kwargs: Any 

1136) -> Generator[InferenceResult, None, None]: 

1137 return _filter_operation_errors( 

1138 self, _infer_augassign, context, util.BadBinaryOperationMessage 

1139 ) 

1140 

1141 

1142nodes.AugAssign._infer_augassign = _infer_augassign 

1143nodes.AugAssign._infer = infer_augassign 

1144 

1145# End of binary operation inference. 

1146 

1147 

1148@decorators.raise_if_nothing_inferred 

1149def infer_arguments( 

1150 self: nodes.Arguments, context: InferenceContext | None = None, **kwargs: Any 

1151) -> Generator[InferenceResult, None, None]: 

1152 if context is None or context.lookupname is None: 

1153 raise InferenceError(node=self, context=context) 

1154 return protocols._arguments_infer_argname(self, context.lookupname, context) 

1155 

1156 

1157nodes.Arguments._infer = infer_arguments # type: ignore[assignment] 

1158 

1159 

1160@decorators.raise_if_nothing_inferred 

1161@decorators.path_wrapper 

1162def infer_assign( 

1163 self: nodes.AssignName | nodes.AssignAttr, 

1164 context: InferenceContext | None = None, 

1165 **kwargs: Any, 

1166) -> Generator[InferenceResult, None, None]: 

1167 """Infer a AssignName/AssignAttr: need to inspect the RHS part of the 

1168 assign node. 

1169 """ 

1170 if isinstance(self.parent, nodes.AugAssign): 

1171 return self.parent.infer(context) 

1172 

1173 stmts = list(self.assigned_stmts(context=context)) 

1174 return bases._infer_stmts(stmts, context) 

1175 

1176 

1177nodes.AssignName._infer = infer_assign 

1178nodes.AssignAttr._infer = infer_assign 

1179 

1180 

1181@decorators.raise_if_nothing_inferred 

1182@decorators.path_wrapper 

1183def infer_empty_node( 

1184 self: nodes.EmptyNode, context: InferenceContext | None = None, **kwargs: Any 

1185) -> Generator[InferenceResult, None, None]: 

1186 if not self.has_underlying_object(): 

1187 yield util.Uninferable 

1188 else: 

1189 try: 

1190 yield from AstroidManager().infer_ast_from_something( 

1191 self.object, context=context 

1192 ) 

1193 except AstroidError: 

1194 yield util.Uninferable 

1195 

1196 

1197nodes.EmptyNode._infer = infer_empty_node # type: ignore[assignment] 

1198 

1199 

1200def _populate_context_lookup(call: nodes.Call, context: InferenceContext | None): 

1201 # Allows context to be saved for later 

1202 # for inference inside a function 

1203 context_lookup: dict[InferenceResult, InferenceContext] = {} 

1204 if context is None: 

1205 return context_lookup 

1206 for arg in call.args: 

1207 if isinstance(arg, nodes.Starred): 

1208 context_lookup[arg.value] = context 

1209 else: 

1210 context_lookup[arg] = context 

1211 keywords = call.keywords if call.keywords is not None else [] 

1212 for keyword in keywords: 

1213 context_lookup[keyword.value] = context 

1214 return context_lookup 

1215 

1216 

1217@decorators.raise_if_nothing_inferred 

1218def infer_ifexp( 

1219 self: nodes.IfExp, context: InferenceContext | None = None, **kwargs: Any 

1220) -> Generator[InferenceResult, None, None]: 

1221 """Support IfExp inference. 

1222 

1223 If we can't infer the truthiness of the condition, we default 

1224 to inferring both branches. Otherwise, we infer either branch 

1225 depending on the condition. 

1226 """ 

1227 both_branches = False 

1228 # We use two separate contexts for evaluating lhs and rhs because 

1229 # evaluating lhs may leave some undesired entries in context.path 

1230 # which may not let us infer right value of rhs. 

1231 

1232 context = context or InferenceContext() 

1233 lhs_context = copy_context(context) 

1234 rhs_context = copy_context(context) 

1235 try: 

1236 test = next(self.test.infer(context=context.clone())) 

1237 except (InferenceError, StopIteration): 

1238 both_branches = True 

1239 else: 

1240 if not isinstance(test, util.UninferableBase): 

1241 if test.bool_value(): 

1242 yield from self.body.infer(context=lhs_context) 

1243 else: 

1244 yield from self.orelse.infer(context=rhs_context) 

1245 else: 

1246 both_branches = True 

1247 if both_branches: 

1248 yield from self.body.infer(context=lhs_context) 

1249 yield from self.orelse.infer(context=rhs_context) 

1250 

1251 

1252nodes.IfExp._infer = infer_ifexp # type: ignore[assignment] 

1253 

1254 

1255def infer_functiondef( 

1256 self: _FunctionDefT, context: InferenceContext | None = None, **kwargs: Any 

1257) -> Generator[Property | _FunctionDefT, None, InferenceErrorInfo]: 

1258 if not self.decorators or not bases._is_property(self): 

1259 yield self 

1260 return InferenceErrorInfo(node=self, context=context) 

1261 

1262 # When inferring a property, we instantiate a new `objects.Property` object, 

1263 # which in turn, because it inherits from `FunctionDef`, sets itself in the locals 

1264 # of the wrapping frame. This means that every time we infer a property, the locals 

1265 # are mutated with a new instance of the property. To avoid this, we detect this 

1266 # scenario and avoid passing the `parent` argument to the constructor. 

1267 parent_frame = self.parent.frame(future=True) 

1268 property_already_in_parent_locals = self.name in parent_frame.locals and any( 

1269 isinstance(val, objects.Property) for val in parent_frame.locals[self.name] 

1270 ) 

1271 # We also don't want to pass parent if the definition is within a Try node 

1272 if isinstance(self.parent, (nodes.TryExcept, nodes.TryFinally, nodes.If)): 

1273 property_already_in_parent_locals = True 

1274 

1275 prop_func = objects.Property( 

1276 function=self, 

1277 name=self.name, 

1278 lineno=self.lineno, 

1279 parent=self.parent if not property_already_in_parent_locals else None, 

1280 col_offset=self.col_offset, 

1281 ) 

1282 if property_already_in_parent_locals: 

1283 prop_func.parent = self.parent 

1284 prop_func.postinit(body=[], args=self.args, doc_node=self.doc_node) 

1285 yield prop_func 

1286 return InferenceErrorInfo(node=self, context=context) 

1287 

1288 

1289nodes.FunctionDef._infer = infer_functiondef