Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jedi/inference/value/iterable.py: 34%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

396 statements  

1""" 

2Contains all classes and functions to deal with lists, dicts, generators and 

3iterators in general. 

4""" 

5from jedi.inference import compiled 

6from jedi.inference import analysis 

7from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \ 

8 LazyTreeValue 

9from jedi.inference.helpers import get_int_or_none, is_string, \ 

10 reraise_getitem_errors, SimpleGetItemNotFound 

11from jedi.inference.utils import safe_property, to_list 

12from jedi.inference.cache import inference_state_method_cache 

13from jedi.inference.filters import LazyAttributeOverwrite, publish_method 

14from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \ 

15 ContextualizedNode, iterate_values, sentinel, \ 

16 LazyValueWrapper 

17from jedi.parser_utils import get_sync_comp_fors 

18from jedi.inference.context import CompForContext 

19from jedi.inference.value.dynamic_arrays import check_array_additions 

20 

21 

22class IterableMixin: 

23 def py__next__(self, contextualized_node=None): 

24 return self.py__iter__(contextualized_node) 

25 

26 def py__stop_iteration_returns(self): 

27 return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')]) 

28 

29 # At the moment, safe values are simple values like "foo", 1 and not 

30 # lists/dicts. Therefore as a small speed optimization we can just do the 

31 # default instead of resolving the lazy wrapped values, that are just 

32 # doing this in the end as well. 

33 # This mostly speeds up patterns like `sys.version_info >= (3, 0)` in 

34 # typeshed. 

35 get_safe_value = Value.get_safe_value 

36 

37 

38class GeneratorBase(LazyAttributeOverwrite, IterableMixin): 

39 array_type = None 

40 

41 def _get_wrapped_value(self): 

42 instance, = self._get_cls().execute_annotation() 

43 return instance 

44 

45 def _get_cls(self): 

46 generator, = self.inference_state.typing_module.py__getattribute__('Generator') 

47 return generator 

48 

49 def py__bool__(self): 

50 return True 

51 

52 @publish_method('__iter__') 

53 def _iter(self, arguments): 

54 return ValueSet([self]) 

55 

56 @publish_method('send') 

57 @publish_method('__next__') 

58 def _next(self, arguments): 

59 return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) 

60 

61 def py__stop_iteration_returns(self): 

62 return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')]) 

63 

64 @property 

65 def name(self): 

66 return compiled.CompiledValueName(self, 'Generator') 

67 

68 def get_annotated_class_object(self): 

69 from jedi.inference.gradual.generics import TupleGenericManager 

70 gen_values = self.merge_types_of_iterate().py__class__() 

71 gm = TupleGenericManager((gen_values, NO_VALUES, NO_VALUES)) 

72 return self._get_cls().with_generics(gm) 

73 

74 

75class Generator(GeneratorBase): 

76 """Handling of `yield` functions.""" 

77 def __init__(self, inference_state, func_execution_context): 

78 super().__init__(inference_state) 

79 self._func_execution_context = func_execution_context 

80 

81 def py__iter__(self, contextualized_node=None): 

82 iterators = self._func_execution_context.infer_annotations() 

83 if iterators: 

84 return iterators.iterate(contextualized_node) 

85 return self._func_execution_context.get_yield_lazy_values() 

86 

87 def py__stop_iteration_returns(self): 

88 return self._func_execution_context.get_return_values() 

89 

90 def __repr__(self): 

91 return "<%s of %s>" % (type(self).__name__, self._func_execution_context) 

92 

93 

94def comprehension_from_atom(inference_state, value, atom): 

95 bracket = atom.children[0] 

96 test_list_comp = atom.children[1] 

97 

98 if bracket == '{': 

99 if atom.children[1].children[1] == ':': 

100 sync_comp_for = test_list_comp.children[3] 

101 if sync_comp_for.type == 'comp_for': 

102 sync_comp_for = sync_comp_for.children[1] 

103 

104 return DictComprehension( 

105 inference_state, 

106 value, 

107 sync_comp_for_node=sync_comp_for, 

108 key_node=test_list_comp.children[0], 

109 value_node=test_list_comp.children[2], 

110 ) 

111 else: 

112 cls = SetComprehension 

113 elif bracket == '(': 

114 cls = GeneratorComprehension 

115 elif bracket == '[': 

116 cls = ListComprehension 

117 

118 sync_comp_for = test_list_comp.children[1] 

119 if sync_comp_for.type == 'comp_for': 

120 sync_comp_for = sync_comp_for.children[1] 

121 

122 return cls( 

123 inference_state, 

124 defining_context=value, 

125 sync_comp_for_node=sync_comp_for, 

126 entry_node=test_list_comp.children[0], 

127 ) 

128 

129 

130class ComprehensionMixin: 

131 @inference_state_method_cache() 

132 def _get_comp_for_context(self, parent_context, comp_for): 

133 return CompForContext(parent_context, comp_for) 

134 

135 def _nested(self, comp_fors, parent_context=None): 

136 comp_for = comp_fors[0] 

137 

138 is_async = comp_for.parent.type == 'comp_for' 

139 

140 input_node = comp_for.children[3] 

141 parent_context = parent_context or self._defining_context 

142 input_types = parent_context.infer_node(input_node) 

143 

144 cn = ContextualizedNode(parent_context, input_node) 

145 iterated = input_types.iterate(cn, is_async=is_async) 

146 exprlist = comp_for.children[1] 

147 for i, lazy_value in enumerate(iterated): 

148 types = lazy_value.infer() 

149 dct = unpack_tuple_to_dict(parent_context, types, exprlist) 

150 context = self._get_comp_for_context( 

151 parent_context, 

152 comp_for, 

153 ) 

154 with context.predefine_names(comp_for, dct): 

155 try: 

156 yield from self._nested(comp_fors[1:], context) 

157 except IndexError: 

158 iterated = context.infer_node(self._entry_node) 

159 if self.array_type == 'dict': 

160 yield iterated, context.infer_node(self._value_node) 

161 else: 

162 yield iterated 

163 

164 @inference_state_method_cache(default=[]) 

165 @to_list 

166 def _iterate(self): 

167 comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node)) 

168 yield from self._nested(comp_fors) 

169 

170 def py__iter__(self, contextualized_node=None): 

171 for set_ in self._iterate(): 

172 yield LazyKnownValues(set_) 

173 

174 def __repr__(self): 

175 return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node) 

176 

177 

178class _DictMixin: 

179 def _get_generics(self): 

180 return tuple(c_set.py__class__() for c_set in self.get_mapping_item_values()) 

181 

182 

183class Sequence(LazyAttributeOverwrite, IterableMixin): 

184 api_type = 'instance' 

185 

186 @property 

187 def name(self): 

188 return compiled.CompiledValueName(self, self.array_type) 

189 

190 def _get_generics(self): 

191 return (self.merge_types_of_iterate().py__class__(),) 

192 

193 @inference_state_method_cache(default=()) 

194 def _cached_generics(self): 

195 return self._get_generics() 

196 

197 def _get_wrapped_value(self): 

198 from jedi.inference.gradual.base import GenericClass 

199 from jedi.inference.gradual.generics import TupleGenericManager 

200 klass = compiled.builtin_from_name(self.inference_state, self.array_type) 

201 c, = GenericClass( 

202 klass, 

203 TupleGenericManager(self._cached_generics()) 

204 ).execute_annotation() 

205 return c 

206 

207 def py__bool__(self): 

208 return None # We don't know the length, because of appends. 

209 

210 @safe_property 

211 def parent(self): 

212 return self.inference_state.builtins_module 

213 

214 def py__getitem__(self, index_value_set, contextualized_node): 

215 if self.array_type == 'dict': 

216 return self._dict_values() 

217 return iterate_values(ValueSet([self])) 

218 

219 

220class _BaseComprehension(ComprehensionMixin): 

221 def __init__(self, inference_state, defining_context, sync_comp_for_node, entry_node): 

222 assert sync_comp_for_node.type == 'sync_comp_for' 

223 super().__init__(inference_state) 

224 self._defining_context = defining_context 

225 self._sync_comp_for_node = sync_comp_for_node 

226 self._entry_node = entry_node 

227 

228 

229class ListComprehension(_BaseComprehension, Sequence): 

230 array_type = 'list' 

231 

232 def py__simple_getitem__(self, index): 

233 if isinstance(index, slice): 

234 return ValueSet([self]) 

235 

236 all_types = list(self.py__iter__()) 

237 with reraise_getitem_errors(IndexError, TypeError): 

238 lazy_value = all_types[index] 

239 return lazy_value.infer() 

240 

241 

242class SetComprehension(_BaseComprehension, Sequence): 

243 array_type = 'set' 

244 

245 

246class GeneratorComprehension(_BaseComprehension, GeneratorBase): 

247 pass 

248 

249 

250class _DictKeyMixin: 

251 # TODO merge with _DictMixin? 

252 def get_mapping_item_values(self): 

253 return self._dict_keys(), self._dict_values() 

254 

255 def get_key_values(self): 

256 # TODO merge with _dict_keys? 

257 return self._dict_keys() 

258 

259 

260class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin): 

261 array_type = 'dict' 

262 

263 def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node): 

264 assert sync_comp_for_node.type == 'sync_comp_for' 

265 super().__init__(inference_state) 

266 self._defining_context = defining_context 

267 self._sync_comp_for_node = sync_comp_for_node 

268 self._entry_node = key_node 

269 self._value_node = value_node 

270 

271 def py__iter__(self, contextualized_node=None): 

272 for keys, values in self._iterate(): 

273 yield LazyKnownValues(keys) 

274 

275 def py__simple_getitem__(self, index): 

276 for keys, values in self._iterate(): 

277 for k in keys: 

278 # Be careful in the future if refactoring, index could be a 

279 # slice object. 

280 if k.get_safe_value(default=object()) == index: 

281 return values 

282 raise SimpleGetItemNotFound() 

283 

284 def _dict_keys(self): 

285 return ValueSet.from_sets(keys for keys, values in self._iterate()) 

286 

287 def _dict_values(self): 

288 return ValueSet.from_sets(values for keys, values in self._iterate()) 

289 

290 @publish_method('values') 

291 def _imitate_values(self, arguments): 

292 lazy_value = LazyKnownValues(self._dict_values()) 

293 return ValueSet([FakeList(self.inference_state, [lazy_value])]) 

294 

295 @publish_method('items') 

296 def _imitate_items(self, arguments): 

297 lazy_values = [ 

298 LazyKnownValue( 

299 FakeTuple( 

300 self.inference_state, 

301 [LazyKnownValues(key), 

302 LazyKnownValues(value)] 

303 ) 

304 ) 

305 for key, value in self._iterate() 

306 ] 

307 

308 return ValueSet([FakeList(self.inference_state, lazy_values)]) 

309 

310 def exact_key_items(self): 

311 # NOTE: A smarter thing can probably done here to achieve better 

312 # completions, but at least like this jedi doesn't crash 

313 return [] 

314 

315 

316class SequenceLiteralValue(Sequence): 

317 _TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist' 

318 mapping = {'(': 'tuple', 

319 '[': 'list', 

320 '{': 'set'} 

321 

322 def __init__(self, inference_state, defining_context, atom): 

323 super().__init__(inference_state) 

324 self.atom = atom 

325 self._defining_context = defining_context 

326 

327 if self.atom.type in self._TUPLE_LIKE: 

328 self.array_type = 'tuple' 

329 else: 

330 self.array_type = SequenceLiteralValue.mapping[atom.children[0]] 

331 """The builtin name of the array (list, set, tuple or dict).""" 

332 

333 def _get_generics(self): 

334 if self.array_type == 'tuple': 

335 return tuple(x.infer().py__class__() for x in self.py__iter__()) 

336 return super()._get_generics() 

337 

338 def py__simple_getitem__(self, index): 

339 """Here the index is an int/str. Raises IndexError/KeyError.""" 

340 if isinstance(index, slice): 

341 return ValueSet([self]) 

342 else: 

343 with reraise_getitem_errors(TypeError, KeyError, IndexError): 

344 node = self.get_tree_entries()[index] 

345 if node == ':' or node.type == 'subscript': 

346 return NO_VALUES 

347 return self._defining_context.infer_node(node) 

348 

349 def py__iter__(self, contextualized_node=None): 

350 """ 

351 While values returns the possible values for any array field, this 

352 function returns the value for a certain index. 

353 """ 

354 for node in self.get_tree_entries(): 

355 if node == ':' or node.type == 'subscript': 

356 # TODO this should probably use at least part of the code 

357 # of infer_subscript_list. 

358 yield LazyKnownValue(Slice(self._defining_context, None, None, None)) 

359 else: 

360 yield LazyTreeValue(self._defining_context, node) 

361 yield from check_array_additions(self._defining_context, self) 

362 

363 def py__len__(self): 

364 # This function is not really used often. It's more of a try. 

365 return len(self.get_tree_entries()) 

366 

367 def get_tree_entries(self): 

368 c = self.atom.children 

369 

370 if self.atom.type in self._TUPLE_LIKE: 

371 return c[::2] 

372 

373 array_node = c[1] 

374 if array_node in (']', '}', ')'): 

375 return [] # Direct closing bracket, doesn't contain items. 

376 

377 if array_node.type == 'testlist_comp': 

378 # filter out (for now) pep 448 single-star unpacking 

379 return [value for value in array_node.children[::2] 

380 if value.type != "star_expr"] 

381 elif array_node.type == 'dictorsetmaker': 

382 kv = [] 

383 iterator = iter(array_node.children) 

384 for key in iterator: 

385 if key == "**": 

386 # dict with pep 448 double-star unpacking 

387 # for now ignoring the values imported by ** 

388 next(iterator) 

389 next(iterator, None) # Possible comma. 

390 else: 

391 op = next(iterator, None) 

392 if op is None or op == ',': 

393 if key.type == "star_expr": 

394 # pep 448 single-star unpacking 

395 # for now ignoring values imported by * 

396 pass 

397 else: 

398 kv.append(key) # A set. 

399 else: 

400 assert op == ':' # A dict. 

401 kv.append((key, next(iterator))) 

402 next(iterator, None) # Possible comma. 

403 return kv 

404 else: 

405 if array_node.type == "star_expr": 

406 # pep 448 single-star unpacking 

407 # for now ignoring values imported by * 

408 return [] 

409 else: 

410 return [array_node] 

411 

412 def __repr__(self): 

413 return "<%s of %s>" % (self.__class__.__name__, self.atom) 

414 

415 

416class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin): 

417 array_type = 'dict' 

418 

419 def __init__(self, inference_state, defining_context, atom): 

420 # Intentionally don't call the super class. This is definitely a sign 

421 # that the architecture is bad and we should refactor. 

422 Sequence.__init__(self, inference_state) 

423 self._defining_context = defining_context 

424 self.atom = atom 

425 

426 def py__simple_getitem__(self, index): 

427 """Here the index is an int/str. Raises IndexError/KeyError.""" 

428 compiled_value_index = compiled.create_simple_object(self.inference_state, index) 

429 for key, value in self.get_tree_entries(): 

430 for k in self._defining_context.infer_node(key): 

431 for key_v in k.execute_operation(compiled_value_index, '=='): 

432 if key_v.get_safe_value(): 

433 return self._defining_context.infer_node(value) 

434 raise SimpleGetItemNotFound('No key found in dictionary %s.' % self) 

435 

436 def py__iter__(self, contextualized_node=None): 

437 """ 

438 While values returns the possible values for any array field, this 

439 function returns the value for a certain index. 

440 """ 

441 # Get keys. 

442 types = NO_VALUES 

443 for k, _ in self.get_tree_entries(): 

444 types |= self._defining_context.infer_node(k) 

445 # We don't know which dict index comes first, therefore always 

446 # yield all the types. 

447 for _ in types: 

448 yield LazyKnownValues(types) 

449 

450 @publish_method('values') 

451 def _imitate_values(self, arguments): 

452 lazy_value = LazyKnownValues(self._dict_values()) 

453 return ValueSet([FakeList(self.inference_state, [lazy_value])]) 

454 

455 @publish_method('items') 

456 def _imitate_items(self, arguments): 

457 lazy_values = [ 

458 LazyKnownValue(FakeTuple( 

459 self.inference_state, 

460 (LazyTreeValue(self._defining_context, key_node), 

461 LazyTreeValue(self._defining_context, value_node)) 

462 )) for key_node, value_node in self.get_tree_entries() 

463 ] 

464 

465 return ValueSet([FakeList(self.inference_state, lazy_values)]) 

466 

467 def exact_key_items(self): 

468 """ 

469 Returns a generator of tuples like dict.items(), where the key is 

470 resolved (as a string) and the values are still lazy values. 

471 """ 

472 for key_node, value in self.get_tree_entries(): 

473 for key in self._defining_context.infer_node(key_node): 

474 if is_string(key): 

475 yield key.get_safe_value(), LazyTreeValue(self._defining_context, value) 

476 

477 def _dict_values(self): 

478 return ValueSet.from_sets( 

479 self._defining_context.infer_node(v) 

480 for k, v in self.get_tree_entries() 

481 ) 

482 

483 def _dict_keys(self): 

484 return ValueSet.from_sets( 

485 self._defining_context.infer_node(k) 

486 for k, v in self.get_tree_entries() 

487 ) 

488 

489 

490class _FakeSequence(Sequence): 

491 def __init__(self, inference_state, lazy_value_list): 

492 """ 

493 type should be one of "tuple", "list" 

494 """ 

495 super().__init__(inference_state) 

496 self._lazy_value_list = lazy_value_list 

497 

498 def py__simple_getitem__(self, index): 

499 if isinstance(index, slice): 

500 return ValueSet([self]) 

501 

502 with reraise_getitem_errors(IndexError, TypeError): 

503 lazy_value = self._lazy_value_list[index] 

504 return lazy_value.infer() 

505 

506 def py__iter__(self, contextualized_node=None): 

507 return self._lazy_value_list 

508 

509 def py__bool__(self): 

510 return bool(len(self._lazy_value_list)) 

511 

512 def __repr__(self): 

513 return "<%s of %s>" % (type(self).__name__, self._lazy_value_list) 

514 

515 

516class FakeTuple(_FakeSequence): 

517 array_type = 'tuple' 

518 

519 

520class FakeList(_FakeSequence): 

521 array_type = 'tuple' 

522 

523 

524class FakeDict(_DictMixin, Sequence, _DictKeyMixin): 

525 array_type = 'dict' 

526 

527 def __init__(self, inference_state, dct): 

528 super().__init__(inference_state) 

529 self._dct = dct 

530 

531 def py__iter__(self, contextualized_node=None): 

532 for key in self._dct: 

533 yield LazyKnownValue(compiled.create_simple_object(self.inference_state, key)) 

534 

535 def py__simple_getitem__(self, index): 

536 with reraise_getitem_errors(KeyError, TypeError): 

537 lazy_value = self._dct[index] 

538 return lazy_value.infer() 

539 

540 @publish_method('values') 

541 def _values(self, arguments): 

542 return ValueSet([FakeTuple( 

543 self.inference_state, 

544 [LazyKnownValues(self._dict_values())] 

545 )]) 

546 

547 def _dict_values(self): 

548 return ValueSet.from_sets(lazy_value.infer() for lazy_value in self._dct.values()) 

549 

550 def _dict_keys(self): 

551 return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) 

552 

553 def exact_key_items(self): 

554 return self._dct.items() 

555 

556 def __repr__(self): 

557 return '<%s: %s>' % (self.__class__.__name__, self._dct) 

558 

559 

560class MergedArray(Sequence): 

561 def __init__(self, inference_state, arrays): 

562 super().__init__(inference_state) 

563 self.array_type = arrays[-1].array_type 

564 self._arrays = arrays 

565 

566 def py__iter__(self, contextualized_node=None): 

567 for array in self._arrays: 

568 yield from array.py__iter__() 

569 

570 def py__simple_getitem__(self, index): 

571 return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) 

572 

573 

574def unpack_tuple_to_dict(context, types, exprlist): 

575 """ 

576 Unpacking tuple assignments in for statements and expr_stmts. 

577 """ 

578 if exprlist.type == 'name': 

579 return {exprlist.value: types} 

580 elif exprlist.type == 'atom' and exprlist.children[0] in ('(', '['): 

581 return unpack_tuple_to_dict(context, types, exprlist.children[1]) 

582 elif exprlist.type in ('testlist', 'testlist_comp', 'exprlist', 

583 'testlist_star_expr'): 

584 dct = {} 

585 parts = iter(exprlist.children[::2]) 

586 n = 0 

587 for lazy_value in types.iterate(ContextualizedNode(context, exprlist)): 

588 n += 1 

589 try: 

590 part = next(parts) 

591 except StopIteration: 

592 analysis.add(context, 'value-error-too-many-values', part, 

593 message="ValueError: too many values to unpack (expected %s)" % n) 

594 else: 

595 dct.update(unpack_tuple_to_dict(context, lazy_value.infer(), part)) 

596 has_parts = next(parts, None) 

597 if types and has_parts is not None: 

598 analysis.add(context, 'value-error-too-few-values', has_parts, 

599 message="ValueError: need more than %s values to unpack" % n) 

600 return dct 

601 elif exprlist.type == 'power' or exprlist.type == 'atom_expr': 

602 # Something like ``arr[x], var = ...``. 

603 # This is something that is not yet supported, would also be difficult 

604 # to write into a dict. 

605 return {} 

606 elif exprlist.type == 'star_expr': # `a, *b, c = x` type unpackings 

607 # Currently we're not supporting them. 

608 return {} 

609 raise NotImplementedError 

610 

611 

612class Slice(LazyValueWrapper): 

613 def __init__(self, python_context, start, stop, step): 

614 self.inference_state = python_context.inference_state 

615 self._context = python_context 

616 # All of them are either a Precedence or None. 

617 self._start = start 

618 self._stop = stop 

619 self._step = step 

620 

621 def _get_wrapped_value(self): 

622 value = compiled.builtin_from_name(self._context.inference_state, 'slice') 

623 slice_value, = value.execute_with_values() 

624 return slice_value 

625 

626 def get_safe_value(self, default=sentinel): 

627 """ 

628 Imitate CompiledValue.obj behavior and return a ``builtin.slice()`` 

629 object. 

630 """ 

631 def get(element): 

632 if element is None: 

633 return None 

634 

635 result = self._context.infer_node(element) 

636 if len(result) != 1: 

637 # For simplicity, we want slices to be clear defined with just 

638 # one type. Otherwise we will return an empty slice object. 

639 raise IndexError 

640 

641 value, = result 

642 return get_int_or_none(value) 

643 

644 try: 

645 return slice(get(self._start), get(self._stop), get(self._step)) 

646 except IndexError: 

647 return slice(None, None, None)