Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/typing_extensions.py: 28%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1768 statements  

1import abc 

2import builtins 

3import collections 

4import collections.abc 

5import contextlib 

6import enum 

7import functools 

8import inspect 

9import io 

10import keyword 

11import operator 

12import sys 

13import types as _types 

14import typing 

15import warnings 

16 

17if sys.version_info >= (3, 14): 

18 import annotationlib 

19 

20__all__ = [ 

21 # Super-special typing primitives. 

22 'Any', 

23 'ClassVar', 

24 'Concatenate', 

25 'Final', 

26 'LiteralString', 

27 'ParamSpec', 

28 'ParamSpecArgs', 

29 'ParamSpecKwargs', 

30 'Self', 

31 'Type', 

32 'TypeVar', 

33 'TypeVarTuple', 

34 'Unpack', 

35 

36 # ABCs (from collections.abc). 

37 'Awaitable', 

38 'AsyncIterator', 

39 'AsyncIterable', 

40 'Coroutine', 

41 'AsyncGenerator', 

42 'AsyncContextManager', 

43 'Buffer', 

44 'ChainMap', 

45 

46 # Concrete collection types. 

47 'ContextManager', 

48 'Counter', 

49 'Deque', 

50 'DefaultDict', 

51 'NamedTuple', 

52 'OrderedDict', 

53 'TypedDict', 

54 

55 # Structural checks, a.k.a. protocols. 

56 'SupportsAbs', 

57 'SupportsBytes', 

58 'SupportsComplex', 

59 'SupportsFloat', 

60 'SupportsIndex', 

61 'SupportsInt', 

62 'SupportsRound', 

63 'Reader', 

64 'Writer', 

65 

66 # One-off things. 

67 'Annotated', 

68 'assert_never', 

69 'assert_type', 

70 'clear_overloads', 

71 'dataclass_transform', 

72 'deprecated', 

73 'Doc', 

74 'evaluate_forward_ref', 

75 'get_overloads', 

76 'final', 

77 'Format', 

78 'get_annotations', 

79 'get_args', 

80 'get_origin', 

81 'get_original_bases', 

82 'get_protocol_members', 

83 'get_type_hints', 

84 'IntVar', 

85 'is_protocol', 

86 'is_typeddict', 

87 'Literal', 

88 'NewType', 

89 'overload', 

90 'override', 

91 'Protocol', 

92 'Sentinel', 

93 'reveal_type', 

94 'runtime', 

95 'runtime_checkable', 

96 'Text', 

97 'TypeAlias', 

98 'TypeAliasType', 

99 'TypeForm', 

100 'TypeGuard', 

101 'TypeIs', 

102 'TYPE_CHECKING', 

103 'Never', 

104 'NoReturn', 

105 'ReadOnly', 

106 'Required', 

107 'NotRequired', 

108 'NoDefault', 

109 'NoExtraItems', 

110 

111 # Pure aliases, have always been in typing 

112 'AbstractSet', 

113 'AnyStr', 

114 'BinaryIO', 

115 'Callable', 

116 'Collection', 

117 'Container', 

118 'Dict', 

119 'ForwardRef', 

120 'FrozenSet', 

121 'Generator', 

122 'Generic', 

123 'Hashable', 

124 'IO', 

125 'ItemsView', 

126 'Iterable', 

127 'Iterator', 

128 'KeysView', 

129 'List', 

130 'Mapping', 

131 'MappingView', 

132 'Match', 

133 'MutableMapping', 

134 'MutableSequence', 

135 'MutableSet', 

136 'Optional', 

137 'Pattern', 

138 'Reversible', 

139 'Sequence', 

140 'Set', 

141 'Sized', 

142 'TextIO', 

143 'Tuple', 

144 'Union', 

145 'ValuesView', 

146 'cast', 

147 'no_type_check', 

148 'no_type_check_decorator', 

149] 

150 

151# for backward compatibility 

152PEP_560 = True 

153GenericMeta = type 

154_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta") 

155 

156# Added with bpo-45166 to 3.10.1+ and some 3.9 versions 

157_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__ 

158 

159# The functions below are modified copies of typing internal helpers. 

160# They are needed by _ProtocolMeta and they provide support for PEP 646. 

161 

162 

163class _Sentinel: 

164 def __repr__(self): 

165 return "<sentinel>" 

166 

167 

168_marker = _Sentinel() 

169 

170 

171if sys.version_info >= (3, 10): 

172 def _should_collect_from_parameters(t): 

173 return isinstance( 

174 t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) 

175 ) 

176else: 

177 def _should_collect_from_parameters(t): 

178 return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) 

179 

180 

181NoReturn = typing.NoReturn 

182 

183# Some unconstrained type variables. These are used by the container types. 

184# (These are not for export.) 

185T = typing.TypeVar('T') # Any type. 

186KT = typing.TypeVar('KT') # Key type. 

187VT = typing.TypeVar('VT') # Value type. 

188T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. 

189T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. 

190 

191 

192if sys.version_info >= (3, 11): 

193 from typing import Any 

194else: 

195 

196 class _AnyMeta(type): 

197 def __instancecheck__(self, obj): 

198 if self is Any: 

199 raise TypeError("typing_extensions.Any cannot be used with isinstance()") 

200 return super().__instancecheck__(obj) 

201 

202 def __repr__(self): 

203 if self is Any: 

204 return "typing_extensions.Any" 

205 return super().__repr__() 

206 

207 class Any(metaclass=_AnyMeta): 

208 """Special type indicating an unconstrained type. 

209 - Any is compatible with every type. 

210 - Any assumed to have all methods. 

211 - All values assumed to be instances of Any. 

212 Note that all the above statements are true from the point of view of 

213 static type checkers. At runtime, Any should not be used with instance 

214 checks. 

215 """ 

216 def __new__(cls, *args, **kwargs): 

217 if cls is Any: 

218 raise TypeError("Any cannot be instantiated") 

219 return super().__new__(cls, *args, **kwargs) 

220 

221 

222ClassVar = typing.ClassVar 

223 

224 

225class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): 

226 def __repr__(self): 

227 return 'typing_extensions.' + self._name 

228 

229 

230Final = typing.Final 

231 

232if sys.version_info >= (3, 11): 

233 final = typing.final 

234else: 

235 # @final exists in 3.8+, but we backport it for all versions 

236 # before 3.11 to keep support for the __final__ attribute. 

237 # See https://bugs.python.org/issue46342 

238 def final(f): 

239 """This decorator can be used to indicate to type checkers that 

240 the decorated method cannot be overridden, and decorated class 

241 cannot be subclassed. For example: 

242 

243 class Base: 

244 @final 

245 def done(self) -> None: 

246 ... 

247 class Sub(Base): 

248 def done(self) -> None: # Error reported by type checker 

249 ... 

250 @final 

251 class Leaf: 

252 ... 

253 class Other(Leaf): # Error reported by type checker 

254 ... 

255 

256 There is no runtime checking of these properties. The decorator 

257 sets the ``__final__`` attribute to ``True`` on the decorated object 

258 to allow runtime introspection. 

259 """ 

260 try: 

261 f.__final__ = True 

262 except (AttributeError, TypeError): 

263 # Skip the attribute silently if it is not writable. 

264 # AttributeError happens if the object has __slots__ or a 

265 # read-only property, TypeError if it's a builtin class. 

266 pass 

267 return f 

268 

269 

270def IntVar(name): 

271 return typing.TypeVar(name) 

272 

273 

274# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 

275if sys.version_info >= (3, 10, 1): 

276 Literal = typing.Literal 

277else: 

278 def _flatten_literal_params(parameters): 

279 """An internal helper for Literal creation: flatten Literals among parameters""" 

280 params = [] 

281 for p in parameters: 

282 if isinstance(p, _LiteralGenericAlias): 

283 params.extend(p.__args__) 

284 else: 

285 params.append(p) 

286 return tuple(params) 

287 

288 def _value_and_type_iter(params): 

289 for p in params: 

290 yield p, type(p) 

291 

292 class _LiteralGenericAlias(typing._GenericAlias, _root=True): 

293 def __eq__(self, other): 

294 if not isinstance(other, _LiteralGenericAlias): 

295 return NotImplemented 

296 these_args_deduped = set(_value_and_type_iter(self.__args__)) 

297 other_args_deduped = set(_value_and_type_iter(other.__args__)) 

298 return these_args_deduped == other_args_deduped 

299 

300 def __hash__(self): 

301 return hash(frozenset(_value_and_type_iter(self.__args__))) 

302 

303 class _LiteralForm(_ExtensionsSpecialForm, _root=True): 

304 def __init__(self, doc: str): 

305 self._name = 'Literal' 

306 self._doc = self.__doc__ = doc 

307 

308 def __getitem__(self, parameters): 

309 if not isinstance(parameters, tuple): 

310 parameters = (parameters,) 

311 

312 parameters = _flatten_literal_params(parameters) 

313 

314 val_type_pairs = list(_value_and_type_iter(parameters)) 

315 try: 

316 deduped_pairs = set(val_type_pairs) 

317 except TypeError: 

318 # unhashable parameters 

319 pass 

320 else: 

321 # similar logic to typing._deduplicate on Python 3.9+ 

322 if len(deduped_pairs) < len(val_type_pairs): 

323 new_parameters = [] 

324 for pair in val_type_pairs: 

325 if pair in deduped_pairs: 

326 new_parameters.append(pair[0]) 

327 deduped_pairs.remove(pair) 

328 assert not deduped_pairs, deduped_pairs 

329 parameters = tuple(new_parameters) 

330 

331 return _LiteralGenericAlias(self, parameters) 

332 

333 Literal = _LiteralForm(doc="""\ 

334 A type that can be used to indicate to type checkers 

335 that the corresponding value has a value literally equivalent 

336 to the provided parameter. For example: 

337 

338 var: Literal[4] = 4 

339 

340 The type checker understands that 'var' is literally equal to 

341 the value 4 and no other value. 

342 

343 Literal[...] cannot be subclassed. There is no runtime 

344 checking verifying that the parameter is actually a value 

345 instead of a type.""") 

346 

347 

348_overload_dummy = typing._overload_dummy 

349 

350 

351if hasattr(typing, "get_overloads"): # 3.11+ 

352 overload = typing.overload 

353 get_overloads = typing.get_overloads 

354 clear_overloads = typing.clear_overloads 

355else: 

356 # {module: {qualname: {firstlineno: func}}} 

357 _overload_registry = collections.defaultdict( 

358 functools.partial(collections.defaultdict, dict) 

359 ) 

360 

361 def overload(func): 

362 """Decorator for overloaded functions/methods. 

363 

364 In a stub file, place two or more stub definitions for the same 

365 function in a row, each decorated with @overload. For example: 

366 

367 @overload 

368 def utf8(value: None) -> None: ... 

369 @overload 

370 def utf8(value: bytes) -> bytes: ... 

371 @overload 

372 def utf8(value: str) -> bytes: ... 

373 

374 In a non-stub file (i.e. a regular .py file), do the same but 

375 follow it with an implementation. The implementation should *not* 

376 be decorated with @overload. For example: 

377 

378 @overload 

379 def utf8(value: None) -> None: ... 

380 @overload 

381 def utf8(value: bytes) -> bytes: ... 

382 @overload 

383 def utf8(value: str) -> bytes: ... 

384 def utf8(value): 

385 # implementation goes here 

386 

387 The overloads for a function can be retrieved at runtime using the 

388 get_overloads() function. 

389 """ 

390 # classmethod and staticmethod 

391 f = getattr(func, "__func__", func) 

392 try: 

393 _overload_registry[f.__module__][f.__qualname__][ 

394 f.__code__.co_firstlineno 

395 ] = func 

396 except AttributeError: 

397 # Not a normal function; ignore. 

398 pass 

399 return _overload_dummy 

400 

401 def get_overloads(func): 

402 """Return all defined overloads for *func* as a sequence.""" 

403 # classmethod and staticmethod 

404 f = getattr(func, "__func__", func) 

405 if f.__module__ not in _overload_registry: 

406 return [] 

407 mod_dict = _overload_registry[f.__module__] 

408 if f.__qualname__ not in mod_dict: 

409 return [] 

410 return list(mod_dict[f.__qualname__].values()) 

411 

412 def clear_overloads(): 

413 """Clear all overloads in the registry.""" 

414 _overload_registry.clear() 

415 

416 

417# This is not a real generic class. Don't use outside annotations. 

418Type = typing.Type 

419 

420# Various ABCs mimicking those in collections.abc. 

421# A few are simply re-exported for completeness. 

422Awaitable = typing.Awaitable 

423Coroutine = typing.Coroutine 

424AsyncIterable = typing.AsyncIterable 

425AsyncIterator = typing.AsyncIterator 

426Deque = typing.Deque 

427DefaultDict = typing.DefaultDict 

428OrderedDict = typing.OrderedDict 

429Counter = typing.Counter 

430ChainMap = typing.ChainMap 

431Text = typing.Text 

432TYPE_CHECKING = typing.TYPE_CHECKING 

433 

434 

435if sys.version_info >= (3, 13, 0, "beta"): 

436 from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator 

437else: 

438 def _is_dunder(attr): 

439 return attr.startswith('__') and attr.endswith('__') 

440 

441 

442 class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True): 

443 def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): 

444 super().__init__(origin, nparams, inst=inst, name=name) 

445 self._defaults = defaults 

446 

447 def __setattr__(self, attr, val): 

448 allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'} 

449 if _is_dunder(attr) or attr in allowed_attrs: 

450 object.__setattr__(self, attr, val) 

451 else: 

452 setattr(self.__origin__, attr, val) 

453 

454 @typing._tp_cache 

455 def __getitem__(self, params): 

456 if not isinstance(params, tuple): 

457 params = (params,) 

458 msg = "Parameters to generic types must be types." 

459 params = tuple(typing._type_check(p, msg) for p in params) 

460 if ( 

461 self._defaults 

462 and len(params) < self._nparams 

463 and len(params) + len(self._defaults) >= self._nparams 

464 ): 

465 params = (*params, *self._defaults[len(params) - self._nparams:]) 

466 actual_len = len(params) 

467 

468 if actual_len != self._nparams: 

469 if self._defaults: 

470 expected = f"at least {self._nparams - len(self._defaults)}" 

471 else: 

472 expected = str(self._nparams) 

473 if not self._nparams: 

474 raise TypeError(f"{self} is not a generic class") 

475 raise TypeError( 

476 f"Too {'many' if actual_len > self._nparams else 'few'}" 

477 f" arguments for {self};" 

478 f" actual {actual_len}, expected {expected}" 

479 ) 

480 return self.copy_with(params) 

481 

482 _NoneType = type(None) 

483 Generator = _SpecialGenericAlias( 

484 collections.abc.Generator, 3, defaults=(_NoneType, _NoneType) 

485 ) 

486 AsyncGenerator = _SpecialGenericAlias( 

487 collections.abc.AsyncGenerator, 2, defaults=(_NoneType,) 

488 ) 

489 ContextManager = _SpecialGenericAlias( 

490 contextlib.AbstractContextManager, 

491 2, 

492 name="ContextManager", 

493 defaults=(typing.Optional[bool],) 

494 ) 

495 AsyncContextManager = _SpecialGenericAlias( 

496 contextlib.AbstractAsyncContextManager, 

497 2, 

498 name="AsyncContextManager", 

499 defaults=(typing.Optional[bool],) 

500 ) 

501 

502 

503_PROTO_ALLOWLIST = { 

504 'collections.abc': [ 

505 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', 

506 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', 

507 ], 

508 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], 

509 'typing_extensions': ['Buffer'], 

510} 

511 

512 

513_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | { 

514 "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__", 

515 "__final__", 

516} 

517 

518 

519def _get_protocol_attrs(cls): 

520 attrs = set() 

521 for base in cls.__mro__[:-1]: # without object 

522 if base.__name__ in {'Protocol', 'Generic'}: 

523 continue 

524 annotations = getattr(base, '__annotations__', {}) 

525 for attr in (*base.__dict__, *annotations): 

526 if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): 

527 attrs.add(attr) 

528 return attrs 

529 

530 

531def _caller(depth=1, default='__main__'): 

532 try: 

533 return sys._getframemodulename(depth + 1) or default 

534 except AttributeError: # For platforms without _getframemodulename() 

535 pass 

536 try: 

537 return sys._getframe(depth + 1).f_globals.get('__name__', default) 

538 except (AttributeError, ValueError): # For platforms without _getframe() 

539 pass 

540 return None 

541 

542 

543# `__match_args__` attribute was removed from protocol members in 3.13, 

544# we want to backport this change to older Python versions. 

545if sys.version_info >= (3, 13): 

546 Protocol = typing.Protocol 

547else: 

548 def _allow_reckless_class_checks(depth=2): 

549 """Allow instance and class checks for special stdlib modules. 

550 The abc and functools modules indiscriminately call isinstance() and 

551 issubclass() on the whole MRO of a user class, which may contain protocols. 

552 """ 

553 return _caller(depth) in {'abc', 'functools', None} 

554 

555 def _no_init(self, *args, **kwargs): 

556 if type(self)._is_protocol: 

557 raise TypeError('Protocols cannot be instantiated') 

558 

559 def _type_check_issubclass_arg_1(arg): 

560 """Raise TypeError if `arg` is not an instance of `type` 

561 in `issubclass(arg, <protocol>)`. 

562 

563 In most cases, this is verified by type.__subclasscheck__. 

564 Checking it again unnecessarily would slow down issubclass() checks, 

565 so, we don't perform this check unless we absolutely have to. 

566 

567 For various error paths, however, 

568 we want to ensure that *this* error message is shown to the user 

569 where relevant, rather than a typing.py-specific error message. 

570 """ 

571 if not isinstance(arg, type): 

572 # Same error message as for issubclass(1, int). 

573 raise TypeError('issubclass() arg 1 must be a class') 

574 

575 # Inheriting from typing._ProtocolMeta isn't actually desirable, 

576 # but is necessary to allow typing.Protocol and typing_extensions.Protocol 

577 # to mix without getting TypeErrors about "metaclass conflict" 

578 class _ProtocolMeta(type(typing.Protocol)): 

579 # This metaclass is somewhat unfortunate, 

580 # but is necessary for several reasons... 

581 # 

582 # NOTE: DO NOT call super() in any methods in this class 

583 # That would call the methods on typing._ProtocolMeta on Python <=3.11 

584 # and those are slow 

585 def __new__(mcls, name, bases, namespace, **kwargs): 

586 if name == "Protocol" and len(bases) < 2: 

587 pass 

588 elif {Protocol, typing.Protocol} & set(bases): 

589 for base in bases: 

590 if not ( 

591 base in {object, typing.Generic, Protocol, typing.Protocol} 

592 or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) 

593 or is_protocol(base) 

594 ): 

595 raise TypeError( 

596 f"Protocols can only inherit from other protocols, " 

597 f"got {base!r}" 

598 ) 

599 return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) 

600 

601 def __init__(cls, *args, **kwargs): 

602 abc.ABCMeta.__init__(cls, *args, **kwargs) 

603 if getattr(cls, "_is_protocol", False): 

604 cls.__protocol_attrs__ = _get_protocol_attrs(cls) 

605 

606 def __subclasscheck__(cls, other): 

607 if cls is Protocol: 

608 return type.__subclasscheck__(cls, other) 

609 if ( 

610 getattr(cls, '_is_protocol', False) 

611 and not _allow_reckless_class_checks() 

612 ): 

613 if not getattr(cls, '_is_runtime_protocol', False): 

614 _type_check_issubclass_arg_1(other) 

615 raise TypeError( 

616 "Instance and class checks can only be used with " 

617 "@runtime_checkable protocols" 

618 ) 

619 if ( 

620 # this attribute is set by @runtime_checkable: 

621 cls.__non_callable_proto_members__ 

622 and cls.__dict__.get("__subclasshook__") is _proto_hook 

623 ): 

624 _type_check_issubclass_arg_1(other) 

625 non_method_attrs = sorted(cls.__non_callable_proto_members__) 

626 raise TypeError( 

627 "Protocols with non-method members don't support issubclass()." 

628 f" Non-method members: {str(non_method_attrs)[1:-1]}." 

629 ) 

630 return abc.ABCMeta.__subclasscheck__(cls, other) 

631 

632 def __instancecheck__(cls, instance): 

633 # We need this method for situations where attributes are 

634 # assigned in __init__. 

635 if cls is Protocol: 

636 return type.__instancecheck__(cls, instance) 

637 if not getattr(cls, "_is_protocol", False): 

638 # i.e., it's a concrete subclass of a protocol 

639 return abc.ABCMeta.__instancecheck__(cls, instance) 

640 

641 if ( 

642 not getattr(cls, '_is_runtime_protocol', False) and 

643 not _allow_reckless_class_checks() 

644 ): 

645 raise TypeError("Instance and class checks can only be used with" 

646 " @runtime_checkable protocols") 

647 

648 if abc.ABCMeta.__instancecheck__(cls, instance): 

649 return True 

650 

651 for attr in cls.__protocol_attrs__: 

652 try: 

653 val = inspect.getattr_static(instance, attr) 

654 except AttributeError: 

655 break 

656 # this attribute is set by @runtime_checkable: 

657 if val is None and attr not in cls.__non_callable_proto_members__: 

658 break 

659 else: 

660 return True 

661 

662 return False 

663 

664 def __eq__(cls, other): 

665 # Hack so that typing.Generic.__class_getitem__ 

666 # treats typing_extensions.Protocol 

667 # as equivalent to typing.Protocol 

668 if abc.ABCMeta.__eq__(cls, other) is True: 

669 return True 

670 return cls is Protocol and other is typing.Protocol 

671 

672 # This has to be defined, or the abc-module cache 

673 # complains about classes with this metaclass being unhashable, 

674 # if we define only __eq__! 

675 def __hash__(cls) -> int: 

676 return type.__hash__(cls) 

677 

678 @classmethod 

679 def _proto_hook(cls, other): 

680 if not cls.__dict__.get('_is_protocol', False): 

681 return NotImplemented 

682 

683 for attr in cls.__protocol_attrs__: 

684 for base in other.__mro__: 

685 # Check if the members appears in the class dictionary... 

686 if attr in base.__dict__: 

687 if base.__dict__[attr] is None: 

688 return NotImplemented 

689 break 

690 

691 # ...or in annotations, if it is a sub-protocol. 

692 annotations = getattr(base, '__annotations__', {}) 

693 if ( 

694 isinstance(annotations, collections.abc.Mapping) 

695 and attr in annotations 

696 and is_protocol(other) 

697 ): 

698 break 

699 else: 

700 return NotImplemented 

701 return True 

702 

703 class Protocol(typing.Generic, metaclass=_ProtocolMeta): 

704 __doc__ = typing.Protocol.__doc__ 

705 __slots__ = () 

706 _is_protocol = True 

707 _is_runtime_protocol = False 

708 

709 def __init_subclass__(cls, *args, **kwargs): 

710 super().__init_subclass__(*args, **kwargs) 

711 

712 # Determine if this is a protocol or a concrete subclass. 

713 if not cls.__dict__.get('_is_protocol', False): 

714 cls._is_protocol = any(b is Protocol for b in cls.__bases__) 

715 

716 # Set (or override) the protocol subclass hook. 

717 if '__subclasshook__' not in cls.__dict__: 

718 cls.__subclasshook__ = _proto_hook 

719 

720 # Prohibit instantiation for protocol classes 

721 if cls._is_protocol and cls.__init__ is Protocol.__init__: 

722 cls.__init__ = _no_init 

723 

724 

725if sys.version_info >= (3, 13): 

726 runtime_checkable = typing.runtime_checkable 

727else: 

728 def runtime_checkable(cls): 

729 """Mark a protocol class as a runtime protocol. 

730 

731 Such protocol can be used with isinstance() and issubclass(). 

732 Raise TypeError if applied to a non-protocol class. 

733 This allows a simple-minded structural check very similar to 

734 one trick ponies in collections.abc such as Iterable. 

735 

736 For example:: 

737 

738 @runtime_checkable 

739 class Closable(Protocol): 

740 def close(self): ... 

741 

742 assert isinstance(open('/some/file'), Closable) 

743 

744 Warning: this will check only the presence of the required methods, 

745 not their type signatures! 

746 """ 

747 if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): 

748 raise TypeError(f'@runtime_checkable can be only applied to protocol classes,' 

749 f' got {cls!r}') 

750 cls._is_runtime_protocol = True 

751 

752 # typing.Protocol classes on <=3.11 break if we execute this block, 

753 # because typing.Protocol classes on <=3.11 don't have a 

754 # `__protocol_attrs__` attribute, and this block relies on the 

755 # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+ 

756 # break if we *don't* execute this block, because *they* assume that all 

757 # protocol classes have a `__non_callable_proto_members__` attribute 

758 # (which this block sets) 

759 if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2): 

760 # PEP 544 prohibits using issubclass() 

761 # with protocols that have non-method members. 

762 # See gh-113320 for why we compute this attribute here, 

763 # rather than in `_ProtocolMeta.__init__` 

764 cls.__non_callable_proto_members__ = set() 

765 for attr in cls.__protocol_attrs__: 

766 try: 

767 is_callable = callable(getattr(cls, attr, None)) 

768 except Exception as e: 

769 raise TypeError( 

770 f"Failed to determine whether protocol member {attr!r} " 

771 "is a method member" 

772 ) from e 

773 else: 

774 if not is_callable: 

775 cls.__non_callable_proto_members__.add(attr) 

776 

777 return cls 

778 

779 

780# The "runtime" alias exists for backwards compatibility. 

781runtime = runtime_checkable 

782 

783 

784# Our version of runtime-checkable protocols is faster on Python <=3.11 

785if sys.version_info >= (3, 12): 

786 SupportsInt = typing.SupportsInt 

787 SupportsFloat = typing.SupportsFloat 

788 SupportsComplex = typing.SupportsComplex 

789 SupportsBytes = typing.SupportsBytes 

790 SupportsIndex = typing.SupportsIndex 

791 SupportsAbs = typing.SupportsAbs 

792 SupportsRound = typing.SupportsRound 

793else: 

794 @runtime_checkable 

795 class SupportsInt(Protocol): 

796 """An ABC with one abstract method __int__.""" 

797 __slots__ = () 

798 

799 @abc.abstractmethod 

800 def __int__(self) -> int: 

801 pass 

802 

803 @runtime_checkable 

804 class SupportsFloat(Protocol): 

805 """An ABC with one abstract method __float__.""" 

806 __slots__ = () 

807 

808 @abc.abstractmethod 

809 def __float__(self) -> float: 

810 pass 

811 

812 @runtime_checkable 

813 class SupportsComplex(Protocol): 

814 """An ABC with one abstract method __complex__.""" 

815 __slots__ = () 

816 

817 @abc.abstractmethod 

818 def __complex__(self) -> complex: 

819 pass 

820 

821 @runtime_checkable 

822 class SupportsBytes(Protocol): 

823 """An ABC with one abstract method __bytes__.""" 

824 __slots__ = () 

825 

826 @abc.abstractmethod 

827 def __bytes__(self) -> bytes: 

828 pass 

829 

830 @runtime_checkable 

831 class SupportsIndex(Protocol): 

832 __slots__ = () 

833 

834 @abc.abstractmethod 

835 def __index__(self) -> int: 

836 pass 

837 

838 @runtime_checkable 

839 class SupportsAbs(Protocol[T_co]): 

840 """ 

841 An ABC with one abstract method __abs__ that is covariant in its return type. 

842 """ 

843 __slots__ = () 

844 

845 @abc.abstractmethod 

846 def __abs__(self) -> T_co: 

847 pass 

848 

849 @runtime_checkable 

850 class SupportsRound(Protocol[T_co]): 

851 """ 

852 An ABC with one abstract method __round__ that is covariant in its return type. 

853 """ 

854 __slots__ = () 

855 

856 @abc.abstractmethod 

857 def __round__(self, ndigits: int = 0) -> T_co: 

858 pass 

859 

860 

861if hasattr(io, "Reader") and hasattr(io, "Writer"): 

862 Reader = io.Reader 

863 Writer = io.Writer 

864else: 

865 @runtime_checkable 

866 class Reader(Protocol[T_co]): 

867 """Protocol for simple I/O reader instances. 

868 

869 This protocol only supports blocking I/O. 

870 """ 

871 

872 __slots__ = () 

873 

874 @abc.abstractmethod 

875 def read(self, size: int = ..., /) -> T_co: 

876 """Read data from the input stream and return it. 

877 

878 If *size* is specified, at most *size* items (bytes/characters) will be 

879 read. 

880 """ 

881 

882 @runtime_checkable 

883 class Writer(Protocol[T_contra]): 

884 """Protocol for simple I/O writer instances. 

885 

886 This protocol only supports blocking I/O. 

887 """ 

888 

889 __slots__ = () 

890 

891 @abc.abstractmethod 

892 def write(self, data: T_contra, /) -> int: 

893 """Write *data* to the output stream and return the number of items written.""" # noqa: E501 

894 

895 

896_NEEDS_SINGLETONMETA = ( 

897 not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems") 

898) 

899 

900if _NEEDS_SINGLETONMETA: 

901 class SingletonMeta(type): 

902 def __setattr__(cls, attr, value): 

903 # TypeError is consistent with the behavior of NoneType 

904 raise TypeError( 

905 f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}" 

906 ) 

907 

908 

909if hasattr(typing, "NoDefault"): 

910 NoDefault = typing.NoDefault 

911else: 

912 class NoDefaultType(metaclass=SingletonMeta): 

913 """The type of the NoDefault singleton.""" 

914 

915 __slots__ = () 

916 

917 def __new__(cls): 

918 return globals().get("NoDefault") or object.__new__(cls) 

919 

920 def __repr__(self): 

921 return "typing_extensions.NoDefault" 

922 

923 def __reduce__(self): 

924 return "NoDefault" 

925 

926 NoDefault = NoDefaultType() 

927 del NoDefaultType 

928 

929if hasattr(typing, "NoExtraItems"): 

930 NoExtraItems = typing.NoExtraItems 

931else: 

932 class NoExtraItemsType(metaclass=SingletonMeta): 

933 """The type of the NoExtraItems singleton.""" 

934 

935 __slots__ = () 

936 

937 def __new__(cls): 

938 return globals().get("NoExtraItems") or object.__new__(cls) 

939 

940 def __repr__(self): 

941 return "typing_extensions.NoExtraItems" 

942 

943 def __reduce__(self): 

944 return "NoExtraItems" 

945 

946 NoExtraItems = NoExtraItemsType() 

947 del NoExtraItemsType 

948 

949if _NEEDS_SINGLETONMETA: 

950 del SingletonMeta 

951 

952 

953# Update this to something like >=3.13.0b1 if and when 

954# PEP 728 is implemented in CPython 

955_PEP_728_IMPLEMENTED = False 

956 

957if _PEP_728_IMPLEMENTED: 

958 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" 

959 # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 

960 # The standard library TypedDict below Python 3.11 does not store runtime 

961 # information about optional and required keys when using Required or NotRequired. 

962 # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. 

963 # Aaaand on 3.12 we add __orig_bases__ to TypedDict 

964 # to enable better runtime introspection. 

965 # On 3.13 we deprecate some odd ways of creating TypedDicts. 

966 # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. 

967 # PEP 728 (still pending) makes more changes. 

968 TypedDict = typing.TypedDict 

969 _TypedDictMeta = typing._TypedDictMeta 

970 is_typeddict = typing.is_typeddict 

971else: 

972 # 3.10.0 and later 

973 _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters 

974 

975 def _get_typeddict_qualifiers(annotation_type): 

976 while True: 

977 annotation_origin = get_origin(annotation_type) 

978 if annotation_origin is Annotated: 

979 annotation_args = get_args(annotation_type) 

980 if annotation_args: 

981 annotation_type = annotation_args[0] 

982 else: 

983 break 

984 elif annotation_origin is Required: 

985 yield Required 

986 annotation_type, = get_args(annotation_type) 

987 elif annotation_origin is NotRequired: 

988 yield NotRequired 

989 annotation_type, = get_args(annotation_type) 

990 elif annotation_origin is ReadOnly: 

991 yield ReadOnly 

992 annotation_type, = get_args(annotation_type) 

993 else: 

994 break 

995 

996 class _TypedDictMeta(type): 

997 

998 def __new__(cls, name, bases, ns, *, total=True, closed=None, 

999 extra_items=NoExtraItems): 

1000 """Create new typed dict class object. 

1001 

1002 This method is called when TypedDict is subclassed, 

1003 or when TypedDict is instantiated. This way 

1004 TypedDict supports all three syntax forms described in its docstring. 

1005 Subclasses and instances of TypedDict return actual dictionaries. 

1006 """ 

1007 for base in bases: 

1008 if type(base) is not _TypedDictMeta and base is not typing.Generic: 

1009 raise TypeError('cannot inherit from both a TypedDict type ' 

1010 'and a non-TypedDict base class') 

1011 if closed is not None and extra_items is not NoExtraItems: 

1012 raise TypeError(f"Cannot combine closed={closed!r} and extra_items") 

1013 

1014 if any(issubclass(b, typing.Generic) for b in bases): 

1015 generic_base = (typing.Generic,) 

1016 else: 

1017 generic_base = () 

1018 

1019 ns_annotations = ns.pop('__annotations__', None) 

1020 

1021 # typing.py generally doesn't let you inherit from plain Generic, unless 

1022 # the name of the class happens to be "Protocol" 

1023 tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) 

1024 tp_dict.__name__ = name 

1025 if tp_dict.__qualname__ == "Protocol": 

1026 tp_dict.__qualname__ = name 

1027 

1028 if not hasattr(tp_dict, '__orig_bases__'): 

1029 tp_dict.__orig_bases__ = bases 

1030 

1031 annotations = {} 

1032 own_annotate = None 

1033 if ns_annotations is not None: 

1034 own_annotations = ns_annotations 

1035 elif sys.version_info >= (3, 14): 

1036 if hasattr(annotationlib, "get_annotate_from_class_namespace"): 

1037 own_annotate = annotationlib.get_annotate_from_class_namespace(ns) 

1038 else: 

1039 # 3.14.0a7 and earlier 

1040 own_annotate = ns.get("__annotate__") 

1041 if own_annotate is not None: 

1042 own_annotations = annotationlib.call_annotate_function( 

1043 own_annotate, Format.FORWARDREF, owner=tp_dict 

1044 ) 

1045 else: 

1046 own_annotations = {} 

1047 else: 

1048 own_annotations = {} 

1049 msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" 

1050 if _TAKES_MODULE: 

1051 own_checked_annotations = { 

1052 n: typing._type_check(tp, msg, module=tp_dict.__module__) 

1053 for n, tp in own_annotations.items() 

1054 } 

1055 else: 

1056 own_checked_annotations = { 

1057 n: typing._type_check(tp, msg) 

1058 for n, tp in own_annotations.items() 

1059 } 

1060 required_keys = set() 

1061 optional_keys = set() 

1062 readonly_keys = set() 

1063 mutable_keys = set() 

1064 extra_items_type = extra_items 

1065 

1066 for base in bases: 

1067 base_dict = base.__dict__ 

1068 

1069 if sys.version_info <= (3, 14): 

1070 annotations.update(base_dict.get('__annotations__', {})) 

1071 required_keys.update(base_dict.get('__required_keys__', ())) 

1072 optional_keys.update(base_dict.get('__optional_keys__', ())) 

1073 readonly_keys.update(base_dict.get('__readonly_keys__', ())) 

1074 mutable_keys.update(base_dict.get('__mutable_keys__', ())) 

1075 

1076 # This was specified in an earlier version of PEP 728. Support 

1077 # is retained for backwards compatibility, but only for Python 

1078 # 3.13 and lower. 

1079 if (closed and sys.version_info < (3, 14) 

1080 and "__extra_items__" in own_checked_annotations): 

1081 annotation_type = own_checked_annotations.pop("__extra_items__") 

1082 qualifiers = set(_get_typeddict_qualifiers(annotation_type)) 

1083 if Required in qualifiers: 

1084 raise TypeError( 

1085 "Special key __extra_items__ does not support " 

1086 "Required" 

1087 ) 

1088 if NotRequired in qualifiers: 

1089 raise TypeError( 

1090 "Special key __extra_items__ does not support " 

1091 "NotRequired" 

1092 ) 

1093 extra_items_type = annotation_type 

1094 

1095 annotations.update(own_checked_annotations) 

1096 for annotation_key, annotation_type in own_checked_annotations.items(): 

1097 qualifiers = set(_get_typeddict_qualifiers(annotation_type)) 

1098 

1099 if Required in qualifiers: 

1100 required_keys.add(annotation_key) 

1101 elif NotRequired in qualifiers: 

1102 optional_keys.add(annotation_key) 

1103 elif total: 

1104 required_keys.add(annotation_key) 

1105 else: 

1106 optional_keys.add(annotation_key) 

1107 if ReadOnly in qualifiers: 

1108 mutable_keys.discard(annotation_key) 

1109 readonly_keys.add(annotation_key) 

1110 else: 

1111 mutable_keys.add(annotation_key) 

1112 readonly_keys.discard(annotation_key) 

1113 

1114 if sys.version_info >= (3, 14): 

1115 def __annotate__(format): 

1116 annos = {} 

1117 for base in bases: 

1118 if base is Generic: 

1119 continue 

1120 base_annotate = base.__annotate__ 

1121 if base_annotate is None: 

1122 continue 

1123 base_annos = annotationlib.call_annotate_function( 

1124 base_annotate, format, owner=base) 

1125 annos.update(base_annos) 

1126 if own_annotate is not None: 

1127 own = annotationlib.call_annotate_function( 

1128 own_annotate, format, owner=tp_dict) 

1129 if format != Format.STRING: 

1130 own = { 

1131 n: typing._type_check(tp, msg, module=tp_dict.__module__) 

1132 for n, tp in own.items() 

1133 } 

1134 elif format == Format.STRING: 

1135 own = annotationlib.annotations_to_string(own_annotations) 

1136 elif format in (Format.FORWARDREF, Format.VALUE): 

1137 own = own_checked_annotations 

1138 else: 

1139 raise NotImplementedError(format) 

1140 annos.update(own) 

1141 return annos 

1142 

1143 tp_dict.__annotate__ = __annotate__ 

1144 else: 

1145 tp_dict.__annotations__ = annotations 

1146 tp_dict.__required_keys__ = frozenset(required_keys) 

1147 tp_dict.__optional_keys__ = frozenset(optional_keys) 

1148 tp_dict.__readonly_keys__ = frozenset(readonly_keys) 

1149 tp_dict.__mutable_keys__ = frozenset(mutable_keys) 

1150 tp_dict.__total__ = total 

1151 tp_dict.__closed__ = closed 

1152 tp_dict.__extra_items__ = extra_items_type 

1153 return tp_dict 

1154 

1155 __call__ = dict # static method 

1156 

1157 def __subclasscheck__(cls, other): 

1158 # Typed dicts are only for static structural subtyping. 

1159 raise TypeError('TypedDict does not support instance and class checks') 

1160 

1161 __instancecheck__ = __subclasscheck__ 

1162 

1163 _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) 

1164 

1165 def _create_typeddict( 

1166 typename, 

1167 fields, 

1168 /, 

1169 *, 

1170 typing_is_inline, 

1171 total, 

1172 closed, 

1173 extra_items, 

1174 **kwargs, 

1175 ): 

1176 if fields is _marker or fields is None: 

1177 if fields is _marker: 

1178 deprecated_thing = ( 

1179 "Failing to pass a value for the 'fields' parameter" 

1180 ) 

1181 else: 

1182 deprecated_thing = "Passing `None` as the 'fields' parameter" 

1183 

1184 example = f"`{typename} = TypedDict({typename!r}, {{}})`" 

1185 deprecation_msg = ( 

1186 f"{deprecated_thing} is deprecated and will be disallowed in " 

1187 "Python 3.15. To create a TypedDict class with 0 fields " 

1188 "using the functional syntax, pass an empty dictionary, e.g. " 

1189 ) + example + "." 

1190 warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) 

1191 # Support a field called "closed" 

1192 if closed is not False and closed is not True and closed is not None: 

1193 kwargs["closed"] = closed 

1194 closed = None 

1195 # Or "extra_items" 

1196 if extra_items is not NoExtraItems: 

1197 kwargs["extra_items"] = extra_items 

1198 extra_items = NoExtraItems 

1199 fields = kwargs 

1200 elif kwargs: 

1201 raise TypeError("TypedDict takes either a dict or keyword arguments," 

1202 " but not both") 

1203 if kwargs: 

1204 if sys.version_info >= (3, 13): 

1205 raise TypeError("TypedDict takes no keyword arguments") 

1206 warnings.warn( 

1207 "The kwargs-based syntax for TypedDict definitions is deprecated " 

1208 "in Python 3.11, will be removed in Python 3.13, and may not be " 

1209 "understood by third-party type checkers.", 

1210 DeprecationWarning, 

1211 stacklevel=2, 

1212 ) 

1213 

1214 ns = {'__annotations__': dict(fields)} 

1215 module = _caller(depth=4 if typing_is_inline else 2) 

1216 if module is not None: 

1217 # Setting correct module is necessary to make typed dict classes 

1218 # pickleable. 

1219 ns['__module__'] = module 

1220 

1221 td = _TypedDictMeta(typename, (), ns, total=total, closed=closed, 

1222 extra_items=extra_items) 

1223 td.__orig_bases__ = (TypedDict,) 

1224 return td 

1225 

1226 class _TypedDictSpecialForm(_ExtensionsSpecialForm, _root=True): 

1227 def __call__( 

1228 self, 

1229 typename, 

1230 fields=_marker, 

1231 /, 

1232 *, 

1233 total=True, 

1234 closed=None, 

1235 extra_items=NoExtraItems, 

1236 **kwargs 

1237 ): 

1238 return _create_typeddict( 

1239 typename, 

1240 fields, 

1241 typing_is_inline=False, 

1242 total=total, 

1243 closed=closed, 

1244 extra_items=extra_items, 

1245 **kwargs, 

1246 ) 

1247 

1248 def __mro_entries__(self, bases): 

1249 return (_TypedDict,) 

1250 

1251 @_TypedDictSpecialForm 

1252 def TypedDict(self, args): 

1253 """A simple typed namespace. At runtime it is equivalent to a plain dict. 

1254 

1255 TypedDict creates a dictionary type such that a type checker will expect all 

1256 instances to have a certain set of keys, where each key is 

1257 associated with a value of a consistent type. This expectation 

1258 is not checked at runtime. 

1259 

1260 Usage:: 

1261 

1262 class Point2D(TypedDict): 

1263 x: int 

1264 y: int 

1265 label: str 

1266 

1267 a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK 

1268 b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check 

1269 

1270 assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') 

1271 

1272 The type info can be accessed via the Point2D.__annotations__ dict, and 

1273 the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. 

1274 TypedDict supports an additional equivalent form:: 

1275 

1276 Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) 

1277 

1278 By default, all keys must be present in a TypedDict. It is possible 

1279 to override this by specifying totality:: 

1280 

1281 class Point2D(TypedDict, total=False): 

1282 x: int 

1283 y: int 

1284 

1285 This means that a Point2D TypedDict can have any of the keys omitted. A type 

1286 checker is only expected to support a literal False or True as the value of 

1287 the total argument. True is the default, and makes all items defined in the 

1288 class body be required. 

1289 

1290 The Required and NotRequired special forms can also be used to mark 

1291 individual keys as being required or not required:: 

1292 

1293 class Point2D(TypedDict): 

1294 x: int # the "x" key must always be present (Required is the default) 

1295 y: NotRequired[int] # the "y" key can be omitted 

1296 

1297 See PEP 655 for more details on Required and NotRequired. 

1298 """ 

1299 # This runs when creating inline TypedDicts: 

1300 if not isinstance(args, dict): 

1301 raise TypeError( 

1302 "TypedDict[...] should be used with a single dict argument" 

1303 ) 

1304 

1305 return _create_typeddict( 

1306 "<inline TypedDict>", 

1307 args, 

1308 typing_is_inline=True, 

1309 total=True, 

1310 closed=True, 

1311 extra_items=NoExtraItems, 

1312 ) 

1313 

1314 _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) 

1315 

1316 def is_typeddict(tp): 

1317 """Check if an annotation is a TypedDict class 

1318 

1319 For example:: 

1320 class Film(TypedDict): 

1321 title: str 

1322 year: int 

1323 

1324 is_typeddict(Film) # => True 

1325 is_typeddict(Union[list, str]) # => False 

1326 """ 

1327 return isinstance(tp, _TYPEDDICT_TYPES) 

1328 

1329 

1330if hasattr(typing, "assert_type"): 

1331 assert_type = typing.assert_type 

1332 

1333else: 

1334 def assert_type(val, typ, /): 

1335 """Assert (to the type checker) that the value is of the given type. 

1336 

1337 When the type checker encounters a call to assert_type(), it 

1338 emits an error if the value is not of the specified type:: 

1339 

1340 def greet(name: str) -> None: 

1341 assert_type(name, str) # ok 

1342 assert_type(name, int) # type checker error 

1343 

1344 At runtime this returns the first argument unchanged and otherwise 

1345 does nothing. 

1346 """ 

1347 return val 

1348 

1349 

1350if hasattr(typing, "ReadOnly"): # 3.13+ 

1351 get_type_hints = typing.get_type_hints 

1352else: # <=3.13 

1353 # replaces _strip_annotations() 

1354 def _strip_extras(t): 

1355 """Strips Annotated, Required and NotRequired from a given type.""" 

1356 if isinstance(t, typing._AnnotatedAlias): 

1357 return _strip_extras(t.__origin__) 

1358 if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): 

1359 return _strip_extras(t.__args__[0]) 

1360 if isinstance(t, typing._GenericAlias): 

1361 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1362 if stripped_args == t.__args__: 

1363 return t 

1364 return t.copy_with(stripped_args) 

1365 if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): 

1366 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1367 if stripped_args == t.__args__: 

1368 return t 

1369 return _types.GenericAlias(t.__origin__, stripped_args) 

1370 if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): 

1371 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1372 if stripped_args == t.__args__: 

1373 return t 

1374 return functools.reduce(operator.or_, stripped_args) 

1375 

1376 return t 

1377 

1378 def get_type_hints(obj, globalns=None, localns=None, include_extras=False): 

1379 """Return type hints for an object. 

1380 

1381 This is often the same as obj.__annotations__, but it handles 

1382 forward references encoded as string literals, adds Optional[t] if a 

1383 default value equal to None is set and recursively replaces all 

1384 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' 

1385 (unless 'include_extras=True'). 

1386 

1387 The argument may be a module, class, method, or function. The annotations 

1388 are returned as a dictionary. For classes, annotations include also 

1389 inherited members. 

1390 

1391 TypeError is raised if the argument is not of a type that can contain 

1392 annotations, and an empty dictionary is returned if no annotations are 

1393 present. 

1394 

1395 BEWARE -- the behavior of globalns and localns is counterintuitive 

1396 (unless you are familiar with how eval() and exec() work). The 

1397 search order is locals first, then globals. 

1398 

1399 - If no dict arguments are passed, an attempt is made to use the 

1400 globals from obj (or the respective module's globals for classes), 

1401 and these are also used as the locals. If the object does not appear 

1402 to have globals, an empty dictionary is used. 

1403 

1404 - If one dict argument is passed, it is used for both globals and 

1405 locals. 

1406 

1407 - If two dict arguments are passed, they specify globals and 

1408 locals, respectively. 

1409 """ 

1410 hint = typing.get_type_hints( 

1411 obj, globalns=globalns, localns=localns, include_extras=True 

1412 ) 

1413 if sys.version_info < (3, 11): 

1414 _clean_optional(obj, hint, globalns, localns) 

1415 if include_extras: 

1416 return hint 

1417 return {k: _strip_extras(t) for k, t in hint.items()} 

1418 

1419 _NoneType = type(None) 

1420 

1421 def _could_be_inserted_optional(t): 

1422 """detects Union[..., None] pattern""" 

1423 if not isinstance(t, typing._UnionGenericAlias): 

1424 return False 

1425 # Assume if last argument is not None they are user defined 

1426 if t.__args__[-1] is not _NoneType: 

1427 return False 

1428 return True 

1429 

1430 # < 3.11 

1431 def _clean_optional(obj, hints, globalns=None, localns=None): 

1432 # reverts injected Union[..., None] cases from typing.get_type_hints 

1433 # when a None default value is used. 

1434 # see https://github.com/python/typing_extensions/issues/310 

1435 if not hints or isinstance(obj, type): 

1436 return 

1437 defaults = typing._get_defaults(obj) # avoid accessing __annotations___ 

1438 if not defaults: 

1439 return 

1440 original_hints = obj.__annotations__ 

1441 for name, value in hints.items(): 

1442 # Not a Union[..., None] or replacement conditions not fullfilled 

1443 if (not _could_be_inserted_optional(value) 

1444 or name not in defaults 

1445 or defaults[name] is not None 

1446 ): 

1447 continue 

1448 original_value = original_hints[name] 

1449 # value=NoneType should have caused a skip above but check for safety 

1450 if original_value is None: 

1451 original_value = _NoneType 

1452 # Forward reference 

1453 if isinstance(original_value, str): 

1454 if globalns is None: 

1455 if isinstance(obj, _types.ModuleType): 

1456 globalns = obj.__dict__ 

1457 else: 

1458 nsobj = obj 

1459 # Find globalns for the unwrapped object. 

1460 while hasattr(nsobj, '__wrapped__'): 

1461 nsobj = nsobj.__wrapped__ 

1462 globalns = getattr(nsobj, '__globals__', {}) 

1463 if localns is None: 

1464 localns = globalns 

1465 elif localns is None: 

1466 localns = globalns 

1467 

1468 original_value = ForwardRef( 

1469 original_value, 

1470 is_argument=not isinstance(obj, _types.ModuleType) 

1471 ) 

1472 original_evaluated = typing._eval_type(original_value, globalns, localns) 

1473 # Compare if values differ. Note that even if equal 

1474 # value might be cached by typing._tp_cache contrary to original_evaluated 

1475 if original_evaluated != value or ( 

1476 # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias 

1477 hasattr(_types, "UnionType") 

1478 and isinstance(original_evaluated, _types.UnionType) 

1479 and not isinstance(value, _types.UnionType) 

1480 ): 

1481 hints[name] = original_evaluated 

1482 

1483# Python 3.9 has get_origin() and get_args() but those implementations don't support 

1484# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. 

1485if sys.version_info[:2] >= (3, 10): 

1486 get_origin = typing.get_origin 

1487 get_args = typing.get_args 

1488# 3.9 

1489else: 

1490 def get_origin(tp): 

1491 """Get the unsubscripted version of a type. 

1492 

1493 This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar 

1494 and Annotated. Return None for unsupported types. Examples:: 

1495 

1496 get_origin(Literal[42]) is Literal 

1497 get_origin(int) is None 

1498 get_origin(ClassVar[int]) is ClassVar 

1499 get_origin(Generic) is Generic 

1500 get_origin(Generic[T]) is Generic 

1501 get_origin(Union[T, int]) is Union 

1502 get_origin(List[Tuple[T, T]][int]) == list 

1503 get_origin(P.args) is P 

1504 """ 

1505 if isinstance(tp, typing._AnnotatedAlias): 

1506 return Annotated 

1507 if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias, 

1508 ParamSpecArgs, ParamSpecKwargs)): 

1509 return tp.__origin__ 

1510 if tp is typing.Generic: 

1511 return typing.Generic 

1512 return None 

1513 

1514 def get_args(tp): 

1515 """Get type arguments with all substitutions performed. 

1516 

1517 For unions, basic simplifications used by Union constructor are performed. 

1518 Examples:: 

1519 get_args(Dict[str, int]) == (str, int) 

1520 get_args(int) == () 

1521 get_args(Union[int, Union[T, int], str][int]) == (int, str) 

1522 get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) 

1523 get_args(Callable[[], T][int]) == ([], int) 

1524 """ 

1525 if isinstance(tp, typing._AnnotatedAlias): 

1526 return (tp.__origin__, *tp.__metadata__) 

1527 if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)): 

1528 res = tp.__args__ 

1529 if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: 

1530 res = (list(res[:-1]), res[-1]) 

1531 return res 

1532 return () 

1533 

1534 

1535# 3.10+ 

1536if hasattr(typing, 'TypeAlias'): 

1537 TypeAlias = typing.TypeAlias 

1538# 3.9 

1539else: 

1540 @_ExtensionsSpecialForm 

1541 def TypeAlias(self, parameters): 

1542 """Special marker indicating that an assignment should 

1543 be recognized as a proper type alias definition by type 

1544 checkers. 

1545 

1546 For example:: 

1547 

1548 Predicate: TypeAlias = Callable[..., bool] 

1549 

1550 It's invalid when used anywhere except as in the example above. 

1551 """ 

1552 raise TypeError(f"{self} is not subscriptable") 

1553 

1554 

1555def _set_default(type_param, default): 

1556 type_param.has_default = lambda: default is not NoDefault 

1557 type_param.__default__ = default 

1558 

1559 

1560def _set_module(typevarlike): 

1561 # for pickling: 

1562 def_mod = _caller(depth=2) 

1563 if def_mod != 'typing_extensions': 

1564 typevarlike.__module__ = def_mod 

1565 

1566 

1567class _DefaultMixin: 

1568 """Mixin for TypeVarLike defaults.""" 

1569 

1570 __slots__ = () 

1571 __init__ = _set_default 

1572 

1573 

1574# Classes using this metaclass must provide a _backported_typevarlike ClassVar 

1575class _TypeVarLikeMeta(type): 

1576 def __instancecheck__(cls, __instance: Any) -> bool: 

1577 return isinstance(__instance, cls._backported_typevarlike) 

1578 

1579 

1580if _PEP_696_IMPLEMENTED: 

1581 from typing import TypeVar 

1582else: 

1583 # Add default and infer_variance parameters from PEP 696 and 695 

1584 class TypeVar(metaclass=_TypeVarLikeMeta): 

1585 """Type variable.""" 

1586 

1587 _backported_typevarlike = typing.TypeVar 

1588 

1589 def __new__(cls, name, *constraints, bound=None, 

1590 covariant=False, contravariant=False, 

1591 default=NoDefault, infer_variance=False): 

1592 if hasattr(typing, "TypeAliasType"): 

1593 # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar 

1594 typevar = typing.TypeVar(name, *constraints, bound=bound, 

1595 covariant=covariant, contravariant=contravariant, 

1596 infer_variance=infer_variance) 

1597 else: 

1598 typevar = typing.TypeVar(name, *constraints, bound=bound, 

1599 covariant=covariant, contravariant=contravariant) 

1600 if infer_variance and (covariant or contravariant): 

1601 raise ValueError("Variance cannot be specified with infer_variance.") 

1602 typevar.__infer_variance__ = infer_variance 

1603 

1604 _set_default(typevar, default) 

1605 _set_module(typevar) 

1606 

1607 def _tvar_prepare_subst(alias, args): 

1608 if ( 

1609 typevar.has_default() 

1610 and alias.__parameters__.index(typevar) == len(args) 

1611 ): 

1612 args += (typevar.__default__,) 

1613 return args 

1614 

1615 typevar.__typing_prepare_subst__ = _tvar_prepare_subst 

1616 return typevar 

1617 

1618 def __init_subclass__(cls) -> None: 

1619 raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") 

1620 

1621 

1622# Python 3.10+ has PEP 612 

1623if hasattr(typing, 'ParamSpecArgs'): 

1624 ParamSpecArgs = typing.ParamSpecArgs 

1625 ParamSpecKwargs = typing.ParamSpecKwargs 

1626# 3.9 

1627else: 

1628 class _Immutable: 

1629 """Mixin to indicate that object should not be copied.""" 

1630 __slots__ = () 

1631 

1632 def __copy__(self): 

1633 return self 

1634 

1635 def __deepcopy__(self, memo): 

1636 return self 

1637 

1638 class ParamSpecArgs(_Immutable): 

1639 """The args for a ParamSpec object. 

1640 

1641 Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. 

1642 

1643 ParamSpecArgs objects have a reference back to their ParamSpec: 

1644 

1645 P.args.__origin__ is P 

1646 

1647 This type is meant for runtime introspection and has no special meaning to 

1648 static type checkers. 

1649 """ 

1650 def __init__(self, origin): 

1651 self.__origin__ = origin 

1652 

1653 def __repr__(self): 

1654 return f"{self.__origin__.__name__}.args" 

1655 

1656 def __eq__(self, other): 

1657 if not isinstance(other, ParamSpecArgs): 

1658 return NotImplemented 

1659 return self.__origin__ == other.__origin__ 

1660 

1661 class ParamSpecKwargs(_Immutable): 

1662 """The kwargs for a ParamSpec object. 

1663 

1664 Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. 

1665 

1666 ParamSpecKwargs objects have a reference back to their ParamSpec: 

1667 

1668 P.kwargs.__origin__ is P 

1669 

1670 This type is meant for runtime introspection and has no special meaning to 

1671 static type checkers. 

1672 """ 

1673 def __init__(self, origin): 

1674 self.__origin__ = origin 

1675 

1676 def __repr__(self): 

1677 return f"{self.__origin__.__name__}.kwargs" 

1678 

1679 def __eq__(self, other): 

1680 if not isinstance(other, ParamSpecKwargs): 

1681 return NotImplemented 

1682 return self.__origin__ == other.__origin__ 

1683 

1684 

1685if _PEP_696_IMPLEMENTED: 

1686 from typing import ParamSpec 

1687 

1688# 3.10+ 

1689elif hasattr(typing, 'ParamSpec'): 

1690 

1691 # Add default parameter - PEP 696 

1692 class ParamSpec(metaclass=_TypeVarLikeMeta): 

1693 """Parameter specification.""" 

1694 

1695 _backported_typevarlike = typing.ParamSpec 

1696 

1697 def __new__(cls, name, *, bound=None, 

1698 covariant=False, contravariant=False, 

1699 infer_variance=False, default=NoDefault): 

1700 if hasattr(typing, "TypeAliasType"): 

1701 # PEP 695 implemented, can pass infer_variance to typing.TypeVar 

1702 paramspec = typing.ParamSpec(name, bound=bound, 

1703 covariant=covariant, 

1704 contravariant=contravariant, 

1705 infer_variance=infer_variance) 

1706 else: 

1707 paramspec = typing.ParamSpec(name, bound=bound, 

1708 covariant=covariant, 

1709 contravariant=contravariant) 

1710 paramspec.__infer_variance__ = infer_variance 

1711 

1712 _set_default(paramspec, default) 

1713 _set_module(paramspec) 

1714 

1715 def _paramspec_prepare_subst(alias, args): 

1716 params = alias.__parameters__ 

1717 i = params.index(paramspec) 

1718 if i == len(args) and paramspec.has_default(): 

1719 args = [*args, paramspec.__default__] 

1720 if i >= len(args): 

1721 raise TypeError(f"Too few arguments for {alias}") 

1722 # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. 

1723 if len(params) == 1 and not typing._is_param_expr(args[0]): 

1724 assert i == 0 

1725 args = (args,) 

1726 # Convert lists to tuples to help other libraries cache the results. 

1727 elif isinstance(args[i], list): 

1728 args = (*args[:i], tuple(args[i]), *args[i + 1:]) 

1729 return args 

1730 

1731 paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst 

1732 return paramspec 

1733 

1734 def __init_subclass__(cls) -> None: 

1735 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") 

1736 

1737# 3.9 

1738else: 

1739 

1740 # Inherits from list as a workaround for Callable checks in Python < 3.9.2. 

1741 class ParamSpec(list, _DefaultMixin): 

1742 """Parameter specification variable. 

1743 

1744 Usage:: 

1745 

1746 P = ParamSpec('P') 

1747 

1748 Parameter specification variables exist primarily for the benefit of static 

1749 type checkers. They are used to forward the parameter types of one 

1750 callable to another callable, a pattern commonly found in higher order 

1751 functions and decorators. They are only valid when used in ``Concatenate``, 

1752 or s the first argument to ``Callable``. In Python 3.10 and higher, 

1753 they are also supported in user-defined Generics at runtime. 

1754 See class Generic for more information on generic types. An 

1755 example for annotating a decorator:: 

1756 

1757 T = TypeVar('T') 

1758 P = ParamSpec('P') 

1759 

1760 def add_logging(f: Callable[P, T]) -> Callable[P, T]: 

1761 '''A type-safe decorator to add logging to a function.''' 

1762 def inner(*args: P.args, **kwargs: P.kwargs) -> T: 

1763 logging.info(f'{f.__name__} was called') 

1764 return f(*args, **kwargs) 

1765 return inner 

1766 

1767 @add_logging 

1768 def add_two(x: float, y: float) -> float: 

1769 '''Add two numbers together.''' 

1770 return x + y 

1771 

1772 Parameter specification variables defined with covariant=True or 

1773 contravariant=True can be used to declare covariant or contravariant 

1774 generic types. These keyword arguments are valid, but their actual semantics 

1775 are yet to be decided. See PEP 612 for details. 

1776 

1777 Parameter specification variables can be introspected. e.g.: 

1778 

1779 P.__name__ == 'T' 

1780 P.__bound__ == None 

1781 P.__covariant__ == False 

1782 P.__contravariant__ == False 

1783 

1784 Note that only parameter specification variables defined in global scope can 

1785 be pickled. 

1786 """ 

1787 

1788 # Trick Generic __parameters__. 

1789 __class__ = typing.TypeVar 

1790 

1791 @property 

1792 def args(self): 

1793 return ParamSpecArgs(self) 

1794 

1795 @property 

1796 def kwargs(self): 

1797 return ParamSpecKwargs(self) 

1798 

1799 def __init__(self, name, *, bound=None, covariant=False, contravariant=False, 

1800 infer_variance=False, default=NoDefault): 

1801 list.__init__(self, [self]) 

1802 self.__name__ = name 

1803 self.__covariant__ = bool(covariant) 

1804 self.__contravariant__ = bool(contravariant) 

1805 self.__infer_variance__ = bool(infer_variance) 

1806 if bound: 

1807 self.__bound__ = typing._type_check(bound, 'Bound must be a type.') 

1808 else: 

1809 self.__bound__ = None 

1810 _DefaultMixin.__init__(self, default) 

1811 

1812 # for pickling: 

1813 def_mod = _caller() 

1814 if def_mod != 'typing_extensions': 

1815 self.__module__ = def_mod 

1816 

1817 def __repr__(self): 

1818 if self.__infer_variance__: 

1819 prefix = '' 

1820 elif self.__covariant__: 

1821 prefix = '+' 

1822 elif self.__contravariant__: 

1823 prefix = '-' 

1824 else: 

1825 prefix = '~' 

1826 return prefix + self.__name__ 

1827 

1828 def __hash__(self): 

1829 return object.__hash__(self) 

1830 

1831 def __eq__(self, other): 

1832 return self is other 

1833 

1834 def __reduce__(self): 

1835 return self.__name__ 

1836 

1837 # Hack to get typing._type_check to pass. 

1838 def __call__(self, *args, **kwargs): 

1839 pass 

1840 

1841 

1842# 3.9 

1843if not hasattr(typing, 'Concatenate'): 

1844 # Inherits from list as a workaround for Callable checks in Python < 3.9.2. 

1845 

1846 # 3.9.0-1 

1847 if not hasattr(typing, '_type_convert'): 

1848 def _type_convert(arg, module=None, *, allow_special_forms=False): 

1849 """For converting None to type(None), and strings to ForwardRef.""" 

1850 if arg is None: 

1851 return type(None) 

1852 if isinstance(arg, str): 

1853 if sys.version_info <= (3, 9, 6): 

1854 return ForwardRef(arg) 

1855 if sys.version_info <= (3, 9, 7): 

1856 return ForwardRef(arg, module=module) 

1857 return ForwardRef(arg, module=module, is_class=allow_special_forms) 

1858 return arg 

1859 else: 

1860 _type_convert = typing._type_convert 

1861 

1862 class _ConcatenateGenericAlias(list): 

1863 

1864 # Trick Generic into looking into this for __parameters__. 

1865 __class__ = typing._GenericAlias 

1866 

1867 def __init__(self, origin, args): 

1868 super().__init__(args) 

1869 self.__origin__ = origin 

1870 self.__args__ = args 

1871 

1872 def __repr__(self): 

1873 _type_repr = typing._type_repr 

1874 return (f'{_type_repr(self.__origin__)}' 

1875 f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') 

1876 

1877 def __hash__(self): 

1878 return hash((self.__origin__, self.__args__)) 

1879 

1880 # Hack to get typing._type_check to pass in Generic. 

1881 def __call__(self, *args, **kwargs): 

1882 pass 

1883 

1884 @property 

1885 def __parameters__(self): 

1886 return tuple( 

1887 tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) 

1888 ) 

1889 

1890 # 3.9 used by __getitem__ below 

1891 def copy_with(self, params): 

1892 if isinstance(params[-1], _ConcatenateGenericAlias): 

1893 params = (*params[:-1], *params[-1].__args__) 

1894 elif isinstance(params[-1], (list, tuple)): 

1895 return (*params[:-1], *params[-1]) 

1896 elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))): 

1897 raise TypeError("The last parameter to Concatenate should be a " 

1898 "ParamSpec variable or ellipsis.") 

1899 return self.__class__(self.__origin__, params) 

1900 

1901 # 3.9; accessed during GenericAlias.__getitem__ when substituting 

1902 def __getitem__(self, args): 

1903 if self.__origin__ in (Generic, Protocol): 

1904 # Can't subscript Generic[...] or Protocol[...]. 

1905 raise TypeError(f"Cannot subscript already-subscripted {self}") 

1906 if not self.__parameters__: 

1907 raise TypeError(f"{self} is not a generic class") 

1908 

1909 if not isinstance(args, tuple): 

1910 args = (args,) 

1911 args = _unpack_args(*(_type_convert(p) for p in args)) 

1912 params = self.__parameters__ 

1913 for param in params: 

1914 prepare = getattr(param, "__typing_prepare_subst__", None) 

1915 if prepare is not None: 

1916 args = prepare(self, args) 

1917 # 3.9 & typing.ParamSpec 

1918 elif isinstance(param, ParamSpec): 

1919 i = params.index(param) 

1920 if ( 

1921 i == len(args) 

1922 and getattr(param, '__default__', NoDefault) is not NoDefault 

1923 ): 

1924 args = [*args, param.__default__] 

1925 if i >= len(args): 

1926 raise TypeError(f"Too few arguments for {self}") 

1927 # Special case for Z[[int, str, bool]] == Z[int, str, bool] 

1928 if len(params) == 1 and not _is_param_expr(args[0]): 

1929 assert i == 0 

1930 args = (args,) 

1931 elif ( 

1932 isinstance(args[i], list) 

1933 # 3.9 

1934 # This class inherits from list do not convert 

1935 and not isinstance(args[i], _ConcatenateGenericAlias) 

1936 ): 

1937 args = (*args[:i], tuple(args[i]), *args[i + 1:]) 

1938 

1939 alen = len(args) 

1940 plen = len(params) 

1941 if alen != plen: 

1942 raise TypeError( 

1943 f"Too {'many' if alen > plen else 'few'} arguments for {self};" 

1944 f" actual {alen}, expected {plen}" 

1945 ) 

1946 

1947 subst = dict(zip(self.__parameters__, args)) 

1948 # determine new args 

1949 new_args = [] 

1950 for arg in self.__args__: 

1951 if isinstance(arg, type): 

1952 new_args.append(arg) 

1953 continue 

1954 if isinstance(arg, TypeVar): 

1955 arg = subst[arg] 

1956 if ( 

1957 (isinstance(arg, typing._GenericAlias) and _is_unpack(arg)) 

1958 or ( 

1959 hasattr(_types, "GenericAlias") 

1960 and isinstance(arg, _types.GenericAlias) 

1961 and getattr(arg, "__unpacked__", False) 

1962 ) 

1963 ): 

1964 raise TypeError(f"{arg} is not valid as type argument") 

1965 

1966 elif isinstance(arg, 

1967 typing._GenericAlias 

1968 if not hasattr(_types, "GenericAlias") else 

1969 (typing._GenericAlias, _types.GenericAlias) 

1970 ): 

1971 subparams = arg.__parameters__ 

1972 if subparams: 

1973 subargs = tuple(subst[x] for x in subparams) 

1974 arg = arg[subargs] 

1975 new_args.append(arg) 

1976 return self.copy_with(tuple(new_args)) 

1977 

1978# 3.10+ 

1979else: 

1980 _ConcatenateGenericAlias = typing._ConcatenateGenericAlias 

1981 

1982 # 3.10 

1983 if sys.version_info < (3, 11): 

1984 

1985 class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True): 

1986 # needed for checks in collections.abc.Callable to accept this class 

1987 __module__ = "typing" 

1988 

1989 def copy_with(self, params): 

1990 if isinstance(params[-1], (list, tuple)): 

1991 return (*params[:-1], *params[-1]) 

1992 if isinstance(params[-1], typing._ConcatenateGenericAlias): 

1993 params = (*params[:-1], *params[-1].__args__) 

1994 elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)): 

1995 raise TypeError("The last parameter to Concatenate should be a " 

1996 "ParamSpec variable or ellipsis.") 

1997 return super(typing._ConcatenateGenericAlias, self).copy_with(params) 

1998 

1999 def __getitem__(self, args): 

2000 value = super().__getitem__(args) 

2001 if isinstance(value, tuple) and any(_is_unpack(t) for t in value): 

2002 return tuple(_unpack_args(*(n for n in value))) 

2003 return value 

2004 

2005 

2006# 3.9.2 

2007class _EllipsisDummy: ... 

2008 

2009 

2010# <=3.10 

2011def _create_concatenate_alias(origin, parameters): 

2012 if parameters[-1] is ... and sys.version_info < (3, 9, 2): 

2013 # Hack: Arguments must be types, replace it with one. 

2014 parameters = (*parameters[:-1], _EllipsisDummy) 

2015 if sys.version_info >= (3, 10, 3): 

2016 concatenate = _ConcatenateGenericAlias(origin, parameters, 

2017 _typevar_types=(TypeVar, ParamSpec), 

2018 _paramspec_tvars=True) 

2019 else: 

2020 concatenate = _ConcatenateGenericAlias(origin, parameters) 

2021 if parameters[-1] is not _EllipsisDummy: 

2022 return concatenate 

2023 # Remove dummy again 

2024 concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ... 

2025 for p in concatenate.__args__) 

2026 if sys.version_info < (3, 10): 

2027 # backport needs __args__ adjustment only 

2028 return concatenate 

2029 concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__ 

2030 if p is not _EllipsisDummy) 

2031 return concatenate 

2032 

2033 

2034# <=3.10 

2035@typing._tp_cache 

2036def _concatenate_getitem(self, parameters): 

2037 if parameters == (): 

2038 raise TypeError("Cannot take a Concatenate of no types.") 

2039 if not isinstance(parameters, tuple): 

2040 parameters = (parameters,) 

2041 if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)): 

2042 raise TypeError("The last parameter to Concatenate should be a " 

2043 "ParamSpec variable or ellipsis.") 

2044 msg = "Concatenate[arg, ...]: each arg must be a type." 

2045 parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]), 

2046 parameters[-1]) 

2047 return _create_concatenate_alias(self, parameters) 

2048 

2049 

2050# 3.11+; Concatenate does not accept ellipsis in 3.10 

2051if sys.version_info >= (3, 11): 

2052 Concatenate = typing.Concatenate 

2053# <=3.10 

2054else: 

2055 @_ExtensionsSpecialForm 

2056 def Concatenate(self, parameters): 

2057 """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a 

2058 higher order function which adds, removes or transforms parameters of a 

2059 callable. 

2060 

2061 For example:: 

2062 

2063 Callable[Concatenate[int, P], int] 

2064 

2065 See PEP 612 for detailed information. 

2066 """ 

2067 return _concatenate_getitem(self, parameters) 

2068 

2069 

2070# 3.10+ 

2071if hasattr(typing, 'TypeGuard'): 

2072 TypeGuard = typing.TypeGuard 

2073# 3.9 

2074else: 

2075 @_ExtensionsSpecialForm 

2076 def TypeGuard(self, parameters): 

2077 """Special typing form used to annotate the return type of a user-defined 

2078 type guard function. ``TypeGuard`` only accepts a single type argument. 

2079 At runtime, functions marked this way should return a boolean. 

2080 

2081 ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static 

2082 type checkers to determine a more precise type of an expression within a 

2083 program's code flow. Usually type narrowing is done by analyzing 

2084 conditional code flow and applying the narrowing to a block of code. The 

2085 conditional expression here is sometimes referred to as a "type guard". 

2086 

2087 Sometimes it would be convenient to use a user-defined boolean function 

2088 as a type guard. Such a function should use ``TypeGuard[...]`` as its 

2089 return type to alert static type checkers to this intention. 

2090 

2091 Using ``-> TypeGuard`` tells the static type checker that for a given 

2092 function: 

2093 

2094 1. The return value is a boolean. 

2095 2. If the return value is ``True``, the type of its argument 

2096 is the type inside ``TypeGuard``. 

2097 

2098 For example:: 

2099 

2100 def is_str(val: Union[str, float]): 

2101 # "isinstance" type guard 

2102 if isinstance(val, str): 

2103 # Type of ``val`` is narrowed to ``str`` 

2104 ... 

2105 else: 

2106 # Else, type of ``val`` is narrowed to ``float``. 

2107 ... 

2108 

2109 Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower 

2110 form of ``TypeA`` (it can even be a wider form) and this may lead to 

2111 type-unsafe results. The main reason is to allow for things like 

2112 narrowing ``List[object]`` to ``List[str]`` even though the latter is not 

2113 a subtype of the former, since ``List`` is invariant. The responsibility of 

2114 writing type-safe type guards is left to the user. 

2115 

2116 ``TypeGuard`` also works with type variables. For more information, see 

2117 PEP 647 (User-Defined Type Guards). 

2118 """ 

2119 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2120 return typing._GenericAlias(self, (item,)) 

2121 

2122 

2123# 3.13+ 

2124if hasattr(typing, 'TypeIs'): 

2125 TypeIs = typing.TypeIs 

2126# <=3.12 

2127else: 

2128 @_ExtensionsSpecialForm 

2129 def TypeIs(self, parameters): 

2130 """Special typing form used to annotate the return type of a user-defined 

2131 type narrower function. ``TypeIs`` only accepts a single type argument. 

2132 At runtime, functions marked this way should return a boolean. 

2133 

2134 ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static 

2135 type checkers to determine a more precise type of an expression within a 

2136 program's code flow. Usually type narrowing is done by analyzing 

2137 conditional code flow and applying the narrowing to a block of code. The 

2138 conditional expression here is sometimes referred to as a "type guard". 

2139 

2140 Sometimes it would be convenient to use a user-defined boolean function 

2141 as a type guard. Such a function should use ``TypeIs[...]`` as its 

2142 return type to alert static type checkers to this intention. 

2143 

2144 Using ``-> TypeIs`` tells the static type checker that for a given 

2145 function: 

2146 

2147 1. The return value is a boolean. 

2148 2. If the return value is ``True``, the type of its argument 

2149 is the intersection of the type inside ``TypeIs`` and the argument's 

2150 previously known type. 

2151 

2152 For example:: 

2153 

2154 def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: 

2155 return hasattr(val, '__await__') 

2156 

2157 def f(val: Union[int, Awaitable[int]]) -> int: 

2158 if is_awaitable(val): 

2159 assert_type(val, Awaitable[int]) 

2160 else: 

2161 assert_type(val, int) 

2162 

2163 ``TypeIs`` also works with type variables. For more information, see 

2164 PEP 742 (Narrowing types with TypeIs). 

2165 """ 

2166 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2167 return typing._GenericAlias(self, (item,)) 

2168 

2169 

2170# 3.14+? 

2171if hasattr(typing, 'TypeForm'): 

2172 TypeForm = typing.TypeForm 

2173# <=3.13 

2174else: 

2175 class _TypeFormForm(_ExtensionsSpecialForm, _root=True): 

2176 # TypeForm(X) is equivalent to X but indicates to the type checker 

2177 # that the object is a TypeForm. 

2178 def __call__(self, obj, /): 

2179 return obj 

2180 

2181 @_TypeFormForm 

2182 def TypeForm(self, parameters): 

2183 """A special form representing the value that results from the evaluation 

2184 of a type expression. This value encodes the information supplied in the 

2185 type expression, and it represents the type described by that type expression. 

2186 

2187 When used in a type expression, TypeForm describes a set of type form objects. 

2188 It accepts a single type argument, which must be a valid type expression. 

2189 ``TypeForm[T]`` describes the set of all type form objects that represent 

2190 the type T or types that are assignable to T. 

2191 

2192 Usage: 

2193 

2194 def cast[T](typ: TypeForm[T], value: Any) -> T: ... 

2195 

2196 reveal_type(cast(int, "x")) # int 

2197 

2198 See PEP 747 for more information. 

2199 """ 

2200 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2201 return typing._GenericAlias(self, (item,)) 

2202 

2203 

2204# Vendored from cpython typing._SpecialFrom 

2205class _SpecialForm(typing._Final, _root=True): 

2206 __slots__ = ('_name', '__doc__', '_getitem') 

2207 

2208 def __init__(self, getitem): 

2209 self._getitem = getitem 

2210 self._name = getitem.__name__ 

2211 self.__doc__ = getitem.__doc__ 

2212 

2213 def __getattr__(self, item): 

2214 if item in {'__name__', '__qualname__'}: 

2215 return self._name 

2216 

2217 raise AttributeError(item) 

2218 

2219 def __mro_entries__(self, bases): 

2220 raise TypeError(f"Cannot subclass {self!r}") 

2221 

2222 def __repr__(self): 

2223 return f'typing_extensions.{self._name}' 

2224 

2225 def __reduce__(self): 

2226 return self._name 

2227 

2228 def __call__(self, *args, **kwds): 

2229 raise TypeError(f"Cannot instantiate {self!r}") 

2230 

2231 def __or__(self, other): 

2232 return typing.Union[self, other] 

2233 

2234 def __ror__(self, other): 

2235 return typing.Union[other, self] 

2236 

2237 def __instancecheck__(self, obj): 

2238 raise TypeError(f"{self} cannot be used with isinstance()") 

2239 

2240 def __subclasscheck__(self, cls): 

2241 raise TypeError(f"{self} cannot be used with issubclass()") 

2242 

2243 @typing._tp_cache 

2244 def __getitem__(self, parameters): 

2245 return self._getitem(self, parameters) 

2246 

2247 

2248if hasattr(typing, "LiteralString"): # 3.11+ 

2249 LiteralString = typing.LiteralString 

2250else: 

2251 @_SpecialForm 

2252 def LiteralString(self, params): 

2253 """Represents an arbitrary literal string. 

2254 

2255 Example:: 

2256 

2257 from typing_extensions import LiteralString 

2258 

2259 def query(sql: LiteralString) -> ...: 

2260 ... 

2261 

2262 query("SELECT * FROM table") # ok 

2263 query(f"SELECT * FROM {input()}") # not ok 

2264 

2265 See PEP 675 for details. 

2266 

2267 """ 

2268 raise TypeError(f"{self} is not subscriptable") 

2269 

2270 

2271if hasattr(typing, "Self"): # 3.11+ 

2272 Self = typing.Self 

2273else: 

2274 @_SpecialForm 

2275 def Self(self, params): 

2276 """Used to spell the type of "self" in classes. 

2277 

2278 Example:: 

2279 

2280 from typing import Self 

2281 

2282 class ReturnsSelf: 

2283 def parse(self, data: bytes) -> Self: 

2284 ... 

2285 return self 

2286 

2287 """ 

2288 

2289 raise TypeError(f"{self} is not subscriptable") 

2290 

2291 

2292if hasattr(typing, "Never"): # 3.11+ 

2293 Never = typing.Never 

2294else: 

2295 @_SpecialForm 

2296 def Never(self, params): 

2297 """The bottom type, a type that has no members. 

2298 

2299 This can be used to define a function that should never be 

2300 called, or a function that never returns:: 

2301 

2302 from typing_extensions import Never 

2303 

2304 def never_call_me(arg: Never) -> None: 

2305 pass 

2306 

2307 def int_or_str(arg: int | str) -> None: 

2308 never_call_me(arg) # type checker error 

2309 match arg: 

2310 case int(): 

2311 print("It's an int") 

2312 case str(): 

2313 print("It's a str") 

2314 case _: 

2315 never_call_me(arg) # ok, arg is of type Never 

2316 

2317 """ 

2318 

2319 raise TypeError(f"{self} is not subscriptable") 

2320 

2321 

2322if hasattr(typing, 'Required'): # 3.11+ 

2323 Required = typing.Required 

2324 NotRequired = typing.NotRequired 

2325else: # <=3.10 

2326 @_ExtensionsSpecialForm 

2327 def Required(self, parameters): 

2328 """A special typing construct to mark a key of a total=False TypedDict 

2329 as required. For example: 

2330 

2331 class Movie(TypedDict, total=False): 

2332 title: Required[str] 

2333 year: int 

2334 

2335 m = Movie( 

2336 title='The Matrix', # typechecker error if key is omitted 

2337 year=1999, 

2338 ) 

2339 

2340 There is no runtime checking that a required key is actually provided 

2341 when instantiating a related TypedDict. 

2342 """ 

2343 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2344 return typing._GenericAlias(self, (item,)) 

2345 

2346 @_ExtensionsSpecialForm 

2347 def NotRequired(self, parameters): 

2348 """A special typing construct to mark a key of a TypedDict as 

2349 potentially missing. For example: 

2350 

2351 class Movie(TypedDict): 

2352 title: str 

2353 year: NotRequired[int] 

2354 

2355 m = Movie( 

2356 title='The Matrix', # typechecker error if key is omitted 

2357 year=1999, 

2358 ) 

2359 """ 

2360 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2361 return typing._GenericAlias(self, (item,)) 

2362 

2363 

2364if hasattr(typing, 'ReadOnly'): 

2365 ReadOnly = typing.ReadOnly 

2366else: # <=3.12 

2367 @_ExtensionsSpecialForm 

2368 def ReadOnly(self, parameters): 

2369 """A special typing construct to mark an item of a TypedDict as read-only. 

2370 

2371 For example: 

2372 

2373 class Movie(TypedDict): 

2374 title: ReadOnly[str] 

2375 year: int 

2376 

2377 def mutate_movie(m: Movie) -> None: 

2378 m["year"] = 1992 # allowed 

2379 m["title"] = "The Matrix" # typechecker error 

2380 

2381 There is no runtime checking for this property. 

2382 """ 

2383 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2384 return typing._GenericAlias(self, (item,)) 

2385 

2386 

2387_UNPACK_DOC = """\ 

2388Type unpack operator. 

2389 

2390The type unpack operator takes the child types from some container type, 

2391such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For 

2392example: 

2393 

2394 # For some generic class `Foo`: 

2395 Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] 

2396 

2397 Ts = TypeVarTuple('Ts') 

2398 # Specifies that `Bar` is generic in an arbitrary number of types. 

2399 # (Think of `Ts` as a tuple of an arbitrary number of individual 

2400 # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the 

2401 # `Generic[]`.) 

2402 class Bar(Generic[Unpack[Ts]]): ... 

2403 Bar[int] # Valid 

2404 Bar[int, str] # Also valid 

2405 

2406From Python 3.11, this can also be done using the `*` operator: 

2407 

2408 Foo[*tuple[int, str]] 

2409 class Bar(Generic[*Ts]): ... 

2410 

2411The operator can also be used along with a `TypedDict` to annotate 

2412`**kwargs` in a function signature. For instance: 

2413 

2414 class Movie(TypedDict): 

2415 name: str 

2416 year: int 

2417 

2418 # This function expects two keyword arguments - *name* of type `str` and 

2419 # *year* of type `int`. 

2420 def foo(**kwargs: Unpack[Movie]): ... 

2421 

2422Note that there is only some runtime checking of this operator. Not 

2423everything the runtime allows may be accepted by static type checkers. 

2424 

2425For more information, see PEP 646 and PEP 692. 

2426""" 

2427 

2428 

2429if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] 

2430 Unpack = typing.Unpack 

2431 

2432 def _is_unpack(obj): 

2433 return get_origin(obj) is Unpack 

2434 

2435else: # <=3.11 

2436 class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): 

2437 def __init__(self, getitem): 

2438 super().__init__(getitem) 

2439 self.__doc__ = _UNPACK_DOC 

2440 

2441 class _UnpackAlias(typing._GenericAlias, _root=True): 

2442 if sys.version_info < (3, 11): 

2443 # needed for compatibility with Generic[Unpack[Ts]] 

2444 __class__ = typing.TypeVar 

2445 

2446 @property 

2447 def __typing_unpacked_tuple_args__(self): 

2448 assert self.__origin__ is Unpack 

2449 assert len(self.__args__) == 1 

2450 arg, = self.__args__ 

2451 if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)): 

2452 if arg.__origin__ is not tuple: 

2453 raise TypeError("Unpack[...] must be used with a tuple type") 

2454 return arg.__args__ 

2455 return None 

2456 

2457 @property 

2458 def __typing_is_unpacked_typevartuple__(self): 

2459 assert self.__origin__ is Unpack 

2460 assert len(self.__args__) == 1 

2461 return isinstance(self.__args__[0], TypeVarTuple) 

2462 

2463 def __getitem__(self, args): 

2464 if self.__typing_is_unpacked_typevartuple__: 

2465 return args 

2466 return super().__getitem__(args) 

2467 

2468 @_UnpackSpecialForm 

2469 def Unpack(self, parameters): 

2470 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2471 return _UnpackAlias(self, (item,)) 

2472 

2473 def _is_unpack(obj): 

2474 return isinstance(obj, _UnpackAlias) 

2475 

2476 

2477def _unpack_args(*args): 

2478 newargs = [] 

2479 for arg in args: 

2480 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) 

2481 if subargs is not None and (not (subargs and subargs[-1] is ...)): 

2482 newargs.extend(subargs) 

2483 else: 

2484 newargs.append(arg) 

2485 return newargs 

2486 

2487 

2488if _PEP_696_IMPLEMENTED: 

2489 from typing import TypeVarTuple 

2490 

2491elif hasattr(typing, "TypeVarTuple"): # 3.11+ 

2492 

2493 # Add default parameter - PEP 696 

2494 class TypeVarTuple(metaclass=_TypeVarLikeMeta): 

2495 """Type variable tuple.""" 

2496 

2497 _backported_typevarlike = typing.TypeVarTuple 

2498 

2499 def __new__(cls, name, *, default=NoDefault): 

2500 tvt = typing.TypeVarTuple(name) 

2501 _set_default(tvt, default) 

2502 _set_module(tvt) 

2503 

2504 def _typevartuple_prepare_subst(alias, args): 

2505 params = alias.__parameters__ 

2506 typevartuple_index = params.index(tvt) 

2507 for param in params[typevartuple_index + 1:]: 

2508 if isinstance(param, TypeVarTuple): 

2509 raise TypeError( 

2510 f"More than one TypeVarTuple parameter in {alias}" 

2511 ) 

2512 

2513 alen = len(args) 

2514 plen = len(params) 

2515 left = typevartuple_index 

2516 right = plen - typevartuple_index - 1 

2517 var_tuple_index = None 

2518 fillarg = None 

2519 for k, arg in enumerate(args): 

2520 if not isinstance(arg, type): 

2521 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) 

2522 if subargs and len(subargs) == 2 and subargs[-1] is ...: 

2523 if var_tuple_index is not None: 

2524 raise TypeError( 

2525 "More than one unpacked " 

2526 "arbitrary-length tuple argument" 

2527 ) 

2528 var_tuple_index = k 

2529 fillarg = subargs[0] 

2530 if var_tuple_index is not None: 

2531 left = min(left, var_tuple_index) 

2532 right = min(right, alen - var_tuple_index - 1) 

2533 elif left + right > alen: 

2534 raise TypeError(f"Too few arguments for {alias};" 

2535 f" actual {alen}, expected at least {plen - 1}") 

2536 if left == alen - right and tvt.has_default(): 

2537 replacement = _unpack_args(tvt.__default__) 

2538 else: 

2539 replacement = args[left: alen - right] 

2540 

2541 return ( 

2542 *args[:left], 

2543 *([fillarg] * (typevartuple_index - left)), 

2544 replacement, 

2545 *([fillarg] * (plen - right - left - typevartuple_index - 1)), 

2546 *args[alen - right:], 

2547 ) 

2548 

2549 tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst 

2550 return tvt 

2551 

2552 def __init_subclass__(self, *args, **kwds): 

2553 raise TypeError("Cannot subclass special typing classes") 

2554 

2555else: # <=3.10 

2556 class TypeVarTuple(_DefaultMixin): 

2557 """Type variable tuple. 

2558 

2559 Usage:: 

2560 

2561 Ts = TypeVarTuple('Ts') 

2562 

2563 In the same way that a normal type variable is a stand-in for a single 

2564 type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* 

2565 type such as ``Tuple[int, str]``. 

2566 

2567 Type variable tuples can be used in ``Generic`` declarations. 

2568 Consider the following example:: 

2569 

2570 class Array(Generic[*Ts]): ... 

2571 

2572 The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, 

2573 where ``T1`` and ``T2`` are type variables. To use these type variables 

2574 as type parameters of ``Array``, we must *unpack* the type variable tuple using 

2575 the star operator: ``*Ts``. The signature of ``Array`` then behaves 

2576 as if we had simply written ``class Array(Generic[T1, T2]): ...``. 

2577 In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows 

2578 us to parameterise the class with an *arbitrary* number of type parameters. 

2579 

2580 Type variable tuples can be used anywhere a normal ``TypeVar`` can. 

2581 This includes class definitions, as shown above, as well as function 

2582 signatures and variable annotations:: 

2583 

2584 class Array(Generic[*Ts]): 

2585 

2586 def __init__(self, shape: Tuple[*Ts]): 

2587 self._shape: Tuple[*Ts] = shape 

2588 

2589 def get_shape(self) -> Tuple[*Ts]: 

2590 return self._shape 

2591 

2592 shape = (Height(480), Width(640)) 

2593 x: Array[Height, Width] = Array(shape) 

2594 y = abs(x) # Inferred type is Array[Height, Width] 

2595 z = x + x # ... is Array[Height, Width] 

2596 x.get_shape() # ... is tuple[Height, Width] 

2597 

2598 """ 

2599 

2600 # Trick Generic __parameters__. 

2601 __class__ = typing.TypeVar 

2602 

2603 def __iter__(self): 

2604 yield self.__unpacked__ 

2605 

2606 def __init__(self, name, *, default=NoDefault): 

2607 self.__name__ = name 

2608 _DefaultMixin.__init__(self, default) 

2609 

2610 # for pickling: 

2611 def_mod = _caller() 

2612 if def_mod != 'typing_extensions': 

2613 self.__module__ = def_mod 

2614 

2615 self.__unpacked__ = Unpack[self] 

2616 

2617 def __repr__(self): 

2618 return self.__name__ 

2619 

2620 def __hash__(self): 

2621 return object.__hash__(self) 

2622 

2623 def __eq__(self, other): 

2624 return self is other 

2625 

2626 def __reduce__(self): 

2627 return self.__name__ 

2628 

2629 def __init_subclass__(self, *args, **kwds): 

2630 if '_root' not in kwds: 

2631 raise TypeError("Cannot subclass special typing classes") 

2632 

2633 

2634if hasattr(typing, "reveal_type"): # 3.11+ 

2635 reveal_type = typing.reveal_type 

2636else: # <=3.10 

2637 def reveal_type(obj: T, /) -> T: 

2638 """Reveal the inferred type of a variable. 

2639 

2640 When a static type checker encounters a call to ``reveal_type()``, 

2641 it will emit the inferred type of the argument:: 

2642 

2643 x: int = 1 

2644 reveal_type(x) 

2645 

2646 Running a static type checker (e.g., ``mypy``) on this example 

2647 will produce output similar to 'Revealed type is "builtins.int"'. 

2648 

2649 At runtime, the function prints the runtime type of the 

2650 argument and returns it unchanged. 

2651 

2652 """ 

2653 print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) 

2654 return obj 

2655 

2656 

2657if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+ 

2658 _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH 

2659else: # <=3.10 

2660 _ASSERT_NEVER_REPR_MAX_LENGTH = 100 

2661 

2662 

2663if hasattr(typing, "assert_never"): # 3.11+ 

2664 assert_never = typing.assert_never 

2665else: # <=3.10 

2666 def assert_never(arg: Never, /) -> Never: 

2667 """Assert to the type checker that a line of code is unreachable. 

2668 

2669 Example:: 

2670 

2671 def int_or_str(arg: int | str) -> None: 

2672 match arg: 

2673 case int(): 

2674 print("It's an int") 

2675 case str(): 

2676 print("It's a str") 

2677 case _: 

2678 assert_never(arg) 

2679 

2680 If a type checker finds that a call to assert_never() is 

2681 reachable, it will emit an error. 

2682 

2683 At runtime, this throws an exception when called. 

2684 

2685 """ 

2686 value = repr(arg) 

2687 if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH: 

2688 value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...' 

2689 raise AssertionError(f"Expected code to be unreachable, but got: {value}") 

2690 

2691 

2692if sys.version_info >= (3, 12): # 3.12+ 

2693 # dataclass_transform exists in 3.11 but lacks the frozen_default parameter 

2694 dataclass_transform = typing.dataclass_transform 

2695else: # <=3.11 

2696 def dataclass_transform( 

2697 *, 

2698 eq_default: bool = True, 

2699 order_default: bool = False, 

2700 kw_only_default: bool = False, 

2701 frozen_default: bool = False, 

2702 field_specifiers: typing.Tuple[ 

2703 typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], 

2704 ... 

2705 ] = (), 

2706 **kwargs: typing.Any, 

2707 ) -> typing.Callable[[T], T]: 

2708 """Decorator that marks a function, class, or metaclass as providing 

2709 dataclass-like behavior. 

2710 

2711 Example: 

2712 

2713 from typing_extensions import dataclass_transform 

2714 

2715 _T = TypeVar("_T") 

2716 

2717 # Used on a decorator function 

2718 @dataclass_transform() 

2719 def create_model(cls: type[_T]) -> type[_T]: 

2720 ... 

2721 return cls 

2722 

2723 @create_model 

2724 class CustomerModel: 

2725 id: int 

2726 name: str 

2727 

2728 # Used on a base class 

2729 @dataclass_transform() 

2730 class ModelBase: ... 

2731 

2732 class CustomerModel(ModelBase): 

2733 id: int 

2734 name: str 

2735 

2736 # Used on a metaclass 

2737 @dataclass_transform() 

2738 class ModelMeta(type): ... 

2739 

2740 class ModelBase(metaclass=ModelMeta): ... 

2741 

2742 class CustomerModel(ModelBase): 

2743 id: int 

2744 name: str 

2745 

2746 Each of the ``CustomerModel`` classes defined in this example will now 

2747 behave similarly to a dataclass created with the ``@dataclasses.dataclass`` 

2748 decorator. For example, the type checker will synthesize an ``__init__`` 

2749 method. 

2750 

2751 The arguments to this decorator can be used to customize this behavior: 

2752 - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be 

2753 True or False if it is omitted by the caller. 

2754 - ``order_default`` indicates whether the ``order`` parameter is 

2755 assumed to be True or False if it is omitted by the caller. 

2756 - ``kw_only_default`` indicates whether the ``kw_only`` parameter is 

2757 assumed to be True or False if it is omitted by the caller. 

2758 - ``frozen_default`` indicates whether the ``frozen`` parameter is 

2759 assumed to be True or False if it is omitted by the caller. 

2760 - ``field_specifiers`` specifies a static list of supported classes 

2761 or functions that describe fields, similar to ``dataclasses.field()``. 

2762 

2763 At runtime, this decorator records its arguments in the 

2764 ``__dataclass_transform__`` attribute on the decorated object. 

2765 

2766 See PEP 681 for details. 

2767 

2768 """ 

2769 def decorator(cls_or_fn): 

2770 cls_or_fn.__dataclass_transform__ = { 

2771 "eq_default": eq_default, 

2772 "order_default": order_default, 

2773 "kw_only_default": kw_only_default, 

2774 "frozen_default": frozen_default, 

2775 "field_specifiers": field_specifiers, 

2776 "kwargs": kwargs, 

2777 } 

2778 return cls_or_fn 

2779 return decorator 

2780 

2781 

2782if hasattr(typing, "override"): # 3.12+ 

2783 override = typing.override 

2784else: # <=3.11 

2785 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) 

2786 

2787 def override(arg: _F, /) -> _F: 

2788 """Indicate that a method is intended to override a method in a base class. 

2789 

2790 Usage: 

2791 

2792 class Base: 

2793 def method(self) -> None: 

2794 pass 

2795 

2796 class Child(Base): 

2797 @override 

2798 def method(self) -> None: 

2799 super().method() 

2800 

2801 When this decorator is applied to a method, the type checker will 

2802 validate that it overrides a method with the same name on a base class. 

2803 This helps prevent bugs that may occur when a base class is changed 

2804 without an equivalent change to a child class. 

2805 

2806 There is no runtime checking of these properties. The decorator 

2807 sets the ``__override__`` attribute to ``True`` on the decorated object 

2808 to allow runtime introspection. 

2809 

2810 See PEP 698 for details. 

2811 

2812 """ 

2813 try: 

2814 arg.__override__ = True 

2815 except (AttributeError, TypeError): 

2816 # Skip the attribute silently if it is not writable. 

2817 # AttributeError happens if the object has __slots__ or a 

2818 # read-only property, TypeError if it's a builtin class. 

2819 pass 

2820 return arg 

2821 

2822 

2823# Python 3.13.3+ contains a fix for the wrapped __new__ 

2824if sys.version_info >= (3, 13, 3): 

2825 deprecated = warnings.deprecated 

2826else: 

2827 _T = typing.TypeVar("_T") 

2828 

2829 class deprecated: 

2830 """Indicate that a class, function or overload is deprecated. 

2831 

2832 When this decorator is applied to an object, the type checker 

2833 will generate a diagnostic on usage of the deprecated object. 

2834 

2835 Usage: 

2836 

2837 @deprecated("Use B instead") 

2838 class A: 

2839 pass 

2840 

2841 @deprecated("Use g instead") 

2842 def f(): 

2843 pass 

2844 

2845 @overload 

2846 @deprecated("int support is deprecated") 

2847 def g(x: int) -> int: ... 

2848 @overload 

2849 def g(x: str) -> int: ... 

2850 

2851 The warning specified by *category* will be emitted at runtime 

2852 on use of deprecated objects. For functions, that happens on calls; 

2853 for classes, on instantiation and on creation of subclasses. 

2854 If the *category* is ``None``, no warning is emitted at runtime. 

2855 The *stacklevel* determines where the 

2856 warning is emitted. If it is ``1`` (the default), the warning 

2857 is emitted at the direct caller of the deprecated object; if it 

2858 is higher, it is emitted further up the stack. 

2859 Static type checker behavior is not affected by the *category* 

2860 and *stacklevel* arguments. 

2861 

2862 The deprecation message passed to the decorator is saved in the 

2863 ``__deprecated__`` attribute on the decorated object. 

2864 If applied to an overload, the decorator 

2865 must be after the ``@overload`` decorator for the attribute to 

2866 exist on the overload as returned by ``get_overloads()``. 

2867 

2868 See PEP 702 for details. 

2869 

2870 """ 

2871 def __init__( 

2872 self, 

2873 message: str, 

2874 /, 

2875 *, 

2876 category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, 

2877 stacklevel: int = 1, 

2878 ) -> None: 

2879 if not isinstance(message, str): 

2880 raise TypeError( 

2881 "Expected an object of type str for 'message', not " 

2882 f"{type(message).__name__!r}" 

2883 ) 

2884 self.message = message 

2885 self.category = category 

2886 self.stacklevel = stacklevel 

2887 

2888 def __call__(self, arg: _T, /) -> _T: 

2889 # Make sure the inner functions created below don't 

2890 # retain a reference to self. 

2891 msg = self.message 

2892 category = self.category 

2893 stacklevel = self.stacklevel 

2894 if category is None: 

2895 arg.__deprecated__ = msg 

2896 return arg 

2897 elif isinstance(arg, type): 

2898 import functools 

2899 from types import MethodType 

2900 

2901 original_new = arg.__new__ 

2902 

2903 @functools.wraps(original_new) 

2904 def __new__(cls, /, *args, **kwargs): 

2905 if cls is arg: 

2906 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2907 if original_new is not object.__new__: 

2908 return original_new(cls, *args, **kwargs) 

2909 # Mirrors a similar check in object.__new__. 

2910 elif cls.__init__ is object.__init__ and (args or kwargs): 

2911 raise TypeError(f"{cls.__name__}() takes no arguments") 

2912 else: 

2913 return original_new(cls) 

2914 

2915 arg.__new__ = staticmethod(__new__) 

2916 

2917 original_init_subclass = arg.__init_subclass__ 

2918 # We need slightly different behavior if __init_subclass__ 

2919 # is a bound method (likely if it was implemented in Python) 

2920 if isinstance(original_init_subclass, MethodType): 

2921 original_init_subclass = original_init_subclass.__func__ 

2922 

2923 @functools.wraps(original_init_subclass) 

2924 def __init_subclass__(*args, **kwargs): 

2925 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2926 return original_init_subclass(*args, **kwargs) 

2927 

2928 arg.__init_subclass__ = classmethod(__init_subclass__) 

2929 # Or otherwise, which likely means it's a builtin such as 

2930 # object's implementation of __init_subclass__. 

2931 else: 

2932 @functools.wraps(original_init_subclass) 

2933 def __init_subclass__(*args, **kwargs): 

2934 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2935 return original_init_subclass(*args, **kwargs) 

2936 

2937 arg.__init_subclass__ = __init_subclass__ 

2938 

2939 arg.__deprecated__ = __new__.__deprecated__ = msg 

2940 __init_subclass__.__deprecated__ = msg 

2941 return arg 

2942 elif callable(arg): 

2943 import asyncio.coroutines 

2944 import functools 

2945 import inspect 

2946 

2947 @functools.wraps(arg) 

2948 def wrapper(*args, **kwargs): 

2949 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2950 return arg(*args, **kwargs) 

2951 

2952 if asyncio.coroutines.iscoroutinefunction(arg): 

2953 if sys.version_info >= (3, 12): 

2954 wrapper = inspect.markcoroutinefunction(wrapper) 

2955 else: 

2956 wrapper._is_coroutine = asyncio.coroutines._is_coroutine 

2957 

2958 arg.__deprecated__ = wrapper.__deprecated__ = msg 

2959 return wrapper 

2960 else: 

2961 raise TypeError( 

2962 "@deprecated decorator with non-None category must be applied to " 

2963 f"a class or callable, not {arg!r}" 

2964 ) 

2965 

2966if sys.version_info < (3, 10): 

2967 def _is_param_expr(arg): 

2968 return arg is ... or isinstance( 

2969 arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias) 

2970 ) 

2971else: 

2972 def _is_param_expr(arg): 

2973 return arg is ... or isinstance( 

2974 arg, 

2975 ( 

2976 tuple, 

2977 list, 

2978 ParamSpec, 

2979 _ConcatenateGenericAlias, 

2980 typing._ConcatenateGenericAlias, 

2981 ), 

2982 ) 

2983 

2984 

2985# We have to do some monkey patching to deal with the dual nature of 

2986# Unpack/TypeVarTuple: 

2987# - We want Unpack to be a kind of TypeVar so it gets accepted in 

2988# Generic[Unpack[Ts]] 

2989# - We want it to *not* be treated as a TypeVar for the purposes of 

2990# counting generic parameters, so that when we subscript a generic, 

2991# the runtime doesn't try to substitute the Unpack with the subscripted type. 

2992if not hasattr(typing, "TypeVarTuple"): 

2993 def _check_generic(cls, parameters, elen=_marker): 

2994 """Check correct count for parameters of a generic cls (internal helper). 

2995 

2996 This gives a nice error message in case of count mismatch. 

2997 """ 

2998 # If substituting a single ParamSpec with multiple arguments 

2999 # we do not check the count 

3000 if (inspect.isclass(cls) and issubclass(cls, typing.Generic) 

3001 and len(cls.__parameters__) == 1 

3002 and isinstance(cls.__parameters__[0], ParamSpec) 

3003 and parameters 

3004 and not _is_param_expr(parameters[0]) 

3005 ): 

3006 # Generic modifies parameters variable, but here we cannot do this 

3007 return 

3008 

3009 if not elen: 

3010 raise TypeError(f"{cls} is not a generic class") 

3011 if elen is _marker: 

3012 if not hasattr(cls, "__parameters__") or not cls.__parameters__: 

3013 raise TypeError(f"{cls} is not a generic class") 

3014 elen = len(cls.__parameters__) 

3015 alen = len(parameters) 

3016 if alen != elen: 

3017 expect_val = elen 

3018 if hasattr(cls, "__parameters__"): 

3019 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] 

3020 num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) 

3021 if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): 

3022 return 

3023 

3024 # deal with TypeVarLike defaults 

3025 # required TypeVarLikes cannot appear after a defaulted one. 

3026 if alen < elen: 

3027 # since we validate TypeVarLike default in _collect_type_vars 

3028 # or _collect_parameters we can safely check parameters[alen] 

3029 if ( 

3030 getattr(parameters[alen], '__default__', NoDefault) 

3031 is not NoDefault 

3032 ): 

3033 return 

3034 

3035 num_default_tv = sum(getattr(p, '__default__', NoDefault) 

3036 is not NoDefault for p in parameters) 

3037 

3038 elen -= num_default_tv 

3039 

3040 expect_val = f"at least {elen}" 

3041 

3042 things = "arguments" if sys.version_info >= (3, 10) else "parameters" 

3043 raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}" 

3044 f" for {cls}; actual {alen}, expected {expect_val}") 

3045else: 

3046 # Python 3.11+ 

3047 

3048 def _check_generic(cls, parameters, elen): 

3049 """Check correct count for parameters of a generic cls (internal helper). 

3050 

3051 This gives a nice error message in case of count mismatch. 

3052 """ 

3053 if not elen: 

3054 raise TypeError(f"{cls} is not a generic class") 

3055 alen = len(parameters) 

3056 if alen != elen: 

3057 expect_val = elen 

3058 if hasattr(cls, "__parameters__"): 

3059 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] 

3060 

3061 # deal with TypeVarLike defaults 

3062 # required TypeVarLikes cannot appear after a defaulted one. 

3063 if alen < elen: 

3064 # since we validate TypeVarLike default in _collect_type_vars 

3065 # or _collect_parameters we can safely check parameters[alen] 

3066 if ( 

3067 getattr(parameters[alen], '__default__', NoDefault) 

3068 is not NoDefault 

3069 ): 

3070 return 

3071 

3072 num_default_tv = sum(getattr(p, '__default__', NoDefault) 

3073 is not NoDefault for p in parameters) 

3074 

3075 elen -= num_default_tv 

3076 

3077 expect_val = f"at least {elen}" 

3078 

3079 raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments" 

3080 f" for {cls}; actual {alen}, expected {expect_val}") 

3081 

3082if not _PEP_696_IMPLEMENTED: 

3083 typing._check_generic = _check_generic 

3084 

3085 

3086def _has_generic_or_protocol_as_origin() -> bool: 

3087 try: 

3088 frame = sys._getframe(2) 

3089 # - Catch AttributeError: not all Python implementations have sys._getframe() 

3090 # - Catch ValueError: maybe we're called from an unexpected module 

3091 # and the call stack isn't deep enough 

3092 except (AttributeError, ValueError): 

3093 return False # err on the side of leniency 

3094 else: 

3095 # If we somehow get invoked from outside typing.py, 

3096 # also err on the side of leniency 

3097 if frame.f_globals.get("__name__") != "typing": 

3098 return False 

3099 origin = frame.f_locals.get("origin") 

3100 # Cannot use "in" because origin may be an object with a buggy __eq__ that 

3101 # throws an error. 

3102 return origin is typing.Generic or origin is Protocol or origin is typing.Protocol 

3103 

3104 

3105_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)} 

3106 

3107 

3108def _is_unpacked_typevartuple(x) -> bool: 

3109 if get_origin(x) is not Unpack: 

3110 return False 

3111 args = get_args(x) 

3112 return ( 

3113 bool(args) 

3114 and len(args) == 1 

3115 and type(args[0]) in _TYPEVARTUPLE_TYPES 

3116 ) 

3117 

3118 

3119# Python 3.11+ _collect_type_vars was renamed to _collect_parameters 

3120if hasattr(typing, '_collect_type_vars'): 

3121 def _collect_type_vars(types, typevar_types=None): 

3122 """Collect all type variable contained in types in order of 

3123 first appearance (lexicographic order). For example:: 

3124 

3125 _collect_type_vars((T, List[S, T])) == (T, S) 

3126 """ 

3127 if typevar_types is None: 

3128 typevar_types = typing.TypeVar 

3129 tvars = [] 

3130 

3131 # A required TypeVarLike cannot appear after a TypeVarLike with a default 

3132 # if it was a direct call to `Generic[]` or `Protocol[]` 

3133 enforce_default_ordering = _has_generic_or_protocol_as_origin() 

3134 default_encountered = False 

3135 

3136 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple 

3137 type_var_tuple_encountered = False 

3138 

3139 for t in types: 

3140 if _is_unpacked_typevartuple(t): 

3141 type_var_tuple_encountered = True 

3142 elif ( 

3143 isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias) 

3144 and t not in tvars 

3145 ): 

3146 if enforce_default_ordering: 

3147 has_default = getattr(t, '__default__', NoDefault) is not NoDefault 

3148 if has_default: 

3149 if type_var_tuple_encountered: 

3150 raise TypeError('Type parameter with a default' 

3151 ' follows TypeVarTuple') 

3152 default_encountered = True 

3153 elif default_encountered: 

3154 raise TypeError(f'Type parameter {t!r} without a default' 

3155 ' follows type parameter with a default') 

3156 

3157 tvars.append(t) 

3158 if _should_collect_from_parameters(t): 

3159 tvars.extend([t for t in t.__parameters__ if t not in tvars]) 

3160 elif isinstance(t, tuple): 

3161 # Collect nested type_vars 

3162 # tuple wrapped by _prepare_paramspec_params(cls, params) 

3163 for x in t: 

3164 for collected in _collect_type_vars([x]): 

3165 if collected not in tvars: 

3166 tvars.append(collected) 

3167 return tuple(tvars) 

3168 

3169 typing._collect_type_vars = _collect_type_vars 

3170else: 

3171 def _collect_parameters(args): 

3172 """Collect all type variables and parameter specifications in args 

3173 in order of first appearance (lexicographic order). 

3174 

3175 For example:: 

3176 

3177 assert _collect_parameters((T, Callable[P, T])) == (T, P) 

3178 """ 

3179 parameters = [] 

3180 

3181 # A required TypeVarLike cannot appear after a TypeVarLike with default 

3182 # if it was a direct call to `Generic[]` or `Protocol[]` 

3183 enforce_default_ordering = _has_generic_or_protocol_as_origin() 

3184 default_encountered = False 

3185 

3186 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple 

3187 type_var_tuple_encountered = False 

3188 

3189 for t in args: 

3190 if isinstance(t, type): 

3191 # We don't want __parameters__ descriptor of a bare Python class. 

3192 pass 

3193 elif isinstance(t, tuple): 

3194 # `t` might be a tuple, when `ParamSpec` is substituted with 

3195 # `[T, int]`, or `[int, *Ts]`, etc. 

3196 for x in t: 

3197 for collected in _collect_parameters([x]): 

3198 if collected not in parameters: 

3199 parameters.append(collected) 

3200 elif hasattr(t, '__typing_subst__'): 

3201 if t not in parameters: 

3202 if enforce_default_ordering: 

3203 has_default = ( 

3204 getattr(t, '__default__', NoDefault) is not NoDefault 

3205 ) 

3206 

3207 if type_var_tuple_encountered and has_default: 

3208 raise TypeError('Type parameter with a default' 

3209 ' follows TypeVarTuple') 

3210 

3211 if has_default: 

3212 default_encountered = True 

3213 elif default_encountered: 

3214 raise TypeError(f'Type parameter {t!r} without a default' 

3215 ' follows type parameter with a default') 

3216 

3217 parameters.append(t) 

3218 else: 

3219 if _is_unpacked_typevartuple(t): 

3220 type_var_tuple_encountered = True 

3221 for x in getattr(t, '__parameters__', ()): 

3222 if x not in parameters: 

3223 parameters.append(x) 

3224 

3225 return tuple(parameters) 

3226 

3227 if not _PEP_696_IMPLEMENTED: 

3228 typing._collect_parameters = _collect_parameters 

3229 

3230# Backport typing.NamedTuple as it exists in Python 3.13. 

3231# In 3.11, the ability to define generic `NamedTuple`s was supported. 

3232# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. 

3233# On 3.12, we added __orig_bases__ to call-based NamedTuples 

3234# On 3.13, we deprecated kwargs-based NamedTuples 

3235if sys.version_info >= (3, 13): 

3236 NamedTuple = typing.NamedTuple 

3237else: 

3238 def _make_nmtuple(name, types, module, defaults=()): 

3239 fields = [n for n, t in types] 

3240 annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") 

3241 for n, t in types} 

3242 nm_tpl = collections.namedtuple(name, fields, 

3243 defaults=defaults, module=module) 

3244 nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations 

3245 return nm_tpl 

3246 

3247 _prohibited_namedtuple_fields = typing._prohibited 

3248 _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) 

3249 

3250 class _NamedTupleMeta(type): 

3251 def __new__(cls, typename, bases, ns): 

3252 assert _NamedTuple in bases 

3253 for base in bases: 

3254 if base is not _NamedTuple and base is not typing.Generic: 

3255 raise TypeError( 

3256 'can only inherit from a NamedTuple type and Generic') 

3257 bases = tuple(tuple if base is _NamedTuple else base for base in bases) 

3258 if "__annotations__" in ns: 

3259 types = ns["__annotations__"] 

3260 elif "__annotate__" in ns: 

3261 # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated 

3262 types = ns["__annotate__"](1) 

3263 else: 

3264 types = {} 

3265 default_names = [] 

3266 for field_name in types: 

3267 if field_name in ns: 

3268 default_names.append(field_name) 

3269 elif default_names: 

3270 raise TypeError(f"Non-default namedtuple field {field_name} " 

3271 f"cannot follow default field" 

3272 f"{'s' if len(default_names) > 1 else ''} " 

3273 f"{', '.join(default_names)}") 

3274 nm_tpl = _make_nmtuple( 

3275 typename, types.items(), 

3276 defaults=[ns[n] for n in default_names], 

3277 module=ns['__module__'] 

3278 ) 

3279 nm_tpl.__bases__ = bases 

3280 if typing.Generic in bases: 

3281 if hasattr(typing, '_generic_class_getitem'): # 3.12+ 

3282 nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) 

3283 else: 

3284 class_getitem = typing.Generic.__class_getitem__.__func__ 

3285 nm_tpl.__class_getitem__ = classmethod(class_getitem) 

3286 # update from user namespace without overriding special namedtuple attributes 

3287 for key, val in ns.items(): 

3288 if key in _prohibited_namedtuple_fields: 

3289 raise AttributeError("Cannot overwrite NamedTuple attribute " + key) 

3290 elif key not in _special_namedtuple_fields: 

3291 if key not in nm_tpl._fields: 

3292 setattr(nm_tpl, key, ns[key]) 

3293 try: 

3294 set_name = type(val).__set_name__ 

3295 except AttributeError: 

3296 pass 

3297 else: 

3298 try: 

3299 set_name(val, nm_tpl, key) 

3300 except BaseException as e: 

3301 msg = ( 

3302 f"Error calling __set_name__ on {type(val).__name__!r} " 

3303 f"instance {key!r} in {typename!r}" 

3304 ) 

3305 # BaseException.add_note() existed on py311, 

3306 # but the __set_name__ machinery didn't start 

3307 # using add_note() until py312. 

3308 # Making sure exceptions are raised in the same way 

3309 # as in "normal" classes seems most important here. 

3310 if sys.version_info >= (3, 12): 

3311 e.add_note(msg) 

3312 raise 

3313 else: 

3314 raise RuntimeError(msg) from e 

3315 

3316 if typing.Generic in bases: 

3317 nm_tpl.__init_subclass__() 

3318 return nm_tpl 

3319 

3320 _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) 

3321 

3322 def _namedtuple_mro_entries(bases): 

3323 assert NamedTuple in bases 

3324 return (_NamedTuple,) 

3325 

3326 def NamedTuple(typename, fields=_marker, /, **kwargs): 

3327 """Typed version of namedtuple. 

3328 

3329 Usage:: 

3330 

3331 class Employee(NamedTuple): 

3332 name: str 

3333 id: int 

3334 

3335 This is equivalent to:: 

3336 

3337 Employee = collections.namedtuple('Employee', ['name', 'id']) 

3338 

3339 The resulting class has an extra __annotations__ attribute, giving a 

3340 dict that maps field names to types. (The field names are also in 

3341 the _fields attribute, which is part of the namedtuple API.) 

3342 An alternative equivalent functional syntax is also accepted:: 

3343 

3344 Employee = NamedTuple('Employee', [('name', str), ('id', int)]) 

3345 """ 

3346 if fields is _marker: 

3347 if kwargs: 

3348 deprecated_thing = "Creating NamedTuple classes using keyword arguments" 

3349 deprecation_msg = ( 

3350 "{name} is deprecated and will be disallowed in Python {remove}. " 

3351 "Use the class-based or functional syntax instead." 

3352 ) 

3353 else: 

3354 deprecated_thing = "Failing to pass a value for the 'fields' parameter" 

3355 example = f"`{typename} = NamedTuple({typename!r}, [])`" 

3356 deprecation_msg = ( 

3357 "{name} is deprecated and will be disallowed in Python {remove}. " 

3358 "To create a NamedTuple class with 0 fields " 

3359 "using the functional syntax, " 

3360 "pass an empty list, e.g. " 

3361 ) + example + "." 

3362 elif fields is None: 

3363 if kwargs: 

3364 raise TypeError( 

3365 "Cannot pass `None` as the 'fields' parameter " 

3366 "and also specify fields using keyword arguments" 

3367 ) 

3368 else: 

3369 deprecated_thing = "Passing `None` as the 'fields' parameter" 

3370 example = f"`{typename} = NamedTuple({typename!r}, [])`" 

3371 deprecation_msg = ( 

3372 "{name} is deprecated and will be disallowed in Python {remove}. " 

3373 "To create a NamedTuple class with 0 fields " 

3374 "using the functional syntax, " 

3375 "pass an empty list, e.g. " 

3376 ) + example + "." 

3377 elif kwargs: 

3378 raise TypeError("Either list of fields or keywords" 

3379 " can be provided to NamedTuple, not both") 

3380 if fields is _marker or fields is None: 

3381 warnings.warn( 

3382 deprecation_msg.format(name=deprecated_thing, remove="3.15"), 

3383 DeprecationWarning, 

3384 stacklevel=2, 

3385 ) 

3386 fields = kwargs.items() 

3387 nt = _make_nmtuple(typename, fields, module=_caller()) 

3388 nt.__orig_bases__ = (NamedTuple,) 

3389 return nt 

3390 

3391 NamedTuple.__mro_entries__ = _namedtuple_mro_entries 

3392 

3393 

3394if hasattr(collections.abc, "Buffer"): 

3395 Buffer = collections.abc.Buffer 

3396else: 

3397 class Buffer(abc.ABC): # noqa: B024 

3398 """Base class for classes that implement the buffer protocol. 

3399 

3400 The buffer protocol allows Python objects to expose a low-level 

3401 memory buffer interface. Before Python 3.12, it is not possible 

3402 to implement the buffer protocol in pure Python code, or even 

3403 to check whether a class implements the buffer protocol. In 

3404 Python 3.12 and higher, the ``__buffer__`` method allows access 

3405 to the buffer protocol from Python code, and the 

3406 ``collections.abc.Buffer`` ABC allows checking whether a class 

3407 implements the buffer protocol. 

3408 

3409 To indicate support for the buffer protocol in earlier versions, 

3410 inherit from this ABC, either in a stub file or at runtime, 

3411 or use ABC registration. This ABC provides no methods, because 

3412 there is no Python-accessible methods shared by pre-3.12 buffer 

3413 classes. It is useful primarily for static checks. 

3414 

3415 """ 

3416 

3417 # As a courtesy, register the most common stdlib buffer classes. 

3418 Buffer.register(memoryview) 

3419 Buffer.register(bytearray) 

3420 Buffer.register(bytes) 

3421 

3422 

3423# Backport of types.get_original_bases, available on 3.12+ in CPython 

3424if hasattr(_types, "get_original_bases"): 

3425 get_original_bases = _types.get_original_bases 

3426else: 

3427 def get_original_bases(cls, /): 

3428 """Return the class's "original" bases prior to modification by `__mro_entries__`. 

3429 

3430 Examples:: 

3431 

3432 from typing import TypeVar, Generic 

3433 from typing_extensions import NamedTuple, TypedDict 

3434 

3435 T = TypeVar("T") 

3436 class Foo(Generic[T]): ... 

3437 class Bar(Foo[int], float): ... 

3438 class Baz(list[str]): ... 

3439 Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) 

3440 Spam = TypedDict("Spam", {"a": int, "b": str}) 

3441 

3442 assert get_original_bases(Bar) == (Foo[int], float) 

3443 assert get_original_bases(Baz) == (list[str],) 

3444 assert get_original_bases(Eggs) == (NamedTuple,) 

3445 assert get_original_bases(Spam) == (TypedDict,) 

3446 assert get_original_bases(int) == (object,) 

3447 """ 

3448 try: 

3449 return cls.__dict__.get("__orig_bases__", cls.__bases__) 

3450 except AttributeError: 

3451 raise TypeError( 

3452 f'Expected an instance of type, not {type(cls).__name__!r}' 

3453 ) from None 

3454 

3455 

3456# NewType is a class on Python 3.10+, making it pickleable 

3457# The error message for subclassing instances of NewType was improved on 3.11+ 

3458if sys.version_info >= (3, 11): 

3459 NewType = typing.NewType 

3460else: 

3461 class NewType: 

3462 """NewType creates simple unique types with almost zero 

3463 runtime overhead. NewType(name, tp) is considered a subtype of tp 

3464 by static type checkers. At runtime, NewType(name, tp) returns 

3465 a dummy callable that simply returns its argument. Usage:: 

3466 UserId = NewType('UserId', int) 

3467 def name_by_id(user_id: UserId) -> str: 

3468 ... 

3469 UserId('user') # Fails type check 

3470 name_by_id(42) # Fails type check 

3471 name_by_id(UserId(42)) # OK 

3472 num = UserId(5) + 1 # type: int 

3473 """ 

3474 

3475 def __call__(self, obj, /): 

3476 return obj 

3477 

3478 def __init__(self, name, tp): 

3479 self.__qualname__ = name 

3480 if '.' in name: 

3481 name = name.rpartition('.')[-1] 

3482 self.__name__ = name 

3483 self.__supertype__ = tp 

3484 def_mod = _caller() 

3485 if def_mod != 'typing_extensions': 

3486 self.__module__ = def_mod 

3487 

3488 def __mro_entries__(self, bases): 

3489 # We defined __mro_entries__ to get a better error message 

3490 # if a user attempts to subclass a NewType instance. bpo-46170 

3491 supercls_name = self.__name__ 

3492 

3493 class Dummy: 

3494 def __init_subclass__(cls): 

3495 subcls_name = cls.__name__ 

3496 raise TypeError( 

3497 f"Cannot subclass an instance of NewType. " 

3498 f"Perhaps you were looking for: " 

3499 f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" 

3500 ) 

3501 

3502 return (Dummy,) 

3503 

3504 def __repr__(self): 

3505 return f'{self.__module__}.{self.__qualname__}' 

3506 

3507 def __reduce__(self): 

3508 return self.__qualname__ 

3509 

3510 if sys.version_info >= (3, 10): 

3511 # PEP 604 methods 

3512 # It doesn't make sense to have these methods on Python <3.10 

3513 

3514 def __or__(self, other): 

3515 return typing.Union[self, other] 

3516 

3517 def __ror__(self, other): 

3518 return typing.Union[other, self] 

3519 

3520 

3521if sys.version_info >= (3, 14): 

3522 TypeAliasType = typing.TypeAliasType 

3523# <=3.13 

3524else: 

3525 if sys.version_info >= (3, 12): 

3526 # 3.12-3.13 

3527 def _is_unionable(obj): 

3528 """Corresponds to is_unionable() in unionobject.c in CPython.""" 

3529 return obj is None or isinstance(obj, ( 

3530 type, 

3531 _types.GenericAlias, 

3532 _types.UnionType, 

3533 typing.TypeAliasType, 

3534 TypeAliasType, 

3535 )) 

3536 else: 

3537 # <=3.11 

3538 def _is_unionable(obj): 

3539 """Corresponds to is_unionable() in unionobject.c in CPython.""" 

3540 return obj is None or isinstance(obj, ( 

3541 type, 

3542 _types.GenericAlias, 

3543 _types.UnionType, 

3544 TypeAliasType, 

3545 )) 

3546 

3547 if sys.version_info < (3, 10): 

3548 # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582, 

3549 # so that we emulate the behaviour of `types.GenericAlias` 

3550 # on the latest versions of CPython 

3551 _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({ 

3552 "__class__", 

3553 "__bases__", 

3554 "__origin__", 

3555 "__args__", 

3556 "__unpacked__", 

3557 "__parameters__", 

3558 "__typing_unpacked_tuple_args__", 

3559 "__mro_entries__", 

3560 "__reduce_ex__", 

3561 "__reduce__", 

3562 "__copy__", 

3563 "__deepcopy__", 

3564 }) 

3565 

3566 class _TypeAliasGenericAlias(typing._GenericAlias, _root=True): 

3567 def __getattr__(self, attr): 

3568 if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS: 

3569 return object.__getattr__(self, attr) 

3570 return getattr(self.__origin__, attr) 

3571 

3572 

3573 class TypeAliasType: 

3574 """Create named, parameterized type aliases. 

3575 

3576 This provides a backport of the new `type` statement in Python 3.12: 

3577 

3578 type ListOrSet[T] = list[T] | set[T] 

3579 

3580 is equivalent to: 

3581 

3582 T = TypeVar("T") 

3583 ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) 

3584 

3585 The name ListOrSet can then be used as an alias for the type it refers to. 

3586 

3587 The type_params argument should contain all the type parameters used 

3588 in the value of the type alias. If the alias is not generic, this 

3589 argument is omitted. 

3590 

3591 Static type checkers should only support type aliases declared using 

3592 TypeAliasType that follow these rules: 

3593 

3594 - The first argument (the name) must be a string literal. 

3595 - The TypeAliasType instance must be immediately assigned to a variable 

3596 of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, 

3597 as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). 

3598 

3599 """ 

3600 

3601 def __init__(self, name: str, value, *, type_params=()): 

3602 if not isinstance(name, str): 

3603 raise TypeError("TypeAliasType name must be a string") 

3604 if not isinstance(type_params, tuple): 

3605 raise TypeError("type_params must be a tuple") 

3606 self.__value__ = value 

3607 self.__type_params__ = type_params 

3608 

3609 default_value_encountered = False 

3610 parameters = [] 

3611 for type_param in type_params: 

3612 if ( 

3613 not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec)) 

3614 # <=3.11 

3615 # Unpack Backport passes isinstance(type_param, TypeVar) 

3616 or _is_unpack(type_param) 

3617 ): 

3618 raise TypeError(f"Expected a type param, got {type_param!r}") 

3619 has_default = ( 

3620 getattr(type_param, '__default__', NoDefault) is not NoDefault 

3621 ) 

3622 if default_value_encountered and not has_default: 

3623 raise TypeError(f"non-default type parameter '{type_param!r}'" 

3624 " follows default type parameter") 

3625 if has_default: 

3626 default_value_encountered = True 

3627 if isinstance(type_param, TypeVarTuple): 

3628 parameters.extend(type_param) 

3629 else: 

3630 parameters.append(type_param) 

3631 self.__parameters__ = tuple(parameters) 

3632 def_mod = _caller() 

3633 if def_mod != 'typing_extensions': 

3634 self.__module__ = def_mod 

3635 # Setting this attribute closes the TypeAliasType from further modification 

3636 self.__name__ = name 

3637 

3638 def __setattr__(self, name: str, value: object, /) -> None: 

3639 if hasattr(self, "__name__"): 

3640 self._raise_attribute_error(name) 

3641 super().__setattr__(name, value) 

3642 

3643 def __delattr__(self, name: str, /) -> Never: 

3644 self._raise_attribute_error(name) 

3645 

3646 def _raise_attribute_error(self, name: str) -> Never: 

3647 # Match the Python 3.12 error messages exactly 

3648 if name == "__name__": 

3649 raise AttributeError("readonly attribute") 

3650 elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: 

3651 raise AttributeError( 

3652 f"attribute '{name}' of 'typing.TypeAliasType' objects " 

3653 "is not writable" 

3654 ) 

3655 else: 

3656 raise AttributeError( 

3657 f"'typing.TypeAliasType' object has no attribute '{name}'" 

3658 ) 

3659 

3660 def __repr__(self) -> str: 

3661 return self.__name__ 

3662 

3663 if sys.version_info < (3, 11): 

3664 def _check_single_param(self, param, recursion=0): 

3665 # Allow [], [int], [int, str], [int, ...], [int, T] 

3666 if param is ...: 

3667 return ... 

3668 if param is None: 

3669 return None 

3670 # Note in <= 3.9 _ConcatenateGenericAlias inherits from list 

3671 if isinstance(param, list) and recursion == 0: 

3672 return [self._check_single_param(arg, recursion+1) 

3673 for arg in param] 

3674 return typing._type_check( 

3675 param, f'Subscripting {self.__name__} requires a type.' 

3676 ) 

3677 

3678 def _check_parameters(self, parameters): 

3679 if sys.version_info < (3, 11): 

3680 return tuple( 

3681 self._check_single_param(item) 

3682 for item in parameters 

3683 ) 

3684 return tuple(typing._type_check( 

3685 item, f'Subscripting {self.__name__} requires a type.' 

3686 ) 

3687 for item in parameters 

3688 ) 

3689 

3690 def __getitem__(self, parameters): 

3691 if not self.__type_params__: 

3692 raise TypeError("Only generic type aliases are subscriptable") 

3693 if not isinstance(parameters, tuple): 

3694 parameters = (parameters,) 

3695 # Using 3.9 here will create problems with Concatenate 

3696 if sys.version_info >= (3, 10): 

3697 return _types.GenericAlias(self, parameters) 

3698 type_vars = _collect_type_vars(parameters) 

3699 parameters = self._check_parameters(parameters) 

3700 alias = _TypeAliasGenericAlias(self, parameters) 

3701 # alias.__parameters__ is not complete if Concatenate is present 

3702 # as it is converted to a list from which no parameters are extracted. 

3703 if alias.__parameters__ != type_vars: 

3704 alias.__parameters__ = type_vars 

3705 return alias 

3706 

3707 def __reduce__(self): 

3708 return self.__name__ 

3709 

3710 def __init_subclass__(cls, *args, **kwargs): 

3711 raise TypeError( 

3712 "type 'typing_extensions.TypeAliasType' is not an acceptable base type" 

3713 ) 

3714 

3715 # The presence of this method convinces typing._type_check 

3716 # that TypeAliasTypes are types. 

3717 def __call__(self): 

3718 raise TypeError("Type alias is not callable") 

3719 

3720 if sys.version_info >= (3, 10): 

3721 def __or__(self, right): 

3722 # For forward compatibility with 3.12, reject Unions 

3723 # that are not accepted by the built-in Union. 

3724 if not _is_unionable(right): 

3725 return NotImplemented 

3726 return typing.Union[self, right] 

3727 

3728 def __ror__(self, left): 

3729 if not _is_unionable(left): 

3730 return NotImplemented 

3731 return typing.Union[left, self] 

3732 

3733 

3734if hasattr(typing, "is_protocol"): 

3735 is_protocol = typing.is_protocol 

3736 get_protocol_members = typing.get_protocol_members 

3737else: 

3738 def is_protocol(tp: type, /) -> bool: 

3739 """Return True if the given type is a Protocol. 

3740 

3741 Example:: 

3742 

3743 >>> from typing_extensions import Protocol, is_protocol 

3744 >>> class P(Protocol): 

3745 ... def a(self) -> str: ... 

3746 ... b: int 

3747 >>> is_protocol(P) 

3748 True 

3749 >>> is_protocol(int) 

3750 False 

3751 """ 

3752 return ( 

3753 isinstance(tp, type) 

3754 and getattr(tp, '_is_protocol', False) 

3755 and tp is not Protocol 

3756 and tp is not typing.Protocol 

3757 ) 

3758 

3759 def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: 

3760 """Return the set of members defined in a Protocol. 

3761 

3762 Example:: 

3763 

3764 >>> from typing_extensions import Protocol, get_protocol_members 

3765 >>> class P(Protocol): 

3766 ... def a(self) -> str: ... 

3767 ... b: int 

3768 >>> get_protocol_members(P) 

3769 frozenset({'a', 'b'}) 

3770 

3771 Raise a TypeError for arguments that are not Protocols. 

3772 """ 

3773 if not is_protocol(tp): 

3774 raise TypeError(f'{tp!r} is not a Protocol') 

3775 if hasattr(tp, '__protocol_attrs__'): 

3776 return frozenset(tp.__protocol_attrs__) 

3777 return frozenset(_get_protocol_attrs(tp)) 

3778 

3779 

3780if hasattr(typing, "Doc"): 

3781 Doc = typing.Doc 

3782else: 

3783 class Doc: 

3784 """Define the documentation of a type annotation using ``Annotated``, to be 

3785 used in class attributes, function and method parameters, return values, 

3786 and variables. 

3787 

3788 The value should be a positional-only string literal to allow static tools 

3789 like editors and documentation generators to use it. 

3790 

3791 This complements docstrings. 

3792 

3793 The string value passed is available in the attribute ``documentation``. 

3794 

3795 Example:: 

3796 

3797 >>> from typing_extensions import Annotated, Doc 

3798 >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... 

3799 """ 

3800 def __init__(self, documentation: str, /) -> None: 

3801 self.documentation = documentation 

3802 

3803 def __repr__(self) -> str: 

3804 return f"Doc({self.documentation!r})" 

3805 

3806 def __hash__(self) -> int: 

3807 return hash(self.documentation) 

3808 

3809 def __eq__(self, other: object) -> bool: 

3810 if not isinstance(other, Doc): 

3811 return NotImplemented 

3812 return self.documentation == other.documentation 

3813 

3814 

3815_CapsuleType = getattr(_types, "CapsuleType", None) 

3816 

3817if _CapsuleType is None: 

3818 try: 

3819 import _socket 

3820 except ImportError: 

3821 pass 

3822 else: 

3823 _CAPI = getattr(_socket, "CAPI", None) 

3824 if _CAPI is not None: 

3825 _CapsuleType = type(_CAPI) 

3826 

3827if _CapsuleType is not None: 

3828 CapsuleType = _CapsuleType 

3829 __all__.append("CapsuleType") 

3830 

3831 

3832if sys.version_info >= (3,14): 

3833 from annotationlib import Format, get_annotations 

3834else: 

3835 class Format(enum.IntEnum): 

3836 VALUE = 1 

3837 VALUE_WITH_FAKE_GLOBALS = 2 

3838 FORWARDREF = 3 

3839 STRING = 4 

3840 

3841 def get_annotations(obj, *, globals=None, locals=None, eval_str=False, 

3842 format=Format.VALUE): 

3843 """Compute the annotations dict for an object. 

3844 

3845 obj may be a callable, class, or module. 

3846 Passing in an object of any other type raises TypeError. 

3847 

3848 Returns a dict. get_annotations() returns a new dict every time 

3849 it's called; calling it twice on the same object will return two 

3850 different but equivalent dicts. 

3851 

3852 This is a backport of `inspect.get_annotations`, which has been 

3853 in the standard library since Python 3.10. See the standard library 

3854 documentation for more: 

3855 

3856 https://docs.python.org/3/library/inspect.html#inspect.get_annotations 

3857 

3858 This backport adds the *format* argument introduced by PEP 649. The 

3859 three formats supported are: 

3860 * VALUE: the annotations are returned as-is. This is the default and 

3861 it is compatible with the behavior on previous Python versions. 

3862 * FORWARDREF: return annotations as-is if possible, but replace any 

3863 undefined names with ForwardRef objects. The implementation proposed by 

3864 PEP 649 relies on language changes that cannot be backported; the 

3865 typing-extensions implementation simply returns the same result as VALUE. 

3866 * STRING: return annotations as strings, in a format close to the original 

3867 source. Again, this behavior cannot be replicated directly in a backport. 

3868 As an approximation, typing-extensions retrieves the annotations under 

3869 VALUE semantics and then stringifies them. 

3870 

3871 The purpose of this backport is to allow users who would like to use 

3872 FORWARDREF or STRING semantics once PEP 649 is implemented, but who also 

3873 want to support earlier Python versions, to simply write: 

3874 

3875 typing_extensions.get_annotations(obj, format=Format.FORWARDREF) 

3876 

3877 """ 

3878 format = Format(format) 

3879 if format is Format.VALUE_WITH_FAKE_GLOBALS: 

3880 raise ValueError( 

3881 "The VALUE_WITH_FAKE_GLOBALS format is for internal use only" 

3882 ) 

3883 

3884 if eval_str and format is not Format.VALUE: 

3885 raise ValueError("eval_str=True is only supported with format=Format.VALUE") 

3886 

3887 if isinstance(obj, type): 

3888 # class 

3889 obj_dict = getattr(obj, '__dict__', None) 

3890 if obj_dict and hasattr(obj_dict, 'get'): 

3891 ann = obj_dict.get('__annotations__', None) 

3892 if isinstance(ann, _types.GetSetDescriptorType): 

3893 ann = None 

3894 else: 

3895 ann = None 

3896 

3897 obj_globals = None 

3898 module_name = getattr(obj, '__module__', None) 

3899 if module_name: 

3900 module = sys.modules.get(module_name, None) 

3901 if module: 

3902 obj_globals = getattr(module, '__dict__', None) 

3903 obj_locals = dict(vars(obj)) 

3904 unwrap = obj 

3905 elif isinstance(obj, _types.ModuleType): 

3906 # module 

3907 ann = getattr(obj, '__annotations__', None) 

3908 obj_globals = obj.__dict__ 

3909 obj_locals = None 

3910 unwrap = None 

3911 elif callable(obj): 

3912 # this includes types.Function, types.BuiltinFunctionType, 

3913 # types.BuiltinMethodType, functools.partial, functools.singledispatch, 

3914 # "class funclike" from Lib/test/test_inspect... on and on it goes. 

3915 ann = getattr(obj, '__annotations__', None) 

3916 obj_globals = getattr(obj, '__globals__', None) 

3917 obj_locals = None 

3918 unwrap = obj 

3919 elif hasattr(obj, '__annotations__'): 

3920 ann = obj.__annotations__ 

3921 obj_globals = obj_locals = unwrap = None 

3922 else: 

3923 raise TypeError(f"{obj!r} is not a module, class, or callable.") 

3924 

3925 if ann is None: 

3926 return {} 

3927 

3928 if not isinstance(ann, dict): 

3929 raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None") 

3930 

3931 if not ann: 

3932 return {} 

3933 

3934 if not eval_str: 

3935 if format is Format.STRING: 

3936 return { 

3937 key: value if isinstance(value, str) else typing._type_repr(value) 

3938 for key, value in ann.items() 

3939 } 

3940 return dict(ann) 

3941 

3942 if unwrap is not None: 

3943 while True: 

3944 if hasattr(unwrap, '__wrapped__'): 

3945 unwrap = unwrap.__wrapped__ 

3946 continue 

3947 if isinstance(unwrap, functools.partial): 

3948 unwrap = unwrap.func 

3949 continue 

3950 break 

3951 if hasattr(unwrap, "__globals__"): 

3952 obj_globals = unwrap.__globals__ 

3953 

3954 if globals is None: 

3955 globals = obj_globals 

3956 if locals is None: 

3957 locals = obj_locals or {} 

3958 

3959 # "Inject" type parameters into the local namespace 

3960 # (unless they are shadowed by assignments *in* the local namespace), 

3961 # as a way of emulating annotation scopes when calling `eval()` 

3962 if type_params := getattr(obj, "__type_params__", ()): 

3963 locals = {param.__name__: param for param in type_params} | locals 

3964 

3965 return_value = {key: 

3966 value if not isinstance(value, str) else eval(value, globals, locals) 

3967 for key, value in ann.items() } 

3968 return return_value 

3969 

3970 

3971if hasattr(typing, "evaluate_forward_ref"): 

3972 evaluate_forward_ref = typing.evaluate_forward_ref 

3973else: 

3974 # Implements annotationlib.ForwardRef.evaluate 

3975 def _eval_with_owner( 

3976 forward_ref, *, owner=None, globals=None, locals=None, type_params=None 

3977 ): 

3978 if forward_ref.__forward_evaluated__: 

3979 return forward_ref.__forward_value__ 

3980 if getattr(forward_ref, "__cell__", None) is not None: 

3981 try: 

3982 value = forward_ref.__cell__.cell_contents 

3983 except ValueError: 

3984 pass 

3985 else: 

3986 forward_ref.__forward_evaluated__ = True 

3987 forward_ref.__forward_value__ = value 

3988 return value 

3989 if owner is None: 

3990 owner = getattr(forward_ref, "__owner__", None) 

3991 

3992 if ( 

3993 globals is None 

3994 and getattr(forward_ref, "__forward_module__", None) is not None 

3995 ): 

3996 globals = getattr( 

3997 sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None 

3998 ) 

3999 if globals is None: 

4000 globals = getattr(forward_ref, "__globals__", None) 

4001 if globals is None: 

4002 if isinstance(owner, type): 

4003 module_name = getattr(owner, "__module__", None) 

4004 if module_name: 

4005 module = sys.modules.get(module_name, None) 

4006 if module: 

4007 globals = getattr(module, "__dict__", None) 

4008 elif isinstance(owner, _types.ModuleType): 

4009 globals = getattr(owner, "__dict__", None) 

4010 elif callable(owner): 

4011 globals = getattr(owner, "__globals__", None) 

4012 

4013 # If we pass None to eval() below, the globals of this module are used. 

4014 if globals is None: 

4015 globals = {} 

4016 

4017 if locals is None: 

4018 locals = {} 

4019 if isinstance(owner, type): 

4020 locals.update(vars(owner)) 

4021 

4022 if type_params is None and owner is not None: 

4023 # "Inject" type parameters into the local namespace 

4024 # (unless they are shadowed by assignments *in* the local namespace), 

4025 # as a way of emulating annotation scopes when calling `eval()` 

4026 type_params = getattr(owner, "__type_params__", None) 

4027 

4028 # type parameters require some special handling, 

4029 # as they exist in their own scope 

4030 # but `eval()` does not have a dedicated parameter for that scope. 

4031 # For classes, names in type parameter scopes should override 

4032 # names in the global scope (which here are called `localns`!), 

4033 # but should in turn be overridden by names in the class scope 

4034 # (which here are called `globalns`!) 

4035 if type_params is not None: 

4036 globals = dict(globals) 

4037 locals = dict(locals) 

4038 for param in type_params: 

4039 param_name = param.__name__ 

4040 if ( 

4041 _FORWARD_REF_HAS_CLASS and not forward_ref.__forward_is_class__ 

4042 ) or param_name not in globals: 

4043 globals[param_name] = param 

4044 locals.pop(param_name, None) 

4045 

4046 arg = forward_ref.__forward_arg__ 

4047 if arg.isidentifier() and not keyword.iskeyword(arg): 

4048 if arg in locals: 

4049 value = locals[arg] 

4050 elif arg in globals: 

4051 value = globals[arg] 

4052 elif hasattr(builtins, arg): 

4053 return getattr(builtins, arg) 

4054 else: 

4055 raise NameError(arg) 

4056 else: 

4057 code = forward_ref.__forward_code__ 

4058 value = eval(code, globals, locals) 

4059 forward_ref.__forward_evaluated__ = True 

4060 forward_ref.__forward_value__ = value 

4061 return value 

4062 

4063 def evaluate_forward_ref( 

4064 forward_ref, 

4065 *, 

4066 owner=None, 

4067 globals=None, 

4068 locals=None, 

4069 type_params=None, 

4070 format=None, 

4071 _recursive_guard=frozenset(), 

4072 ): 

4073 """Evaluate a forward reference as a type hint. 

4074 

4075 This is similar to calling the ForwardRef.evaluate() method, 

4076 but unlike that method, evaluate_forward_ref() also: 

4077 

4078 * Recursively evaluates forward references nested within the type hint. 

4079 * Rejects certain objects that are not valid type hints. 

4080 * Replaces type hints that evaluate to None with types.NoneType. 

4081 * Supports the *FORWARDREF* and *STRING* formats. 

4082 

4083 *forward_ref* must be an instance of ForwardRef. *owner*, if given, 

4084 should be the object that holds the annotations that the forward reference 

4085 derived from, such as a module, class object, or function. It is used to 

4086 infer the namespaces to use for looking up names. *globals* and *locals* 

4087 can also be explicitly given to provide the global and local namespaces. 

4088 *type_params* is a tuple of type parameters that are in scope when 

4089 evaluating the forward reference. This parameter must be provided (though 

4090 it may be an empty tuple) if *owner* is not given and the forward reference 

4091 does not already have an owner set. *format* specifies the format of the 

4092 annotation and is a member of the annotationlib.Format enum. 

4093 

4094 """ 

4095 if format == Format.STRING: 

4096 return forward_ref.__forward_arg__ 

4097 if forward_ref.__forward_arg__ in _recursive_guard: 

4098 return forward_ref 

4099 

4100 # Evaluate the forward reference 

4101 try: 

4102 value = _eval_with_owner( 

4103 forward_ref, 

4104 owner=owner, 

4105 globals=globals, 

4106 locals=locals, 

4107 type_params=type_params, 

4108 ) 

4109 except NameError: 

4110 if format == Format.FORWARDREF: 

4111 return forward_ref 

4112 else: 

4113 raise 

4114 

4115 if isinstance(value, str): 

4116 value = ForwardRef(value) 

4117 

4118 # Recursively evaluate the type 

4119 if isinstance(value, ForwardRef): 

4120 if getattr(value, "__forward_module__", True) is not None: 

4121 globals = None 

4122 return evaluate_forward_ref( 

4123 value, 

4124 globals=globals, 

4125 locals=locals, 

4126 type_params=type_params, owner=owner, 

4127 _recursive_guard=_recursive_guard, format=format 

4128 ) 

4129 if sys.version_info < (3, 12, 5) and type_params: 

4130 # Make use of type_params 

4131 locals = dict(locals) if locals else {} 

4132 for tvar in type_params: 

4133 if tvar.__name__ not in locals: # lets not overwrite something present 

4134 locals[tvar.__name__] = tvar 

4135 if sys.version_info < (3, 12, 5): 

4136 return typing._eval_type( 

4137 value, 

4138 globals, 

4139 locals, 

4140 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, 

4141 ) 

4142 else: 

4143 return typing._eval_type( 

4144 value, 

4145 globals, 

4146 locals, 

4147 type_params, 

4148 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, 

4149 ) 

4150 

4151 

4152class Sentinel: 

4153 """Create a unique sentinel object. 

4154 

4155 *name* should be the name of the variable to which the return value shall be assigned. 

4156 

4157 *repr*, if supplied, will be used for the repr of the sentinel object. 

4158 If not provided, "<name>" will be used. 

4159 """ 

4160 

4161 def __init__( 

4162 self, 

4163 name: str, 

4164 repr: typing.Optional[str] = None, 

4165 ): 

4166 self._name = name 

4167 self._repr = repr if repr is not None else f'<{name}>' 

4168 

4169 def __repr__(self): 

4170 return self._repr 

4171 

4172 if sys.version_info < (3, 11): 

4173 # The presence of this method convinces typing._type_check 

4174 # that Sentinels are types. 

4175 def __call__(self, *args, **kwargs): 

4176 raise TypeError(f"{type(self).__name__!r} object is not callable") 

4177 

4178 if sys.version_info >= (3, 10): 

4179 def __or__(self, other): 

4180 return typing.Union[self, other] 

4181 

4182 def __ror__(self, other): 

4183 return typing.Union[other, self] 

4184 

4185 def __getstate__(self): 

4186 raise TypeError(f"Cannot pickle {type(self).__name__!r} object") 

4187 

4188 

4189# Aliases for items that are in typing in all supported versions. 

4190# We use hasattr() checks so this library will continue to import on 

4191# future versions of Python that may remove these names. 

4192_typing_names = [ 

4193 "AbstractSet", 

4194 "AnyStr", 

4195 "BinaryIO", 

4196 "Callable", 

4197 "Collection", 

4198 "Container", 

4199 "Dict", 

4200 "FrozenSet", 

4201 "Hashable", 

4202 "IO", 

4203 "ItemsView", 

4204 "Iterable", 

4205 "Iterator", 

4206 "KeysView", 

4207 "List", 

4208 "Mapping", 

4209 "MappingView", 

4210 "Match", 

4211 "MutableMapping", 

4212 "MutableSequence", 

4213 "MutableSet", 

4214 "Optional", 

4215 "Pattern", 

4216 "Reversible", 

4217 "Sequence", 

4218 "Set", 

4219 "Sized", 

4220 "TextIO", 

4221 "Tuple", 

4222 "Union", 

4223 "ValuesView", 

4224 "cast", 

4225 "no_type_check", 

4226 "no_type_check_decorator", 

4227 # This is private, but it was defined by typing_extensions for a long time 

4228 # and some users rely on it. 

4229 "_AnnotatedAlias", 

4230] 

4231globals().update( 

4232 {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)} 

4233) 

4234# These are defined unconditionally because they are used in 

4235# typing-extensions itself. 

4236Generic = typing.Generic 

4237ForwardRef = typing.ForwardRef 

4238Annotated = typing.Annotated