Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/typing_extensions.py: 30%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1768 statements  

1import abc 

2import builtins 

3import collections 

4import collections.abc 

5import contextlib 

6import enum 

7import functools 

8import inspect 

9import io 

10import keyword 

11import operator 

12import sys 

13import types as _types 

14import typing 

15import warnings 

16 

17if sys.version_info >= (3, 14): 

18 import annotationlib 

19 

20__all__ = [ 

21 # Super-special typing primitives. 

22 'Any', 

23 'ClassVar', 

24 'Concatenate', 

25 'Final', 

26 'LiteralString', 

27 'ParamSpec', 

28 'ParamSpecArgs', 

29 'ParamSpecKwargs', 

30 'Self', 

31 'Type', 

32 'TypeVar', 

33 'TypeVarTuple', 

34 'Unpack', 

35 

36 # ABCs (from collections.abc). 

37 'Awaitable', 

38 'AsyncIterator', 

39 'AsyncIterable', 

40 'Coroutine', 

41 'AsyncGenerator', 

42 'AsyncContextManager', 

43 'Buffer', 

44 'ChainMap', 

45 

46 # Concrete collection types. 

47 'ContextManager', 

48 'Counter', 

49 'Deque', 

50 'DefaultDict', 

51 'NamedTuple', 

52 'OrderedDict', 

53 'TypedDict', 

54 

55 # Structural checks, a.k.a. protocols. 

56 'SupportsAbs', 

57 'SupportsBytes', 

58 'SupportsComplex', 

59 'SupportsFloat', 

60 'SupportsIndex', 

61 'SupportsInt', 

62 'SupportsRound', 

63 'Reader', 

64 'Writer', 

65 

66 # One-off things. 

67 'Annotated', 

68 'assert_never', 

69 'assert_type', 

70 'clear_overloads', 

71 'dataclass_transform', 

72 'deprecated', 

73 'Doc', 

74 'evaluate_forward_ref', 

75 'get_overloads', 

76 'final', 

77 'Format', 

78 'get_annotations', 

79 'get_args', 

80 'get_origin', 

81 'get_original_bases', 

82 'get_protocol_members', 

83 'get_type_hints', 

84 'IntVar', 

85 'is_protocol', 

86 'is_typeddict', 

87 'Literal', 

88 'NewType', 

89 'overload', 

90 'override', 

91 'Protocol', 

92 'Sentinel', 

93 'reveal_type', 

94 'runtime', 

95 'runtime_checkable', 

96 'Text', 

97 'TypeAlias', 

98 'TypeAliasType', 

99 'TypeForm', 

100 'TypeGuard', 

101 'TypeIs', 

102 'TYPE_CHECKING', 

103 'Never', 

104 'NoReturn', 

105 'ReadOnly', 

106 'Required', 

107 'NotRequired', 

108 'NoDefault', 

109 'NoExtraItems', 

110 

111 # Pure aliases, have always been in typing 

112 'AbstractSet', 

113 'AnyStr', 

114 'BinaryIO', 

115 'Callable', 

116 'Collection', 

117 'Container', 

118 'Dict', 

119 'ForwardRef', 

120 'FrozenSet', 

121 'Generator', 

122 'Generic', 

123 'Hashable', 

124 'IO', 

125 'ItemsView', 

126 'Iterable', 

127 'Iterator', 

128 'KeysView', 

129 'List', 

130 'Mapping', 

131 'MappingView', 

132 'Match', 

133 'MutableMapping', 

134 'MutableSequence', 

135 'MutableSet', 

136 'Optional', 

137 'Pattern', 

138 'Reversible', 

139 'Sequence', 

140 'Set', 

141 'Sized', 

142 'TextIO', 

143 'Tuple', 

144 'Union', 

145 'ValuesView', 

146 'cast', 

147 'no_type_check', 

148 'no_type_check_decorator', 

149] 

150 

151# for backward compatibility 

152PEP_560 = True 

153GenericMeta = type 

154_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta") 

155 

156# Added with bpo-45166 to 3.10.1+ and some 3.9 versions 

157_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__ 

158 

159# The functions below are modified copies of typing internal helpers. 

160# They are needed by _ProtocolMeta and they provide support for PEP 646. 

161 

162 

163class _Sentinel: 

164 def __repr__(self): 

165 return "<sentinel>" 

166 

167 

168_marker = _Sentinel() 

169 

170 

171if sys.version_info >= (3, 10): 

172 def _should_collect_from_parameters(t): 

173 return isinstance( 

174 t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) 

175 ) 

176else: 

177 def _should_collect_from_parameters(t): 

178 return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) 

179 

180 

181NoReturn = typing.NoReturn 

182 

183# Some unconstrained type variables. These are used by the container types. 

184# (These are not for export.) 

185T = typing.TypeVar('T') # Any type. 

186KT = typing.TypeVar('KT') # Key type. 

187VT = typing.TypeVar('VT') # Value type. 

188T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. 

189T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. 

190 

191 

192if sys.version_info >= (3, 11): 

193 from typing import Any 

194else: 

195 

196 class _AnyMeta(type): 

197 def __instancecheck__(self, obj): 

198 if self is Any: 

199 raise TypeError("typing_extensions.Any cannot be used with isinstance()") 

200 return super().__instancecheck__(obj) 

201 

202 def __repr__(self): 

203 if self is Any: 

204 return "typing_extensions.Any" 

205 return super().__repr__() 

206 

207 class Any(metaclass=_AnyMeta): 

208 """Special type indicating an unconstrained type. 

209 - Any is compatible with every type. 

210 - Any assumed to have all methods. 

211 - All values assumed to be instances of Any. 

212 Note that all the above statements are true from the point of view of 

213 static type checkers. At runtime, Any should not be used with instance 

214 checks. 

215 """ 

216 def __new__(cls, *args, **kwargs): 

217 if cls is Any: 

218 raise TypeError("Any cannot be instantiated") 

219 return super().__new__(cls, *args, **kwargs) 

220 

221 

222ClassVar = typing.ClassVar 

223 

224# Vendored from cpython typing._SpecialFrom 

225# Having a separate class means that instances will not be rejected by 

226# typing._type_check. 

227class _SpecialForm(typing._Final, _root=True): 

228 __slots__ = ('_name', '__doc__', '_getitem') 

229 

230 def __init__(self, getitem): 

231 self._getitem = getitem 

232 self._name = getitem.__name__ 

233 self.__doc__ = getitem.__doc__ 

234 

235 def __getattr__(self, item): 

236 if item in {'__name__', '__qualname__'}: 

237 return self._name 

238 

239 raise AttributeError(item) 

240 

241 def __mro_entries__(self, bases): 

242 raise TypeError(f"Cannot subclass {self!r}") 

243 

244 def __repr__(self): 

245 return f'typing_extensions.{self._name}' 

246 

247 def __reduce__(self): 

248 return self._name 

249 

250 def __call__(self, *args, **kwds): 

251 raise TypeError(f"Cannot instantiate {self!r}") 

252 

253 def __or__(self, other): 

254 return typing.Union[self, other] 

255 

256 def __ror__(self, other): 

257 return typing.Union[other, self] 

258 

259 def __instancecheck__(self, obj): 

260 raise TypeError(f"{self} cannot be used with isinstance()") 

261 

262 def __subclasscheck__(self, cls): 

263 raise TypeError(f"{self} cannot be used with issubclass()") 

264 

265 @typing._tp_cache 

266 def __getitem__(self, parameters): 

267 return self._getitem(self, parameters) 

268 

269 

270# Note that inheriting from this class means that the object will be 

271# rejected by typing._type_check, so do not use it if the special form 

272# is arguably valid as a type by itself. 

273class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): 

274 def __repr__(self): 

275 return 'typing_extensions.' + self._name 

276 

277 

278Final = typing.Final 

279 

280if sys.version_info >= (3, 11): 

281 final = typing.final 

282else: 

283 # @final exists in 3.8+, but we backport it for all versions 

284 # before 3.11 to keep support for the __final__ attribute. 

285 # See https://bugs.python.org/issue46342 

286 def final(f): 

287 """This decorator can be used to indicate to type checkers that 

288 the decorated method cannot be overridden, and decorated class 

289 cannot be subclassed. For example: 

290 

291 class Base: 

292 @final 

293 def done(self) -> None: 

294 ... 

295 class Sub(Base): 

296 def done(self) -> None: # Error reported by type checker 

297 ... 

298 @final 

299 class Leaf: 

300 ... 

301 class Other(Leaf): # Error reported by type checker 

302 ... 

303 

304 There is no runtime checking of these properties. The decorator 

305 sets the ``__final__`` attribute to ``True`` on the decorated object 

306 to allow runtime introspection. 

307 """ 

308 try: 

309 f.__final__ = True 

310 except (AttributeError, TypeError): 

311 # Skip the attribute silently if it is not writable. 

312 # AttributeError happens if the object has __slots__ or a 

313 # read-only property, TypeError if it's a builtin class. 

314 pass 

315 return f 

316 

317 

318def IntVar(name): 

319 return typing.TypeVar(name) 

320 

321 

322# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 

323if sys.version_info >= (3, 10, 1): 

324 Literal = typing.Literal 

325else: 

326 def _flatten_literal_params(parameters): 

327 """An internal helper for Literal creation: flatten Literals among parameters""" 

328 params = [] 

329 for p in parameters: 

330 if isinstance(p, _LiteralGenericAlias): 

331 params.extend(p.__args__) 

332 else: 

333 params.append(p) 

334 return tuple(params) 

335 

336 def _value_and_type_iter(params): 

337 for p in params: 

338 yield p, type(p) 

339 

340 class _LiteralGenericAlias(typing._GenericAlias, _root=True): 

341 def __eq__(self, other): 

342 if not isinstance(other, _LiteralGenericAlias): 

343 return NotImplemented 

344 these_args_deduped = set(_value_and_type_iter(self.__args__)) 

345 other_args_deduped = set(_value_and_type_iter(other.__args__)) 

346 return these_args_deduped == other_args_deduped 

347 

348 def __hash__(self): 

349 return hash(frozenset(_value_and_type_iter(self.__args__))) 

350 

351 class _LiteralForm(_ExtensionsSpecialForm, _root=True): 

352 def __init__(self, doc: str): 

353 self._name = 'Literal' 

354 self._doc = self.__doc__ = doc 

355 

356 def __getitem__(self, parameters): 

357 if not isinstance(parameters, tuple): 

358 parameters = (parameters,) 

359 

360 parameters = _flatten_literal_params(parameters) 

361 

362 val_type_pairs = list(_value_and_type_iter(parameters)) 

363 try: 

364 deduped_pairs = set(val_type_pairs) 

365 except TypeError: 

366 # unhashable parameters 

367 pass 

368 else: 

369 # similar logic to typing._deduplicate on Python 3.9+ 

370 if len(deduped_pairs) < len(val_type_pairs): 

371 new_parameters = [] 

372 for pair in val_type_pairs: 

373 if pair in deduped_pairs: 

374 new_parameters.append(pair[0]) 

375 deduped_pairs.remove(pair) 

376 assert not deduped_pairs, deduped_pairs 

377 parameters = tuple(new_parameters) 

378 

379 return _LiteralGenericAlias(self, parameters) 

380 

381 Literal = _LiteralForm(doc="""\ 

382 A type that can be used to indicate to type checkers 

383 that the corresponding value has a value literally equivalent 

384 to the provided parameter. For example: 

385 

386 var: Literal[4] = 4 

387 

388 The type checker understands that 'var' is literally equal to 

389 the value 4 and no other value. 

390 

391 Literal[...] cannot be subclassed. There is no runtime 

392 checking verifying that the parameter is actually a value 

393 instead of a type.""") 

394 

395 

396_overload_dummy = typing._overload_dummy 

397 

398 

399if hasattr(typing, "get_overloads"): # 3.11+ 

400 overload = typing.overload 

401 get_overloads = typing.get_overloads 

402 clear_overloads = typing.clear_overloads 

403else: 

404 # {module: {qualname: {firstlineno: func}}} 

405 _overload_registry = collections.defaultdict( 

406 functools.partial(collections.defaultdict, dict) 

407 ) 

408 

409 def overload(func): 

410 """Decorator for overloaded functions/methods. 

411 

412 In a stub file, place two or more stub definitions for the same 

413 function in a row, each decorated with @overload. For example: 

414 

415 @overload 

416 def utf8(value: None) -> None: ... 

417 @overload 

418 def utf8(value: bytes) -> bytes: ... 

419 @overload 

420 def utf8(value: str) -> bytes: ... 

421 

422 In a non-stub file (i.e. a regular .py file), do the same but 

423 follow it with an implementation. The implementation should *not* 

424 be decorated with @overload. For example: 

425 

426 @overload 

427 def utf8(value: None) -> None: ... 

428 @overload 

429 def utf8(value: bytes) -> bytes: ... 

430 @overload 

431 def utf8(value: str) -> bytes: ... 

432 def utf8(value): 

433 # implementation goes here 

434 

435 The overloads for a function can be retrieved at runtime using the 

436 get_overloads() function. 

437 """ 

438 # classmethod and staticmethod 

439 f = getattr(func, "__func__", func) 

440 try: 

441 _overload_registry[f.__module__][f.__qualname__][ 

442 f.__code__.co_firstlineno 

443 ] = func 

444 except AttributeError: 

445 # Not a normal function; ignore. 

446 pass 

447 return _overload_dummy 

448 

449 def get_overloads(func): 

450 """Return all defined overloads for *func* as a sequence.""" 

451 # classmethod and staticmethod 

452 f = getattr(func, "__func__", func) 

453 if f.__module__ not in _overload_registry: 

454 return [] 

455 mod_dict = _overload_registry[f.__module__] 

456 if f.__qualname__ not in mod_dict: 

457 return [] 

458 return list(mod_dict[f.__qualname__].values()) 

459 

460 def clear_overloads(): 

461 """Clear all overloads in the registry.""" 

462 _overload_registry.clear() 

463 

464 

465# This is not a real generic class. Don't use outside annotations. 

466Type = typing.Type 

467 

468# Various ABCs mimicking those in collections.abc. 

469# A few are simply re-exported for completeness. 

470Awaitable = typing.Awaitable 

471Coroutine = typing.Coroutine 

472AsyncIterable = typing.AsyncIterable 

473AsyncIterator = typing.AsyncIterator 

474Deque = typing.Deque 

475DefaultDict = typing.DefaultDict 

476OrderedDict = typing.OrderedDict 

477Counter = typing.Counter 

478ChainMap = typing.ChainMap 

479Text = typing.Text 

480TYPE_CHECKING = typing.TYPE_CHECKING 

481 

482 

483if sys.version_info >= (3, 13, 0, "beta"): 

484 from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator 

485else: 

486 def _is_dunder(attr): 

487 return attr.startswith('__') and attr.endswith('__') 

488 

489 

490 class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True): 

491 def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): 

492 super().__init__(origin, nparams, inst=inst, name=name) 

493 self._defaults = defaults 

494 

495 def __setattr__(self, attr, val): 

496 allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'} 

497 if _is_dunder(attr) or attr in allowed_attrs: 

498 object.__setattr__(self, attr, val) 

499 else: 

500 setattr(self.__origin__, attr, val) 

501 

502 @typing._tp_cache 

503 def __getitem__(self, params): 

504 if not isinstance(params, tuple): 

505 params = (params,) 

506 msg = "Parameters to generic types must be types." 

507 params = tuple(typing._type_check(p, msg) for p in params) 

508 if ( 

509 self._defaults 

510 and len(params) < self._nparams 

511 and len(params) + len(self._defaults) >= self._nparams 

512 ): 

513 params = (*params, *self._defaults[len(params) - self._nparams:]) 

514 actual_len = len(params) 

515 

516 if actual_len != self._nparams: 

517 if self._defaults: 

518 expected = f"at least {self._nparams - len(self._defaults)}" 

519 else: 

520 expected = str(self._nparams) 

521 if not self._nparams: 

522 raise TypeError(f"{self} is not a generic class") 

523 raise TypeError( 

524 f"Too {'many' if actual_len > self._nparams else 'few'}" 

525 f" arguments for {self};" 

526 f" actual {actual_len}, expected {expected}" 

527 ) 

528 return self.copy_with(params) 

529 

530 _NoneType = type(None) 

531 Generator = _SpecialGenericAlias( 

532 collections.abc.Generator, 3, defaults=(_NoneType, _NoneType) 

533 ) 

534 AsyncGenerator = _SpecialGenericAlias( 

535 collections.abc.AsyncGenerator, 2, defaults=(_NoneType,) 

536 ) 

537 ContextManager = _SpecialGenericAlias( 

538 contextlib.AbstractContextManager, 

539 2, 

540 name="ContextManager", 

541 defaults=(typing.Optional[bool],) 

542 ) 

543 AsyncContextManager = _SpecialGenericAlias( 

544 contextlib.AbstractAsyncContextManager, 

545 2, 

546 name="AsyncContextManager", 

547 defaults=(typing.Optional[bool],) 

548 ) 

549 

550 

551_PROTO_ALLOWLIST = { 

552 'collections.abc': [ 

553 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', 

554 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', 

555 ], 

556 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], 

557 'typing_extensions': ['Buffer'], 

558} 

559 

560 

561_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | { 

562 "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__", 

563 "__final__", 

564} 

565 

566 

567def _get_protocol_attrs(cls): 

568 attrs = set() 

569 for base in cls.__mro__[:-1]: # without object 

570 if base.__name__ in {'Protocol', 'Generic'}: 

571 continue 

572 annotations = getattr(base, '__annotations__', {}) 

573 for attr in (*base.__dict__, *annotations): 

574 if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): 

575 attrs.add(attr) 

576 return attrs 

577 

578 

579def _caller(depth=1, default='__main__'): 

580 try: 

581 return sys._getframemodulename(depth + 1) or default 

582 except AttributeError: # For platforms without _getframemodulename() 

583 pass 

584 try: 

585 return sys._getframe(depth + 1).f_globals.get('__name__', default) 

586 except (AttributeError, ValueError): # For platforms without _getframe() 

587 pass 

588 return None 

589 

590 

591# `__match_args__` attribute was removed from protocol members in 3.13, 

592# we want to backport this change to older Python versions. 

593if sys.version_info >= (3, 13): 

594 Protocol = typing.Protocol 

595else: 

596 def _allow_reckless_class_checks(depth=2): 

597 """Allow instance and class checks for special stdlib modules. 

598 The abc and functools modules indiscriminately call isinstance() and 

599 issubclass() on the whole MRO of a user class, which may contain protocols. 

600 """ 

601 return _caller(depth) in {'abc', 'functools', None} 

602 

603 def _no_init(self, *args, **kwargs): 

604 if type(self)._is_protocol: 

605 raise TypeError('Protocols cannot be instantiated') 

606 

607 def _type_check_issubclass_arg_1(arg): 

608 """Raise TypeError if `arg` is not an instance of `type` 

609 in `issubclass(arg, <protocol>)`. 

610 

611 In most cases, this is verified by type.__subclasscheck__. 

612 Checking it again unnecessarily would slow down issubclass() checks, 

613 so, we don't perform this check unless we absolutely have to. 

614 

615 For various error paths, however, 

616 we want to ensure that *this* error message is shown to the user 

617 where relevant, rather than a typing.py-specific error message. 

618 """ 

619 if not isinstance(arg, type): 

620 # Same error message as for issubclass(1, int). 

621 raise TypeError('issubclass() arg 1 must be a class') 

622 

623 # Inheriting from typing._ProtocolMeta isn't actually desirable, 

624 # but is necessary to allow typing.Protocol and typing_extensions.Protocol 

625 # to mix without getting TypeErrors about "metaclass conflict" 

626 class _ProtocolMeta(type(typing.Protocol)): 

627 # This metaclass is somewhat unfortunate, 

628 # but is necessary for several reasons... 

629 # 

630 # NOTE: DO NOT call super() in any methods in this class 

631 # That would call the methods on typing._ProtocolMeta on Python <=3.11 

632 # and those are slow 

633 def __new__(mcls, name, bases, namespace, **kwargs): 

634 if name == "Protocol" and len(bases) < 2: 

635 pass 

636 elif {Protocol, typing.Protocol} & set(bases): 

637 for base in bases: 

638 if not ( 

639 base in {object, typing.Generic, Protocol, typing.Protocol} 

640 or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) 

641 or is_protocol(base) 

642 ): 

643 raise TypeError( 

644 f"Protocols can only inherit from other protocols, " 

645 f"got {base!r}" 

646 ) 

647 return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) 

648 

649 def __init__(cls, *args, **kwargs): 

650 abc.ABCMeta.__init__(cls, *args, **kwargs) 

651 if getattr(cls, "_is_protocol", False): 

652 cls.__protocol_attrs__ = _get_protocol_attrs(cls) 

653 

654 def __subclasscheck__(cls, other): 

655 if cls is Protocol: 

656 return type.__subclasscheck__(cls, other) 

657 if ( 

658 getattr(cls, '_is_protocol', False) 

659 and not _allow_reckless_class_checks() 

660 ): 

661 if not getattr(cls, '_is_runtime_protocol', False): 

662 _type_check_issubclass_arg_1(other) 

663 raise TypeError( 

664 "Instance and class checks can only be used with " 

665 "@runtime_checkable protocols" 

666 ) 

667 if ( 

668 # this attribute is set by @runtime_checkable: 

669 cls.__non_callable_proto_members__ 

670 and cls.__dict__.get("__subclasshook__") is _proto_hook 

671 ): 

672 _type_check_issubclass_arg_1(other) 

673 non_method_attrs = sorted(cls.__non_callable_proto_members__) 

674 raise TypeError( 

675 "Protocols with non-method members don't support issubclass()." 

676 f" Non-method members: {str(non_method_attrs)[1:-1]}." 

677 ) 

678 return abc.ABCMeta.__subclasscheck__(cls, other) 

679 

680 def __instancecheck__(cls, instance): 

681 # We need this method for situations where attributes are 

682 # assigned in __init__. 

683 if cls is Protocol: 

684 return type.__instancecheck__(cls, instance) 

685 if not getattr(cls, "_is_protocol", False): 

686 # i.e., it's a concrete subclass of a protocol 

687 return abc.ABCMeta.__instancecheck__(cls, instance) 

688 

689 if ( 

690 not getattr(cls, '_is_runtime_protocol', False) and 

691 not _allow_reckless_class_checks() 

692 ): 

693 raise TypeError("Instance and class checks can only be used with" 

694 " @runtime_checkable protocols") 

695 

696 if abc.ABCMeta.__instancecheck__(cls, instance): 

697 return True 

698 

699 for attr in cls.__protocol_attrs__: 

700 try: 

701 val = inspect.getattr_static(instance, attr) 

702 except AttributeError: 

703 break 

704 # this attribute is set by @runtime_checkable: 

705 if val is None and attr not in cls.__non_callable_proto_members__: 

706 break 

707 else: 

708 return True 

709 

710 return False 

711 

712 def __eq__(cls, other): 

713 # Hack so that typing.Generic.__class_getitem__ 

714 # treats typing_extensions.Protocol 

715 # as equivalent to typing.Protocol 

716 if abc.ABCMeta.__eq__(cls, other) is True: 

717 return True 

718 return cls is Protocol and other is typing.Protocol 

719 

720 # This has to be defined, or the abc-module cache 

721 # complains about classes with this metaclass being unhashable, 

722 # if we define only __eq__! 

723 def __hash__(cls) -> int: 

724 return type.__hash__(cls) 

725 

726 @classmethod 

727 def _proto_hook(cls, other): 

728 if not cls.__dict__.get('_is_protocol', False): 

729 return NotImplemented 

730 

731 for attr in cls.__protocol_attrs__: 

732 for base in other.__mro__: 

733 # Check if the members appears in the class dictionary... 

734 if attr in base.__dict__: 

735 if base.__dict__[attr] is None: 

736 return NotImplemented 

737 break 

738 

739 # ...or in annotations, if it is a sub-protocol. 

740 annotations = getattr(base, '__annotations__', {}) 

741 if ( 

742 isinstance(annotations, collections.abc.Mapping) 

743 and attr in annotations 

744 and is_protocol(other) 

745 ): 

746 break 

747 else: 

748 return NotImplemented 

749 return True 

750 

751 class Protocol(typing.Generic, metaclass=_ProtocolMeta): 

752 __doc__ = typing.Protocol.__doc__ 

753 __slots__ = () 

754 _is_protocol = True 

755 _is_runtime_protocol = False 

756 

757 def __init_subclass__(cls, *args, **kwargs): 

758 super().__init_subclass__(*args, **kwargs) 

759 

760 # Determine if this is a protocol or a concrete subclass. 

761 if not cls.__dict__.get('_is_protocol', False): 

762 cls._is_protocol = any(b is Protocol for b in cls.__bases__) 

763 

764 # Set (or override) the protocol subclass hook. 

765 if '__subclasshook__' not in cls.__dict__: 

766 cls.__subclasshook__ = _proto_hook 

767 

768 # Prohibit instantiation for protocol classes 

769 if cls._is_protocol and cls.__init__ is Protocol.__init__: 

770 cls.__init__ = _no_init 

771 

772 

773if sys.version_info >= (3, 13): 

774 runtime_checkable = typing.runtime_checkable 

775else: 

776 def runtime_checkable(cls): 

777 """Mark a protocol class as a runtime protocol. 

778 

779 Such protocol can be used with isinstance() and issubclass(). 

780 Raise TypeError if applied to a non-protocol class. 

781 This allows a simple-minded structural check very similar to 

782 one trick ponies in collections.abc such as Iterable. 

783 

784 For example:: 

785 

786 @runtime_checkable 

787 class Closable(Protocol): 

788 def close(self): ... 

789 

790 assert isinstance(open('/some/file'), Closable) 

791 

792 Warning: this will check only the presence of the required methods, 

793 not their type signatures! 

794 """ 

795 if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): 

796 raise TypeError(f'@runtime_checkable can be only applied to protocol classes,' 

797 f' got {cls!r}') 

798 cls._is_runtime_protocol = True 

799 

800 # typing.Protocol classes on <=3.11 break if we execute this block, 

801 # because typing.Protocol classes on <=3.11 don't have a 

802 # `__protocol_attrs__` attribute, and this block relies on the 

803 # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+ 

804 # break if we *don't* execute this block, because *they* assume that all 

805 # protocol classes have a `__non_callable_proto_members__` attribute 

806 # (which this block sets) 

807 if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2): 

808 # PEP 544 prohibits using issubclass() 

809 # with protocols that have non-method members. 

810 # See gh-113320 for why we compute this attribute here, 

811 # rather than in `_ProtocolMeta.__init__` 

812 cls.__non_callable_proto_members__ = set() 

813 for attr in cls.__protocol_attrs__: 

814 try: 

815 is_callable = callable(getattr(cls, attr, None)) 

816 except Exception as e: 

817 raise TypeError( 

818 f"Failed to determine whether protocol member {attr!r} " 

819 "is a method member" 

820 ) from e 

821 else: 

822 if not is_callable: 

823 cls.__non_callable_proto_members__.add(attr) 

824 

825 return cls 

826 

827 

828# The "runtime" alias exists for backwards compatibility. 

829runtime = runtime_checkable 

830 

831 

832# Our version of runtime-checkable protocols is faster on Python <=3.11 

833if sys.version_info >= (3, 12): 

834 SupportsInt = typing.SupportsInt 

835 SupportsFloat = typing.SupportsFloat 

836 SupportsComplex = typing.SupportsComplex 

837 SupportsBytes = typing.SupportsBytes 

838 SupportsIndex = typing.SupportsIndex 

839 SupportsAbs = typing.SupportsAbs 

840 SupportsRound = typing.SupportsRound 

841else: 

842 @runtime_checkable 

843 class SupportsInt(Protocol): 

844 """An ABC with one abstract method __int__.""" 

845 __slots__ = () 

846 

847 @abc.abstractmethod 

848 def __int__(self) -> int: 

849 pass 

850 

851 @runtime_checkable 

852 class SupportsFloat(Protocol): 

853 """An ABC with one abstract method __float__.""" 

854 __slots__ = () 

855 

856 @abc.abstractmethod 

857 def __float__(self) -> float: 

858 pass 

859 

860 @runtime_checkable 

861 class SupportsComplex(Protocol): 

862 """An ABC with one abstract method __complex__.""" 

863 __slots__ = () 

864 

865 @abc.abstractmethod 

866 def __complex__(self) -> complex: 

867 pass 

868 

869 @runtime_checkable 

870 class SupportsBytes(Protocol): 

871 """An ABC with one abstract method __bytes__.""" 

872 __slots__ = () 

873 

874 @abc.abstractmethod 

875 def __bytes__(self) -> bytes: 

876 pass 

877 

878 @runtime_checkable 

879 class SupportsIndex(Protocol): 

880 __slots__ = () 

881 

882 @abc.abstractmethod 

883 def __index__(self) -> int: 

884 pass 

885 

886 @runtime_checkable 

887 class SupportsAbs(Protocol[T_co]): 

888 """ 

889 An ABC with one abstract method __abs__ that is covariant in its return type. 

890 """ 

891 __slots__ = () 

892 

893 @abc.abstractmethod 

894 def __abs__(self) -> T_co: 

895 pass 

896 

897 @runtime_checkable 

898 class SupportsRound(Protocol[T_co]): 

899 """ 

900 An ABC with one abstract method __round__ that is covariant in its return type. 

901 """ 

902 __slots__ = () 

903 

904 @abc.abstractmethod 

905 def __round__(self, ndigits: int = 0) -> T_co: 

906 pass 

907 

908 

909if hasattr(io, "Reader") and hasattr(io, "Writer"): 

910 Reader = io.Reader 

911 Writer = io.Writer 

912else: 

913 @runtime_checkable 

914 class Reader(Protocol[T_co]): 

915 """Protocol for simple I/O reader instances. 

916 

917 This protocol only supports blocking I/O. 

918 """ 

919 

920 __slots__ = () 

921 

922 @abc.abstractmethod 

923 def read(self, size: int = ..., /) -> T_co: 

924 """Read data from the input stream and return it. 

925 

926 If *size* is specified, at most *size* items (bytes/characters) will be 

927 read. 

928 """ 

929 

930 @runtime_checkable 

931 class Writer(Protocol[T_contra]): 

932 """Protocol for simple I/O writer instances. 

933 

934 This protocol only supports blocking I/O. 

935 """ 

936 

937 __slots__ = () 

938 

939 @abc.abstractmethod 

940 def write(self, data: T_contra, /) -> int: 

941 """Write *data* to the output stream and return the number of items written.""" # noqa: E501 

942 

943 

944_NEEDS_SINGLETONMETA = ( 

945 not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems") 

946) 

947 

948if _NEEDS_SINGLETONMETA: 

949 class SingletonMeta(type): 

950 def __setattr__(cls, attr, value): 

951 # TypeError is consistent with the behavior of NoneType 

952 raise TypeError( 

953 f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}" 

954 ) 

955 

956 

957if hasattr(typing, "NoDefault"): 

958 NoDefault = typing.NoDefault 

959else: 

960 class NoDefaultType(metaclass=SingletonMeta): 

961 """The type of the NoDefault singleton.""" 

962 

963 __slots__ = () 

964 

965 def __new__(cls): 

966 return globals().get("NoDefault") or object.__new__(cls) 

967 

968 def __repr__(self): 

969 return "typing_extensions.NoDefault" 

970 

971 def __reduce__(self): 

972 return "NoDefault" 

973 

974 NoDefault = NoDefaultType() 

975 del NoDefaultType 

976 

977if hasattr(typing, "NoExtraItems"): 

978 NoExtraItems = typing.NoExtraItems 

979else: 

980 class NoExtraItemsType(metaclass=SingletonMeta): 

981 """The type of the NoExtraItems singleton.""" 

982 

983 __slots__ = () 

984 

985 def __new__(cls): 

986 return globals().get("NoExtraItems") or object.__new__(cls) 

987 

988 def __repr__(self): 

989 return "typing_extensions.NoExtraItems" 

990 

991 def __reduce__(self): 

992 return "NoExtraItems" 

993 

994 NoExtraItems = NoExtraItemsType() 

995 del NoExtraItemsType 

996 

997if _NEEDS_SINGLETONMETA: 

998 del SingletonMeta 

999 

1000 

1001# Update this to something like >=3.13.0b1 if and when 

1002# PEP 728 is implemented in CPython 

1003_PEP_728_IMPLEMENTED = False 

1004 

1005if _PEP_728_IMPLEMENTED: 

1006 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" 

1007 # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 

1008 # The standard library TypedDict below Python 3.11 does not store runtime 

1009 # information about optional and required keys when using Required or NotRequired. 

1010 # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. 

1011 # Aaaand on 3.12 we add __orig_bases__ to TypedDict 

1012 # to enable better runtime introspection. 

1013 # On 3.13 we deprecate some odd ways of creating TypedDicts. 

1014 # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. 

1015 # PEP 728 (still pending) makes more changes. 

1016 TypedDict = typing.TypedDict 

1017 _TypedDictMeta = typing._TypedDictMeta 

1018 is_typeddict = typing.is_typeddict 

1019else: 

1020 # 3.10.0 and later 

1021 _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters 

1022 

1023 def _get_typeddict_qualifiers(annotation_type): 

1024 while True: 

1025 annotation_origin = get_origin(annotation_type) 

1026 if annotation_origin is Annotated: 

1027 annotation_args = get_args(annotation_type) 

1028 if annotation_args: 

1029 annotation_type = annotation_args[0] 

1030 else: 

1031 break 

1032 elif annotation_origin is Required: 

1033 yield Required 

1034 annotation_type, = get_args(annotation_type) 

1035 elif annotation_origin is NotRequired: 

1036 yield NotRequired 

1037 annotation_type, = get_args(annotation_type) 

1038 elif annotation_origin is ReadOnly: 

1039 yield ReadOnly 

1040 annotation_type, = get_args(annotation_type) 

1041 else: 

1042 break 

1043 

1044 class _TypedDictMeta(type): 

1045 

1046 def __new__(cls, name, bases, ns, *, total=True, closed=None, 

1047 extra_items=NoExtraItems): 

1048 """Create new typed dict class object. 

1049 

1050 This method is called when TypedDict is subclassed, 

1051 or when TypedDict is instantiated. This way 

1052 TypedDict supports all three syntax forms described in its docstring. 

1053 Subclasses and instances of TypedDict return actual dictionaries. 

1054 """ 

1055 for base in bases: 

1056 if type(base) is not _TypedDictMeta and base is not typing.Generic: 

1057 raise TypeError('cannot inherit from both a TypedDict type ' 

1058 'and a non-TypedDict base class') 

1059 if closed is not None and extra_items is not NoExtraItems: 

1060 raise TypeError(f"Cannot combine closed={closed!r} and extra_items") 

1061 

1062 if any(issubclass(b, typing.Generic) for b in bases): 

1063 generic_base = (typing.Generic,) 

1064 else: 

1065 generic_base = () 

1066 

1067 ns_annotations = ns.pop('__annotations__', None) 

1068 

1069 # typing.py generally doesn't let you inherit from plain Generic, unless 

1070 # the name of the class happens to be "Protocol" 

1071 tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) 

1072 tp_dict.__name__ = name 

1073 if tp_dict.__qualname__ == "Protocol": 

1074 tp_dict.__qualname__ = name 

1075 

1076 if not hasattr(tp_dict, '__orig_bases__'): 

1077 tp_dict.__orig_bases__ = bases 

1078 

1079 annotations = {} 

1080 own_annotate = None 

1081 if ns_annotations is not None: 

1082 own_annotations = ns_annotations 

1083 elif sys.version_info >= (3, 14): 

1084 if hasattr(annotationlib, "get_annotate_from_class_namespace"): 

1085 own_annotate = annotationlib.get_annotate_from_class_namespace(ns) 

1086 else: 

1087 # 3.14.0a7 and earlier 

1088 own_annotate = ns.get("__annotate__") 

1089 if own_annotate is not None: 

1090 own_annotations = annotationlib.call_annotate_function( 

1091 own_annotate, Format.FORWARDREF, owner=tp_dict 

1092 ) 

1093 else: 

1094 own_annotations = {} 

1095 else: 

1096 own_annotations = {} 

1097 msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" 

1098 if _TAKES_MODULE: 

1099 own_checked_annotations = { 

1100 n: typing._type_check(tp, msg, module=tp_dict.__module__) 

1101 for n, tp in own_annotations.items() 

1102 } 

1103 else: 

1104 own_checked_annotations = { 

1105 n: typing._type_check(tp, msg) 

1106 for n, tp in own_annotations.items() 

1107 } 

1108 required_keys = set() 

1109 optional_keys = set() 

1110 readonly_keys = set() 

1111 mutable_keys = set() 

1112 extra_items_type = extra_items 

1113 

1114 for base in bases: 

1115 base_dict = base.__dict__ 

1116 

1117 if sys.version_info <= (3, 14): 

1118 annotations.update(base_dict.get('__annotations__', {})) 

1119 required_keys.update(base_dict.get('__required_keys__', ())) 

1120 optional_keys.update(base_dict.get('__optional_keys__', ())) 

1121 readonly_keys.update(base_dict.get('__readonly_keys__', ())) 

1122 mutable_keys.update(base_dict.get('__mutable_keys__', ())) 

1123 

1124 # This was specified in an earlier version of PEP 728. Support 

1125 # is retained for backwards compatibility, but only for Python 

1126 # 3.13 and lower. 

1127 if (closed and sys.version_info < (3, 14) 

1128 and "__extra_items__" in own_checked_annotations): 

1129 annotation_type = own_checked_annotations.pop("__extra_items__") 

1130 qualifiers = set(_get_typeddict_qualifiers(annotation_type)) 

1131 if Required in qualifiers: 

1132 raise TypeError( 

1133 "Special key __extra_items__ does not support " 

1134 "Required" 

1135 ) 

1136 if NotRequired in qualifiers: 

1137 raise TypeError( 

1138 "Special key __extra_items__ does not support " 

1139 "NotRequired" 

1140 ) 

1141 extra_items_type = annotation_type 

1142 

1143 annotations.update(own_checked_annotations) 

1144 for annotation_key, annotation_type in own_checked_annotations.items(): 

1145 qualifiers = set(_get_typeddict_qualifiers(annotation_type)) 

1146 

1147 if Required in qualifiers: 

1148 required_keys.add(annotation_key) 

1149 elif NotRequired in qualifiers: 

1150 optional_keys.add(annotation_key) 

1151 elif total: 

1152 required_keys.add(annotation_key) 

1153 else: 

1154 optional_keys.add(annotation_key) 

1155 if ReadOnly in qualifiers: 

1156 mutable_keys.discard(annotation_key) 

1157 readonly_keys.add(annotation_key) 

1158 else: 

1159 mutable_keys.add(annotation_key) 

1160 readonly_keys.discard(annotation_key) 

1161 

1162 if sys.version_info >= (3, 14): 

1163 def __annotate__(format): 

1164 annos = {} 

1165 for base in bases: 

1166 if base is Generic: 

1167 continue 

1168 base_annotate = base.__annotate__ 

1169 if base_annotate is None: 

1170 continue 

1171 base_annos = annotationlib.call_annotate_function( 

1172 base_annotate, format, owner=base) 

1173 annos.update(base_annos) 

1174 if own_annotate is not None: 

1175 own = annotationlib.call_annotate_function( 

1176 own_annotate, format, owner=tp_dict) 

1177 if format != Format.STRING: 

1178 own = { 

1179 n: typing._type_check(tp, msg, module=tp_dict.__module__) 

1180 for n, tp in own.items() 

1181 } 

1182 elif format == Format.STRING: 

1183 own = annotationlib.annotations_to_string(own_annotations) 

1184 elif format in (Format.FORWARDREF, Format.VALUE): 

1185 own = own_checked_annotations 

1186 else: 

1187 raise NotImplementedError(format) 

1188 annos.update(own) 

1189 return annos 

1190 

1191 tp_dict.__annotate__ = __annotate__ 

1192 else: 

1193 tp_dict.__annotations__ = annotations 

1194 tp_dict.__required_keys__ = frozenset(required_keys) 

1195 tp_dict.__optional_keys__ = frozenset(optional_keys) 

1196 tp_dict.__readonly_keys__ = frozenset(readonly_keys) 

1197 tp_dict.__mutable_keys__ = frozenset(mutable_keys) 

1198 tp_dict.__total__ = total 

1199 tp_dict.__closed__ = closed 

1200 tp_dict.__extra_items__ = extra_items_type 

1201 return tp_dict 

1202 

1203 __call__ = dict # static method 

1204 

1205 def __subclasscheck__(cls, other): 

1206 # Typed dicts are only for static structural subtyping. 

1207 raise TypeError('TypedDict does not support instance and class checks') 

1208 

1209 __instancecheck__ = __subclasscheck__ 

1210 

1211 _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) 

1212 

1213 def _create_typeddict( 

1214 typename, 

1215 fields, 

1216 /, 

1217 *, 

1218 typing_is_inline, 

1219 total, 

1220 closed, 

1221 extra_items, 

1222 **kwargs, 

1223 ): 

1224 if fields is _marker or fields is None: 

1225 if fields is _marker: 

1226 deprecated_thing = ( 

1227 "Failing to pass a value for the 'fields' parameter" 

1228 ) 

1229 else: 

1230 deprecated_thing = "Passing `None` as the 'fields' parameter" 

1231 

1232 example = f"`{typename} = TypedDict({typename!r}, {{}})`" 

1233 deprecation_msg = ( 

1234 f"{deprecated_thing} is deprecated and will be disallowed in " 

1235 "Python 3.15. To create a TypedDict class with 0 fields " 

1236 "using the functional syntax, pass an empty dictionary, e.g. " 

1237 ) + example + "." 

1238 warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) 

1239 # Support a field called "closed" 

1240 if closed is not False and closed is not True and closed is not None: 

1241 kwargs["closed"] = closed 

1242 closed = None 

1243 # Or "extra_items" 

1244 if extra_items is not NoExtraItems: 

1245 kwargs["extra_items"] = extra_items 

1246 extra_items = NoExtraItems 

1247 fields = kwargs 

1248 elif kwargs: 

1249 raise TypeError("TypedDict takes either a dict or keyword arguments," 

1250 " but not both") 

1251 if kwargs: 

1252 if sys.version_info >= (3, 13): 

1253 raise TypeError("TypedDict takes no keyword arguments") 

1254 warnings.warn( 

1255 "The kwargs-based syntax for TypedDict definitions is deprecated " 

1256 "in Python 3.11, will be removed in Python 3.13, and may not be " 

1257 "understood by third-party type checkers.", 

1258 DeprecationWarning, 

1259 stacklevel=2, 

1260 ) 

1261 

1262 ns = {'__annotations__': dict(fields)} 

1263 module = _caller(depth=4 if typing_is_inline else 2) 

1264 if module is not None: 

1265 # Setting correct module is necessary to make typed dict classes 

1266 # pickleable. 

1267 ns['__module__'] = module 

1268 

1269 td = _TypedDictMeta(typename, (), ns, total=total, closed=closed, 

1270 extra_items=extra_items) 

1271 td.__orig_bases__ = (TypedDict,) 

1272 return td 

1273 

1274 class _TypedDictSpecialForm(_SpecialForm, _root=True): 

1275 def __call__( 

1276 self, 

1277 typename, 

1278 fields=_marker, 

1279 /, 

1280 *, 

1281 total=True, 

1282 closed=None, 

1283 extra_items=NoExtraItems, 

1284 **kwargs 

1285 ): 

1286 return _create_typeddict( 

1287 typename, 

1288 fields, 

1289 typing_is_inline=False, 

1290 total=total, 

1291 closed=closed, 

1292 extra_items=extra_items, 

1293 **kwargs, 

1294 ) 

1295 

1296 def __mro_entries__(self, bases): 

1297 return (_TypedDict,) 

1298 

1299 @_TypedDictSpecialForm 

1300 def TypedDict(self, args): 

1301 """A simple typed namespace. At runtime it is equivalent to a plain dict. 

1302 

1303 TypedDict creates a dictionary type such that a type checker will expect all 

1304 instances to have a certain set of keys, where each key is 

1305 associated with a value of a consistent type. This expectation 

1306 is not checked at runtime. 

1307 

1308 Usage:: 

1309 

1310 class Point2D(TypedDict): 

1311 x: int 

1312 y: int 

1313 label: str 

1314 

1315 a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK 

1316 b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check 

1317 

1318 assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') 

1319 

1320 The type info can be accessed via the Point2D.__annotations__ dict, and 

1321 the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. 

1322 TypedDict supports an additional equivalent form:: 

1323 

1324 Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) 

1325 

1326 By default, all keys must be present in a TypedDict. It is possible 

1327 to override this by specifying totality:: 

1328 

1329 class Point2D(TypedDict, total=False): 

1330 x: int 

1331 y: int 

1332 

1333 This means that a Point2D TypedDict can have any of the keys omitted. A type 

1334 checker is only expected to support a literal False or True as the value of 

1335 the total argument. True is the default, and makes all items defined in the 

1336 class body be required. 

1337 

1338 The Required and NotRequired special forms can also be used to mark 

1339 individual keys as being required or not required:: 

1340 

1341 class Point2D(TypedDict): 

1342 x: int # the "x" key must always be present (Required is the default) 

1343 y: NotRequired[int] # the "y" key can be omitted 

1344 

1345 See PEP 655 for more details on Required and NotRequired. 

1346 """ 

1347 # This runs when creating inline TypedDicts: 

1348 if not isinstance(args, dict): 

1349 raise TypeError( 

1350 "TypedDict[...] should be used with a single dict argument" 

1351 ) 

1352 

1353 return _create_typeddict( 

1354 "<inline TypedDict>", 

1355 args, 

1356 typing_is_inline=True, 

1357 total=True, 

1358 closed=True, 

1359 extra_items=NoExtraItems, 

1360 ) 

1361 

1362 _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) 

1363 

1364 def is_typeddict(tp): 

1365 """Check if an annotation is a TypedDict class 

1366 

1367 For example:: 

1368 class Film(TypedDict): 

1369 title: str 

1370 year: int 

1371 

1372 is_typeddict(Film) # => True 

1373 is_typeddict(Union[list, str]) # => False 

1374 """ 

1375 return isinstance(tp, _TYPEDDICT_TYPES) 

1376 

1377 

1378if hasattr(typing, "assert_type"): 

1379 assert_type = typing.assert_type 

1380 

1381else: 

1382 def assert_type(val, typ, /): 

1383 """Assert (to the type checker) that the value is of the given type. 

1384 

1385 When the type checker encounters a call to assert_type(), it 

1386 emits an error if the value is not of the specified type:: 

1387 

1388 def greet(name: str) -> None: 

1389 assert_type(name, str) # ok 

1390 assert_type(name, int) # type checker error 

1391 

1392 At runtime this returns the first argument unchanged and otherwise 

1393 does nothing. 

1394 """ 

1395 return val 

1396 

1397 

1398if hasattr(typing, "ReadOnly"): # 3.13+ 

1399 get_type_hints = typing.get_type_hints 

1400else: # <=3.13 

1401 # replaces _strip_annotations() 

1402 def _strip_extras(t): 

1403 """Strips Annotated, Required and NotRequired from a given type.""" 

1404 if isinstance(t, typing._AnnotatedAlias): 

1405 return _strip_extras(t.__origin__) 

1406 if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): 

1407 return _strip_extras(t.__args__[0]) 

1408 if isinstance(t, typing._GenericAlias): 

1409 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1410 if stripped_args == t.__args__: 

1411 return t 

1412 return t.copy_with(stripped_args) 

1413 if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): 

1414 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1415 if stripped_args == t.__args__: 

1416 return t 

1417 return _types.GenericAlias(t.__origin__, stripped_args) 

1418 if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): 

1419 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1420 if stripped_args == t.__args__: 

1421 return t 

1422 return functools.reduce(operator.or_, stripped_args) 

1423 

1424 return t 

1425 

1426 def get_type_hints(obj, globalns=None, localns=None, include_extras=False): 

1427 """Return type hints for an object. 

1428 

1429 This is often the same as obj.__annotations__, but it handles 

1430 forward references encoded as string literals, adds Optional[t] if a 

1431 default value equal to None is set and recursively replaces all 

1432 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' 

1433 (unless 'include_extras=True'). 

1434 

1435 The argument may be a module, class, method, or function. The annotations 

1436 are returned as a dictionary. For classes, annotations include also 

1437 inherited members. 

1438 

1439 TypeError is raised if the argument is not of a type that can contain 

1440 annotations, and an empty dictionary is returned if no annotations are 

1441 present. 

1442 

1443 BEWARE -- the behavior of globalns and localns is counterintuitive 

1444 (unless you are familiar with how eval() and exec() work). The 

1445 search order is locals first, then globals. 

1446 

1447 - If no dict arguments are passed, an attempt is made to use the 

1448 globals from obj (or the respective module's globals for classes), 

1449 and these are also used as the locals. If the object does not appear 

1450 to have globals, an empty dictionary is used. 

1451 

1452 - If one dict argument is passed, it is used for both globals and 

1453 locals. 

1454 

1455 - If two dict arguments are passed, they specify globals and 

1456 locals, respectively. 

1457 """ 

1458 hint = typing.get_type_hints( 

1459 obj, globalns=globalns, localns=localns, include_extras=True 

1460 ) 

1461 if sys.version_info < (3, 11): 

1462 _clean_optional(obj, hint, globalns, localns) 

1463 if include_extras: 

1464 return hint 

1465 return {k: _strip_extras(t) for k, t in hint.items()} 

1466 

1467 _NoneType = type(None) 

1468 

1469 def _could_be_inserted_optional(t): 

1470 """detects Union[..., None] pattern""" 

1471 if not isinstance(t, typing._UnionGenericAlias): 

1472 return False 

1473 # Assume if last argument is not None they are user defined 

1474 if t.__args__[-1] is not _NoneType: 

1475 return False 

1476 return True 

1477 

1478 # < 3.11 

1479 def _clean_optional(obj, hints, globalns=None, localns=None): 

1480 # reverts injected Union[..., None] cases from typing.get_type_hints 

1481 # when a None default value is used. 

1482 # see https://github.com/python/typing_extensions/issues/310 

1483 if not hints or isinstance(obj, type): 

1484 return 

1485 defaults = typing._get_defaults(obj) # avoid accessing __annotations___ 

1486 if not defaults: 

1487 return 

1488 original_hints = obj.__annotations__ 

1489 for name, value in hints.items(): 

1490 # Not a Union[..., None] or replacement conditions not fullfilled 

1491 if (not _could_be_inserted_optional(value) 

1492 or name not in defaults 

1493 or defaults[name] is not None 

1494 ): 

1495 continue 

1496 original_value = original_hints[name] 

1497 # value=NoneType should have caused a skip above but check for safety 

1498 if original_value is None: 

1499 original_value = _NoneType 

1500 # Forward reference 

1501 if isinstance(original_value, str): 

1502 if globalns is None: 

1503 if isinstance(obj, _types.ModuleType): 

1504 globalns = obj.__dict__ 

1505 else: 

1506 nsobj = obj 

1507 # Find globalns for the unwrapped object. 

1508 while hasattr(nsobj, '__wrapped__'): 

1509 nsobj = nsobj.__wrapped__ 

1510 globalns = getattr(nsobj, '__globals__', {}) 

1511 if localns is None: 

1512 localns = globalns 

1513 elif localns is None: 

1514 localns = globalns 

1515 

1516 original_value = ForwardRef( 

1517 original_value, 

1518 is_argument=not isinstance(obj, _types.ModuleType) 

1519 ) 

1520 original_evaluated = typing._eval_type(original_value, globalns, localns) 

1521 # Compare if values differ. Note that even if equal 

1522 # value might be cached by typing._tp_cache contrary to original_evaluated 

1523 if original_evaluated != value or ( 

1524 # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias 

1525 hasattr(_types, "UnionType") 

1526 and isinstance(original_evaluated, _types.UnionType) 

1527 and not isinstance(value, _types.UnionType) 

1528 ): 

1529 hints[name] = original_evaluated 

1530 

1531# Python 3.9 has get_origin() and get_args() but those implementations don't support 

1532# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. 

1533if sys.version_info[:2] >= (3, 10): 

1534 get_origin = typing.get_origin 

1535 get_args = typing.get_args 

1536# 3.9 

1537else: 

1538 def get_origin(tp): 

1539 """Get the unsubscripted version of a type. 

1540 

1541 This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar 

1542 and Annotated. Return None for unsupported types. Examples:: 

1543 

1544 get_origin(Literal[42]) is Literal 

1545 get_origin(int) is None 

1546 get_origin(ClassVar[int]) is ClassVar 

1547 get_origin(Generic) is Generic 

1548 get_origin(Generic[T]) is Generic 

1549 get_origin(Union[T, int]) is Union 

1550 get_origin(List[Tuple[T, T]][int]) == list 

1551 get_origin(P.args) is P 

1552 """ 

1553 if isinstance(tp, typing._AnnotatedAlias): 

1554 return Annotated 

1555 if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias, 

1556 ParamSpecArgs, ParamSpecKwargs)): 

1557 return tp.__origin__ 

1558 if tp is typing.Generic: 

1559 return typing.Generic 

1560 return None 

1561 

1562 def get_args(tp): 

1563 """Get type arguments with all substitutions performed. 

1564 

1565 For unions, basic simplifications used by Union constructor are performed. 

1566 Examples:: 

1567 get_args(Dict[str, int]) == (str, int) 

1568 get_args(int) == () 

1569 get_args(Union[int, Union[T, int], str][int]) == (int, str) 

1570 get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) 

1571 get_args(Callable[[], T][int]) == ([], int) 

1572 """ 

1573 if isinstance(tp, typing._AnnotatedAlias): 

1574 return (tp.__origin__, *tp.__metadata__) 

1575 if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)): 

1576 res = tp.__args__ 

1577 if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: 

1578 res = (list(res[:-1]), res[-1]) 

1579 return res 

1580 return () 

1581 

1582 

1583# 3.10+ 

1584if hasattr(typing, 'TypeAlias'): 

1585 TypeAlias = typing.TypeAlias 

1586# 3.9 

1587else: 

1588 @_ExtensionsSpecialForm 

1589 def TypeAlias(self, parameters): 

1590 """Special marker indicating that an assignment should 

1591 be recognized as a proper type alias definition by type 

1592 checkers. 

1593 

1594 For example:: 

1595 

1596 Predicate: TypeAlias = Callable[..., bool] 

1597 

1598 It's invalid when used anywhere except as in the example above. 

1599 """ 

1600 raise TypeError(f"{self} is not subscriptable") 

1601 

1602 

1603def _set_default(type_param, default): 

1604 type_param.has_default = lambda: default is not NoDefault 

1605 type_param.__default__ = default 

1606 

1607 

1608def _set_module(typevarlike): 

1609 # for pickling: 

1610 def_mod = _caller(depth=2) 

1611 if def_mod != 'typing_extensions': 

1612 typevarlike.__module__ = def_mod 

1613 

1614 

1615class _DefaultMixin: 

1616 """Mixin for TypeVarLike defaults.""" 

1617 

1618 __slots__ = () 

1619 __init__ = _set_default 

1620 

1621 

1622# Classes using this metaclass must provide a _backported_typevarlike ClassVar 

1623class _TypeVarLikeMeta(type): 

1624 def __instancecheck__(cls, __instance: Any) -> bool: 

1625 return isinstance(__instance, cls._backported_typevarlike) 

1626 

1627 

1628if _PEP_696_IMPLEMENTED: 

1629 from typing import TypeVar 

1630else: 

1631 # Add default and infer_variance parameters from PEP 696 and 695 

1632 class TypeVar(metaclass=_TypeVarLikeMeta): 

1633 """Type variable.""" 

1634 

1635 _backported_typevarlike = typing.TypeVar 

1636 

1637 def __new__(cls, name, *constraints, bound=None, 

1638 covariant=False, contravariant=False, 

1639 default=NoDefault, infer_variance=False): 

1640 if hasattr(typing, "TypeAliasType"): 

1641 # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar 

1642 typevar = typing.TypeVar(name, *constraints, bound=bound, 

1643 covariant=covariant, contravariant=contravariant, 

1644 infer_variance=infer_variance) 

1645 else: 

1646 typevar = typing.TypeVar(name, *constraints, bound=bound, 

1647 covariant=covariant, contravariant=contravariant) 

1648 if infer_variance and (covariant or contravariant): 

1649 raise ValueError("Variance cannot be specified with infer_variance.") 

1650 typevar.__infer_variance__ = infer_variance 

1651 

1652 _set_default(typevar, default) 

1653 _set_module(typevar) 

1654 

1655 def _tvar_prepare_subst(alias, args): 

1656 if ( 

1657 typevar.has_default() 

1658 and alias.__parameters__.index(typevar) == len(args) 

1659 ): 

1660 args += (typevar.__default__,) 

1661 return args 

1662 

1663 typevar.__typing_prepare_subst__ = _tvar_prepare_subst 

1664 return typevar 

1665 

1666 def __init_subclass__(cls) -> None: 

1667 raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") 

1668 

1669 

1670# Python 3.10+ has PEP 612 

1671if hasattr(typing, 'ParamSpecArgs'): 

1672 ParamSpecArgs = typing.ParamSpecArgs 

1673 ParamSpecKwargs = typing.ParamSpecKwargs 

1674# 3.9 

1675else: 

1676 class _Immutable: 

1677 """Mixin to indicate that object should not be copied.""" 

1678 __slots__ = () 

1679 

1680 def __copy__(self): 

1681 return self 

1682 

1683 def __deepcopy__(self, memo): 

1684 return self 

1685 

1686 class ParamSpecArgs(_Immutable): 

1687 """The args for a ParamSpec object. 

1688 

1689 Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. 

1690 

1691 ParamSpecArgs objects have a reference back to their ParamSpec: 

1692 

1693 P.args.__origin__ is P 

1694 

1695 This type is meant for runtime introspection and has no special meaning to 

1696 static type checkers. 

1697 """ 

1698 def __init__(self, origin): 

1699 self.__origin__ = origin 

1700 

1701 def __repr__(self): 

1702 return f"{self.__origin__.__name__}.args" 

1703 

1704 def __eq__(self, other): 

1705 if not isinstance(other, ParamSpecArgs): 

1706 return NotImplemented 

1707 return self.__origin__ == other.__origin__ 

1708 

1709 class ParamSpecKwargs(_Immutable): 

1710 """The kwargs for a ParamSpec object. 

1711 

1712 Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. 

1713 

1714 ParamSpecKwargs objects have a reference back to their ParamSpec: 

1715 

1716 P.kwargs.__origin__ is P 

1717 

1718 This type is meant for runtime introspection and has no special meaning to 

1719 static type checkers. 

1720 """ 

1721 def __init__(self, origin): 

1722 self.__origin__ = origin 

1723 

1724 def __repr__(self): 

1725 return f"{self.__origin__.__name__}.kwargs" 

1726 

1727 def __eq__(self, other): 

1728 if not isinstance(other, ParamSpecKwargs): 

1729 return NotImplemented 

1730 return self.__origin__ == other.__origin__ 

1731 

1732 

1733if _PEP_696_IMPLEMENTED: 

1734 from typing import ParamSpec 

1735 

1736# 3.10+ 

1737elif hasattr(typing, 'ParamSpec'): 

1738 

1739 # Add default parameter - PEP 696 

1740 class ParamSpec(metaclass=_TypeVarLikeMeta): 

1741 """Parameter specification.""" 

1742 

1743 _backported_typevarlike = typing.ParamSpec 

1744 

1745 def __new__(cls, name, *, bound=None, 

1746 covariant=False, contravariant=False, 

1747 infer_variance=False, default=NoDefault): 

1748 if hasattr(typing, "TypeAliasType"): 

1749 # PEP 695 implemented, can pass infer_variance to typing.TypeVar 

1750 paramspec = typing.ParamSpec(name, bound=bound, 

1751 covariant=covariant, 

1752 contravariant=contravariant, 

1753 infer_variance=infer_variance) 

1754 else: 

1755 paramspec = typing.ParamSpec(name, bound=bound, 

1756 covariant=covariant, 

1757 contravariant=contravariant) 

1758 paramspec.__infer_variance__ = infer_variance 

1759 

1760 _set_default(paramspec, default) 

1761 _set_module(paramspec) 

1762 

1763 def _paramspec_prepare_subst(alias, args): 

1764 params = alias.__parameters__ 

1765 i = params.index(paramspec) 

1766 if i == len(args) and paramspec.has_default(): 

1767 args = [*args, paramspec.__default__] 

1768 if i >= len(args): 

1769 raise TypeError(f"Too few arguments for {alias}") 

1770 # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. 

1771 if len(params) == 1 and not typing._is_param_expr(args[0]): 

1772 assert i == 0 

1773 args = (args,) 

1774 # Convert lists to tuples to help other libraries cache the results. 

1775 elif isinstance(args[i], list): 

1776 args = (*args[:i], tuple(args[i]), *args[i + 1:]) 

1777 return args 

1778 

1779 paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst 

1780 return paramspec 

1781 

1782 def __init_subclass__(cls) -> None: 

1783 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") 

1784 

1785# 3.9 

1786else: 

1787 

1788 # Inherits from list as a workaround for Callable checks in Python < 3.9.2. 

1789 class ParamSpec(list, _DefaultMixin): 

1790 """Parameter specification variable. 

1791 

1792 Usage:: 

1793 

1794 P = ParamSpec('P') 

1795 

1796 Parameter specification variables exist primarily for the benefit of static 

1797 type checkers. They are used to forward the parameter types of one 

1798 callable to another callable, a pattern commonly found in higher order 

1799 functions and decorators. They are only valid when used in ``Concatenate``, 

1800 or s the first argument to ``Callable``. In Python 3.10 and higher, 

1801 they are also supported in user-defined Generics at runtime. 

1802 See class Generic for more information on generic types. An 

1803 example for annotating a decorator:: 

1804 

1805 T = TypeVar('T') 

1806 P = ParamSpec('P') 

1807 

1808 def add_logging(f: Callable[P, T]) -> Callable[P, T]: 

1809 '''A type-safe decorator to add logging to a function.''' 

1810 def inner(*args: P.args, **kwargs: P.kwargs) -> T: 

1811 logging.info(f'{f.__name__} was called') 

1812 return f(*args, **kwargs) 

1813 return inner 

1814 

1815 @add_logging 

1816 def add_two(x: float, y: float) -> float: 

1817 '''Add two numbers together.''' 

1818 return x + y 

1819 

1820 Parameter specification variables defined with covariant=True or 

1821 contravariant=True can be used to declare covariant or contravariant 

1822 generic types. These keyword arguments are valid, but their actual semantics 

1823 are yet to be decided. See PEP 612 for details. 

1824 

1825 Parameter specification variables can be introspected. e.g.: 

1826 

1827 P.__name__ == 'T' 

1828 P.__bound__ == None 

1829 P.__covariant__ == False 

1830 P.__contravariant__ == False 

1831 

1832 Note that only parameter specification variables defined in global scope can 

1833 be pickled. 

1834 """ 

1835 

1836 # Trick Generic __parameters__. 

1837 __class__ = typing.TypeVar 

1838 

1839 @property 

1840 def args(self): 

1841 return ParamSpecArgs(self) 

1842 

1843 @property 

1844 def kwargs(self): 

1845 return ParamSpecKwargs(self) 

1846 

1847 def __init__(self, name, *, bound=None, covariant=False, contravariant=False, 

1848 infer_variance=False, default=NoDefault): 

1849 list.__init__(self, [self]) 

1850 self.__name__ = name 

1851 self.__covariant__ = bool(covariant) 

1852 self.__contravariant__ = bool(contravariant) 

1853 self.__infer_variance__ = bool(infer_variance) 

1854 if bound: 

1855 self.__bound__ = typing._type_check(bound, 'Bound must be a type.') 

1856 else: 

1857 self.__bound__ = None 

1858 _DefaultMixin.__init__(self, default) 

1859 

1860 # for pickling: 

1861 def_mod = _caller() 

1862 if def_mod != 'typing_extensions': 

1863 self.__module__ = def_mod 

1864 

1865 def __repr__(self): 

1866 if self.__infer_variance__: 

1867 prefix = '' 

1868 elif self.__covariant__: 

1869 prefix = '+' 

1870 elif self.__contravariant__: 

1871 prefix = '-' 

1872 else: 

1873 prefix = '~' 

1874 return prefix + self.__name__ 

1875 

1876 def __hash__(self): 

1877 return object.__hash__(self) 

1878 

1879 def __eq__(self, other): 

1880 return self is other 

1881 

1882 def __reduce__(self): 

1883 return self.__name__ 

1884 

1885 # Hack to get typing._type_check to pass. 

1886 def __call__(self, *args, **kwargs): 

1887 pass 

1888 

1889 

1890# 3.9 

1891if not hasattr(typing, 'Concatenate'): 

1892 # Inherits from list as a workaround for Callable checks in Python < 3.9.2. 

1893 

1894 # 3.9.0-1 

1895 if not hasattr(typing, '_type_convert'): 

1896 def _type_convert(arg, module=None, *, allow_special_forms=False): 

1897 """For converting None to type(None), and strings to ForwardRef.""" 

1898 if arg is None: 

1899 return type(None) 

1900 if isinstance(arg, str): 

1901 if sys.version_info <= (3, 9, 6): 

1902 return ForwardRef(arg) 

1903 if sys.version_info <= (3, 9, 7): 

1904 return ForwardRef(arg, module=module) 

1905 return ForwardRef(arg, module=module, is_class=allow_special_forms) 

1906 return arg 

1907 else: 

1908 _type_convert = typing._type_convert 

1909 

1910 class _ConcatenateGenericAlias(list): 

1911 

1912 # Trick Generic into looking into this for __parameters__. 

1913 __class__ = typing._GenericAlias 

1914 

1915 def __init__(self, origin, args): 

1916 super().__init__(args) 

1917 self.__origin__ = origin 

1918 self.__args__ = args 

1919 

1920 def __repr__(self): 

1921 _type_repr = typing._type_repr 

1922 return (f'{_type_repr(self.__origin__)}' 

1923 f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') 

1924 

1925 def __hash__(self): 

1926 return hash((self.__origin__, self.__args__)) 

1927 

1928 # Hack to get typing._type_check to pass in Generic. 

1929 def __call__(self, *args, **kwargs): 

1930 pass 

1931 

1932 @property 

1933 def __parameters__(self): 

1934 return tuple( 

1935 tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) 

1936 ) 

1937 

1938 # 3.9 used by __getitem__ below 

1939 def copy_with(self, params): 

1940 if isinstance(params[-1], _ConcatenateGenericAlias): 

1941 params = (*params[:-1], *params[-1].__args__) 

1942 elif isinstance(params[-1], (list, tuple)): 

1943 return (*params[:-1], *params[-1]) 

1944 elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))): 

1945 raise TypeError("The last parameter to Concatenate should be a " 

1946 "ParamSpec variable or ellipsis.") 

1947 return self.__class__(self.__origin__, params) 

1948 

1949 # 3.9; accessed during GenericAlias.__getitem__ when substituting 

1950 def __getitem__(self, args): 

1951 if self.__origin__ in (Generic, Protocol): 

1952 # Can't subscript Generic[...] or Protocol[...]. 

1953 raise TypeError(f"Cannot subscript already-subscripted {self}") 

1954 if not self.__parameters__: 

1955 raise TypeError(f"{self} is not a generic class") 

1956 

1957 if not isinstance(args, tuple): 

1958 args = (args,) 

1959 args = _unpack_args(*(_type_convert(p) for p in args)) 

1960 params = self.__parameters__ 

1961 for param in params: 

1962 prepare = getattr(param, "__typing_prepare_subst__", None) 

1963 if prepare is not None: 

1964 args = prepare(self, args) 

1965 # 3.9 & typing.ParamSpec 

1966 elif isinstance(param, ParamSpec): 

1967 i = params.index(param) 

1968 if ( 

1969 i == len(args) 

1970 and getattr(param, '__default__', NoDefault) is not NoDefault 

1971 ): 

1972 args = [*args, param.__default__] 

1973 if i >= len(args): 

1974 raise TypeError(f"Too few arguments for {self}") 

1975 # Special case for Z[[int, str, bool]] == Z[int, str, bool] 

1976 if len(params) == 1 and not _is_param_expr(args[0]): 

1977 assert i == 0 

1978 args = (args,) 

1979 elif ( 

1980 isinstance(args[i], list) 

1981 # 3.9 

1982 # This class inherits from list do not convert 

1983 and not isinstance(args[i], _ConcatenateGenericAlias) 

1984 ): 

1985 args = (*args[:i], tuple(args[i]), *args[i + 1:]) 

1986 

1987 alen = len(args) 

1988 plen = len(params) 

1989 if alen != plen: 

1990 raise TypeError( 

1991 f"Too {'many' if alen > plen else 'few'} arguments for {self};" 

1992 f" actual {alen}, expected {plen}" 

1993 ) 

1994 

1995 subst = dict(zip(self.__parameters__, args)) 

1996 # determine new args 

1997 new_args = [] 

1998 for arg in self.__args__: 

1999 if isinstance(arg, type): 

2000 new_args.append(arg) 

2001 continue 

2002 if isinstance(arg, TypeVar): 

2003 arg = subst[arg] 

2004 if ( 

2005 (isinstance(arg, typing._GenericAlias) and _is_unpack(arg)) 

2006 or ( 

2007 hasattr(_types, "GenericAlias") 

2008 and isinstance(arg, _types.GenericAlias) 

2009 and getattr(arg, "__unpacked__", False) 

2010 ) 

2011 ): 

2012 raise TypeError(f"{arg} is not valid as type argument") 

2013 

2014 elif isinstance(arg, 

2015 typing._GenericAlias 

2016 if not hasattr(_types, "GenericAlias") else 

2017 (typing._GenericAlias, _types.GenericAlias) 

2018 ): 

2019 subparams = arg.__parameters__ 

2020 if subparams: 

2021 subargs = tuple(subst[x] for x in subparams) 

2022 arg = arg[subargs] 

2023 new_args.append(arg) 

2024 return self.copy_with(tuple(new_args)) 

2025 

2026# 3.10+ 

2027else: 

2028 _ConcatenateGenericAlias = typing._ConcatenateGenericAlias 

2029 

2030 # 3.10 

2031 if sys.version_info < (3, 11): 

2032 

2033 class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True): 

2034 # needed for checks in collections.abc.Callable to accept this class 

2035 __module__ = "typing" 

2036 

2037 def copy_with(self, params): 

2038 if isinstance(params[-1], (list, tuple)): 

2039 return (*params[:-1], *params[-1]) 

2040 if isinstance(params[-1], typing._ConcatenateGenericAlias): 

2041 params = (*params[:-1], *params[-1].__args__) 

2042 elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)): 

2043 raise TypeError("The last parameter to Concatenate should be a " 

2044 "ParamSpec variable or ellipsis.") 

2045 return super(typing._ConcatenateGenericAlias, self).copy_with(params) 

2046 

2047 def __getitem__(self, args): 

2048 value = super().__getitem__(args) 

2049 if isinstance(value, tuple) and any(_is_unpack(t) for t in value): 

2050 return tuple(_unpack_args(*(n for n in value))) 

2051 return value 

2052 

2053 

2054# 3.9.2 

2055class _EllipsisDummy: ... 

2056 

2057 

2058# <=3.10 

2059def _create_concatenate_alias(origin, parameters): 

2060 if parameters[-1] is ... and sys.version_info < (3, 9, 2): 

2061 # Hack: Arguments must be types, replace it with one. 

2062 parameters = (*parameters[:-1], _EllipsisDummy) 

2063 if sys.version_info >= (3, 10, 3): 

2064 concatenate = _ConcatenateGenericAlias(origin, parameters, 

2065 _typevar_types=(TypeVar, ParamSpec), 

2066 _paramspec_tvars=True) 

2067 else: 

2068 concatenate = _ConcatenateGenericAlias(origin, parameters) 

2069 if parameters[-1] is not _EllipsisDummy: 

2070 return concatenate 

2071 # Remove dummy again 

2072 concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ... 

2073 for p in concatenate.__args__) 

2074 if sys.version_info < (3, 10): 

2075 # backport needs __args__ adjustment only 

2076 return concatenate 

2077 concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__ 

2078 if p is not _EllipsisDummy) 

2079 return concatenate 

2080 

2081 

2082# <=3.10 

2083@typing._tp_cache 

2084def _concatenate_getitem(self, parameters): 

2085 if parameters == (): 

2086 raise TypeError("Cannot take a Concatenate of no types.") 

2087 if not isinstance(parameters, tuple): 

2088 parameters = (parameters,) 

2089 if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)): 

2090 raise TypeError("The last parameter to Concatenate should be a " 

2091 "ParamSpec variable or ellipsis.") 

2092 msg = "Concatenate[arg, ...]: each arg must be a type." 

2093 parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]), 

2094 parameters[-1]) 

2095 return _create_concatenate_alias(self, parameters) 

2096 

2097 

2098# 3.11+; Concatenate does not accept ellipsis in 3.10 

2099if sys.version_info >= (3, 11): 

2100 Concatenate = typing.Concatenate 

2101# <=3.10 

2102else: 

2103 @_ExtensionsSpecialForm 

2104 def Concatenate(self, parameters): 

2105 """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a 

2106 higher order function which adds, removes or transforms parameters of a 

2107 callable. 

2108 

2109 For example:: 

2110 

2111 Callable[Concatenate[int, P], int] 

2112 

2113 See PEP 612 for detailed information. 

2114 """ 

2115 return _concatenate_getitem(self, parameters) 

2116 

2117 

2118# 3.10+ 

2119if hasattr(typing, 'TypeGuard'): 

2120 TypeGuard = typing.TypeGuard 

2121# 3.9 

2122else: 

2123 @_ExtensionsSpecialForm 

2124 def TypeGuard(self, parameters): 

2125 """Special typing form used to annotate the return type of a user-defined 

2126 type guard function. ``TypeGuard`` only accepts a single type argument. 

2127 At runtime, functions marked this way should return a boolean. 

2128 

2129 ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static 

2130 type checkers to determine a more precise type of an expression within a 

2131 program's code flow. Usually type narrowing is done by analyzing 

2132 conditional code flow and applying the narrowing to a block of code. The 

2133 conditional expression here is sometimes referred to as a "type guard". 

2134 

2135 Sometimes it would be convenient to use a user-defined boolean function 

2136 as a type guard. Such a function should use ``TypeGuard[...]`` as its 

2137 return type to alert static type checkers to this intention. 

2138 

2139 Using ``-> TypeGuard`` tells the static type checker that for a given 

2140 function: 

2141 

2142 1. The return value is a boolean. 

2143 2. If the return value is ``True``, the type of its argument 

2144 is the type inside ``TypeGuard``. 

2145 

2146 For example:: 

2147 

2148 def is_str(val: Union[str, float]): 

2149 # "isinstance" type guard 

2150 if isinstance(val, str): 

2151 # Type of ``val`` is narrowed to ``str`` 

2152 ... 

2153 else: 

2154 # Else, type of ``val`` is narrowed to ``float``. 

2155 ... 

2156 

2157 Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower 

2158 form of ``TypeA`` (it can even be a wider form) and this may lead to 

2159 type-unsafe results. The main reason is to allow for things like 

2160 narrowing ``List[object]`` to ``List[str]`` even though the latter is not 

2161 a subtype of the former, since ``List`` is invariant. The responsibility of 

2162 writing type-safe type guards is left to the user. 

2163 

2164 ``TypeGuard`` also works with type variables. For more information, see 

2165 PEP 647 (User-Defined Type Guards). 

2166 """ 

2167 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2168 return typing._GenericAlias(self, (item,)) 

2169 

2170 

2171# 3.13+ 

2172if hasattr(typing, 'TypeIs'): 

2173 TypeIs = typing.TypeIs 

2174# <=3.12 

2175else: 

2176 @_ExtensionsSpecialForm 

2177 def TypeIs(self, parameters): 

2178 """Special typing form used to annotate the return type of a user-defined 

2179 type narrower function. ``TypeIs`` only accepts a single type argument. 

2180 At runtime, functions marked this way should return a boolean. 

2181 

2182 ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static 

2183 type checkers to determine a more precise type of an expression within a 

2184 program's code flow. Usually type narrowing is done by analyzing 

2185 conditional code flow and applying the narrowing to a block of code. The 

2186 conditional expression here is sometimes referred to as a "type guard". 

2187 

2188 Sometimes it would be convenient to use a user-defined boolean function 

2189 as a type guard. Such a function should use ``TypeIs[...]`` as its 

2190 return type to alert static type checkers to this intention. 

2191 

2192 Using ``-> TypeIs`` tells the static type checker that for a given 

2193 function: 

2194 

2195 1. The return value is a boolean. 

2196 2. If the return value is ``True``, the type of its argument 

2197 is the intersection of the type inside ``TypeIs`` and the argument's 

2198 previously known type. 

2199 

2200 For example:: 

2201 

2202 def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: 

2203 return hasattr(val, '__await__') 

2204 

2205 def f(val: Union[int, Awaitable[int]]) -> int: 

2206 if is_awaitable(val): 

2207 assert_type(val, Awaitable[int]) 

2208 else: 

2209 assert_type(val, int) 

2210 

2211 ``TypeIs`` also works with type variables. For more information, see 

2212 PEP 742 (Narrowing types with TypeIs). 

2213 """ 

2214 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2215 return typing._GenericAlias(self, (item,)) 

2216 

2217 

2218# 3.14+? 

2219if hasattr(typing, 'TypeForm'): 

2220 TypeForm = typing.TypeForm 

2221# <=3.13 

2222else: 

2223 class _TypeFormForm(_ExtensionsSpecialForm, _root=True): 

2224 # TypeForm(X) is equivalent to X but indicates to the type checker 

2225 # that the object is a TypeForm. 

2226 def __call__(self, obj, /): 

2227 return obj 

2228 

2229 @_TypeFormForm 

2230 def TypeForm(self, parameters): 

2231 """A special form representing the value that results from the evaluation 

2232 of a type expression. This value encodes the information supplied in the 

2233 type expression, and it represents the type described by that type expression. 

2234 

2235 When used in a type expression, TypeForm describes a set of type form objects. 

2236 It accepts a single type argument, which must be a valid type expression. 

2237 ``TypeForm[T]`` describes the set of all type form objects that represent 

2238 the type T or types that are assignable to T. 

2239 

2240 Usage: 

2241 

2242 def cast[T](typ: TypeForm[T], value: Any) -> T: ... 

2243 

2244 reveal_type(cast(int, "x")) # int 

2245 

2246 See PEP 747 for more information. 

2247 """ 

2248 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2249 return typing._GenericAlias(self, (item,)) 

2250 

2251 

2252 

2253 

2254if hasattr(typing, "LiteralString"): # 3.11+ 

2255 LiteralString = typing.LiteralString 

2256else: 

2257 @_SpecialForm 

2258 def LiteralString(self, params): 

2259 """Represents an arbitrary literal string. 

2260 

2261 Example:: 

2262 

2263 from typing_extensions import LiteralString 

2264 

2265 def query(sql: LiteralString) -> ...: 

2266 ... 

2267 

2268 query("SELECT * FROM table") # ok 

2269 query(f"SELECT * FROM {input()}") # not ok 

2270 

2271 See PEP 675 for details. 

2272 

2273 """ 

2274 raise TypeError(f"{self} is not subscriptable") 

2275 

2276 

2277if hasattr(typing, "Self"): # 3.11+ 

2278 Self = typing.Self 

2279else: 

2280 @_SpecialForm 

2281 def Self(self, params): 

2282 """Used to spell the type of "self" in classes. 

2283 

2284 Example:: 

2285 

2286 from typing import Self 

2287 

2288 class ReturnsSelf: 

2289 def parse(self, data: bytes) -> Self: 

2290 ... 

2291 return self 

2292 

2293 """ 

2294 

2295 raise TypeError(f"{self} is not subscriptable") 

2296 

2297 

2298if hasattr(typing, "Never"): # 3.11+ 

2299 Never = typing.Never 

2300else: 

2301 @_SpecialForm 

2302 def Never(self, params): 

2303 """The bottom type, a type that has no members. 

2304 

2305 This can be used to define a function that should never be 

2306 called, or a function that never returns:: 

2307 

2308 from typing_extensions import Never 

2309 

2310 def never_call_me(arg: Never) -> None: 

2311 pass 

2312 

2313 def int_or_str(arg: int | str) -> None: 

2314 never_call_me(arg) # type checker error 

2315 match arg: 

2316 case int(): 

2317 print("It's an int") 

2318 case str(): 

2319 print("It's a str") 

2320 case _: 

2321 never_call_me(arg) # ok, arg is of type Never 

2322 

2323 """ 

2324 

2325 raise TypeError(f"{self} is not subscriptable") 

2326 

2327 

2328if hasattr(typing, 'Required'): # 3.11+ 

2329 Required = typing.Required 

2330 NotRequired = typing.NotRequired 

2331else: # <=3.10 

2332 @_ExtensionsSpecialForm 

2333 def Required(self, parameters): 

2334 """A special typing construct to mark a key of a total=False TypedDict 

2335 as required. For example: 

2336 

2337 class Movie(TypedDict, total=False): 

2338 title: Required[str] 

2339 year: int 

2340 

2341 m = Movie( 

2342 title='The Matrix', # typechecker error if key is omitted 

2343 year=1999, 

2344 ) 

2345 

2346 There is no runtime checking that a required key is actually provided 

2347 when instantiating a related TypedDict. 

2348 """ 

2349 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2350 return typing._GenericAlias(self, (item,)) 

2351 

2352 @_ExtensionsSpecialForm 

2353 def NotRequired(self, parameters): 

2354 """A special typing construct to mark a key of a TypedDict as 

2355 potentially missing. For example: 

2356 

2357 class Movie(TypedDict): 

2358 title: str 

2359 year: NotRequired[int] 

2360 

2361 m = Movie( 

2362 title='The Matrix', # typechecker error if key is omitted 

2363 year=1999, 

2364 ) 

2365 """ 

2366 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2367 return typing._GenericAlias(self, (item,)) 

2368 

2369 

2370if hasattr(typing, 'ReadOnly'): 

2371 ReadOnly = typing.ReadOnly 

2372else: # <=3.12 

2373 @_ExtensionsSpecialForm 

2374 def ReadOnly(self, parameters): 

2375 """A special typing construct to mark an item of a TypedDict as read-only. 

2376 

2377 For example: 

2378 

2379 class Movie(TypedDict): 

2380 title: ReadOnly[str] 

2381 year: int 

2382 

2383 def mutate_movie(m: Movie) -> None: 

2384 m["year"] = 1992 # allowed 

2385 m["title"] = "The Matrix" # typechecker error 

2386 

2387 There is no runtime checking for this property. 

2388 """ 

2389 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2390 return typing._GenericAlias(self, (item,)) 

2391 

2392 

2393_UNPACK_DOC = """\ 

2394Type unpack operator. 

2395 

2396The type unpack operator takes the child types from some container type, 

2397such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For 

2398example: 

2399 

2400 # For some generic class `Foo`: 

2401 Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] 

2402 

2403 Ts = TypeVarTuple('Ts') 

2404 # Specifies that `Bar` is generic in an arbitrary number of types. 

2405 # (Think of `Ts` as a tuple of an arbitrary number of individual 

2406 # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the 

2407 # `Generic[]`.) 

2408 class Bar(Generic[Unpack[Ts]]): ... 

2409 Bar[int] # Valid 

2410 Bar[int, str] # Also valid 

2411 

2412From Python 3.11, this can also be done using the `*` operator: 

2413 

2414 Foo[*tuple[int, str]] 

2415 class Bar(Generic[*Ts]): ... 

2416 

2417The operator can also be used along with a `TypedDict` to annotate 

2418`**kwargs` in a function signature. For instance: 

2419 

2420 class Movie(TypedDict): 

2421 name: str 

2422 year: int 

2423 

2424 # This function expects two keyword arguments - *name* of type `str` and 

2425 # *year* of type `int`. 

2426 def foo(**kwargs: Unpack[Movie]): ... 

2427 

2428Note that there is only some runtime checking of this operator. Not 

2429everything the runtime allows may be accepted by static type checkers. 

2430 

2431For more information, see PEP 646 and PEP 692. 

2432""" 

2433 

2434 

2435if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] 

2436 Unpack = typing.Unpack 

2437 

2438 def _is_unpack(obj): 

2439 return get_origin(obj) is Unpack 

2440 

2441else: # <=3.11 

2442 class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): 

2443 def __init__(self, getitem): 

2444 super().__init__(getitem) 

2445 self.__doc__ = _UNPACK_DOC 

2446 

2447 class _UnpackAlias(typing._GenericAlias, _root=True): 

2448 if sys.version_info < (3, 11): 

2449 # needed for compatibility with Generic[Unpack[Ts]] 

2450 __class__ = typing.TypeVar 

2451 

2452 @property 

2453 def __typing_unpacked_tuple_args__(self): 

2454 assert self.__origin__ is Unpack 

2455 assert len(self.__args__) == 1 

2456 arg, = self.__args__ 

2457 if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)): 

2458 if arg.__origin__ is not tuple: 

2459 raise TypeError("Unpack[...] must be used with a tuple type") 

2460 return arg.__args__ 

2461 return None 

2462 

2463 @property 

2464 def __typing_is_unpacked_typevartuple__(self): 

2465 assert self.__origin__ is Unpack 

2466 assert len(self.__args__) == 1 

2467 return isinstance(self.__args__[0], TypeVarTuple) 

2468 

2469 def __getitem__(self, args): 

2470 if self.__typing_is_unpacked_typevartuple__: 

2471 return args 

2472 return super().__getitem__(args) 

2473 

2474 @_UnpackSpecialForm 

2475 def Unpack(self, parameters): 

2476 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2477 return _UnpackAlias(self, (item,)) 

2478 

2479 def _is_unpack(obj): 

2480 return isinstance(obj, _UnpackAlias) 

2481 

2482 

2483def _unpack_args(*args): 

2484 newargs = [] 

2485 for arg in args: 

2486 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) 

2487 if subargs is not None and (not (subargs and subargs[-1] is ...)): 

2488 newargs.extend(subargs) 

2489 else: 

2490 newargs.append(arg) 

2491 return newargs 

2492 

2493 

2494if _PEP_696_IMPLEMENTED: 

2495 from typing import TypeVarTuple 

2496 

2497elif hasattr(typing, "TypeVarTuple"): # 3.11+ 

2498 

2499 # Add default parameter - PEP 696 

2500 class TypeVarTuple(metaclass=_TypeVarLikeMeta): 

2501 """Type variable tuple.""" 

2502 

2503 _backported_typevarlike = typing.TypeVarTuple 

2504 

2505 def __new__(cls, name, *, default=NoDefault): 

2506 tvt = typing.TypeVarTuple(name) 

2507 _set_default(tvt, default) 

2508 _set_module(tvt) 

2509 

2510 def _typevartuple_prepare_subst(alias, args): 

2511 params = alias.__parameters__ 

2512 typevartuple_index = params.index(tvt) 

2513 for param in params[typevartuple_index + 1:]: 

2514 if isinstance(param, TypeVarTuple): 

2515 raise TypeError( 

2516 f"More than one TypeVarTuple parameter in {alias}" 

2517 ) 

2518 

2519 alen = len(args) 

2520 plen = len(params) 

2521 left = typevartuple_index 

2522 right = plen - typevartuple_index - 1 

2523 var_tuple_index = None 

2524 fillarg = None 

2525 for k, arg in enumerate(args): 

2526 if not isinstance(arg, type): 

2527 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) 

2528 if subargs and len(subargs) == 2 and subargs[-1] is ...: 

2529 if var_tuple_index is not None: 

2530 raise TypeError( 

2531 "More than one unpacked " 

2532 "arbitrary-length tuple argument" 

2533 ) 

2534 var_tuple_index = k 

2535 fillarg = subargs[0] 

2536 if var_tuple_index is not None: 

2537 left = min(left, var_tuple_index) 

2538 right = min(right, alen - var_tuple_index - 1) 

2539 elif left + right > alen: 

2540 raise TypeError(f"Too few arguments for {alias};" 

2541 f" actual {alen}, expected at least {plen - 1}") 

2542 if left == alen - right and tvt.has_default(): 

2543 replacement = _unpack_args(tvt.__default__) 

2544 else: 

2545 replacement = args[left: alen - right] 

2546 

2547 return ( 

2548 *args[:left], 

2549 *([fillarg] * (typevartuple_index - left)), 

2550 replacement, 

2551 *([fillarg] * (plen - right - left - typevartuple_index - 1)), 

2552 *args[alen - right:], 

2553 ) 

2554 

2555 tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst 

2556 return tvt 

2557 

2558 def __init_subclass__(self, *args, **kwds): 

2559 raise TypeError("Cannot subclass special typing classes") 

2560 

2561else: # <=3.10 

2562 class TypeVarTuple(_DefaultMixin): 

2563 """Type variable tuple. 

2564 

2565 Usage:: 

2566 

2567 Ts = TypeVarTuple('Ts') 

2568 

2569 In the same way that a normal type variable is a stand-in for a single 

2570 type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* 

2571 type such as ``Tuple[int, str]``. 

2572 

2573 Type variable tuples can be used in ``Generic`` declarations. 

2574 Consider the following example:: 

2575 

2576 class Array(Generic[*Ts]): ... 

2577 

2578 The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, 

2579 where ``T1`` and ``T2`` are type variables. To use these type variables 

2580 as type parameters of ``Array``, we must *unpack* the type variable tuple using 

2581 the star operator: ``*Ts``. The signature of ``Array`` then behaves 

2582 as if we had simply written ``class Array(Generic[T1, T2]): ...``. 

2583 In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows 

2584 us to parameterise the class with an *arbitrary* number of type parameters. 

2585 

2586 Type variable tuples can be used anywhere a normal ``TypeVar`` can. 

2587 This includes class definitions, as shown above, as well as function 

2588 signatures and variable annotations:: 

2589 

2590 class Array(Generic[*Ts]): 

2591 

2592 def __init__(self, shape: Tuple[*Ts]): 

2593 self._shape: Tuple[*Ts] = shape 

2594 

2595 def get_shape(self) -> Tuple[*Ts]: 

2596 return self._shape 

2597 

2598 shape = (Height(480), Width(640)) 

2599 x: Array[Height, Width] = Array(shape) 

2600 y = abs(x) # Inferred type is Array[Height, Width] 

2601 z = x + x # ... is Array[Height, Width] 

2602 x.get_shape() # ... is tuple[Height, Width] 

2603 

2604 """ 

2605 

2606 # Trick Generic __parameters__. 

2607 __class__ = typing.TypeVar 

2608 

2609 def __iter__(self): 

2610 yield self.__unpacked__ 

2611 

2612 def __init__(self, name, *, default=NoDefault): 

2613 self.__name__ = name 

2614 _DefaultMixin.__init__(self, default) 

2615 

2616 # for pickling: 

2617 def_mod = _caller() 

2618 if def_mod != 'typing_extensions': 

2619 self.__module__ = def_mod 

2620 

2621 self.__unpacked__ = Unpack[self] 

2622 

2623 def __repr__(self): 

2624 return self.__name__ 

2625 

2626 def __hash__(self): 

2627 return object.__hash__(self) 

2628 

2629 def __eq__(self, other): 

2630 return self is other 

2631 

2632 def __reduce__(self): 

2633 return self.__name__ 

2634 

2635 def __init_subclass__(self, *args, **kwds): 

2636 if '_root' not in kwds: 

2637 raise TypeError("Cannot subclass special typing classes") 

2638 

2639 

2640if hasattr(typing, "reveal_type"): # 3.11+ 

2641 reveal_type = typing.reveal_type 

2642else: # <=3.10 

2643 def reveal_type(obj: T, /) -> T: 

2644 """Reveal the inferred type of a variable. 

2645 

2646 When a static type checker encounters a call to ``reveal_type()``, 

2647 it will emit the inferred type of the argument:: 

2648 

2649 x: int = 1 

2650 reveal_type(x) 

2651 

2652 Running a static type checker (e.g., ``mypy``) on this example 

2653 will produce output similar to 'Revealed type is "builtins.int"'. 

2654 

2655 At runtime, the function prints the runtime type of the 

2656 argument and returns it unchanged. 

2657 

2658 """ 

2659 print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) 

2660 return obj 

2661 

2662 

2663if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+ 

2664 _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH 

2665else: # <=3.10 

2666 _ASSERT_NEVER_REPR_MAX_LENGTH = 100 

2667 

2668 

2669if hasattr(typing, "assert_never"): # 3.11+ 

2670 assert_never = typing.assert_never 

2671else: # <=3.10 

2672 def assert_never(arg: Never, /) -> Never: 

2673 """Assert to the type checker that a line of code is unreachable. 

2674 

2675 Example:: 

2676 

2677 def int_or_str(arg: int | str) -> None: 

2678 match arg: 

2679 case int(): 

2680 print("It's an int") 

2681 case str(): 

2682 print("It's a str") 

2683 case _: 

2684 assert_never(arg) 

2685 

2686 If a type checker finds that a call to assert_never() is 

2687 reachable, it will emit an error. 

2688 

2689 At runtime, this throws an exception when called. 

2690 

2691 """ 

2692 value = repr(arg) 

2693 if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH: 

2694 value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...' 

2695 raise AssertionError(f"Expected code to be unreachable, but got: {value}") 

2696 

2697 

2698if sys.version_info >= (3, 12): # 3.12+ 

2699 # dataclass_transform exists in 3.11 but lacks the frozen_default parameter 

2700 dataclass_transform = typing.dataclass_transform 

2701else: # <=3.11 

2702 def dataclass_transform( 

2703 *, 

2704 eq_default: bool = True, 

2705 order_default: bool = False, 

2706 kw_only_default: bool = False, 

2707 frozen_default: bool = False, 

2708 field_specifiers: typing.Tuple[ 

2709 typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], 

2710 ... 

2711 ] = (), 

2712 **kwargs: typing.Any, 

2713 ) -> typing.Callable[[T], T]: 

2714 """Decorator that marks a function, class, or metaclass as providing 

2715 dataclass-like behavior. 

2716 

2717 Example: 

2718 

2719 from typing_extensions import dataclass_transform 

2720 

2721 _T = TypeVar("_T") 

2722 

2723 # Used on a decorator function 

2724 @dataclass_transform() 

2725 def create_model(cls: type[_T]) -> type[_T]: 

2726 ... 

2727 return cls 

2728 

2729 @create_model 

2730 class CustomerModel: 

2731 id: int 

2732 name: str 

2733 

2734 # Used on a base class 

2735 @dataclass_transform() 

2736 class ModelBase: ... 

2737 

2738 class CustomerModel(ModelBase): 

2739 id: int 

2740 name: str 

2741 

2742 # Used on a metaclass 

2743 @dataclass_transform() 

2744 class ModelMeta(type): ... 

2745 

2746 class ModelBase(metaclass=ModelMeta): ... 

2747 

2748 class CustomerModel(ModelBase): 

2749 id: int 

2750 name: str 

2751 

2752 Each of the ``CustomerModel`` classes defined in this example will now 

2753 behave similarly to a dataclass created with the ``@dataclasses.dataclass`` 

2754 decorator. For example, the type checker will synthesize an ``__init__`` 

2755 method. 

2756 

2757 The arguments to this decorator can be used to customize this behavior: 

2758 - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be 

2759 True or False if it is omitted by the caller. 

2760 - ``order_default`` indicates whether the ``order`` parameter is 

2761 assumed to be True or False if it is omitted by the caller. 

2762 - ``kw_only_default`` indicates whether the ``kw_only`` parameter is 

2763 assumed to be True or False if it is omitted by the caller. 

2764 - ``frozen_default`` indicates whether the ``frozen`` parameter is 

2765 assumed to be True or False if it is omitted by the caller. 

2766 - ``field_specifiers`` specifies a static list of supported classes 

2767 or functions that describe fields, similar to ``dataclasses.field()``. 

2768 

2769 At runtime, this decorator records its arguments in the 

2770 ``__dataclass_transform__`` attribute on the decorated object. 

2771 

2772 See PEP 681 for details. 

2773 

2774 """ 

2775 def decorator(cls_or_fn): 

2776 cls_or_fn.__dataclass_transform__ = { 

2777 "eq_default": eq_default, 

2778 "order_default": order_default, 

2779 "kw_only_default": kw_only_default, 

2780 "frozen_default": frozen_default, 

2781 "field_specifiers": field_specifiers, 

2782 "kwargs": kwargs, 

2783 } 

2784 return cls_or_fn 

2785 return decorator 

2786 

2787 

2788if hasattr(typing, "override"): # 3.12+ 

2789 override = typing.override 

2790else: # <=3.11 

2791 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) 

2792 

2793 def override(arg: _F, /) -> _F: 

2794 """Indicate that a method is intended to override a method in a base class. 

2795 

2796 Usage: 

2797 

2798 class Base: 

2799 def method(self) -> None: 

2800 pass 

2801 

2802 class Child(Base): 

2803 @override 

2804 def method(self) -> None: 

2805 super().method() 

2806 

2807 When this decorator is applied to a method, the type checker will 

2808 validate that it overrides a method with the same name on a base class. 

2809 This helps prevent bugs that may occur when a base class is changed 

2810 without an equivalent change to a child class. 

2811 

2812 There is no runtime checking of these properties. The decorator 

2813 sets the ``__override__`` attribute to ``True`` on the decorated object 

2814 to allow runtime introspection. 

2815 

2816 See PEP 698 for details. 

2817 

2818 """ 

2819 try: 

2820 arg.__override__ = True 

2821 except (AttributeError, TypeError): 

2822 # Skip the attribute silently if it is not writable. 

2823 # AttributeError happens if the object has __slots__ or a 

2824 # read-only property, TypeError if it's a builtin class. 

2825 pass 

2826 return arg 

2827 

2828 

2829# Python 3.13.3+ contains a fix for the wrapped __new__ 

2830if sys.version_info >= (3, 13, 3): 

2831 deprecated = warnings.deprecated 

2832else: 

2833 _T = typing.TypeVar("_T") 

2834 

2835 class deprecated: 

2836 """Indicate that a class, function or overload is deprecated. 

2837 

2838 When this decorator is applied to an object, the type checker 

2839 will generate a diagnostic on usage of the deprecated object. 

2840 

2841 Usage: 

2842 

2843 @deprecated("Use B instead") 

2844 class A: 

2845 pass 

2846 

2847 @deprecated("Use g instead") 

2848 def f(): 

2849 pass 

2850 

2851 @overload 

2852 @deprecated("int support is deprecated") 

2853 def g(x: int) -> int: ... 

2854 @overload 

2855 def g(x: str) -> int: ... 

2856 

2857 The warning specified by *category* will be emitted at runtime 

2858 on use of deprecated objects. For functions, that happens on calls; 

2859 for classes, on instantiation and on creation of subclasses. 

2860 If the *category* is ``None``, no warning is emitted at runtime. 

2861 The *stacklevel* determines where the 

2862 warning is emitted. If it is ``1`` (the default), the warning 

2863 is emitted at the direct caller of the deprecated object; if it 

2864 is higher, it is emitted further up the stack. 

2865 Static type checker behavior is not affected by the *category* 

2866 and *stacklevel* arguments. 

2867 

2868 The deprecation message passed to the decorator is saved in the 

2869 ``__deprecated__`` attribute on the decorated object. 

2870 If applied to an overload, the decorator 

2871 must be after the ``@overload`` decorator for the attribute to 

2872 exist on the overload as returned by ``get_overloads()``. 

2873 

2874 See PEP 702 for details. 

2875 

2876 """ 

2877 def __init__( 

2878 self, 

2879 message: str, 

2880 /, 

2881 *, 

2882 category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, 

2883 stacklevel: int = 1, 

2884 ) -> None: 

2885 if not isinstance(message, str): 

2886 raise TypeError( 

2887 "Expected an object of type str for 'message', not " 

2888 f"{type(message).__name__!r}" 

2889 ) 

2890 self.message = message 

2891 self.category = category 

2892 self.stacklevel = stacklevel 

2893 

2894 def __call__(self, arg: _T, /) -> _T: 

2895 # Make sure the inner functions created below don't 

2896 # retain a reference to self. 

2897 msg = self.message 

2898 category = self.category 

2899 stacklevel = self.stacklevel 

2900 if category is None: 

2901 arg.__deprecated__ = msg 

2902 return arg 

2903 elif isinstance(arg, type): 

2904 import functools 

2905 from types import MethodType 

2906 

2907 original_new = arg.__new__ 

2908 

2909 @functools.wraps(original_new) 

2910 def __new__(cls, /, *args, **kwargs): 

2911 if cls is arg: 

2912 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2913 if original_new is not object.__new__: 

2914 return original_new(cls, *args, **kwargs) 

2915 # Mirrors a similar check in object.__new__. 

2916 elif cls.__init__ is object.__init__ and (args or kwargs): 

2917 raise TypeError(f"{cls.__name__}() takes no arguments") 

2918 else: 

2919 return original_new(cls) 

2920 

2921 arg.__new__ = staticmethod(__new__) 

2922 

2923 original_init_subclass = arg.__init_subclass__ 

2924 # We need slightly different behavior if __init_subclass__ 

2925 # is a bound method (likely if it was implemented in Python) 

2926 if isinstance(original_init_subclass, MethodType): 

2927 original_init_subclass = original_init_subclass.__func__ 

2928 

2929 @functools.wraps(original_init_subclass) 

2930 def __init_subclass__(*args, **kwargs): 

2931 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2932 return original_init_subclass(*args, **kwargs) 

2933 

2934 arg.__init_subclass__ = classmethod(__init_subclass__) 

2935 # Or otherwise, which likely means it's a builtin such as 

2936 # object's implementation of __init_subclass__. 

2937 else: 

2938 @functools.wraps(original_init_subclass) 

2939 def __init_subclass__(*args, **kwargs): 

2940 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2941 return original_init_subclass(*args, **kwargs) 

2942 

2943 arg.__init_subclass__ = __init_subclass__ 

2944 

2945 arg.__deprecated__ = __new__.__deprecated__ = msg 

2946 __init_subclass__.__deprecated__ = msg 

2947 return arg 

2948 elif callable(arg): 

2949 import asyncio.coroutines 

2950 import functools 

2951 import inspect 

2952 

2953 @functools.wraps(arg) 

2954 def wrapper(*args, **kwargs): 

2955 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2956 return arg(*args, **kwargs) 

2957 

2958 if asyncio.coroutines.iscoroutinefunction(arg): 

2959 if sys.version_info >= (3, 12): 

2960 wrapper = inspect.markcoroutinefunction(wrapper) 

2961 else: 

2962 wrapper._is_coroutine = asyncio.coroutines._is_coroutine 

2963 

2964 arg.__deprecated__ = wrapper.__deprecated__ = msg 

2965 return wrapper 

2966 else: 

2967 raise TypeError( 

2968 "@deprecated decorator with non-None category must be applied to " 

2969 f"a class or callable, not {arg!r}" 

2970 ) 

2971 

2972if sys.version_info < (3, 10): 

2973 def _is_param_expr(arg): 

2974 return arg is ... or isinstance( 

2975 arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias) 

2976 ) 

2977else: 

2978 def _is_param_expr(arg): 

2979 return arg is ... or isinstance( 

2980 arg, 

2981 ( 

2982 tuple, 

2983 list, 

2984 ParamSpec, 

2985 _ConcatenateGenericAlias, 

2986 typing._ConcatenateGenericAlias, 

2987 ), 

2988 ) 

2989 

2990 

2991# We have to do some monkey patching to deal with the dual nature of 

2992# Unpack/TypeVarTuple: 

2993# - We want Unpack to be a kind of TypeVar so it gets accepted in 

2994# Generic[Unpack[Ts]] 

2995# - We want it to *not* be treated as a TypeVar for the purposes of 

2996# counting generic parameters, so that when we subscript a generic, 

2997# the runtime doesn't try to substitute the Unpack with the subscripted type. 

2998if not hasattr(typing, "TypeVarTuple"): 

2999 def _check_generic(cls, parameters, elen=_marker): 

3000 """Check correct count for parameters of a generic cls (internal helper). 

3001 

3002 This gives a nice error message in case of count mismatch. 

3003 """ 

3004 # If substituting a single ParamSpec with multiple arguments 

3005 # we do not check the count 

3006 if (inspect.isclass(cls) and issubclass(cls, typing.Generic) 

3007 and len(cls.__parameters__) == 1 

3008 and isinstance(cls.__parameters__[0], ParamSpec) 

3009 and parameters 

3010 and not _is_param_expr(parameters[0]) 

3011 ): 

3012 # Generic modifies parameters variable, but here we cannot do this 

3013 return 

3014 

3015 if not elen: 

3016 raise TypeError(f"{cls} is not a generic class") 

3017 if elen is _marker: 

3018 if not hasattr(cls, "__parameters__") or not cls.__parameters__: 

3019 raise TypeError(f"{cls} is not a generic class") 

3020 elen = len(cls.__parameters__) 

3021 alen = len(parameters) 

3022 if alen != elen: 

3023 expect_val = elen 

3024 if hasattr(cls, "__parameters__"): 

3025 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] 

3026 num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) 

3027 if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): 

3028 return 

3029 

3030 # deal with TypeVarLike defaults 

3031 # required TypeVarLikes cannot appear after a defaulted one. 

3032 if alen < elen: 

3033 # since we validate TypeVarLike default in _collect_type_vars 

3034 # or _collect_parameters we can safely check parameters[alen] 

3035 if ( 

3036 getattr(parameters[alen], '__default__', NoDefault) 

3037 is not NoDefault 

3038 ): 

3039 return 

3040 

3041 num_default_tv = sum(getattr(p, '__default__', NoDefault) 

3042 is not NoDefault for p in parameters) 

3043 

3044 elen -= num_default_tv 

3045 

3046 expect_val = f"at least {elen}" 

3047 

3048 things = "arguments" if sys.version_info >= (3, 10) else "parameters" 

3049 raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}" 

3050 f" for {cls}; actual {alen}, expected {expect_val}") 

3051else: 

3052 # Python 3.11+ 

3053 

3054 def _check_generic(cls, parameters, elen): 

3055 """Check correct count for parameters of a generic cls (internal helper). 

3056 

3057 This gives a nice error message in case of count mismatch. 

3058 """ 

3059 if not elen: 

3060 raise TypeError(f"{cls} is not a generic class") 

3061 alen = len(parameters) 

3062 if alen != elen: 

3063 expect_val = elen 

3064 if hasattr(cls, "__parameters__"): 

3065 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] 

3066 

3067 # deal with TypeVarLike defaults 

3068 # required TypeVarLikes cannot appear after a defaulted one. 

3069 if alen < elen: 

3070 # since we validate TypeVarLike default in _collect_type_vars 

3071 # or _collect_parameters we can safely check parameters[alen] 

3072 if ( 

3073 getattr(parameters[alen], '__default__', NoDefault) 

3074 is not NoDefault 

3075 ): 

3076 return 

3077 

3078 num_default_tv = sum(getattr(p, '__default__', NoDefault) 

3079 is not NoDefault for p in parameters) 

3080 

3081 elen -= num_default_tv 

3082 

3083 expect_val = f"at least {elen}" 

3084 

3085 raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments" 

3086 f" for {cls}; actual {alen}, expected {expect_val}") 

3087 

3088if not _PEP_696_IMPLEMENTED: 

3089 typing._check_generic = _check_generic 

3090 

3091 

3092def _has_generic_or_protocol_as_origin() -> bool: 

3093 try: 

3094 frame = sys._getframe(2) 

3095 # - Catch AttributeError: not all Python implementations have sys._getframe() 

3096 # - Catch ValueError: maybe we're called from an unexpected module 

3097 # and the call stack isn't deep enough 

3098 except (AttributeError, ValueError): 

3099 return False # err on the side of leniency 

3100 else: 

3101 # If we somehow get invoked from outside typing.py, 

3102 # also err on the side of leniency 

3103 if frame.f_globals.get("__name__") != "typing": 

3104 return False 

3105 origin = frame.f_locals.get("origin") 

3106 # Cannot use "in" because origin may be an object with a buggy __eq__ that 

3107 # throws an error. 

3108 return origin is typing.Generic or origin is Protocol or origin is typing.Protocol 

3109 

3110 

3111_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)} 

3112 

3113 

3114def _is_unpacked_typevartuple(x) -> bool: 

3115 if get_origin(x) is not Unpack: 

3116 return False 

3117 args = get_args(x) 

3118 return ( 

3119 bool(args) 

3120 and len(args) == 1 

3121 and type(args[0]) in _TYPEVARTUPLE_TYPES 

3122 ) 

3123 

3124 

3125# Python 3.11+ _collect_type_vars was renamed to _collect_parameters 

3126if hasattr(typing, '_collect_type_vars'): 

3127 def _collect_type_vars(types, typevar_types=None): 

3128 """Collect all type variable contained in types in order of 

3129 first appearance (lexicographic order). For example:: 

3130 

3131 _collect_type_vars((T, List[S, T])) == (T, S) 

3132 """ 

3133 if typevar_types is None: 

3134 typevar_types = typing.TypeVar 

3135 tvars = [] 

3136 

3137 # A required TypeVarLike cannot appear after a TypeVarLike with a default 

3138 # if it was a direct call to `Generic[]` or `Protocol[]` 

3139 enforce_default_ordering = _has_generic_or_protocol_as_origin() 

3140 default_encountered = False 

3141 

3142 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple 

3143 type_var_tuple_encountered = False 

3144 

3145 for t in types: 

3146 if _is_unpacked_typevartuple(t): 

3147 type_var_tuple_encountered = True 

3148 elif ( 

3149 isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias) 

3150 and t not in tvars 

3151 ): 

3152 if enforce_default_ordering: 

3153 has_default = getattr(t, '__default__', NoDefault) is not NoDefault 

3154 if has_default: 

3155 if type_var_tuple_encountered: 

3156 raise TypeError('Type parameter with a default' 

3157 ' follows TypeVarTuple') 

3158 default_encountered = True 

3159 elif default_encountered: 

3160 raise TypeError(f'Type parameter {t!r} without a default' 

3161 ' follows type parameter with a default') 

3162 

3163 tvars.append(t) 

3164 if _should_collect_from_parameters(t): 

3165 tvars.extend([t for t in t.__parameters__ if t not in tvars]) 

3166 elif isinstance(t, tuple): 

3167 # Collect nested type_vars 

3168 # tuple wrapped by _prepare_paramspec_params(cls, params) 

3169 for x in t: 

3170 for collected in _collect_type_vars([x]): 

3171 if collected not in tvars: 

3172 tvars.append(collected) 

3173 return tuple(tvars) 

3174 

3175 typing._collect_type_vars = _collect_type_vars 

3176else: 

3177 def _collect_parameters(args): 

3178 """Collect all type variables and parameter specifications in args 

3179 in order of first appearance (lexicographic order). 

3180 

3181 For example:: 

3182 

3183 assert _collect_parameters((T, Callable[P, T])) == (T, P) 

3184 """ 

3185 parameters = [] 

3186 

3187 # A required TypeVarLike cannot appear after a TypeVarLike with default 

3188 # if it was a direct call to `Generic[]` or `Protocol[]` 

3189 enforce_default_ordering = _has_generic_or_protocol_as_origin() 

3190 default_encountered = False 

3191 

3192 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple 

3193 type_var_tuple_encountered = False 

3194 

3195 for t in args: 

3196 if isinstance(t, type): 

3197 # We don't want __parameters__ descriptor of a bare Python class. 

3198 pass 

3199 elif isinstance(t, tuple): 

3200 # `t` might be a tuple, when `ParamSpec` is substituted with 

3201 # `[T, int]`, or `[int, *Ts]`, etc. 

3202 for x in t: 

3203 for collected in _collect_parameters([x]): 

3204 if collected not in parameters: 

3205 parameters.append(collected) 

3206 elif hasattr(t, '__typing_subst__'): 

3207 if t not in parameters: 

3208 if enforce_default_ordering: 

3209 has_default = ( 

3210 getattr(t, '__default__', NoDefault) is not NoDefault 

3211 ) 

3212 

3213 if type_var_tuple_encountered and has_default: 

3214 raise TypeError('Type parameter with a default' 

3215 ' follows TypeVarTuple') 

3216 

3217 if has_default: 

3218 default_encountered = True 

3219 elif default_encountered: 

3220 raise TypeError(f'Type parameter {t!r} without a default' 

3221 ' follows type parameter with a default') 

3222 

3223 parameters.append(t) 

3224 else: 

3225 if _is_unpacked_typevartuple(t): 

3226 type_var_tuple_encountered = True 

3227 for x in getattr(t, '__parameters__', ()): 

3228 if x not in parameters: 

3229 parameters.append(x) 

3230 

3231 return tuple(parameters) 

3232 

3233 if not _PEP_696_IMPLEMENTED: 

3234 typing._collect_parameters = _collect_parameters 

3235 

3236# Backport typing.NamedTuple as it exists in Python 3.13. 

3237# In 3.11, the ability to define generic `NamedTuple`s was supported. 

3238# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. 

3239# On 3.12, we added __orig_bases__ to call-based NamedTuples 

3240# On 3.13, we deprecated kwargs-based NamedTuples 

3241if sys.version_info >= (3, 13): 

3242 NamedTuple = typing.NamedTuple 

3243else: 

3244 def _make_nmtuple(name, types, module, defaults=()): 

3245 fields = [n for n, t in types] 

3246 annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") 

3247 for n, t in types} 

3248 nm_tpl = collections.namedtuple(name, fields, 

3249 defaults=defaults, module=module) 

3250 nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations 

3251 return nm_tpl 

3252 

3253 _prohibited_namedtuple_fields = typing._prohibited 

3254 _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) 

3255 

3256 class _NamedTupleMeta(type): 

3257 def __new__(cls, typename, bases, ns): 

3258 assert _NamedTuple in bases 

3259 for base in bases: 

3260 if base is not _NamedTuple and base is not typing.Generic: 

3261 raise TypeError( 

3262 'can only inherit from a NamedTuple type and Generic') 

3263 bases = tuple(tuple if base is _NamedTuple else base for base in bases) 

3264 if "__annotations__" in ns: 

3265 types = ns["__annotations__"] 

3266 elif "__annotate__" in ns: 

3267 # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated 

3268 types = ns["__annotate__"](1) 

3269 else: 

3270 types = {} 

3271 default_names = [] 

3272 for field_name in types: 

3273 if field_name in ns: 

3274 default_names.append(field_name) 

3275 elif default_names: 

3276 raise TypeError(f"Non-default namedtuple field {field_name} " 

3277 f"cannot follow default field" 

3278 f"{'s' if len(default_names) > 1 else ''} " 

3279 f"{', '.join(default_names)}") 

3280 nm_tpl = _make_nmtuple( 

3281 typename, types.items(), 

3282 defaults=[ns[n] for n in default_names], 

3283 module=ns['__module__'] 

3284 ) 

3285 nm_tpl.__bases__ = bases 

3286 if typing.Generic in bases: 

3287 if hasattr(typing, '_generic_class_getitem'): # 3.12+ 

3288 nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) 

3289 else: 

3290 class_getitem = typing.Generic.__class_getitem__.__func__ 

3291 nm_tpl.__class_getitem__ = classmethod(class_getitem) 

3292 # update from user namespace without overriding special namedtuple attributes 

3293 for key, val in ns.items(): 

3294 if key in _prohibited_namedtuple_fields: 

3295 raise AttributeError("Cannot overwrite NamedTuple attribute " + key) 

3296 elif key not in _special_namedtuple_fields: 

3297 if key not in nm_tpl._fields: 

3298 setattr(nm_tpl, key, ns[key]) 

3299 try: 

3300 set_name = type(val).__set_name__ 

3301 except AttributeError: 

3302 pass 

3303 else: 

3304 try: 

3305 set_name(val, nm_tpl, key) 

3306 except BaseException as e: 

3307 msg = ( 

3308 f"Error calling __set_name__ on {type(val).__name__!r} " 

3309 f"instance {key!r} in {typename!r}" 

3310 ) 

3311 # BaseException.add_note() existed on py311, 

3312 # but the __set_name__ machinery didn't start 

3313 # using add_note() until py312. 

3314 # Making sure exceptions are raised in the same way 

3315 # as in "normal" classes seems most important here. 

3316 if sys.version_info >= (3, 12): 

3317 e.add_note(msg) 

3318 raise 

3319 else: 

3320 raise RuntimeError(msg) from e 

3321 

3322 if typing.Generic in bases: 

3323 nm_tpl.__init_subclass__() 

3324 return nm_tpl 

3325 

3326 _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) 

3327 

3328 def _namedtuple_mro_entries(bases): 

3329 assert NamedTuple in bases 

3330 return (_NamedTuple,) 

3331 

3332 def NamedTuple(typename, fields=_marker, /, **kwargs): 

3333 """Typed version of namedtuple. 

3334 

3335 Usage:: 

3336 

3337 class Employee(NamedTuple): 

3338 name: str 

3339 id: int 

3340 

3341 This is equivalent to:: 

3342 

3343 Employee = collections.namedtuple('Employee', ['name', 'id']) 

3344 

3345 The resulting class has an extra __annotations__ attribute, giving a 

3346 dict that maps field names to types. (The field names are also in 

3347 the _fields attribute, which is part of the namedtuple API.) 

3348 An alternative equivalent functional syntax is also accepted:: 

3349 

3350 Employee = NamedTuple('Employee', [('name', str), ('id', int)]) 

3351 """ 

3352 if fields is _marker: 

3353 if kwargs: 

3354 deprecated_thing = "Creating NamedTuple classes using keyword arguments" 

3355 deprecation_msg = ( 

3356 "{name} is deprecated and will be disallowed in Python {remove}. " 

3357 "Use the class-based or functional syntax instead." 

3358 ) 

3359 else: 

3360 deprecated_thing = "Failing to pass a value for the 'fields' parameter" 

3361 example = f"`{typename} = NamedTuple({typename!r}, [])`" 

3362 deprecation_msg = ( 

3363 "{name} is deprecated and will be disallowed in Python {remove}. " 

3364 "To create a NamedTuple class with 0 fields " 

3365 "using the functional syntax, " 

3366 "pass an empty list, e.g. " 

3367 ) + example + "." 

3368 elif fields is None: 

3369 if kwargs: 

3370 raise TypeError( 

3371 "Cannot pass `None` as the 'fields' parameter " 

3372 "and also specify fields using keyword arguments" 

3373 ) 

3374 else: 

3375 deprecated_thing = "Passing `None` as the 'fields' parameter" 

3376 example = f"`{typename} = NamedTuple({typename!r}, [])`" 

3377 deprecation_msg = ( 

3378 "{name} is deprecated and will be disallowed in Python {remove}. " 

3379 "To create a NamedTuple class with 0 fields " 

3380 "using the functional syntax, " 

3381 "pass an empty list, e.g. " 

3382 ) + example + "." 

3383 elif kwargs: 

3384 raise TypeError("Either list of fields or keywords" 

3385 " can be provided to NamedTuple, not both") 

3386 if fields is _marker or fields is None: 

3387 warnings.warn( 

3388 deprecation_msg.format(name=deprecated_thing, remove="3.15"), 

3389 DeprecationWarning, 

3390 stacklevel=2, 

3391 ) 

3392 fields = kwargs.items() 

3393 nt = _make_nmtuple(typename, fields, module=_caller()) 

3394 nt.__orig_bases__ = (NamedTuple,) 

3395 return nt 

3396 

3397 NamedTuple.__mro_entries__ = _namedtuple_mro_entries 

3398 

3399 

3400if hasattr(collections.abc, "Buffer"): 

3401 Buffer = collections.abc.Buffer 

3402else: 

3403 class Buffer(abc.ABC): # noqa: B024 

3404 """Base class for classes that implement the buffer protocol. 

3405 

3406 The buffer protocol allows Python objects to expose a low-level 

3407 memory buffer interface. Before Python 3.12, it is not possible 

3408 to implement the buffer protocol in pure Python code, or even 

3409 to check whether a class implements the buffer protocol. In 

3410 Python 3.12 and higher, the ``__buffer__`` method allows access 

3411 to the buffer protocol from Python code, and the 

3412 ``collections.abc.Buffer`` ABC allows checking whether a class 

3413 implements the buffer protocol. 

3414 

3415 To indicate support for the buffer protocol in earlier versions, 

3416 inherit from this ABC, either in a stub file or at runtime, 

3417 or use ABC registration. This ABC provides no methods, because 

3418 there is no Python-accessible methods shared by pre-3.12 buffer 

3419 classes. It is useful primarily for static checks. 

3420 

3421 """ 

3422 

3423 # As a courtesy, register the most common stdlib buffer classes. 

3424 Buffer.register(memoryview) 

3425 Buffer.register(bytearray) 

3426 Buffer.register(bytes) 

3427 

3428 

3429# Backport of types.get_original_bases, available on 3.12+ in CPython 

3430if hasattr(_types, "get_original_bases"): 

3431 get_original_bases = _types.get_original_bases 

3432else: 

3433 def get_original_bases(cls, /): 

3434 """Return the class's "original" bases prior to modification by `__mro_entries__`. 

3435 

3436 Examples:: 

3437 

3438 from typing import TypeVar, Generic 

3439 from typing_extensions import NamedTuple, TypedDict 

3440 

3441 T = TypeVar("T") 

3442 class Foo(Generic[T]): ... 

3443 class Bar(Foo[int], float): ... 

3444 class Baz(list[str]): ... 

3445 Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) 

3446 Spam = TypedDict("Spam", {"a": int, "b": str}) 

3447 

3448 assert get_original_bases(Bar) == (Foo[int], float) 

3449 assert get_original_bases(Baz) == (list[str],) 

3450 assert get_original_bases(Eggs) == (NamedTuple,) 

3451 assert get_original_bases(Spam) == (TypedDict,) 

3452 assert get_original_bases(int) == (object,) 

3453 """ 

3454 try: 

3455 return cls.__dict__.get("__orig_bases__", cls.__bases__) 

3456 except AttributeError: 

3457 raise TypeError( 

3458 f'Expected an instance of type, not {type(cls).__name__!r}' 

3459 ) from None 

3460 

3461 

3462# NewType is a class on Python 3.10+, making it pickleable 

3463# The error message for subclassing instances of NewType was improved on 3.11+ 

3464if sys.version_info >= (3, 11): 

3465 NewType = typing.NewType 

3466else: 

3467 class NewType: 

3468 """NewType creates simple unique types with almost zero 

3469 runtime overhead. NewType(name, tp) is considered a subtype of tp 

3470 by static type checkers. At runtime, NewType(name, tp) returns 

3471 a dummy callable that simply returns its argument. Usage:: 

3472 UserId = NewType('UserId', int) 

3473 def name_by_id(user_id: UserId) -> str: 

3474 ... 

3475 UserId('user') # Fails type check 

3476 name_by_id(42) # Fails type check 

3477 name_by_id(UserId(42)) # OK 

3478 num = UserId(5) + 1 # type: int 

3479 """ 

3480 

3481 def __call__(self, obj, /): 

3482 return obj 

3483 

3484 def __init__(self, name, tp): 

3485 self.__qualname__ = name 

3486 if '.' in name: 

3487 name = name.rpartition('.')[-1] 

3488 self.__name__ = name 

3489 self.__supertype__ = tp 

3490 def_mod = _caller() 

3491 if def_mod != 'typing_extensions': 

3492 self.__module__ = def_mod 

3493 

3494 def __mro_entries__(self, bases): 

3495 # We defined __mro_entries__ to get a better error message 

3496 # if a user attempts to subclass a NewType instance. bpo-46170 

3497 supercls_name = self.__name__ 

3498 

3499 class Dummy: 

3500 def __init_subclass__(cls): 

3501 subcls_name = cls.__name__ 

3502 raise TypeError( 

3503 f"Cannot subclass an instance of NewType. " 

3504 f"Perhaps you were looking for: " 

3505 f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" 

3506 ) 

3507 

3508 return (Dummy,) 

3509 

3510 def __repr__(self): 

3511 return f'{self.__module__}.{self.__qualname__}' 

3512 

3513 def __reduce__(self): 

3514 return self.__qualname__ 

3515 

3516 if sys.version_info >= (3, 10): 

3517 # PEP 604 methods 

3518 # It doesn't make sense to have these methods on Python <3.10 

3519 

3520 def __or__(self, other): 

3521 return typing.Union[self, other] 

3522 

3523 def __ror__(self, other): 

3524 return typing.Union[other, self] 

3525 

3526 

3527if sys.version_info >= (3, 14): 

3528 TypeAliasType = typing.TypeAliasType 

3529# <=3.13 

3530else: 

3531 if sys.version_info >= (3, 12): 

3532 # 3.12-3.13 

3533 def _is_unionable(obj): 

3534 """Corresponds to is_unionable() in unionobject.c in CPython.""" 

3535 return obj is None or isinstance(obj, ( 

3536 type, 

3537 _types.GenericAlias, 

3538 _types.UnionType, 

3539 typing.TypeAliasType, 

3540 TypeAliasType, 

3541 )) 

3542 else: 

3543 # <=3.11 

3544 def _is_unionable(obj): 

3545 """Corresponds to is_unionable() in unionobject.c in CPython.""" 

3546 return obj is None or isinstance(obj, ( 

3547 type, 

3548 _types.GenericAlias, 

3549 _types.UnionType, 

3550 TypeAliasType, 

3551 )) 

3552 

3553 if sys.version_info < (3, 10): 

3554 # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582, 

3555 # so that we emulate the behaviour of `types.GenericAlias` 

3556 # on the latest versions of CPython 

3557 _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({ 

3558 "__class__", 

3559 "__bases__", 

3560 "__origin__", 

3561 "__args__", 

3562 "__unpacked__", 

3563 "__parameters__", 

3564 "__typing_unpacked_tuple_args__", 

3565 "__mro_entries__", 

3566 "__reduce_ex__", 

3567 "__reduce__", 

3568 "__copy__", 

3569 "__deepcopy__", 

3570 }) 

3571 

3572 class _TypeAliasGenericAlias(typing._GenericAlias, _root=True): 

3573 def __getattr__(self, attr): 

3574 if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS: 

3575 return object.__getattr__(self, attr) 

3576 return getattr(self.__origin__, attr) 

3577 

3578 

3579 class TypeAliasType: 

3580 """Create named, parameterized type aliases. 

3581 

3582 This provides a backport of the new `type` statement in Python 3.12: 

3583 

3584 type ListOrSet[T] = list[T] | set[T] 

3585 

3586 is equivalent to: 

3587 

3588 T = TypeVar("T") 

3589 ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) 

3590 

3591 The name ListOrSet can then be used as an alias for the type it refers to. 

3592 

3593 The type_params argument should contain all the type parameters used 

3594 in the value of the type alias. If the alias is not generic, this 

3595 argument is omitted. 

3596 

3597 Static type checkers should only support type aliases declared using 

3598 TypeAliasType that follow these rules: 

3599 

3600 - The first argument (the name) must be a string literal. 

3601 - The TypeAliasType instance must be immediately assigned to a variable 

3602 of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, 

3603 as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). 

3604 

3605 """ 

3606 

3607 def __init__(self, name: str, value, *, type_params=()): 

3608 if not isinstance(name, str): 

3609 raise TypeError("TypeAliasType name must be a string") 

3610 if not isinstance(type_params, tuple): 

3611 raise TypeError("type_params must be a tuple") 

3612 self.__value__ = value 

3613 self.__type_params__ = type_params 

3614 

3615 default_value_encountered = False 

3616 parameters = [] 

3617 for type_param in type_params: 

3618 if ( 

3619 not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec)) 

3620 # <=3.11 

3621 # Unpack Backport passes isinstance(type_param, TypeVar) 

3622 or _is_unpack(type_param) 

3623 ): 

3624 raise TypeError(f"Expected a type param, got {type_param!r}") 

3625 has_default = ( 

3626 getattr(type_param, '__default__', NoDefault) is not NoDefault 

3627 ) 

3628 if default_value_encountered and not has_default: 

3629 raise TypeError(f"non-default type parameter '{type_param!r}'" 

3630 " follows default type parameter") 

3631 if has_default: 

3632 default_value_encountered = True 

3633 if isinstance(type_param, TypeVarTuple): 

3634 parameters.extend(type_param) 

3635 else: 

3636 parameters.append(type_param) 

3637 self.__parameters__ = tuple(parameters) 

3638 def_mod = _caller() 

3639 if def_mod != 'typing_extensions': 

3640 self.__module__ = def_mod 

3641 # Setting this attribute closes the TypeAliasType from further modification 

3642 self.__name__ = name 

3643 

3644 def __setattr__(self, name: str, value: object, /) -> None: 

3645 if hasattr(self, "__name__"): 

3646 self._raise_attribute_error(name) 

3647 super().__setattr__(name, value) 

3648 

3649 def __delattr__(self, name: str, /) -> Never: 

3650 self._raise_attribute_error(name) 

3651 

3652 def _raise_attribute_error(self, name: str) -> Never: 

3653 # Match the Python 3.12 error messages exactly 

3654 if name == "__name__": 

3655 raise AttributeError("readonly attribute") 

3656 elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: 

3657 raise AttributeError( 

3658 f"attribute '{name}' of 'typing.TypeAliasType' objects " 

3659 "is not writable" 

3660 ) 

3661 else: 

3662 raise AttributeError( 

3663 f"'typing.TypeAliasType' object has no attribute '{name}'" 

3664 ) 

3665 

3666 def __repr__(self) -> str: 

3667 return self.__name__ 

3668 

3669 if sys.version_info < (3, 11): 

3670 def _check_single_param(self, param, recursion=0): 

3671 # Allow [], [int], [int, str], [int, ...], [int, T] 

3672 if param is ...: 

3673 return ... 

3674 if param is None: 

3675 return None 

3676 # Note in <= 3.9 _ConcatenateGenericAlias inherits from list 

3677 if isinstance(param, list) and recursion == 0: 

3678 return [self._check_single_param(arg, recursion+1) 

3679 for arg in param] 

3680 return typing._type_check( 

3681 param, f'Subscripting {self.__name__} requires a type.' 

3682 ) 

3683 

3684 def _check_parameters(self, parameters): 

3685 if sys.version_info < (3, 11): 

3686 return tuple( 

3687 self._check_single_param(item) 

3688 for item in parameters 

3689 ) 

3690 return tuple(typing._type_check( 

3691 item, f'Subscripting {self.__name__} requires a type.' 

3692 ) 

3693 for item in parameters 

3694 ) 

3695 

3696 def __getitem__(self, parameters): 

3697 if not self.__type_params__: 

3698 raise TypeError("Only generic type aliases are subscriptable") 

3699 if not isinstance(parameters, tuple): 

3700 parameters = (parameters,) 

3701 # Using 3.9 here will create problems with Concatenate 

3702 if sys.version_info >= (3, 10): 

3703 return _types.GenericAlias(self, parameters) 

3704 type_vars = _collect_type_vars(parameters) 

3705 parameters = self._check_parameters(parameters) 

3706 alias = _TypeAliasGenericAlias(self, parameters) 

3707 # alias.__parameters__ is not complete if Concatenate is present 

3708 # as it is converted to a list from which no parameters are extracted. 

3709 if alias.__parameters__ != type_vars: 

3710 alias.__parameters__ = type_vars 

3711 return alias 

3712 

3713 def __reduce__(self): 

3714 return self.__name__ 

3715 

3716 def __init_subclass__(cls, *args, **kwargs): 

3717 raise TypeError( 

3718 "type 'typing_extensions.TypeAliasType' is not an acceptable base type" 

3719 ) 

3720 

3721 # The presence of this method convinces typing._type_check 

3722 # that TypeAliasTypes are types. 

3723 def __call__(self): 

3724 raise TypeError("Type alias is not callable") 

3725 

3726 if sys.version_info >= (3, 10): 

3727 def __or__(self, right): 

3728 # For forward compatibility with 3.12, reject Unions 

3729 # that are not accepted by the built-in Union. 

3730 if not _is_unionable(right): 

3731 return NotImplemented 

3732 return typing.Union[self, right] 

3733 

3734 def __ror__(self, left): 

3735 if not _is_unionable(left): 

3736 return NotImplemented 

3737 return typing.Union[left, self] 

3738 

3739 

3740if hasattr(typing, "is_protocol"): 

3741 is_protocol = typing.is_protocol 

3742 get_protocol_members = typing.get_protocol_members 

3743else: 

3744 def is_protocol(tp: type, /) -> bool: 

3745 """Return True if the given type is a Protocol. 

3746 

3747 Example:: 

3748 

3749 >>> from typing_extensions import Protocol, is_protocol 

3750 >>> class P(Protocol): 

3751 ... def a(self) -> str: ... 

3752 ... b: int 

3753 >>> is_protocol(P) 

3754 True 

3755 >>> is_protocol(int) 

3756 False 

3757 """ 

3758 return ( 

3759 isinstance(tp, type) 

3760 and getattr(tp, '_is_protocol', False) 

3761 and tp is not Protocol 

3762 and tp is not typing.Protocol 

3763 ) 

3764 

3765 def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: 

3766 """Return the set of members defined in a Protocol. 

3767 

3768 Example:: 

3769 

3770 >>> from typing_extensions import Protocol, get_protocol_members 

3771 >>> class P(Protocol): 

3772 ... def a(self) -> str: ... 

3773 ... b: int 

3774 >>> get_protocol_members(P) 

3775 frozenset({'a', 'b'}) 

3776 

3777 Raise a TypeError for arguments that are not Protocols. 

3778 """ 

3779 if not is_protocol(tp): 

3780 raise TypeError(f'{tp!r} is not a Protocol') 

3781 if hasattr(tp, '__protocol_attrs__'): 

3782 return frozenset(tp.__protocol_attrs__) 

3783 return frozenset(_get_protocol_attrs(tp)) 

3784 

3785 

3786if hasattr(typing, "Doc"): 

3787 Doc = typing.Doc 

3788else: 

3789 class Doc: 

3790 """Define the documentation of a type annotation using ``Annotated``, to be 

3791 used in class attributes, function and method parameters, return values, 

3792 and variables. 

3793 

3794 The value should be a positional-only string literal to allow static tools 

3795 like editors and documentation generators to use it. 

3796 

3797 This complements docstrings. 

3798 

3799 The string value passed is available in the attribute ``documentation``. 

3800 

3801 Example:: 

3802 

3803 >>> from typing_extensions import Annotated, Doc 

3804 >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... 

3805 """ 

3806 def __init__(self, documentation: str, /) -> None: 

3807 self.documentation = documentation 

3808 

3809 def __repr__(self) -> str: 

3810 return f"Doc({self.documentation!r})" 

3811 

3812 def __hash__(self) -> int: 

3813 return hash(self.documentation) 

3814 

3815 def __eq__(self, other: object) -> bool: 

3816 if not isinstance(other, Doc): 

3817 return NotImplemented 

3818 return self.documentation == other.documentation 

3819 

3820 

3821_CapsuleType = getattr(_types, "CapsuleType", None) 

3822 

3823if _CapsuleType is None: 

3824 try: 

3825 import _socket 

3826 except ImportError: 

3827 pass 

3828 else: 

3829 _CAPI = getattr(_socket, "CAPI", None) 

3830 if _CAPI is not None: 

3831 _CapsuleType = type(_CAPI) 

3832 

3833if _CapsuleType is not None: 

3834 CapsuleType = _CapsuleType 

3835 __all__.append("CapsuleType") 

3836 

3837 

3838if sys.version_info >= (3,14): 

3839 from annotationlib import Format, get_annotations 

3840else: 

3841 class Format(enum.IntEnum): 

3842 VALUE = 1 

3843 VALUE_WITH_FAKE_GLOBALS = 2 

3844 FORWARDREF = 3 

3845 STRING = 4 

3846 

3847 def get_annotations(obj, *, globals=None, locals=None, eval_str=False, 

3848 format=Format.VALUE): 

3849 """Compute the annotations dict for an object. 

3850 

3851 obj may be a callable, class, or module. 

3852 Passing in an object of any other type raises TypeError. 

3853 

3854 Returns a dict. get_annotations() returns a new dict every time 

3855 it's called; calling it twice on the same object will return two 

3856 different but equivalent dicts. 

3857 

3858 This is a backport of `inspect.get_annotations`, which has been 

3859 in the standard library since Python 3.10. See the standard library 

3860 documentation for more: 

3861 

3862 https://docs.python.org/3/library/inspect.html#inspect.get_annotations 

3863 

3864 This backport adds the *format* argument introduced by PEP 649. The 

3865 three formats supported are: 

3866 * VALUE: the annotations are returned as-is. This is the default and 

3867 it is compatible with the behavior on previous Python versions. 

3868 * FORWARDREF: return annotations as-is if possible, but replace any 

3869 undefined names with ForwardRef objects. The implementation proposed by 

3870 PEP 649 relies on language changes that cannot be backported; the 

3871 typing-extensions implementation simply returns the same result as VALUE. 

3872 * STRING: return annotations as strings, in a format close to the original 

3873 source. Again, this behavior cannot be replicated directly in a backport. 

3874 As an approximation, typing-extensions retrieves the annotations under 

3875 VALUE semantics and then stringifies them. 

3876 

3877 The purpose of this backport is to allow users who would like to use 

3878 FORWARDREF or STRING semantics once PEP 649 is implemented, but who also 

3879 want to support earlier Python versions, to simply write: 

3880 

3881 typing_extensions.get_annotations(obj, format=Format.FORWARDREF) 

3882 

3883 """ 

3884 format = Format(format) 

3885 if format is Format.VALUE_WITH_FAKE_GLOBALS: 

3886 raise ValueError( 

3887 "The VALUE_WITH_FAKE_GLOBALS format is for internal use only" 

3888 ) 

3889 

3890 if eval_str and format is not Format.VALUE: 

3891 raise ValueError("eval_str=True is only supported with format=Format.VALUE") 

3892 

3893 if isinstance(obj, type): 

3894 # class 

3895 obj_dict = getattr(obj, '__dict__', None) 

3896 if obj_dict and hasattr(obj_dict, 'get'): 

3897 ann = obj_dict.get('__annotations__', None) 

3898 if isinstance(ann, _types.GetSetDescriptorType): 

3899 ann = None 

3900 else: 

3901 ann = None 

3902 

3903 obj_globals = None 

3904 module_name = getattr(obj, '__module__', None) 

3905 if module_name: 

3906 module = sys.modules.get(module_name, None) 

3907 if module: 

3908 obj_globals = getattr(module, '__dict__', None) 

3909 obj_locals = dict(vars(obj)) 

3910 unwrap = obj 

3911 elif isinstance(obj, _types.ModuleType): 

3912 # module 

3913 ann = getattr(obj, '__annotations__', None) 

3914 obj_globals = obj.__dict__ 

3915 obj_locals = None 

3916 unwrap = None 

3917 elif callable(obj): 

3918 # this includes types.Function, types.BuiltinFunctionType, 

3919 # types.BuiltinMethodType, functools.partial, functools.singledispatch, 

3920 # "class funclike" from Lib/test/test_inspect... on and on it goes. 

3921 ann = getattr(obj, '__annotations__', None) 

3922 obj_globals = getattr(obj, '__globals__', None) 

3923 obj_locals = None 

3924 unwrap = obj 

3925 elif hasattr(obj, '__annotations__'): 

3926 ann = obj.__annotations__ 

3927 obj_globals = obj_locals = unwrap = None 

3928 else: 

3929 raise TypeError(f"{obj!r} is not a module, class, or callable.") 

3930 

3931 if ann is None: 

3932 return {} 

3933 

3934 if not isinstance(ann, dict): 

3935 raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None") 

3936 

3937 if not ann: 

3938 return {} 

3939 

3940 if not eval_str: 

3941 if format is Format.STRING: 

3942 return { 

3943 key: value if isinstance(value, str) else typing._type_repr(value) 

3944 for key, value in ann.items() 

3945 } 

3946 return dict(ann) 

3947 

3948 if unwrap is not None: 

3949 while True: 

3950 if hasattr(unwrap, '__wrapped__'): 

3951 unwrap = unwrap.__wrapped__ 

3952 continue 

3953 if isinstance(unwrap, functools.partial): 

3954 unwrap = unwrap.func 

3955 continue 

3956 break 

3957 if hasattr(unwrap, "__globals__"): 

3958 obj_globals = unwrap.__globals__ 

3959 

3960 if globals is None: 

3961 globals = obj_globals 

3962 if locals is None: 

3963 locals = obj_locals or {} 

3964 

3965 # "Inject" type parameters into the local namespace 

3966 # (unless they are shadowed by assignments *in* the local namespace), 

3967 # as a way of emulating annotation scopes when calling `eval()` 

3968 if type_params := getattr(obj, "__type_params__", ()): 

3969 locals = {param.__name__: param for param in type_params} | locals 

3970 

3971 return_value = {key: 

3972 value if not isinstance(value, str) else eval(value, globals, locals) 

3973 for key, value in ann.items() } 

3974 return return_value 

3975 

3976 

3977if hasattr(typing, "evaluate_forward_ref"): 

3978 evaluate_forward_ref = typing.evaluate_forward_ref 

3979else: 

3980 # Implements annotationlib.ForwardRef.evaluate 

3981 def _eval_with_owner( 

3982 forward_ref, *, owner=None, globals=None, locals=None, type_params=None 

3983 ): 

3984 if forward_ref.__forward_evaluated__: 

3985 return forward_ref.__forward_value__ 

3986 if getattr(forward_ref, "__cell__", None) is not None: 

3987 try: 

3988 value = forward_ref.__cell__.cell_contents 

3989 except ValueError: 

3990 pass 

3991 else: 

3992 forward_ref.__forward_evaluated__ = True 

3993 forward_ref.__forward_value__ = value 

3994 return value 

3995 if owner is None: 

3996 owner = getattr(forward_ref, "__owner__", None) 

3997 

3998 if ( 

3999 globals is None 

4000 and getattr(forward_ref, "__forward_module__", None) is not None 

4001 ): 

4002 globals = getattr( 

4003 sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None 

4004 ) 

4005 if globals is None: 

4006 globals = getattr(forward_ref, "__globals__", None) 

4007 if globals is None: 

4008 if isinstance(owner, type): 

4009 module_name = getattr(owner, "__module__", None) 

4010 if module_name: 

4011 module = sys.modules.get(module_name, None) 

4012 if module: 

4013 globals = getattr(module, "__dict__", None) 

4014 elif isinstance(owner, _types.ModuleType): 

4015 globals = getattr(owner, "__dict__", None) 

4016 elif callable(owner): 

4017 globals = getattr(owner, "__globals__", None) 

4018 

4019 # If we pass None to eval() below, the globals of this module are used. 

4020 if globals is None: 

4021 globals = {} 

4022 

4023 if locals is None: 

4024 locals = {} 

4025 if isinstance(owner, type): 

4026 locals.update(vars(owner)) 

4027 

4028 if type_params is None and owner is not None: 

4029 # "Inject" type parameters into the local namespace 

4030 # (unless they are shadowed by assignments *in* the local namespace), 

4031 # as a way of emulating annotation scopes when calling `eval()` 

4032 type_params = getattr(owner, "__type_params__", None) 

4033 

4034 # type parameters require some special handling, 

4035 # as they exist in their own scope 

4036 # but `eval()` does not have a dedicated parameter for that scope. 

4037 # For classes, names in type parameter scopes should override 

4038 # names in the global scope (which here are called `localns`!), 

4039 # but should in turn be overridden by names in the class scope 

4040 # (which here are called `globalns`!) 

4041 if type_params is not None: 

4042 globals = dict(globals) 

4043 locals = dict(locals) 

4044 for param in type_params: 

4045 param_name = param.__name__ 

4046 if ( 

4047 _FORWARD_REF_HAS_CLASS and not forward_ref.__forward_is_class__ 

4048 ) or param_name not in globals: 

4049 globals[param_name] = param 

4050 locals.pop(param_name, None) 

4051 

4052 arg = forward_ref.__forward_arg__ 

4053 if arg.isidentifier() and not keyword.iskeyword(arg): 

4054 if arg in locals: 

4055 value = locals[arg] 

4056 elif arg in globals: 

4057 value = globals[arg] 

4058 elif hasattr(builtins, arg): 

4059 return getattr(builtins, arg) 

4060 else: 

4061 raise NameError(arg) 

4062 else: 

4063 code = forward_ref.__forward_code__ 

4064 value = eval(code, globals, locals) 

4065 forward_ref.__forward_evaluated__ = True 

4066 forward_ref.__forward_value__ = value 

4067 return value 

4068 

4069 def evaluate_forward_ref( 

4070 forward_ref, 

4071 *, 

4072 owner=None, 

4073 globals=None, 

4074 locals=None, 

4075 type_params=None, 

4076 format=None, 

4077 _recursive_guard=frozenset(), 

4078 ): 

4079 """Evaluate a forward reference as a type hint. 

4080 

4081 This is similar to calling the ForwardRef.evaluate() method, 

4082 but unlike that method, evaluate_forward_ref() also: 

4083 

4084 * Recursively evaluates forward references nested within the type hint. 

4085 * Rejects certain objects that are not valid type hints. 

4086 * Replaces type hints that evaluate to None with types.NoneType. 

4087 * Supports the *FORWARDREF* and *STRING* formats. 

4088 

4089 *forward_ref* must be an instance of ForwardRef. *owner*, if given, 

4090 should be the object that holds the annotations that the forward reference 

4091 derived from, such as a module, class object, or function. It is used to 

4092 infer the namespaces to use for looking up names. *globals* and *locals* 

4093 can also be explicitly given to provide the global and local namespaces. 

4094 *type_params* is a tuple of type parameters that are in scope when 

4095 evaluating the forward reference. This parameter must be provided (though 

4096 it may be an empty tuple) if *owner* is not given and the forward reference 

4097 does not already have an owner set. *format* specifies the format of the 

4098 annotation and is a member of the annotationlib.Format enum. 

4099 

4100 """ 

4101 if format == Format.STRING: 

4102 return forward_ref.__forward_arg__ 

4103 if forward_ref.__forward_arg__ in _recursive_guard: 

4104 return forward_ref 

4105 

4106 # Evaluate the forward reference 

4107 try: 

4108 value = _eval_with_owner( 

4109 forward_ref, 

4110 owner=owner, 

4111 globals=globals, 

4112 locals=locals, 

4113 type_params=type_params, 

4114 ) 

4115 except NameError: 

4116 if format == Format.FORWARDREF: 

4117 return forward_ref 

4118 else: 

4119 raise 

4120 

4121 if isinstance(value, str): 

4122 value = ForwardRef(value) 

4123 

4124 # Recursively evaluate the type 

4125 if isinstance(value, ForwardRef): 

4126 if getattr(value, "__forward_module__", True) is not None: 

4127 globals = None 

4128 return evaluate_forward_ref( 

4129 value, 

4130 globals=globals, 

4131 locals=locals, 

4132 type_params=type_params, owner=owner, 

4133 _recursive_guard=_recursive_guard, format=format 

4134 ) 

4135 if sys.version_info < (3, 12, 5) and type_params: 

4136 # Make use of type_params 

4137 locals = dict(locals) if locals else {} 

4138 for tvar in type_params: 

4139 if tvar.__name__ not in locals: # lets not overwrite something present 

4140 locals[tvar.__name__] = tvar 

4141 if sys.version_info < (3, 12, 5): 

4142 return typing._eval_type( 

4143 value, 

4144 globals, 

4145 locals, 

4146 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, 

4147 ) 

4148 else: 

4149 return typing._eval_type( 

4150 value, 

4151 globals, 

4152 locals, 

4153 type_params, 

4154 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, 

4155 ) 

4156 

4157 

4158class Sentinel: 

4159 """Create a unique sentinel object. 

4160 

4161 *name* should be the name of the variable to which the return value shall be assigned. 

4162 

4163 *repr*, if supplied, will be used for the repr of the sentinel object. 

4164 If not provided, "<name>" will be used. 

4165 """ 

4166 

4167 def __init__( 

4168 self, 

4169 name: str, 

4170 repr: typing.Optional[str] = None, 

4171 ): 

4172 self._name = name 

4173 self._repr = repr if repr is not None else f'<{name}>' 

4174 

4175 def __repr__(self): 

4176 return self._repr 

4177 

4178 if sys.version_info < (3, 11): 

4179 # The presence of this method convinces typing._type_check 

4180 # that Sentinels are types. 

4181 def __call__(self, *args, **kwargs): 

4182 raise TypeError(f"{type(self).__name__!r} object is not callable") 

4183 

4184 if sys.version_info >= (3, 10): 

4185 def __or__(self, other): 

4186 return typing.Union[self, other] 

4187 

4188 def __ror__(self, other): 

4189 return typing.Union[other, self] 

4190 

4191 def __getstate__(self): 

4192 raise TypeError(f"Cannot pickle {type(self).__name__!r} object") 

4193 

4194 

4195# Aliases for items that are in typing in all supported versions. 

4196# We use hasattr() checks so this library will continue to import on 

4197# future versions of Python that may remove these names. 

4198_typing_names = [ 

4199 "AbstractSet", 

4200 "AnyStr", 

4201 "BinaryIO", 

4202 "Callable", 

4203 "Collection", 

4204 "Container", 

4205 "Dict", 

4206 "FrozenSet", 

4207 "Hashable", 

4208 "IO", 

4209 "ItemsView", 

4210 "Iterable", 

4211 "Iterator", 

4212 "KeysView", 

4213 "List", 

4214 "Mapping", 

4215 "MappingView", 

4216 "Match", 

4217 "MutableMapping", 

4218 "MutableSequence", 

4219 "MutableSet", 

4220 "Optional", 

4221 "Pattern", 

4222 "Reversible", 

4223 "Sequence", 

4224 "Set", 

4225 "Sized", 

4226 "TextIO", 

4227 "Tuple", 

4228 "Union", 

4229 "ValuesView", 

4230 "cast", 

4231 "no_type_check", 

4232 "no_type_check_decorator", 

4233 # This is private, but it was defined by typing_extensions for a long time 

4234 # and some users rely on it. 

4235 "_AnnotatedAlias", 

4236] 

4237globals().update( 

4238 {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)} 

4239) 

4240# These are defined unconditionally because they are used in 

4241# typing-extensions itself. 

4242Generic = typing.Generic 

4243ForwardRef = typing.ForwardRef 

4244Annotated = typing.Annotated