Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/typing_extensions.py: 28%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1777 statements  

1import abc 

2import builtins 

3import collections 

4import collections.abc 

5import contextlib 

6import enum 

7import functools 

8import inspect 

9import io 

10import keyword 

11import operator 

12import sys 

13import types as _types 

14import typing 

15import warnings 

16 

17# Breakpoint: https://github.com/python/cpython/pull/119891 

18if sys.version_info >= (3, 14): 

19 import annotationlib 

20 

21__all__ = [ 

22 # Super-special typing primitives. 

23 'Any', 

24 'ClassVar', 

25 'Concatenate', 

26 'Final', 

27 'LiteralString', 

28 'ParamSpec', 

29 'ParamSpecArgs', 

30 'ParamSpecKwargs', 

31 'Self', 

32 'Type', 

33 'TypeVar', 

34 'TypeVarTuple', 

35 'Unpack', 

36 

37 # ABCs (from collections.abc). 

38 'Awaitable', 

39 'AsyncIterator', 

40 'AsyncIterable', 

41 'Coroutine', 

42 'AsyncGenerator', 

43 'AsyncContextManager', 

44 'Buffer', 

45 'ChainMap', 

46 

47 # Concrete collection types. 

48 'ContextManager', 

49 'Counter', 

50 'Deque', 

51 'DefaultDict', 

52 'NamedTuple', 

53 'OrderedDict', 

54 'TypedDict', 

55 

56 # Structural checks, a.k.a. protocols. 

57 'SupportsAbs', 

58 'SupportsBytes', 

59 'SupportsComplex', 

60 'SupportsFloat', 

61 'SupportsIndex', 

62 'SupportsInt', 

63 'SupportsRound', 

64 'Reader', 

65 'Writer', 

66 

67 # One-off things. 

68 'Annotated', 

69 'assert_never', 

70 'assert_type', 

71 'clear_overloads', 

72 'dataclass_transform', 

73 'deprecated', 

74 'disjoint_base', 

75 'Doc', 

76 'evaluate_forward_ref', 

77 'get_overloads', 

78 'final', 

79 'Format', 

80 'get_annotations', 

81 'get_args', 

82 'get_origin', 

83 'get_original_bases', 

84 'get_protocol_members', 

85 'get_type_hints', 

86 'IntVar', 

87 'is_protocol', 

88 'is_typeddict', 

89 'Literal', 

90 'NewType', 

91 'overload', 

92 'override', 

93 'Protocol', 

94 'Sentinel', 

95 'reveal_type', 

96 'runtime', 

97 'runtime_checkable', 

98 'Text', 

99 'TypeAlias', 

100 'TypeAliasType', 

101 'TypeForm', 

102 'TypeGuard', 

103 'TypeIs', 

104 'TYPE_CHECKING', 

105 'type_repr', 

106 'Never', 

107 'NoReturn', 

108 'ReadOnly', 

109 'Required', 

110 'NotRequired', 

111 'NoDefault', 

112 'NoExtraItems', 

113 

114 # Pure aliases, have always been in typing 

115 'AbstractSet', 

116 'AnyStr', 

117 'BinaryIO', 

118 'Callable', 

119 'Collection', 

120 'Container', 

121 'Dict', 

122 'ForwardRef', 

123 'FrozenSet', 

124 'Generator', 

125 'Generic', 

126 'Hashable', 

127 'IO', 

128 'ItemsView', 

129 'Iterable', 

130 'Iterator', 

131 'KeysView', 

132 'List', 

133 'Mapping', 

134 'MappingView', 

135 'Match', 

136 'MutableMapping', 

137 'MutableSequence', 

138 'MutableSet', 

139 'Optional', 

140 'Pattern', 

141 'Reversible', 

142 'Sequence', 

143 'Set', 

144 'Sized', 

145 'TextIO', 

146 'Tuple', 

147 'Union', 

148 'ValuesView', 

149 'cast', 

150 'no_type_check', 

151] 

152 

153# for backward compatibility 

154PEP_560 = True 

155GenericMeta = type 

156# Breakpoint: https://github.com/python/cpython/pull/116129 

157_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta") 

158 

159# Added with bpo-45166 to 3.10.1+ and some 3.9 versions 

160_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__ 

161 

162class Sentinel: 

163 """Create a unique sentinel object. 

164 

165 *name* should be the name of the variable to which the return value shall be assigned. 

166 

167 *repr*, if supplied, will be used for the repr of the sentinel object. 

168 If not provided, "<name>" will be used. 

169 """ 

170 

171 def __init__( 

172 self, 

173 name: str, 

174 repr: typing.Optional[str] = None, 

175 ): 

176 self._name = name 

177 self._repr = repr if repr is not None else f'<{name}>' 

178 

179 def __repr__(self): 

180 return self._repr 

181 

182 if sys.version_info < (3, 11): 

183 # The presence of this method convinces typing._type_check 

184 # that Sentinels are types. 

185 def __call__(self, *args, **kwargs): 

186 raise TypeError(f"{type(self).__name__!r} object is not callable") 

187 

188 # Breakpoint: https://github.com/python/cpython/pull/21515 

189 if sys.version_info >= (3, 10): 

190 def __or__(self, other): 

191 return typing.Union[self, other] 

192 

193 def __ror__(self, other): 

194 return typing.Union[other, self] 

195 

196 def __getstate__(self): 

197 raise TypeError(f"Cannot pickle {type(self).__name__!r} object") 

198 

199 

200_marker = Sentinel("sentinel") 

201 

202# The functions below are modified copies of typing internal helpers. 

203# They are needed by _ProtocolMeta and they provide support for PEP 646. 

204 

205# Breakpoint: https://github.com/python/cpython/pull/27342 

206if sys.version_info >= (3, 10): 

207 def _should_collect_from_parameters(t): 

208 return isinstance( 

209 t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) 

210 ) 

211else: 

212 def _should_collect_from_parameters(t): 

213 return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) 

214 

215 

216NoReturn = typing.NoReturn 

217 

218# Some unconstrained type variables. These are used by the container types. 

219# (These are not for export.) 

220T = typing.TypeVar('T') # Any type. 

221KT = typing.TypeVar('KT') # Key type. 

222VT = typing.TypeVar('VT') # Value type. 

223T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. 

224T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. 

225 

226 

227# Breakpoint: https://github.com/python/cpython/pull/31841 

228if sys.version_info >= (3, 11): 

229 from typing import Any 

230else: 

231 

232 class _AnyMeta(type): 

233 def __instancecheck__(self, obj): 

234 if self is Any: 

235 raise TypeError("typing_extensions.Any cannot be used with isinstance()") 

236 return super().__instancecheck__(obj) 

237 

238 def __repr__(self): 

239 if self is Any: 

240 return "typing_extensions.Any" 

241 return super().__repr__() 

242 

243 class Any(metaclass=_AnyMeta): 

244 """Special type indicating an unconstrained type. 

245 - Any is compatible with every type. 

246 - Any assumed to have all methods. 

247 - All values assumed to be instances of Any. 

248 Note that all the above statements are true from the point of view of 

249 static type checkers. At runtime, Any should not be used with instance 

250 checks. 

251 """ 

252 def __new__(cls, *args, **kwargs): 

253 if cls is Any: 

254 raise TypeError("Any cannot be instantiated") 

255 return super().__new__(cls, *args, **kwargs) 

256 

257 

258ClassVar = typing.ClassVar 

259 

260# Vendored from cpython typing._SpecialFrom 

261# Having a separate class means that instances will not be rejected by 

262# typing._type_check. 

263class _SpecialForm(typing._Final, _root=True): 

264 __slots__ = ('_name', '__doc__', '_getitem') 

265 

266 def __init__(self, getitem): 

267 self._getitem = getitem 

268 self._name = getitem.__name__ 

269 self.__doc__ = getitem.__doc__ 

270 

271 def __getattr__(self, item): 

272 if item in {'__name__', '__qualname__'}: 

273 return self._name 

274 

275 raise AttributeError(item) 

276 

277 def __mro_entries__(self, bases): 

278 raise TypeError(f"Cannot subclass {self!r}") 

279 

280 def __repr__(self): 

281 return f'typing_extensions.{self._name}' 

282 

283 def __reduce__(self): 

284 return self._name 

285 

286 def __call__(self, *args, **kwds): 

287 raise TypeError(f"Cannot instantiate {self!r}") 

288 

289 def __or__(self, other): 

290 return typing.Union[self, other] 

291 

292 def __ror__(self, other): 

293 return typing.Union[other, self] 

294 

295 def __instancecheck__(self, obj): 

296 raise TypeError(f"{self} cannot be used with isinstance()") 

297 

298 def __subclasscheck__(self, cls): 

299 raise TypeError(f"{self} cannot be used with issubclass()") 

300 

301 @typing._tp_cache 

302 def __getitem__(self, parameters): 

303 return self._getitem(self, parameters) 

304 

305 

306# Note that inheriting from this class means that the object will be 

307# rejected by typing._type_check, so do not use it if the special form 

308# is arguably valid as a type by itself. 

309class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): 

310 def __repr__(self): 

311 return 'typing_extensions.' + self._name 

312 

313 

314Final = typing.Final 

315 

316# Breakpoint: https://github.com/python/cpython/pull/30530 

317if sys.version_info >= (3, 11): 

318 final = typing.final 

319else: 

320 # @final exists in 3.8+, but we backport it for all versions 

321 # before 3.11 to keep support for the __final__ attribute. 

322 # See https://bugs.python.org/issue46342 

323 def final(f): 

324 """This decorator can be used to indicate to type checkers that 

325 the decorated method cannot be overridden, and decorated class 

326 cannot be subclassed. For example: 

327 

328 class Base: 

329 @final 

330 def done(self) -> None: 

331 ... 

332 class Sub(Base): 

333 def done(self) -> None: # Error reported by type checker 

334 ... 

335 @final 

336 class Leaf: 

337 ... 

338 class Other(Leaf): # Error reported by type checker 

339 ... 

340 

341 There is no runtime checking of these properties. The decorator 

342 sets the ``__final__`` attribute to ``True`` on the decorated object 

343 to allow runtime introspection. 

344 """ 

345 try: 

346 f.__final__ = True 

347 except (AttributeError, TypeError): 

348 # Skip the attribute silently if it is not writable. 

349 # AttributeError happens if the object has __slots__ or a 

350 # read-only property, TypeError if it's a builtin class. 

351 pass 

352 return f 

353 

354 

355if hasattr(typing, "disjoint_base"): # 3.15 

356 disjoint_base = typing.disjoint_base 

357else: 

358 def disjoint_base(cls): 

359 """This decorator marks a class as a disjoint base. 

360 

361 Child classes of a disjoint base cannot inherit from other disjoint bases that are 

362 not parent classes of the disjoint base. 

363 

364 For example: 

365 

366 @disjoint_base 

367 class Disjoint1: pass 

368 

369 @disjoint_base 

370 class Disjoint2: pass 

371 

372 class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error 

373 

374 Type checkers can use knowledge of disjoint bases to detect unreachable code 

375 and determine when two types can overlap. 

376 

377 See PEP 800.""" 

378 cls.__disjoint_base__ = True 

379 return cls 

380 

381 

382def IntVar(name): 

383 return typing.TypeVar(name) 

384 

385 

386# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 

387# Breakpoint: https://github.com/python/cpython/pull/29334 

388if sys.version_info >= (3, 10, 1): 

389 Literal = typing.Literal 

390else: 

391 def _flatten_literal_params(parameters): 

392 """An internal helper for Literal creation: flatten Literals among parameters""" 

393 params = [] 

394 for p in parameters: 

395 if isinstance(p, _LiteralGenericAlias): 

396 params.extend(p.__args__) 

397 else: 

398 params.append(p) 

399 return tuple(params) 

400 

401 def _value_and_type_iter(params): 

402 for p in params: 

403 yield p, type(p) 

404 

405 class _LiteralGenericAlias(typing._GenericAlias, _root=True): 

406 def __eq__(self, other): 

407 if not isinstance(other, _LiteralGenericAlias): 

408 return NotImplemented 

409 these_args_deduped = set(_value_and_type_iter(self.__args__)) 

410 other_args_deduped = set(_value_and_type_iter(other.__args__)) 

411 return these_args_deduped == other_args_deduped 

412 

413 def __hash__(self): 

414 return hash(frozenset(_value_and_type_iter(self.__args__))) 

415 

416 class _LiteralForm(_ExtensionsSpecialForm, _root=True): 

417 def __init__(self, doc: str): 

418 self._name = 'Literal' 

419 self._doc = self.__doc__ = doc 

420 

421 def __getitem__(self, parameters): 

422 if not isinstance(parameters, tuple): 

423 parameters = (parameters,) 

424 

425 parameters = _flatten_literal_params(parameters) 

426 

427 val_type_pairs = list(_value_and_type_iter(parameters)) 

428 try: 

429 deduped_pairs = set(val_type_pairs) 

430 except TypeError: 

431 # unhashable parameters 

432 pass 

433 else: 

434 # similar logic to typing._deduplicate on Python 3.9+ 

435 if len(deduped_pairs) < len(val_type_pairs): 

436 new_parameters = [] 

437 for pair in val_type_pairs: 

438 if pair in deduped_pairs: 

439 new_parameters.append(pair[0]) 

440 deduped_pairs.remove(pair) 

441 assert not deduped_pairs, deduped_pairs 

442 parameters = tuple(new_parameters) 

443 

444 return _LiteralGenericAlias(self, parameters) 

445 

446 Literal = _LiteralForm(doc="""\ 

447 A type that can be used to indicate to type checkers 

448 that the corresponding value has a value literally equivalent 

449 to the provided parameter. For example: 

450 

451 var: Literal[4] = 4 

452 

453 The type checker understands that 'var' is literally equal to 

454 the value 4 and no other value. 

455 

456 Literal[...] cannot be subclassed. There is no runtime 

457 checking verifying that the parameter is actually a value 

458 instead of a type.""") 

459 

460 

461_overload_dummy = typing._overload_dummy 

462 

463 

464if hasattr(typing, "get_overloads"): # 3.11+ 

465 overload = typing.overload 

466 get_overloads = typing.get_overloads 

467 clear_overloads = typing.clear_overloads 

468else: 

469 # {module: {qualname: {firstlineno: func}}} 

470 _overload_registry = collections.defaultdict( 

471 functools.partial(collections.defaultdict, dict) 

472 ) 

473 

474 def overload(func): 

475 """Decorator for overloaded functions/methods. 

476 

477 In a stub file, place two or more stub definitions for the same 

478 function in a row, each decorated with @overload. For example: 

479 

480 @overload 

481 def utf8(value: None) -> None: ... 

482 @overload 

483 def utf8(value: bytes) -> bytes: ... 

484 @overload 

485 def utf8(value: str) -> bytes: ... 

486 

487 In a non-stub file (i.e. a regular .py file), do the same but 

488 follow it with an implementation. The implementation should *not* 

489 be decorated with @overload. For example: 

490 

491 @overload 

492 def utf8(value: None) -> None: ... 

493 @overload 

494 def utf8(value: bytes) -> bytes: ... 

495 @overload 

496 def utf8(value: str) -> bytes: ... 

497 def utf8(value): 

498 # implementation goes here 

499 

500 The overloads for a function can be retrieved at runtime using the 

501 get_overloads() function. 

502 """ 

503 # classmethod and staticmethod 

504 f = getattr(func, "__func__", func) 

505 try: 

506 _overload_registry[f.__module__][f.__qualname__][ 

507 f.__code__.co_firstlineno 

508 ] = func 

509 except AttributeError: 

510 # Not a normal function; ignore. 

511 pass 

512 return _overload_dummy 

513 

514 def get_overloads(func): 

515 """Return all defined overloads for *func* as a sequence.""" 

516 # classmethod and staticmethod 

517 f = getattr(func, "__func__", func) 

518 if f.__module__ not in _overload_registry: 

519 return [] 

520 mod_dict = _overload_registry[f.__module__] 

521 if f.__qualname__ not in mod_dict: 

522 return [] 

523 return list(mod_dict[f.__qualname__].values()) 

524 

525 def clear_overloads(): 

526 """Clear all overloads in the registry.""" 

527 _overload_registry.clear() 

528 

529 

530# This is not a real generic class. Don't use outside annotations. 

531Type = typing.Type 

532 

533# Various ABCs mimicking those in collections.abc. 

534# A few are simply re-exported for completeness. 

535Awaitable = typing.Awaitable 

536Coroutine = typing.Coroutine 

537AsyncIterable = typing.AsyncIterable 

538AsyncIterator = typing.AsyncIterator 

539Deque = typing.Deque 

540DefaultDict = typing.DefaultDict 

541OrderedDict = typing.OrderedDict 

542Counter = typing.Counter 

543ChainMap = typing.ChainMap 

544Text = typing.Text 

545TYPE_CHECKING = typing.TYPE_CHECKING 

546 

547 

548# Breakpoint: https://github.com/python/cpython/pull/118681 

549if sys.version_info >= (3, 13, 0, "beta"): 

550 from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator 

551else: 

552 def _is_dunder(attr): 

553 return attr.startswith('__') and attr.endswith('__') 

554 

555 

556 class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True): 

557 def __init__(self, origin, nparams, *, defaults, inst=True, name=None): 

558 assert nparams > 0, "`nparams` must be a positive integer" 

559 assert defaults, "Must always specify a non-empty sequence for `defaults`" 

560 super().__init__(origin, nparams, inst=inst, name=name) 

561 self._defaults = defaults 

562 

563 def __setattr__(self, attr, val): 

564 allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'} 

565 if _is_dunder(attr) or attr in allowed_attrs: 

566 object.__setattr__(self, attr, val) 

567 else: 

568 setattr(self.__origin__, attr, val) 

569 

570 @typing._tp_cache 

571 def __getitem__(self, params): 

572 if not isinstance(params, tuple): 

573 params = (params,) 

574 msg = "Parameters to generic types must be types." 

575 params = tuple(typing._type_check(p, msg) for p in params) 

576 if ( 

577 len(params) < self._nparams 

578 and len(params) + len(self._defaults) >= self._nparams 

579 ): 

580 params = (*params, *self._defaults[len(params) - self._nparams:]) 

581 actual_len = len(params) 

582 

583 if actual_len != self._nparams: 

584 expected = f"at least {self._nparams - len(self._defaults)}" 

585 raise TypeError( 

586 f"Too {'many' if actual_len > self._nparams else 'few'}" 

587 f" arguments for {self};" 

588 f" actual {actual_len}, expected {expected}" 

589 ) 

590 return self.copy_with(params) 

591 

592 _NoneType = type(None) 

593 Generator = _SpecialGenericAlias( 

594 collections.abc.Generator, 3, defaults=(_NoneType, _NoneType) 

595 ) 

596 AsyncGenerator = _SpecialGenericAlias( 

597 collections.abc.AsyncGenerator, 2, defaults=(_NoneType,) 

598 ) 

599 ContextManager = _SpecialGenericAlias( 

600 contextlib.AbstractContextManager, 

601 2, 

602 name="ContextManager", 

603 defaults=(typing.Optional[bool],) 

604 ) 

605 AsyncContextManager = _SpecialGenericAlias( 

606 contextlib.AbstractAsyncContextManager, 

607 2, 

608 name="AsyncContextManager", 

609 defaults=(typing.Optional[bool],) 

610 ) 

611 

612 

613_PROTO_ALLOWLIST = { 

614 'collections.abc': [ 

615 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', 

616 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', 

617 ], 

618 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], 

619 'typing_extensions': ['Buffer'], 

620} 

621 

622 

623_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | { 

624 "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__", 

625 "__final__", 

626} 

627 

628 

629def _get_protocol_attrs(cls): 

630 attrs = set() 

631 for base in cls.__mro__[:-1]: # without object 

632 if base.__name__ in {'Protocol', 'Generic'}: 

633 continue 

634 annotations = getattr(base, '__annotations__', {}) 

635 for attr in (*base.__dict__, *annotations): 

636 if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): 

637 attrs.add(attr) 

638 return attrs 

639 

640 

641def _caller(depth=1, default='__main__'): 

642 try: 

643 return sys._getframemodulename(depth + 1) or default 

644 except AttributeError: # For platforms without _getframemodulename() 

645 pass 

646 try: 

647 return sys._getframe(depth + 1).f_globals.get('__name__', default) 

648 except (AttributeError, ValueError): # For platforms without _getframe() 

649 pass 

650 return None 

651 

652 

653# `__match_args__` attribute was removed from protocol members in 3.13, 

654# we want to backport this change to older Python versions. 

655# Breakpoint: https://github.com/python/cpython/pull/110683 

656if sys.version_info >= (3, 13): 

657 Protocol = typing.Protocol 

658else: 

659 def _allow_reckless_class_checks(depth=2): 

660 """Allow instance and class checks for special stdlib modules. 

661 The abc and functools modules indiscriminately call isinstance() and 

662 issubclass() on the whole MRO of a user class, which may contain protocols. 

663 """ 

664 return _caller(depth) in {'abc', 'functools', None} 

665 

666 def _no_init(self, *args, **kwargs): 

667 if type(self)._is_protocol: 

668 raise TypeError('Protocols cannot be instantiated') 

669 

670 def _type_check_issubclass_arg_1(arg): 

671 """Raise TypeError if `arg` is not an instance of `type` 

672 in `issubclass(arg, <protocol>)`. 

673 

674 In most cases, this is verified by type.__subclasscheck__. 

675 Checking it again unnecessarily would slow down issubclass() checks, 

676 so, we don't perform this check unless we absolutely have to. 

677 

678 For various error paths, however, 

679 we want to ensure that *this* error message is shown to the user 

680 where relevant, rather than a typing.py-specific error message. 

681 """ 

682 if not isinstance(arg, type): 

683 # Same error message as for issubclass(1, int). 

684 raise TypeError('issubclass() arg 1 must be a class') 

685 

686 # Inheriting from typing._ProtocolMeta isn't actually desirable, 

687 # but is necessary to allow typing.Protocol and typing_extensions.Protocol 

688 # to mix without getting TypeErrors about "metaclass conflict" 

689 class _ProtocolMeta(type(typing.Protocol)): 

690 # This metaclass is somewhat unfortunate, 

691 # but is necessary for several reasons... 

692 # 

693 # NOTE: DO NOT call super() in any methods in this class 

694 # That would call the methods on typing._ProtocolMeta on Python <=3.11 

695 # and those are slow 

696 def __new__(mcls, name, bases, namespace, **kwargs): 

697 if name == "Protocol" and len(bases) < 2: 

698 pass 

699 elif {Protocol, typing.Protocol} & set(bases): 

700 for base in bases: 

701 if not ( 

702 base in {object, typing.Generic, Protocol, typing.Protocol} 

703 or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) 

704 or is_protocol(base) 

705 ): 

706 raise TypeError( 

707 f"Protocols can only inherit from other protocols, " 

708 f"got {base!r}" 

709 ) 

710 return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) 

711 

712 def __init__(cls, *args, **kwargs): 

713 abc.ABCMeta.__init__(cls, *args, **kwargs) 

714 if getattr(cls, "_is_protocol", False): 

715 cls.__protocol_attrs__ = _get_protocol_attrs(cls) 

716 

717 def __subclasscheck__(cls, other): 

718 if cls is Protocol: 

719 return type.__subclasscheck__(cls, other) 

720 if ( 

721 getattr(cls, '_is_protocol', False) 

722 and not _allow_reckless_class_checks() 

723 ): 

724 if not getattr(cls, '_is_runtime_protocol', False): 

725 _type_check_issubclass_arg_1(other) 

726 raise TypeError( 

727 "Instance and class checks can only be used with " 

728 "@runtime_checkable protocols" 

729 ) 

730 if ( 

731 # this attribute is set by @runtime_checkable: 

732 cls.__non_callable_proto_members__ 

733 and cls.__dict__.get("__subclasshook__") is _proto_hook 

734 ): 

735 _type_check_issubclass_arg_1(other) 

736 non_method_attrs = sorted(cls.__non_callable_proto_members__) 

737 raise TypeError( 

738 "Protocols with non-method members don't support issubclass()." 

739 f" Non-method members: {str(non_method_attrs)[1:-1]}." 

740 ) 

741 return abc.ABCMeta.__subclasscheck__(cls, other) 

742 

743 def __instancecheck__(cls, instance): 

744 # We need this method for situations where attributes are 

745 # assigned in __init__. 

746 if cls is Protocol: 

747 return type.__instancecheck__(cls, instance) 

748 if not getattr(cls, "_is_protocol", False): 

749 # i.e., it's a concrete subclass of a protocol 

750 return abc.ABCMeta.__instancecheck__(cls, instance) 

751 

752 if ( 

753 not getattr(cls, '_is_runtime_protocol', False) and 

754 not _allow_reckless_class_checks() 

755 ): 

756 raise TypeError("Instance and class checks can only be used with" 

757 " @runtime_checkable protocols") 

758 

759 if abc.ABCMeta.__instancecheck__(cls, instance): 

760 return True 

761 

762 for attr in cls.__protocol_attrs__: 

763 try: 

764 val = inspect.getattr_static(instance, attr) 

765 except AttributeError: 

766 break 

767 # this attribute is set by @runtime_checkable: 

768 if val is None and attr not in cls.__non_callable_proto_members__: 

769 break 

770 else: 

771 return True 

772 

773 return False 

774 

775 def __eq__(cls, other): 

776 # Hack so that typing.Generic.__class_getitem__ 

777 # treats typing_extensions.Protocol 

778 # as equivalent to typing.Protocol 

779 if abc.ABCMeta.__eq__(cls, other) is True: 

780 return True 

781 return cls is Protocol and other is typing.Protocol 

782 

783 # This has to be defined, or the abc-module cache 

784 # complains about classes with this metaclass being unhashable, 

785 # if we define only __eq__! 

786 def __hash__(cls) -> int: 

787 return type.__hash__(cls) 

788 

789 @classmethod 

790 def _proto_hook(cls, other): 

791 if not cls.__dict__.get('_is_protocol', False): 

792 return NotImplemented 

793 

794 for attr in cls.__protocol_attrs__: 

795 for base in other.__mro__: 

796 # Check if the members appears in the class dictionary... 

797 if attr in base.__dict__: 

798 if base.__dict__[attr] is None: 

799 return NotImplemented 

800 break 

801 

802 # ...or in annotations, if it is a sub-protocol. 

803 annotations = getattr(base, '__annotations__', {}) 

804 if ( 

805 isinstance(annotations, collections.abc.Mapping) 

806 and attr in annotations 

807 and is_protocol(other) 

808 ): 

809 break 

810 else: 

811 return NotImplemented 

812 return True 

813 

814 class Protocol(typing.Generic, metaclass=_ProtocolMeta): 

815 __doc__ = typing.Protocol.__doc__ 

816 __slots__ = () 

817 _is_protocol = True 

818 _is_runtime_protocol = False 

819 

820 def __init_subclass__(cls, *args, **kwargs): 

821 super().__init_subclass__(*args, **kwargs) 

822 

823 # Determine if this is a protocol or a concrete subclass. 

824 if not cls.__dict__.get('_is_protocol', False): 

825 cls._is_protocol = any(b is Protocol for b in cls.__bases__) 

826 

827 # Set (or override) the protocol subclass hook. 

828 if '__subclasshook__' not in cls.__dict__: 

829 cls.__subclasshook__ = _proto_hook 

830 

831 # Prohibit instantiation for protocol classes 

832 if cls._is_protocol and cls.__init__ is Protocol.__init__: 

833 cls.__init__ = _no_init 

834 

835 

836# Breakpoint: https://github.com/python/cpython/pull/113401 

837if sys.version_info >= (3, 13): 

838 runtime_checkable = typing.runtime_checkable 

839else: 

840 def runtime_checkable(cls): 

841 """Mark a protocol class as a runtime protocol. 

842 

843 Such protocol can be used with isinstance() and issubclass(). 

844 Raise TypeError if applied to a non-protocol class. 

845 This allows a simple-minded structural check very similar to 

846 one trick ponies in collections.abc such as Iterable. 

847 

848 For example:: 

849 

850 @runtime_checkable 

851 class Closable(Protocol): 

852 def close(self): ... 

853 

854 assert isinstance(open('/some/file'), Closable) 

855 

856 Warning: this will check only the presence of the required methods, 

857 not their type signatures! 

858 """ 

859 if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False): 

860 raise TypeError(f'@runtime_checkable can be only applied to protocol classes,' 

861 f' got {cls!r}') 

862 cls._is_runtime_protocol = True 

863 

864 # typing.Protocol classes on <=3.11 break if we execute this block, 

865 # because typing.Protocol classes on <=3.11 don't have a 

866 # `__protocol_attrs__` attribute, and this block relies on the 

867 # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+ 

868 # break if we *don't* execute this block, because *they* assume that all 

869 # protocol classes have a `__non_callable_proto_members__` attribute 

870 # (which this block sets) 

871 if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2): 

872 # PEP 544 prohibits using issubclass() 

873 # with protocols that have non-method members. 

874 # See gh-113320 for why we compute this attribute here, 

875 # rather than in `_ProtocolMeta.__init__` 

876 cls.__non_callable_proto_members__ = set() 

877 for attr in cls.__protocol_attrs__: 

878 try: 

879 is_callable = callable(getattr(cls, attr, None)) 

880 except Exception as e: 

881 raise TypeError( 

882 f"Failed to determine whether protocol member {attr!r} " 

883 "is a method member" 

884 ) from e 

885 else: 

886 if not is_callable: 

887 cls.__non_callable_proto_members__.add(attr) 

888 

889 return cls 

890 

891 

892# The "runtime" alias exists for backwards compatibility. 

893runtime = runtime_checkable 

894 

895 

896# Our version of runtime-checkable protocols is faster on Python <=3.11 

897# Breakpoint: https://github.com/python/cpython/pull/112717 

898if sys.version_info >= (3, 12): 

899 SupportsInt = typing.SupportsInt 

900 SupportsFloat = typing.SupportsFloat 

901 SupportsComplex = typing.SupportsComplex 

902 SupportsBytes = typing.SupportsBytes 

903 SupportsIndex = typing.SupportsIndex 

904 SupportsAbs = typing.SupportsAbs 

905 SupportsRound = typing.SupportsRound 

906else: 

907 @runtime_checkable 

908 class SupportsInt(Protocol): 

909 """An ABC with one abstract method __int__.""" 

910 __slots__ = () 

911 

912 @abc.abstractmethod 

913 def __int__(self) -> int: 

914 pass 

915 

916 @runtime_checkable 

917 class SupportsFloat(Protocol): 

918 """An ABC with one abstract method __float__.""" 

919 __slots__ = () 

920 

921 @abc.abstractmethod 

922 def __float__(self) -> float: 

923 pass 

924 

925 @runtime_checkable 

926 class SupportsComplex(Protocol): 

927 """An ABC with one abstract method __complex__.""" 

928 __slots__ = () 

929 

930 @abc.abstractmethod 

931 def __complex__(self) -> complex: 

932 pass 

933 

934 @runtime_checkable 

935 class SupportsBytes(Protocol): 

936 """An ABC with one abstract method __bytes__.""" 

937 __slots__ = () 

938 

939 @abc.abstractmethod 

940 def __bytes__(self) -> bytes: 

941 pass 

942 

943 @runtime_checkable 

944 class SupportsIndex(Protocol): 

945 __slots__ = () 

946 

947 @abc.abstractmethod 

948 def __index__(self) -> int: 

949 pass 

950 

951 @runtime_checkable 

952 class SupportsAbs(Protocol[T_co]): 

953 """ 

954 An ABC with one abstract method __abs__ that is covariant in its return type. 

955 """ 

956 __slots__ = () 

957 

958 @abc.abstractmethod 

959 def __abs__(self) -> T_co: 

960 pass 

961 

962 @runtime_checkable 

963 class SupportsRound(Protocol[T_co]): 

964 """ 

965 An ABC with one abstract method __round__ that is covariant in its return type. 

966 """ 

967 __slots__ = () 

968 

969 @abc.abstractmethod 

970 def __round__(self, ndigits: int = 0) -> T_co: 

971 pass 

972 

973 

974if hasattr(io, "Reader") and hasattr(io, "Writer"): 

975 Reader = io.Reader 

976 Writer = io.Writer 

977else: 

978 @runtime_checkable 

979 class Reader(Protocol[T_co]): 

980 """Protocol for simple I/O reader instances. 

981 

982 This protocol only supports blocking I/O. 

983 """ 

984 

985 __slots__ = () 

986 

987 @abc.abstractmethod 

988 def read(self, size: int = ..., /) -> T_co: 

989 """Read data from the input stream and return it. 

990 

991 If *size* is specified, at most *size* items (bytes/characters) will be 

992 read. 

993 """ 

994 

995 @runtime_checkable 

996 class Writer(Protocol[T_contra]): 

997 """Protocol for simple I/O writer instances. 

998 

999 This protocol only supports blocking I/O. 

1000 """ 

1001 

1002 __slots__ = () 

1003 

1004 @abc.abstractmethod 

1005 def write(self, data: T_contra, /) -> int: 

1006 """Write *data* to the output stream and return the number of items written.""" # noqa: E501 

1007 

1008 

1009_NEEDS_SINGLETONMETA = ( 

1010 not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems") 

1011) 

1012 

1013if _NEEDS_SINGLETONMETA: 

1014 class SingletonMeta(type): 

1015 def __setattr__(cls, attr, value): 

1016 # TypeError is consistent with the behavior of NoneType 

1017 raise TypeError( 

1018 f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}" 

1019 ) 

1020 

1021 

1022if hasattr(typing, "NoDefault"): 

1023 NoDefault = typing.NoDefault 

1024else: 

1025 class NoDefaultType(metaclass=SingletonMeta): 

1026 """The type of the NoDefault singleton.""" 

1027 

1028 __slots__ = () 

1029 

1030 def __new__(cls): 

1031 return globals().get("NoDefault") or object.__new__(cls) 

1032 

1033 def __repr__(self): 

1034 return "typing_extensions.NoDefault" 

1035 

1036 def __reduce__(self): 

1037 return "NoDefault" 

1038 

1039 NoDefault = NoDefaultType() 

1040 del NoDefaultType 

1041 

1042if hasattr(typing, "NoExtraItems"): 

1043 NoExtraItems = typing.NoExtraItems 

1044else: 

1045 class NoExtraItemsType(metaclass=SingletonMeta): 

1046 """The type of the NoExtraItems singleton.""" 

1047 

1048 __slots__ = () 

1049 

1050 def __new__(cls): 

1051 return globals().get("NoExtraItems") or object.__new__(cls) 

1052 

1053 def __repr__(self): 

1054 return "typing_extensions.NoExtraItems" 

1055 

1056 def __reduce__(self): 

1057 return "NoExtraItems" 

1058 

1059 NoExtraItems = NoExtraItemsType() 

1060 del NoExtraItemsType 

1061 

1062if _NEEDS_SINGLETONMETA: 

1063 del SingletonMeta 

1064 

1065 

1066# Update this to something like >=3.13.0b1 if and when 

1067# PEP 728 is implemented in CPython 

1068_PEP_728_IMPLEMENTED = False 

1069 

1070if _PEP_728_IMPLEMENTED: 

1071 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" 

1072 # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 

1073 # The standard library TypedDict below Python 3.11 does not store runtime 

1074 # information about optional and required keys when using Required or NotRequired. 

1075 # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. 

1076 # Aaaand on 3.12 we add __orig_bases__ to TypedDict 

1077 # to enable better runtime introspection. 

1078 # On 3.13 we deprecate some odd ways of creating TypedDicts. 

1079 # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. 

1080 # PEP 728 (still pending) makes more changes. 

1081 TypedDict = typing.TypedDict 

1082 _TypedDictMeta = typing._TypedDictMeta 

1083 is_typeddict = typing.is_typeddict 

1084else: 

1085 # 3.10.0 and later 

1086 _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters 

1087 

1088 def _get_typeddict_qualifiers(annotation_type): 

1089 while True: 

1090 annotation_origin = get_origin(annotation_type) 

1091 if annotation_origin is Annotated: 

1092 annotation_args = get_args(annotation_type) 

1093 if annotation_args: 

1094 annotation_type = annotation_args[0] 

1095 else: 

1096 break 

1097 elif annotation_origin is Required: 

1098 yield Required 

1099 annotation_type, = get_args(annotation_type) 

1100 elif annotation_origin is NotRequired: 

1101 yield NotRequired 

1102 annotation_type, = get_args(annotation_type) 

1103 elif annotation_origin is ReadOnly: 

1104 yield ReadOnly 

1105 annotation_type, = get_args(annotation_type) 

1106 else: 

1107 break 

1108 

1109 class _TypedDictMeta(type): 

1110 

1111 def __new__(cls, name, bases, ns, *, total=True, closed=None, 

1112 extra_items=NoExtraItems): 

1113 """Create new typed dict class object. 

1114 

1115 This method is called when TypedDict is subclassed, 

1116 or when TypedDict is instantiated. This way 

1117 TypedDict supports all three syntax forms described in its docstring. 

1118 Subclasses and instances of TypedDict return actual dictionaries. 

1119 """ 

1120 for base in bases: 

1121 if type(base) is not _TypedDictMeta and base is not typing.Generic: 

1122 raise TypeError('cannot inherit from both a TypedDict type ' 

1123 'and a non-TypedDict base class') 

1124 if closed is not None and extra_items is not NoExtraItems: 

1125 raise TypeError(f"Cannot combine closed={closed!r} and extra_items") 

1126 

1127 if any(issubclass(b, typing.Generic) for b in bases): 

1128 generic_base = (typing.Generic,) 

1129 else: 

1130 generic_base = () 

1131 

1132 ns_annotations = ns.pop('__annotations__', None) 

1133 

1134 # typing.py generally doesn't let you inherit from plain Generic, unless 

1135 # the name of the class happens to be "Protocol" 

1136 tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) 

1137 tp_dict.__name__ = name 

1138 if tp_dict.__qualname__ == "Protocol": 

1139 tp_dict.__qualname__ = name 

1140 

1141 if not hasattr(tp_dict, '__orig_bases__'): 

1142 tp_dict.__orig_bases__ = bases 

1143 

1144 annotations = {} 

1145 own_annotate = None 

1146 if ns_annotations is not None: 

1147 own_annotations = ns_annotations 

1148 elif sys.version_info >= (3, 14): 

1149 if hasattr(annotationlib, "get_annotate_from_class_namespace"): 

1150 own_annotate = annotationlib.get_annotate_from_class_namespace(ns) 

1151 else: 

1152 # 3.14.0a7 and earlier 

1153 own_annotate = ns.get("__annotate__") 

1154 if own_annotate is not None: 

1155 own_annotations = annotationlib.call_annotate_function( 

1156 own_annotate, Format.FORWARDREF, owner=tp_dict 

1157 ) 

1158 else: 

1159 own_annotations = {} 

1160 else: 

1161 own_annotations = {} 

1162 msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" 

1163 if _TAKES_MODULE: 

1164 own_checked_annotations = { 

1165 n: typing._type_check(tp, msg, module=tp_dict.__module__) 

1166 for n, tp in own_annotations.items() 

1167 } 

1168 else: 

1169 own_checked_annotations = { 

1170 n: typing._type_check(tp, msg) 

1171 for n, tp in own_annotations.items() 

1172 } 

1173 required_keys = set() 

1174 optional_keys = set() 

1175 readonly_keys = set() 

1176 mutable_keys = set() 

1177 extra_items_type = extra_items 

1178 

1179 for base in bases: 

1180 base_dict = base.__dict__ 

1181 

1182 if sys.version_info <= (3, 14): 

1183 annotations.update(base_dict.get('__annotations__', {})) 

1184 required_keys.update(base_dict.get('__required_keys__', ())) 

1185 optional_keys.update(base_dict.get('__optional_keys__', ())) 

1186 readonly_keys.update(base_dict.get('__readonly_keys__', ())) 

1187 mutable_keys.update(base_dict.get('__mutable_keys__', ())) 

1188 

1189 # This was specified in an earlier version of PEP 728. Support 

1190 # is retained for backwards compatibility, but only for Python 

1191 # 3.13 and lower. 

1192 if (closed and sys.version_info < (3, 14) 

1193 and "__extra_items__" in own_checked_annotations): 

1194 annotation_type = own_checked_annotations.pop("__extra_items__") 

1195 qualifiers = set(_get_typeddict_qualifiers(annotation_type)) 

1196 if Required in qualifiers: 

1197 raise TypeError( 

1198 "Special key __extra_items__ does not support " 

1199 "Required" 

1200 ) 

1201 if NotRequired in qualifiers: 

1202 raise TypeError( 

1203 "Special key __extra_items__ does not support " 

1204 "NotRequired" 

1205 ) 

1206 extra_items_type = annotation_type 

1207 

1208 annotations.update(own_checked_annotations) 

1209 for annotation_key, annotation_type in own_checked_annotations.items(): 

1210 qualifiers = set(_get_typeddict_qualifiers(annotation_type)) 

1211 

1212 if Required in qualifiers: 

1213 required_keys.add(annotation_key) 

1214 elif NotRequired in qualifiers: 

1215 optional_keys.add(annotation_key) 

1216 elif total: 

1217 required_keys.add(annotation_key) 

1218 else: 

1219 optional_keys.add(annotation_key) 

1220 if ReadOnly in qualifiers: 

1221 mutable_keys.discard(annotation_key) 

1222 readonly_keys.add(annotation_key) 

1223 else: 

1224 mutable_keys.add(annotation_key) 

1225 readonly_keys.discard(annotation_key) 

1226 

1227 # Breakpoint: https://github.com/python/cpython/pull/119891 

1228 if sys.version_info >= (3, 14): 

1229 def __annotate__(format): 

1230 annos = {} 

1231 for base in bases: 

1232 if base is Generic: 

1233 continue 

1234 base_annotate = base.__annotate__ 

1235 if base_annotate is None: 

1236 continue 

1237 base_annos = annotationlib.call_annotate_function( 

1238 base_annotate, format, owner=base) 

1239 annos.update(base_annos) 

1240 if own_annotate is not None: 

1241 own = annotationlib.call_annotate_function( 

1242 own_annotate, format, owner=tp_dict) 

1243 if format != Format.STRING: 

1244 own = { 

1245 n: typing._type_check(tp, msg, module=tp_dict.__module__) 

1246 for n, tp in own.items() 

1247 } 

1248 elif format == Format.STRING: 

1249 own = annotationlib.annotations_to_string(own_annotations) 

1250 elif format in (Format.FORWARDREF, Format.VALUE): 

1251 own = own_checked_annotations 

1252 else: 

1253 raise NotImplementedError(format) 

1254 annos.update(own) 

1255 return annos 

1256 

1257 tp_dict.__annotate__ = __annotate__ 

1258 else: 

1259 tp_dict.__annotations__ = annotations 

1260 tp_dict.__required_keys__ = frozenset(required_keys) 

1261 tp_dict.__optional_keys__ = frozenset(optional_keys) 

1262 tp_dict.__readonly_keys__ = frozenset(readonly_keys) 

1263 tp_dict.__mutable_keys__ = frozenset(mutable_keys) 

1264 tp_dict.__total__ = total 

1265 tp_dict.__closed__ = closed 

1266 tp_dict.__extra_items__ = extra_items_type 

1267 return tp_dict 

1268 

1269 __call__ = dict # static method 

1270 

1271 def __subclasscheck__(cls, other): 

1272 # Typed dicts are only for static structural subtyping. 

1273 raise TypeError('TypedDict does not support instance and class checks') 

1274 

1275 __instancecheck__ = __subclasscheck__ 

1276 

1277 _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) 

1278 

1279 def _create_typeddict( 

1280 typename, 

1281 fields, 

1282 /, 

1283 *, 

1284 typing_is_inline, 

1285 total, 

1286 closed, 

1287 extra_items, 

1288 **kwargs, 

1289 ): 

1290 if fields is _marker or fields is None: 

1291 if fields is _marker: 

1292 deprecated_thing = ( 

1293 "Failing to pass a value for the 'fields' parameter" 

1294 ) 

1295 else: 

1296 deprecated_thing = "Passing `None` as the 'fields' parameter" 

1297 

1298 example = f"`{typename} = TypedDict({typename!r}, {{}})`" 

1299 deprecation_msg = ( 

1300 f"{deprecated_thing} is deprecated and will be disallowed in " 

1301 "Python 3.15. To create a TypedDict class with 0 fields " 

1302 "using the functional syntax, pass an empty dictionary, e.g. " 

1303 ) + example + "." 

1304 warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) 

1305 # Support a field called "closed" 

1306 if closed is not False and closed is not True and closed is not None: 

1307 kwargs["closed"] = closed 

1308 closed = None 

1309 # Or "extra_items" 

1310 if extra_items is not NoExtraItems: 

1311 kwargs["extra_items"] = extra_items 

1312 extra_items = NoExtraItems 

1313 fields = kwargs 

1314 elif kwargs: 

1315 raise TypeError("TypedDict takes either a dict or keyword arguments," 

1316 " but not both") 

1317 if kwargs: 

1318 # Breakpoint: https://github.com/python/cpython/pull/104891 

1319 if sys.version_info >= (3, 13): 

1320 raise TypeError("TypedDict takes no keyword arguments") 

1321 warnings.warn( 

1322 "The kwargs-based syntax for TypedDict definitions is deprecated " 

1323 "in Python 3.11, will be removed in Python 3.13, and may not be " 

1324 "understood by third-party type checkers.", 

1325 DeprecationWarning, 

1326 stacklevel=2, 

1327 ) 

1328 

1329 ns = {'__annotations__': dict(fields)} 

1330 module = _caller(depth=4 if typing_is_inline else 2) 

1331 if module is not None: 

1332 # Setting correct module is necessary to make typed dict classes 

1333 # pickleable. 

1334 ns['__module__'] = module 

1335 

1336 td = _TypedDictMeta(typename, (), ns, total=total, closed=closed, 

1337 extra_items=extra_items) 

1338 td.__orig_bases__ = (TypedDict,) 

1339 return td 

1340 

1341 class _TypedDictSpecialForm(_SpecialForm, _root=True): 

1342 def __call__( 

1343 self, 

1344 typename, 

1345 fields=_marker, 

1346 /, 

1347 *, 

1348 total=True, 

1349 closed=None, 

1350 extra_items=NoExtraItems, 

1351 **kwargs 

1352 ): 

1353 return _create_typeddict( 

1354 typename, 

1355 fields, 

1356 typing_is_inline=False, 

1357 total=total, 

1358 closed=closed, 

1359 extra_items=extra_items, 

1360 **kwargs, 

1361 ) 

1362 

1363 def __mro_entries__(self, bases): 

1364 return (_TypedDict,) 

1365 

1366 @_TypedDictSpecialForm 

1367 def TypedDict(self, args): 

1368 """A simple typed namespace. At runtime it is equivalent to a plain dict. 

1369 

1370 TypedDict creates a dictionary type such that a type checker will expect all 

1371 instances to have a certain set of keys, where each key is 

1372 associated with a value of a consistent type. This expectation 

1373 is not checked at runtime. 

1374 

1375 Usage:: 

1376 

1377 class Point2D(TypedDict): 

1378 x: int 

1379 y: int 

1380 label: str 

1381 

1382 a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK 

1383 b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check 

1384 

1385 assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') 

1386 

1387 The type info can be accessed via the Point2D.__annotations__ dict, and 

1388 the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. 

1389 TypedDict supports an additional equivalent form:: 

1390 

1391 Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) 

1392 

1393 By default, all keys must be present in a TypedDict. It is possible 

1394 to override this by specifying totality:: 

1395 

1396 class Point2D(TypedDict, total=False): 

1397 x: int 

1398 y: int 

1399 

1400 This means that a Point2D TypedDict can have any of the keys omitted. A type 

1401 checker is only expected to support a literal False or True as the value of 

1402 the total argument. True is the default, and makes all items defined in the 

1403 class body be required. 

1404 

1405 The Required and NotRequired special forms can also be used to mark 

1406 individual keys as being required or not required:: 

1407 

1408 class Point2D(TypedDict): 

1409 x: int # the "x" key must always be present (Required is the default) 

1410 y: NotRequired[int] # the "y" key can be omitted 

1411 

1412 See PEP 655 for more details on Required and NotRequired. 

1413 """ 

1414 # This runs when creating inline TypedDicts: 

1415 if not isinstance(args, dict): 

1416 raise TypeError( 

1417 "TypedDict[...] should be used with a single dict argument" 

1418 ) 

1419 

1420 return _create_typeddict( 

1421 "<inline TypedDict>", 

1422 args, 

1423 typing_is_inline=True, 

1424 total=True, 

1425 closed=True, 

1426 extra_items=NoExtraItems, 

1427 ) 

1428 

1429 _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) 

1430 

1431 def is_typeddict(tp): 

1432 """Check if an annotation is a TypedDict class 

1433 

1434 For example:: 

1435 class Film(TypedDict): 

1436 title: str 

1437 year: int 

1438 

1439 is_typeddict(Film) # => True 

1440 is_typeddict(Union[list, str]) # => False 

1441 """ 

1442 return isinstance(tp, _TYPEDDICT_TYPES) 

1443 

1444 

1445if hasattr(typing, "assert_type"): 

1446 assert_type = typing.assert_type 

1447 

1448else: 

1449 def assert_type(val, typ, /): 

1450 """Assert (to the type checker) that the value is of the given type. 

1451 

1452 When the type checker encounters a call to assert_type(), it 

1453 emits an error if the value is not of the specified type:: 

1454 

1455 def greet(name: str) -> None: 

1456 assert_type(name, str) # ok 

1457 assert_type(name, int) # type checker error 

1458 

1459 At runtime this returns the first argument unchanged and otherwise 

1460 does nothing. 

1461 """ 

1462 return val 

1463 

1464 

1465if hasattr(typing, "ReadOnly"): # 3.13+ 

1466 get_type_hints = typing.get_type_hints 

1467else: # <=3.13 

1468 # replaces _strip_annotations() 

1469 def _strip_extras(t): 

1470 """Strips Annotated, Required and NotRequired from a given type.""" 

1471 if isinstance(t, typing._AnnotatedAlias): 

1472 return _strip_extras(t.__origin__) 

1473 if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly): 

1474 return _strip_extras(t.__args__[0]) 

1475 if isinstance(t, typing._GenericAlias): 

1476 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1477 if stripped_args == t.__args__: 

1478 return t 

1479 return t.copy_with(stripped_args) 

1480 if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): 

1481 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1482 if stripped_args == t.__args__: 

1483 return t 

1484 return _types.GenericAlias(t.__origin__, stripped_args) 

1485 if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): 

1486 stripped_args = tuple(_strip_extras(a) for a in t.__args__) 

1487 if stripped_args == t.__args__: 

1488 return t 

1489 return functools.reduce(operator.or_, stripped_args) 

1490 

1491 return t 

1492 

1493 def get_type_hints(obj, globalns=None, localns=None, include_extras=False): 

1494 """Return type hints for an object. 

1495 

1496 This is often the same as obj.__annotations__, but it handles 

1497 forward references encoded as string literals, adds Optional[t] if a 

1498 default value equal to None is set and recursively replaces all 

1499 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' 

1500 (unless 'include_extras=True'). 

1501 

1502 The argument may be a module, class, method, or function. The annotations 

1503 are returned as a dictionary. For classes, annotations include also 

1504 inherited members. 

1505 

1506 TypeError is raised if the argument is not of a type that can contain 

1507 annotations, and an empty dictionary is returned if no annotations are 

1508 present. 

1509 

1510 BEWARE -- the behavior of globalns and localns is counterintuitive 

1511 (unless you are familiar with how eval() and exec() work). The 

1512 search order is locals first, then globals. 

1513 

1514 - If no dict arguments are passed, an attempt is made to use the 

1515 globals from obj (or the respective module's globals for classes), 

1516 and these are also used as the locals. If the object does not appear 

1517 to have globals, an empty dictionary is used. 

1518 

1519 - If one dict argument is passed, it is used for both globals and 

1520 locals. 

1521 

1522 - If two dict arguments are passed, they specify globals and 

1523 locals, respectively. 

1524 """ 

1525 hint = typing.get_type_hints( 

1526 obj, globalns=globalns, localns=localns, include_extras=True 

1527 ) 

1528 # Breakpoint: https://github.com/python/cpython/pull/30304 

1529 if sys.version_info < (3, 11): 

1530 _clean_optional(obj, hint, globalns, localns) 

1531 if include_extras: 

1532 return hint 

1533 return {k: _strip_extras(t) for k, t in hint.items()} 

1534 

1535 _NoneType = type(None) 

1536 

1537 def _could_be_inserted_optional(t): 

1538 """detects Union[..., None] pattern""" 

1539 if not isinstance(t, typing._UnionGenericAlias): 

1540 return False 

1541 # Assume if last argument is not None they are user defined 

1542 if t.__args__[-1] is not _NoneType: 

1543 return False 

1544 return True 

1545 

1546 # < 3.11 

1547 def _clean_optional(obj, hints, globalns=None, localns=None): 

1548 # reverts injected Union[..., None] cases from typing.get_type_hints 

1549 # when a None default value is used. 

1550 # see https://github.com/python/typing_extensions/issues/310 

1551 if not hints or isinstance(obj, type): 

1552 return 

1553 defaults = typing._get_defaults(obj) # avoid accessing __annotations___ 

1554 if not defaults: 

1555 return 

1556 original_hints = obj.__annotations__ 

1557 for name, value in hints.items(): 

1558 # Not a Union[..., None] or replacement conditions not fullfilled 

1559 if (not _could_be_inserted_optional(value) 

1560 or name not in defaults 

1561 or defaults[name] is not None 

1562 ): 

1563 continue 

1564 original_value = original_hints[name] 

1565 # value=NoneType should have caused a skip above but check for safety 

1566 if original_value is None: 

1567 original_value = _NoneType 

1568 # Forward reference 

1569 if isinstance(original_value, str): 

1570 if globalns is None: 

1571 if isinstance(obj, _types.ModuleType): 

1572 globalns = obj.__dict__ 

1573 else: 

1574 nsobj = obj 

1575 # Find globalns for the unwrapped object. 

1576 while hasattr(nsobj, '__wrapped__'): 

1577 nsobj = nsobj.__wrapped__ 

1578 globalns = getattr(nsobj, '__globals__', {}) 

1579 if localns is None: 

1580 localns = globalns 

1581 elif localns is None: 

1582 localns = globalns 

1583 

1584 original_value = ForwardRef( 

1585 original_value, 

1586 is_argument=not isinstance(obj, _types.ModuleType) 

1587 ) 

1588 original_evaluated = typing._eval_type(original_value, globalns, localns) 

1589 # Compare if values differ. Note that even if equal 

1590 # value might be cached by typing._tp_cache contrary to original_evaluated 

1591 if original_evaluated != value or ( 

1592 # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias 

1593 hasattr(_types, "UnionType") 

1594 and isinstance(original_evaluated, _types.UnionType) 

1595 and not isinstance(value, _types.UnionType) 

1596 ): 

1597 hints[name] = original_evaluated 

1598 

1599# Python 3.9 has get_origin() and get_args() but those implementations don't support 

1600# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. 

1601# Breakpoint: https://github.com/python/cpython/pull/25298 

1602if sys.version_info >= (3, 10): 

1603 get_origin = typing.get_origin 

1604 get_args = typing.get_args 

1605# 3.9 

1606else: 

1607 def get_origin(tp): 

1608 """Get the unsubscripted version of a type. 

1609 

1610 This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar 

1611 and Annotated. Return None for unsupported types. Examples:: 

1612 

1613 get_origin(Literal[42]) is Literal 

1614 get_origin(int) is None 

1615 get_origin(ClassVar[int]) is ClassVar 

1616 get_origin(Generic) is Generic 

1617 get_origin(Generic[T]) is Generic 

1618 get_origin(Union[T, int]) is Union 

1619 get_origin(List[Tuple[T, T]][int]) == list 

1620 get_origin(P.args) is P 

1621 """ 

1622 if isinstance(tp, typing._AnnotatedAlias): 

1623 return Annotated 

1624 if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias, 

1625 ParamSpecArgs, ParamSpecKwargs)): 

1626 return tp.__origin__ 

1627 if tp is typing.Generic: 

1628 return typing.Generic 

1629 return None 

1630 

1631 def get_args(tp): 

1632 """Get type arguments with all substitutions performed. 

1633 

1634 For unions, basic simplifications used by Union constructor are performed. 

1635 Examples:: 

1636 get_args(Dict[str, int]) == (str, int) 

1637 get_args(int) == () 

1638 get_args(Union[int, Union[T, int], str][int]) == (int, str) 

1639 get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) 

1640 get_args(Callable[[], T][int]) == ([], int) 

1641 """ 

1642 if isinstance(tp, typing._AnnotatedAlias): 

1643 return (tp.__origin__, *tp.__metadata__) 

1644 if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)): 

1645 res = tp.__args__ 

1646 if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: 

1647 res = (list(res[:-1]), res[-1]) 

1648 return res 

1649 return () 

1650 

1651 

1652# 3.10+ 

1653if hasattr(typing, 'TypeAlias'): 

1654 TypeAlias = typing.TypeAlias 

1655# 3.9 

1656else: 

1657 @_ExtensionsSpecialForm 

1658 def TypeAlias(self, parameters): 

1659 """Special marker indicating that an assignment should 

1660 be recognized as a proper type alias definition by type 

1661 checkers. 

1662 

1663 For example:: 

1664 

1665 Predicate: TypeAlias = Callable[..., bool] 

1666 

1667 It's invalid when used anywhere except as in the example above. 

1668 """ 

1669 raise TypeError(f"{self} is not subscriptable") 

1670 

1671 

1672def _set_default(type_param, default): 

1673 type_param.has_default = lambda: default is not NoDefault 

1674 type_param.__default__ = default 

1675 

1676 

1677def _set_module(typevarlike): 

1678 # for pickling: 

1679 def_mod = _caller(depth=2) 

1680 if def_mod != 'typing_extensions': 

1681 typevarlike.__module__ = def_mod 

1682 

1683 

1684class _DefaultMixin: 

1685 """Mixin for TypeVarLike defaults.""" 

1686 

1687 __slots__ = () 

1688 __init__ = _set_default 

1689 

1690 

1691# Classes using this metaclass must provide a _backported_typevarlike ClassVar 

1692class _TypeVarLikeMeta(type): 

1693 def __instancecheck__(cls, __instance: Any) -> bool: 

1694 return isinstance(__instance, cls._backported_typevarlike) 

1695 

1696 

1697if _PEP_696_IMPLEMENTED: 

1698 from typing import TypeVar 

1699else: 

1700 # Add default and infer_variance parameters from PEP 696 and 695 

1701 class TypeVar(metaclass=_TypeVarLikeMeta): 

1702 """Type variable.""" 

1703 

1704 _backported_typevarlike = typing.TypeVar 

1705 

1706 def __new__(cls, name, *constraints, bound=None, 

1707 covariant=False, contravariant=False, 

1708 default=NoDefault, infer_variance=False): 

1709 if hasattr(typing, "TypeAliasType"): 

1710 # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar 

1711 typevar = typing.TypeVar(name, *constraints, bound=bound, 

1712 covariant=covariant, contravariant=contravariant, 

1713 infer_variance=infer_variance) 

1714 else: 

1715 typevar = typing.TypeVar(name, *constraints, bound=bound, 

1716 covariant=covariant, contravariant=contravariant) 

1717 if infer_variance and (covariant or contravariant): 

1718 raise ValueError("Variance cannot be specified with infer_variance.") 

1719 typevar.__infer_variance__ = infer_variance 

1720 

1721 _set_default(typevar, default) 

1722 _set_module(typevar) 

1723 

1724 def _tvar_prepare_subst(alias, args): 

1725 if ( 

1726 typevar.has_default() 

1727 and alias.__parameters__.index(typevar) == len(args) 

1728 ): 

1729 args += (typevar.__default__,) 

1730 return args 

1731 

1732 typevar.__typing_prepare_subst__ = _tvar_prepare_subst 

1733 return typevar 

1734 

1735 def __init_subclass__(cls) -> None: 

1736 raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") 

1737 

1738 

1739# Python 3.10+ has PEP 612 

1740if hasattr(typing, 'ParamSpecArgs'): 

1741 ParamSpecArgs = typing.ParamSpecArgs 

1742 ParamSpecKwargs = typing.ParamSpecKwargs 

1743# 3.9 

1744else: 

1745 class _Immutable: 

1746 """Mixin to indicate that object should not be copied.""" 

1747 __slots__ = () 

1748 

1749 def __copy__(self): 

1750 return self 

1751 

1752 def __deepcopy__(self, memo): 

1753 return self 

1754 

1755 class ParamSpecArgs(_Immutable): 

1756 """The args for a ParamSpec object. 

1757 

1758 Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. 

1759 

1760 ParamSpecArgs objects have a reference back to their ParamSpec: 

1761 

1762 P.args.__origin__ is P 

1763 

1764 This type is meant for runtime introspection and has no special meaning to 

1765 static type checkers. 

1766 """ 

1767 def __init__(self, origin): 

1768 self.__origin__ = origin 

1769 

1770 def __repr__(self): 

1771 return f"{self.__origin__.__name__}.args" 

1772 

1773 def __eq__(self, other): 

1774 if not isinstance(other, ParamSpecArgs): 

1775 return NotImplemented 

1776 return self.__origin__ == other.__origin__ 

1777 

1778 class ParamSpecKwargs(_Immutable): 

1779 """The kwargs for a ParamSpec object. 

1780 

1781 Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. 

1782 

1783 ParamSpecKwargs objects have a reference back to their ParamSpec: 

1784 

1785 P.kwargs.__origin__ is P 

1786 

1787 This type is meant for runtime introspection and has no special meaning to 

1788 static type checkers. 

1789 """ 

1790 def __init__(self, origin): 

1791 self.__origin__ = origin 

1792 

1793 def __repr__(self): 

1794 return f"{self.__origin__.__name__}.kwargs" 

1795 

1796 def __eq__(self, other): 

1797 if not isinstance(other, ParamSpecKwargs): 

1798 return NotImplemented 

1799 return self.__origin__ == other.__origin__ 

1800 

1801 

1802if _PEP_696_IMPLEMENTED: 

1803 from typing import ParamSpec 

1804 

1805# 3.10+ 

1806elif hasattr(typing, 'ParamSpec'): 

1807 

1808 # Add default parameter - PEP 696 

1809 class ParamSpec(metaclass=_TypeVarLikeMeta): 

1810 """Parameter specification.""" 

1811 

1812 _backported_typevarlike = typing.ParamSpec 

1813 

1814 def __new__(cls, name, *, bound=None, 

1815 covariant=False, contravariant=False, 

1816 infer_variance=False, default=NoDefault): 

1817 if hasattr(typing, "TypeAliasType"): 

1818 # PEP 695 implemented, can pass infer_variance to typing.TypeVar 

1819 paramspec = typing.ParamSpec(name, bound=bound, 

1820 covariant=covariant, 

1821 contravariant=contravariant, 

1822 infer_variance=infer_variance) 

1823 else: 

1824 paramspec = typing.ParamSpec(name, bound=bound, 

1825 covariant=covariant, 

1826 contravariant=contravariant) 

1827 paramspec.__infer_variance__ = infer_variance 

1828 

1829 _set_default(paramspec, default) 

1830 _set_module(paramspec) 

1831 

1832 def _paramspec_prepare_subst(alias, args): 

1833 params = alias.__parameters__ 

1834 i = params.index(paramspec) 

1835 if i == len(args) and paramspec.has_default(): 

1836 args = [*args, paramspec.__default__] 

1837 if i >= len(args): 

1838 raise TypeError(f"Too few arguments for {alias}") 

1839 # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. 

1840 if len(params) == 1 and not typing._is_param_expr(args[0]): 

1841 assert i == 0 

1842 args = (args,) 

1843 # Convert lists to tuples to help other libraries cache the results. 

1844 elif isinstance(args[i], list): 

1845 args = (*args[:i], tuple(args[i]), *args[i + 1:]) 

1846 return args 

1847 

1848 paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst 

1849 return paramspec 

1850 

1851 def __init_subclass__(cls) -> None: 

1852 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") 

1853 

1854# 3.9 

1855else: 

1856 

1857 # Inherits from list as a workaround for Callable checks in Python < 3.9.2. 

1858 class ParamSpec(list, _DefaultMixin): 

1859 """Parameter specification variable. 

1860 

1861 Usage:: 

1862 

1863 P = ParamSpec('P') 

1864 

1865 Parameter specification variables exist primarily for the benefit of static 

1866 type checkers. They are used to forward the parameter types of one 

1867 callable to another callable, a pattern commonly found in higher order 

1868 functions and decorators. They are only valid when used in ``Concatenate``, 

1869 or s the first argument to ``Callable``. In Python 3.10 and higher, 

1870 they are also supported in user-defined Generics at runtime. 

1871 See class Generic for more information on generic types. An 

1872 example for annotating a decorator:: 

1873 

1874 T = TypeVar('T') 

1875 P = ParamSpec('P') 

1876 

1877 def add_logging(f: Callable[P, T]) -> Callable[P, T]: 

1878 '''A type-safe decorator to add logging to a function.''' 

1879 def inner(*args: P.args, **kwargs: P.kwargs) -> T: 

1880 logging.info(f'{f.__name__} was called') 

1881 return f(*args, **kwargs) 

1882 return inner 

1883 

1884 @add_logging 

1885 def add_two(x: float, y: float) -> float: 

1886 '''Add two numbers together.''' 

1887 return x + y 

1888 

1889 Parameter specification variables defined with covariant=True or 

1890 contravariant=True can be used to declare covariant or contravariant 

1891 generic types. These keyword arguments are valid, but their actual semantics 

1892 are yet to be decided. See PEP 612 for details. 

1893 

1894 Parameter specification variables can be introspected. e.g.: 

1895 

1896 P.__name__ == 'T' 

1897 P.__bound__ == None 

1898 P.__covariant__ == False 

1899 P.__contravariant__ == False 

1900 

1901 Note that only parameter specification variables defined in global scope can 

1902 be pickled. 

1903 """ 

1904 

1905 # Trick Generic __parameters__. 

1906 __class__ = typing.TypeVar 

1907 

1908 @property 

1909 def args(self): 

1910 return ParamSpecArgs(self) 

1911 

1912 @property 

1913 def kwargs(self): 

1914 return ParamSpecKwargs(self) 

1915 

1916 def __init__(self, name, *, bound=None, covariant=False, contravariant=False, 

1917 infer_variance=False, default=NoDefault): 

1918 list.__init__(self, [self]) 

1919 self.__name__ = name 

1920 self.__covariant__ = bool(covariant) 

1921 self.__contravariant__ = bool(contravariant) 

1922 self.__infer_variance__ = bool(infer_variance) 

1923 if bound: 

1924 self.__bound__ = typing._type_check(bound, 'Bound must be a type.') 

1925 else: 

1926 self.__bound__ = None 

1927 _DefaultMixin.__init__(self, default) 

1928 

1929 # for pickling: 

1930 def_mod = _caller() 

1931 if def_mod != 'typing_extensions': 

1932 self.__module__ = def_mod 

1933 

1934 def __repr__(self): 

1935 if self.__infer_variance__: 

1936 prefix = '' 

1937 elif self.__covariant__: 

1938 prefix = '+' 

1939 elif self.__contravariant__: 

1940 prefix = '-' 

1941 else: 

1942 prefix = '~' 

1943 return prefix + self.__name__ 

1944 

1945 def __hash__(self): 

1946 return object.__hash__(self) 

1947 

1948 def __eq__(self, other): 

1949 return self is other 

1950 

1951 def __reduce__(self): 

1952 return self.__name__ 

1953 

1954 # Hack to get typing._type_check to pass. 

1955 def __call__(self, *args, **kwargs): 

1956 pass 

1957 

1958 def __init_subclass__(cls) -> None: 

1959 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") 

1960 

1961 

1962# 3.9 

1963if not hasattr(typing, 'Concatenate'): 

1964 # Inherits from list as a workaround for Callable checks in Python < 3.9.2. 

1965 

1966 # 3.9.0-1 

1967 if not hasattr(typing, '_type_convert'): 

1968 def _type_convert(arg, module=None, *, allow_special_forms=False): 

1969 """For converting None to type(None), and strings to ForwardRef.""" 

1970 if arg is None: 

1971 return type(None) 

1972 if isinstance(arg, str): 

1973 if sys.version_info <= (3, 9, 6): 

1974 return ForwardRef(arg) 

1975 if sys.version_info <= (3, 9, 7): 

1976 return ForwardRef(arg, module=module) 

1977 return ForwardRef(arg, module=module, is_class=allow_special_forms) 

1978 return arg 

1979 else: 

1980 _type_convert = typing._type_convert 

1981 

1982 class _ConcatenateGenericAlias(list): 

1983 

1984 # Trick Generic into looking into this for __parameters__. 

1985 __class__ = typing._GenericAlias 

1986 

1987 def __init__(self, origin, args): 

1988 # Cannot use `super().__init__` here because of the `__class__` assignment 

1989 # in the class body (https://github.com/python/typing_extensions/issues/661) 

1990 list.__init__(self, args) 

1991 self.__origin__ = origin 

1992 self.__args__ = args 

1993 

1994 def __repr__(self): 

1995 _type_repr = typing._type_repr 

1996 return (f'{_type_repr(self.__origin__)}' 

1997 f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') 

1998 

1999 def __hash__(self): 

2000 return hash((self.__origin__, self.__args__)) 

2001 

2002 # Hack to get typing._type_check to pass in Generic. 

2003 def __call__(self, *args, **kwargs): 

2004 pass 

2005 

2006 @property 

2007 def __parameters__(self): 

2008 return tuple( 

2009 tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) 

2010 ) 

2011 

2012 # 3.9 used by __getitem__ below 

2013 def copy_with(self, params): 

2014 if isinstance(params[-1], _ConcatenateGenericAlias): 

2015 params = (*params[:-1], *params[-1].__args__) 

2016 elif isinstance(params[-1], (list, tuple)): 

2017 return (*params[:-1], *params[-1]) 

2018 elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))): 

2019 raise TypeError("The last parameter to Concatenate should be a " 

2020 "ParamSpec variable or ellipsis.") 

2021 return self.__class__(self.__origin__, params) 

2022 

2023 # 3.9; accessed during GenericAlias.__getitem__ when substituting 

2024 def __getitem__(self, args): 

2025 if self.__origin__ in (Generic, Protocol): 

2026 # Can't subscript Generic[...] or Protocol[...]. 

2027 raise TypeError(f"Cannot subscript already-subscripted {self}") 

2028 if not self.__parameters__: 

2029 raise TypeError(f"{self} is not a generic class") 

2030 

2031 if not isinstance(args, tuple): 

2032 args = (args,) 

2033 args = _unpack_args(*(_type_convert(p) for p in args)) 

2034 params = self.__parameters__ 

2035 for param in params: 

2036 prepare = getattr(param, "__typing_prepare_subst__", None) 

2037 if prepare is not None: 

2038 args = prepare(self, args) 

2039 # 3.9 & typing.ParamSpec 

2040 elif isinstance(param, ParamSpec): 

2041 i = params.index(param) 

2042 if ( 

2043 i == len(args) 

2044 and getattr(param, '__default__', NoDefault) is not NoDefault 

2045 ): 

2046 args = [*args, param.__default__] 

2047 if i >= len(args): 

2048 raise TypeError(f"Too few arguments for {self}") 

2049 # Special case for Z[[int, str, bool]] == Z[int, str, bool] 

2050 if len(params) == 1 and not _is_param_expr(args[0]): 

2051 assert i == 0 

2052 args = (args,) 

2053 elif ( 

2054 isinstance(args[i], list) 

2055 # 3.9 

2056 # This class inherits from list do not convert 

2057 and not isinstance(args[i], _ConcatenateGenericAlias) 

2058 ): 

2059 args = (*args[:i], tuple(args[i]), *args[i + 1:]) 

2060 

2061 alen = len(args) 

2062 plen = len(params) 

2063 if alen != plen: 

2064 raise TypeError( 

2065 f"Too {'many' if alen > plen else 'few'} arguments for {self};" 

2066 f" actual {alen}, expected {plen}" 

2067 ) 

2068 

2069 subst = dict(zip(self.__parameters__, args)) 

2070 # determine new args 

2071 new_args = [] 

2072 for arg in self.__args__: 

2073 if isinstance(arg, type): 

2074 new_args.append(arg) 

2075 continue 

2076 if isinstance(arg, TypeVar): 

2077 arg = subst[arg] 

2078 if ( 

2079 (isinstance(arg, typing._GenericAlias) and _is_unpack(arg)) 

2080 or ( 

2081 hasattr(_types, "GenericAlias") 

2082 and isinstance(arg, _types.GenericAlias) 

2083 and getattr(arg, "__unpacked__", False) 

2084 ) 

2085 ): 

2086 raise TypeError(f"{arg} is not valid as type argument") 

2087 

2088 elif isinstance(arg, 

2089 typing._GenericAlias 

2090 if not hasattr(_types, "GenericAlias") else 

2091 (typing._GenericAlias, _types.GenericAlias) 

2092 ): 

2093 subparams = arg.__parameters__ 

2094 if subparams: 

2095 subargs = tuple(subst[x] for x in subparams) 

2096 arg = arg[subargs] 

2097 new_args.append(arg) 

2098 return self.copy_with(tuple(new_args)) 

2099 

2100# 3.10+ 

2101else: 

2102 _ConcatenateGenericAlias = typing._ConcatenateGenericAlias 

2103 

2104 # 3.10 

2105 if sys.version_info < (3, 11): 

2106 

2107 class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True): 

2108 # needed for checks in collections.abc.Callable to accept this class 

2109 __module__ = "typing" 

2110 

2111 def copy_with(self, params): 

2112 if isinstance(params[-1], (list, tuple)): 

2113 return (*params[:-1], *params[-1]) 

2114 if isinstance(params[-1], typing._ConcatenateGenericAlias): 

2115 params = (*params[:-1], *params[-1].__args__) 

2116 elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)): 

2117 raise TypeError("The last parameter to Concatenate should be a " 

2118 "ParamSpec variable or ellipsis.") 

2119 return super(typing._ConcatenateGenericAlias, self).copy_with(params) 

2120 

2121 def __getitem__(self, args): 

2122 value = super().__getitem__(args) 

2123 if isinstance(value, tuple) and any(_is_unpack(t) for t in value): 

2124 return tuple(_unpack_args(*(n for n in value))) 

2125 return value 

2126 

2127 

2128# 3.9.2 

2129class _EllipsisDummy: ... 

2130 

2131 

2132# <=3.10 

2133def _create_concatenate_alias(origin, parameters): 

2134 if parameters[-1] is ... and sys.version_info < (3, 9, 2): 

2135 # Hack: Arguments must be types, replace it with one. 

2136 parameters = (*parameters[:-1], _EllipsisDummy) 

2137 if sys.version_info >= (3, 10, 3): 

2138 concatenate = _ConcatenateGenericAlias(origin, parameters, 

2139 _typevar_types=(TypeVar, ParamSpec), 

2140 _paramspec_tvars=True) 

2141 else: 

2142 concatenate = _ConcatenateGenericAlias(origin, parameters) 

2143 if parameters[-1] is not _EllipsisDummy: 

2144 return concatenate 

2145 # Remove dummy again 

2146 concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ... 

2147 for p in concatenate.__args__) 

2148 if sys.version_info < (3, 10): 

2149 # backport needs __args__ adjustment only 

2150 return concatenate 

2151 concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__ 

2152 if p is not _EllipsisDummy) 

2153 return concatenate 

2154 

2155 

2156# <=3.10 

2157@typing._tp_cache 

2158def _concatenate_getitem(self, parameters): 

2159 if parameters == (): 

2160 raise TypeError("Cannot take a Concatenate of no types.") 

2161 if not isinstance(parameters, tuple): 

2162 parameters = (parameters,) 

2163 if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)): 

2164 raise TypeError("The last parameter to Concatenate should be a " 

2165 "ParamSpec variable or ellipsis.") 

2166 msg = "Concatenate[arg, ...]: each arg must be a type." 

2167 parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]), 

2168 parameters[-1]) 

2169 return _create_concatenate_alias(self, parameters) 

2170 

2171 

2172# 3.11+; Concatenate does not accept ellipsis in 3.10 

2173# Breakpoint: https://github.com/python/cpython/pull/30969 

2174if sys.version_info >= (3, 11): 

2175 Concatenate = typing.Concatenate 

2176# <=3.10 

2177else: 

2178 @_ExtensionsSpecialForm 

2179 def Concatenate(self, parameters): 

2180 """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a 

2181 higher order function which adds, removes or transforms parameters of a 

2182 callable. 

2183 

2184 For example:: 

2185 

2186 Callable[Concatenate[int, P], int] 

2187 

2188 See PEP 612 for detailed information. 

2189 """ 

2190 return _concatenate_getitem(self, parameters) 

2191 

2192 

2193# 3.10+ 

2194if hasattr(typing, 'TypeGuard'): 

2195 TypeGuard = typing.TypeGuard 

2196# 3.9 

2197else: 

2198 @_ExtensionsSpecialForm 

2199 def TypeGuard(self, parameters): 

2200 """Special typing form used to annotate the return type of a user-defined 

2201 type guard function. ``TypeGuard`` only accepts a single type argument. 

2202 At runtime, functions marked this way should return a boolean. 

2203 

2204 ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static 

2205 type checkers to determine a more precise type of an expression within a 

2206 program's code flow. Usually type narrowing is done by analyzing 

2207 conditional code flow and applying the narrowing to a block of code. The 

2208 conditional expression here is sometimes referred to as a "type guard". 

2209 

2210 Sometimes it would be convenient to use a user-defined boolean function 

2211 as a type guard. Such a function should use ``TypeGuard[...]`` as its 

2212 return type to alert static type checkers to this intention. 

2213 

2214 Using ``-> TypeGuard`` tells the static type checker that for a given 

2215 function: 

2216 

2217 1. The return value is a boolean. 

2218 2. If the return value is ``True``, the type of its argument 

2219 is the type inside ``TypeGuard``. 

2220 

2221 For example:: 

2222 

2223 def is_str(val: Union[str, float]): 

2224 # "isinstance" type guard 

2225 if isinstance(val, str): 

2226 # Type of ``val`` is narrowed to ``str`` 

2227 ... 

2228 else: 

2229 # Else, type of ``val`` is narrowed to ``float``. 

2230 ... 

2231 

2232 Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower 

2233 form of ``TypeA`` (it can even be a wider form) and this may lead to 

2234 type-unsafe results. The main reason is to allow for things like 

2235 narrowing ``List[object]`` to ``List[str]`` even though the latter is not 

2236 a subtype of the former, since ``List`` is invariant. The responsibility of 

2237 writing type-safe type guards is left to the user. 

2238 

2239 ``TypeGuard`` also works with type variables. For more information, see 

2240 PEP 647 (User-Defined Type Guards). 

2241 """ 

2242 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2243 return typing._GenericAlias(self, (item,)) 

2244 

2245 

2246# 3.13+ 

2247if hasattr(typing, 'TypeIs'): 

2248 TypeIs = typing.TypeIs 

2249# <=3.12 

2250else: 

2251 @_ExtensionsSpecialForm 

2252 def TypeIs(self, parameters): 

2253 """Special typing form used to annotate the return type of a user-defined 

2254 type narrower function. ``TypeIs`` only accepts a single type argument. 

2255 At runtime, functions marked this way should return a boolean. 

2256 

2257 ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static 

2258 type checkers to determine a more precise type of an expression within a 

2259 program's code flow. Usually type narrowing is done by analyzing 

2260 conditional code flow and applying the narrowing to a block of code. The 

2261 conditional expression here is sometimes referred to as a "type guard". 

2262 

2263 Sometimes it would be convenient to use a user-defined boolean function 

2264 as a type guard. Such a function should use ``TypeIs[...]`` as its 

2265 return type to alert static type checkers to this intention. 

2266 

2267 Using ``-> TypeIs`` tells the static type checker that for a given 

2268 function: 

2269 

2270 1. The return value is a boolean. 

2271 2. If the return value is ``True``, the type of its argument 

2272 is the intersection of the type inside ``TypeIs`` and the argument's 

2273 previously known type. 

2274 

2275 For example:: 

2276 

2277 def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: 

2278 return hasattr(val, '__await__') 

2279 

2280 def f(val: Union[int, Awaitable[int]]) -> int: 

2281 if is_awaitable(val): 

2282 assert_type(val, Awaitable[int]) 

2283 else: 

2284 assert_type(val, int) 

2285 

2286 ``TypeIs`` also works with type variables. For more information, see 

2287 PEP 742 (Narrowing types with TypeIs). 

2288 """ 

2289 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2290 return typing._GenericAlias(self, (item,)) 

2291 

2292 

2293# 3.15+? 

2294if hasattr(typing, 'TypeForm'): 

2295 TypeForm = typing.TypeForm 

2296# <=3.14 

2297else: 

2298 class _TypeFormForm(_ExtensionsSpecialForm, _root=True): 

2299 # TypeForm(X) is equivalent to X but indicates to the type checker 

2300 # that the object is a TypeForm. 

2301 def __call__(self, obj, /): 

2302 return obj 

2303 

2304 @_TypeFormForm 

2305 def TypeForm(self, parameters): 

2306 """A special form representing the value that results from the evaluation 

2307 of a type expression. This value encodes the information supplied in the 

2308 type expression, and it represents the type described by that type expression. 

2309 

2310 When used in a type expression, TypeForm describes a set of type form objects. 

2311 It accepts a single type argument, which must be a valid type expression. 

2312 ``TypeForm[T]`` describes the set of all type form objects that represent 

2313 the type T or types that are assignable to T. 

2314 

2315 Usage: 

2316 

2317 def cast[T](typ: TypeForm[T], value: Any) -> T: ... 

2318 

2319 reveal_type(cast(int, "x")) # int 

2320 

2321 See PEP 747 for more information. 

2322 """ 

2323 item = typing._type_check(parameters, f'{self} accepts only a single type.') 

2324 return typing._GenericAlias(self, (item,)) 

2325 

2326 

2327 

2328 

2329if hasattr(typing, "LiteralString"): # 3.11+ 

2330 LiteralString = typing.LiteralString 

2331else: 

2332 @_SpecialForm 

2333 def LiteralString(self, params): 

2334 """Represents an arbitrary literal string. 

2335 

2336 Example:: 

2337 

2338 from typing_extensions import LiteralString 

2339 

2340 def query(sql: LiteralString) -> ...: 

2341 ... 

2342 

2343 query("SELECT * FROM table") # ok 

2344 query(f"SELECT * FROM {input()}") # not ok 

2345 

2346 See PEP 675 for details. 

2347 

2348 """ 

2349 raise TypeError(f"{self} is not subscriptable") 

2350 

2351 

2352if hasattr(typing, "Self"): # 3.11+ 

2353 Self = typing.Self 

2354else: 

2355 @_SpecialForm 

2356 def Self(self, params): 

2357 """Used to spell the type of "self" in classes. 

2358 

2359 Example:: 

2360 

2361 from typing import Self 

2362 

2363 class ReturnsSelf: 

2364 def parse(self, data: bytes) -> Self: 

2365 ... 

2366 return self 

2367 

2368 """ 

2369 

2370 raise TypeError(f"{self} is not subscriptable") 

2371 

2372 

2373if hasattr(typing, "Never"): # 3.11+ 

2374 Never = typing.Never 

2375else: 

2376 @_SpecialForm 

2377 def Never(self, params): 

2378 """The bottom type, a type that has no members. 

2379 

2380 This can be used to define a function that should never be 

2381 called, or a function that never returns:: 

2382 

2383 from typing_extensions import Never 

2384 

2385 def never_call_me(arg: Never) -> None: 

2386 pass 

2387 

2388 def int_or_str(arg: int | str) -> None: 

2389 never_call_me(arg) # type checker error 

2390 match arg: 

2391 case int(): 

2392 print("It's an int") 

2393 case str(): 

2394 print("It's a str") 

2395 case _: 

2396 never_call_me(arg) # ok, arg is of type Never 

2397 

2398 """ 

2399 

2400 raise TypeError(f"{self} is not subscriptable") 

2401 

2402 

2403if hasattr(typing, 'Required'): # 3.11+ 

2404 Required = typing.Required 

2405 NotRequired = typing.NotRequired 

2406else: # <=3.10 

2407 @_ExtensionsSpecialForm 

2408 def Required(self, parameters): 

2409 """A special typing construct to mark a key of a total=False TypedDict 

2410 as required. For example: 

2411 

2412 class Movie(TypedDict, total=False): 

2413 title: Required[str] 

2414 year: int 

2415 

2416 m = Movie( 

2417 title='The Matrix', # typechecker error if key is omitted 

2418 year=1999, 

2419 ) 

2420 

2421 There is no runtime checking that a required key is actually provided 

2422 when instantiating a related TypedDict. 

2423 """ 

2424 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2425 return typing._GenericAlias(self, (item,)) 

2426 

2427 @_ExtensionsSpecialForm 

2428 def NotRequired(self, parameters): 

2429 """A special typing construct to mark a key of a TypedDict as 

2430 potentially missing. For example: 

2431 

2432 class Movie(TypedDict): 

2433 title: str 

2434 year: NotRequired[int] 

2435 

2436 m = Movie( 

2437 title='The Matrix', # typechecker error if key is omitted 

2438 year=1999, 

2439 ) 

2440 """ 

2441 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2442 return typing._GenericAlias(self, (item,)) 

2443 

2444 

2445if hasattr(typing, 'ReadOnly'): 

2446 ReadOnly = typing.ReadOnly 

2447else: # <=3.12 

2448 @_ExtensionsSpecialForm 

2449 def ReadOnly(self, parameters): 

2450 """A special typing construct to mark an item of a TypedDict as read-only. 

2451 

2452 For example: 

2453 

2454 class Movie(TypedDict): 

2455 title: ReadOnly[str] 

2456 year: int 

2457 

2458 def mutate_movie(m: Movie) -> None: 

2459 m["year"] = 1992 # allowed 

2460 m["title"] = "The Matrix" # typechecker error 

2461 

2462 There is no runtime checking for this property. 

2463 """ 

2464 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2465 return typing._GenericAlias(self, (item,)) 

2466 

2467 

2468_UNPACK_DOC = """\ 

2469Type unpack operator. 

2470 

2471The type unpack operator takes the child types from some container type, 

2472such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For 

2473example: 

2474 

2475 # For some generic class `Foo`: 

2476 Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] 

2477 

2478 Ts = TypeVarTuple('Ts') 

2479 # Specifies that `Bar` is generic in an arbitrary number of types. 

2480 # (Think of `Ts` as a tuple of an arbitrary number of individual 

2481 # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the 

2482 # `Generic[]`.) 

2483 class Bar(Generic[Unpack[Ts]]): ... 

2484 Bar[int] # Valid 

2485 Bar[int, str] # Also valid 

2486 

2487From Python 3.11, this can also be done using the `*` operator: 

2488 

2489 Foo[*tuple[int, str]] 

2490 class Bar(Generic[*Ts]): ... 

2491 

2492The operator can also be used along with a `TypedDict` to annotate 

2493`**kwargs` in a function signature. For instance: 

2494 

2495 class Movie(TypedDict): 

2496 name: str 

2497 year: int 

2498 

2499 # This function expects two keyword arguments - *name* of type `str` and 

2500 # *year* of type `int`. 

2501 def foo(**kwargs: Unpack[Movie]): ... 

2502 

2503Note that there is only some runtime checking of this operator. Not 

2504everything the runtime allows may be accepted by static type checkers. 

2505 

2506For more information, see PEP 646 and PEP 692. 

2507""" 

2508 

2509 

2510# PEP 692 changed the repr of Unpack[] 

2511# Breakpoint: https://github.com/python/cpython/pull/104048 

2512if sys.version_info >= (3, 12): 

2513 Unpack = typing.Unpack 

2514 

2515 def _is_unpack(obj): 

2516 return get_origin(obj) is Unpack 

2517 

2518else: # <=3.11 

2519 class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): 

2520 def __init__(self, getitem): 

2521 super().__init__(getitem) 

2522 self.__doc__ = _UNPACK_DOC 

2523 

2524 class _UnpackAlias(typing._GenericAlias, _root=True): 

2525 if sys.version_info < (3, 11): 

2526 # needed for compatibility with Generic[Unpack[Ts]] 

2527 __class__ = typing.TypeVar 

2528 

2529 @property 

2530 def __typing_unpacked_tuple_args__(self): 

2531 assert self.__origin__ is Unpack 

2532 assert len(self.__args__) == 1 

2533 arg, = self.__args__ 

2534 if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)): 

2535 if arg.__origin__ is not tuple: 

2536 raise TypeError("Unpack[...] must be used with a tuple type") 

2537 return arg.__args__ 

2538 return None 

2539 

2540 @property 

2541 def __typing_is_unpacked_typevartuple__(self): 

2542 assert self.__origin__ is Unpack 

2543 assert len(self.__args__) == 1 

2544 return isinstance(self.__args__[0], TypeVarTuple) 

2545 

2546 def __getitem__(self, args): 

2547 if self.__typing_is_unpacked_typevartuple__: 

2548 return args 

2549 # Cannot use `super().__getitem__` here because of the `__class__` assignment 

2550 # in the class body on Python <=3.11 

2551 # (https://github.com/python/typing_extensions/issues/661) 

2552 return typing._GenericAlias.__getitem__(self, args) 

2553 

2554 @_UnpackSpecialForm 

2555 def Unpack(self, parameters): 

2556 item = typing._type_check(parameters, f'{self._name} accepts only a single type.') 

2557 return _UnpackAlias(self, (item,)) 

2558 

2559 def _is_unpack(obj): 

2560 return isinstance(obj, _UnpackAlias) 

2561 

2562 

2563def _unpack_args(*args): 

2564 newargs = [] 

2565 for arg in args: 

2566 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) 

2567 if subargs is not None and (not (subargs and subargs[-1] is ...)): 

2568 newargs.extend(subargs) 

2569 else: 

2570 newargs.append(arg) 

2571 return newargs 

2572 

2573 

2574if _PEP_696_IMPLEMENTED: 

2575 from typing import TypeVarTuple 

2576 

2577elif hasattr(typing, "TypeVarTuple"): # 3.11+ 

2578 

2579 # Add default parameter - PEP 696 

2580 class TypeVarTuple(metaclass=_TypeVarLikeMeta): 

2581 """Type variable tuple.""" 

2582 

2583 _backported_typevarlike = typing.TypeVarTuple 

2584 

2585 def __new__(cls, name, *, default=NoDefault): 

2586 tvt = typing.TypeVarTuple(name) 

2587 _set_default(tvt, default) 

2588 _set_module(tvt) 

2589 

2590 def _typevartuple_prepare_subst(alias, args): 

2591 params = alias.__parameters__ 

2592 typevartuple_index = params.index(tvt) 

2593 for param in params[typevartuple_index + 1:]: 

2594 if isinstance(param, TypeVarTuple): 

2595 raise TypeError( 

2596 f"More than one TypeVarTuple parameter in {alias}" 

2597 ) 

2598 

2599 alen = len(args) 

2600 plen = len(params) 

2601 left = typevartuple_index 

2602 right = plen - typevartuple_index - 1 

2603 var_tuple_index = None 

2604 fillarg = None 

2605 for k, arg in enumerate(args): 

2606 if not isinstance(arg, type): 

2607 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None) 

2608 if subargs and len(subargs) == 2 and subargs[-1] is ...: 

2609 if var_tuple_index is not None: 

2610 raise TypeError( 

2611 "More than one unpacked " 

2612 "arbitrary-length tuple argument" 

2613 ) 

2614 var_tuple_index = k 

2615 fillarg = subargs[0] 

2616 if var_tuple_index is not None: 

2617 left = min(left, var_tuple_index) 

2618 right = min(right, alen - var_tuple_index - 1) 

2619 elif left + right > alen: 

2620 raise TypeError(f"Too few arguments for {alias};" 

2621 f" actual {alen}, expected at least {plen - 1}") 

2622 if left == alen - right and tvt.has_default(): 

2623 replacement = _unpack_args(tvt.__default__) 

2624 else: 

2625 replacement = args[left: alen - right] 

2626 

2627 return ( 

2628 *args[:left], 

2629 *([fillarg] * (typevartuple_index - left)), 

2630 replacement, 

2631 *([fillarg] * (plen - right - left - typevartuple_index - 1)), 

2632 *args[alen - right:], 

2633 ) 

2634 

2635 tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst 

2636 return tvt 

2637 

2638 def __init_subclass__(self, *args, **kwds): 

2639 raise TypeError("Cannot subclass special typing classes") 

2640 

2641else: # <=3.10 

2642 class TypeVarTuple(_DefaultMixin): 

2643 """Type variable tuple. 

2644 

2645 Usage:: 

2646 

2647 Ts = TypeVarTuple('Ts') 

2648 

2649 In the same way that a normal type variable is a stand-in for a single 

2650 type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* 

2651 type such as ``Tuple[int, str]``. 

2652 

2653 Type variable tuples can be used in ``Generic`` declarations. 

2654 Consider the following example:: 

2655 

2656 class Array(Generic[*Ts]): ... 

2657 

2658 The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, 

2659 where ``T1`` and ``T2`` are type variables. To use these type variables 

2660 as type parameters of ``Array``, we must *unpack* the type variable tuple using 

2661 the star operator: ``*Ts``. The signature of ``Array`` then behaves 

2662 as if we had simply written ``class Array(Generic[T1, T2]): ...``. 

2663 In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows 

2664 us to parameterise the class with an *arbitrary* number of type parameters. 

2665 

2666 Type variable tuples can be used anywhere a normal ``TypeVar`` can. 

2667 This includes class definitions, as shown above, as well as function 

2668 signatures and variable annotations:: 

2669 

2670 class Array(Generic[*Ts]): 

2671 

2672 def __init__(self, shape: Tuple[*Ts]): 

2673 self._shape: Tuple[*Ts] = shape 

2674 

2675 def get_shape(self) -> Tuple[*Ts]: 

2676 return self._shape 

2677 

2678 shape = (Height(480), Width(640)) 

2679 x: Array[Height, Width] = Array(shape) 

2680 y = abs(x) # Inferred type is Array[Height, Width] 

2681 z = x + x # ... is Array[Height, Width] 

2682 x.get_shape() # ... is tuple[Height, Width] 

2683 

2684 """ 

2685 

2686 # Trick Generic __parameters__. 

2687 __class__ = typing.TypeVar 

2688 

2689 def __iter__(self): 

2690 yield self.__unpacked__ 

2691 

2692 def __init__(self, name, *, default=NoDefault): 

2693 self.__name__ = name 

2694 _DefaultMixin.__init__(self, default) 

2695 

2696 # for pickling: 

2697 def_mod = _caller() 

2698 if def_mod != 'typing_extensions': 

2699 self.__module__ = def_mod 

2700 

2701 self.__unpacked__ = Unpack[self] 

2702 

2703 def __repr__(self): 

2704 return self.__name__ 

2705 

2706 def __hash__(self): 

2707 return object.__hash__(self) 

2708 

2709 def __eq__(self, other): 

2710 return self is other 

2711 

2712 def __reduce__(self): 

2713 return self.__name__ 

2714 

2715 def __init_subclass__(self, *args, **kwds): 

2716 if '_root' not in kwds: 

2717 raise TypeError("Cannot subclass special typing classes") 

2718 

2719 

2720if hasattr(typing, "reveal_type"): # 3.11+ 

2721 reveal_type = typing.reveal_type 

2722else: # <=3.10 

2723 def reveal_type(obj: T, /) -> T: 

2724 """Reveal the inferred type of a variable. 

2725 

2726 When a static type checker encounters a call to ``reveal_type()``, 

2727 it will emit the inferred type of the argument:: 

2728 

2729 x: int = 1 

2730 reveal_type(x) 

2731 

2732 Running a static type checker (e.g., ``mypy``) on this example 

2733 will produce output similar to 'Revealed type is "builtins.int"'. 

2734 

2735 At runtime, the function prints the runtime type of the 

2736 argument and returns it unchanged. 

2737 

2738 """ 

2739 print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) 

2740 return obj 

2741 

2742 

2743if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+ 

2744 _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH 

2745else: # <=3.10 

2746 _ASSERT_NEVER_REPR_MAX_LENGTH = 100 

2747 

2748 

2749if hasattr(typing, "assert_never"): # 3.11+ 

2750 assert_never = typing.assert_never 

2751else: # <=3.10 

2752 def assert_never(arg: Never, /) -> Never: 

2753 """Assert to the type checker that a line of code is unreachable. 

2754 

2755 Example:: 

2756 

2757 def int_or_str(arg: int | str) -> None: 

2758 match arg: 

2759 case int(): 

2760 print("It's an int") 

2761 case str(): 

2762 print("It's a str") 

2763 case _: 

2764 assert_never(arg) 

2765 

2766 If a type checker finds that a call to assert_never() is 

2767 reachable, it will emit an error. 

2768 

2769 At runtime, this throws an exception when called. 

2770 

2771 """ 

2772 value = repr(arg) 

2773 if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH: 

2774 value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...' 

2775 raise AssertionError(f"Expected code to be unreachable, but got: {value}") 

2776 

2777 

2778# dataclass_transform exists in 3.11 but lacks the frozen_default parameter 

2779# Breakpoint: https://github.com/python/cpython/pull/99958 

2780if sys.version_info >= (3, 12): # 3.12+ 

2781 dataclass_transform = typing.dataclass_transform 

2782else: # <=3.11 

2783 def dataclass_transform( 

2784 *, 

2785 eq_default: bool = True, 

2786 order_default: bool = False, 

2787 kw_only_default: bool = False, 

2788 frozen_default: bool = False, 

2789 field_specifiers: typing.Tuple[ 

2790 typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], 

2791 ... 

2792 ] = (), 

2793 **kwargs: typing.Any, 

2794 ) -> typing.Callable[[T], T]: 

2795 """Decorator that marks a function, class, or metaclass as providing 

2796 dataclass-like behavior. 

2797 

2798 Example: 

2799 

2800 from typing_extensions import dataclass_transform 

2801 

2802 _T = TypeVar("_T") 

2803 

2804 # Used on a decorator function 

2805 @dataclass_transform() 

2806 def create_model(cls: type[_T]) -> type[_T]: 

2807 ... 

2808 return cls 

2809 

2810 @create_model 

2811 class CustomerModel: 

2812 id: int 

2813 name: str 

2814 

2815 # Used on a base class 

2816 @dataclass_transform() 

2817 class ModelBase: ... 

2818 

2819 class CustomerModel(ModelBase): 

2820 id: int 

2821 name: str 

2822 

2823 # Used on a metaclass 

2824 @dataclass_transform() 

2825 class ModelMeta(type): ... 

2826 

2827 class ModelBase(metaclass=ModelMeta): ... 

2828 

2829 class CustomerModel(ModelBase): 

2830 id: int 

2831 name: str 

2832 

2833 Each of the ``CustomerModel`` classes defined in this example will now 

2834 behave similarly to a dataclass created with the ``@dataclasses.dataclass`` 

2835 decorator. For example, the type checker will synthesize an ``__init__`` 

2836 method. 

2837 

2838 The arguments to this decorator can be used to customize this behavior: 

2839 - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be 

2840 True or False if it is omitted by the caller. 

2841 - ``order_default`` indicates whether the ``order`` parameter is 

2842 assumed to be True or False if it is omitted by the caller. 

2843 - ``kw_only_default`` indicates whether the ``kw_only`` parameter is 

2844 assumed to be True or False if it is omitted by the caller. 

2845 - ``frozen_default`` indicates whether the ``frozen`` parameter is 

2846 assumed to be True or False if it is omitted by the caller. 

2847 - ``field_specifiers`` specifies a static list of supported classes 

2848 or functions that describe fields, similar to ``dataclasses.field()``. 

2849 

2850 At runtime, this decorator records its arguments in the 

2851 ``__dataclass_transform__`` attribute on the decorated object. 

2852 

2853 See PEP 681 for details. 

2854 

2855 """ 

2856 def decorator(cls_or_fn): 

2857 cls_or_fn.__dataclass_transform__ = { 

2858 "eq_default": eq_default, 

2859 "order_default": order_default, 

2860 "kw_only_default": kw_only_default, 

2861 "frozen_default": frozen_default, 

2862 "field_specifiers": field_specifiers, 

2863 "kwargs": kwargs, 

2864 } 

2865 return cls_or_fn 

2866 return decorator 

2867 

2868 

2869if hasattr(typing, "override"): # 3.12+ 

2870 override = typing.override 

2871else: # <=3.11 

2872 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) 

2873 

2874 def override(arg: _F, /) -> _F: 

2875 """Indicate that a method is intended to override a method in a base class. 

2876 

2877 Usage: 

2878 

2879 class Base: 

2880 def method(self) -> None: 

2881 pass 

2882 

2883 class Child(Base): 

2884 @override 

2885 def method(self) -> None: 

2886 super().method() 

2887 

2888 When this decorator is applied to a method, the type checker will 

2889 validate that it overrides a method with the same name on a base class. 

2890 This helps prevent bugs that may occur when a base class is changed 

2891 without an equivalent change to a child class. 

2892 

2893 There is no runtime checking of these properties. The decorator 

2894 sets the ``__override__`` attribute to ``True`` on the decorated object 

2895 to allow runtime introspection. 

2896 

2897 See PEP 698 for details. 

2898 

2899 """ 

2900 try: 

2901 arg.__override__ = True 

2902 except (AttributeError, TypeError): 

2903 # Skip the attribute silently if it is not writable. 

2904 # AttributeError happens if the object has __slots__ or a 

2905 # read-only property, TypeError if it's a builtin class. 

2906 pass 

2907 return arg 

2908 

2909 

2910# Python 3.13.8+ and 3.14.1+ contain a fix for the wrapped __init_subclass__ 

2911# Breakpoint: https://github.com/python/cpython/pull/138210 

2912if ((3, 13, 8) <= sys.version_info < (3, 14)) or sys.version_info >= (3, 14, 1): 

2913 deprecated = warnings.deprecated 

2914else: 

2915 _T = typing.TypeVar("_T") 

2916 

2917 class deprecated: 

2918 """Indicate that a class, function or overload is deprecated. 

2919 

2920 When this decorator is applied to an object, the type checker 

2921 will generate a diagnostic on usage of the deprecated object. 

2922 

2923 Usage: 

2924 

2925 @deprecated("Use B instead") 

2926 class A: 

2927 pass 

2928 

2929 @deprecated("Use g instead") 

2930 def f(): 

2931 pass 

2932 

2933 @overload 

2934 @deprecated("int support is deprecated") 

2935 def g(x: int) -> int: ... 

2936 @overload 

2937 def g(x: str) -> int: ... 

2938 

2939 The warning specified by *category* will be emitted at runtime 

2940 on use of deprecated objects. For functions, that happens on calls; 

2941 for classes, on instantiation and on creation of subclasses. 

2942 If the *category* is ``None``, no warning is emitted at runtime. 

2943 The *stacklevel* determines where the 

2944 warning is emitted. If it is ``1`` (the default), the warning 

2945 is emitted at the direct caller of the deprecated object; if it 

2946 is higher, it is emitted further up the stack. 

2947 Static type checker behavior is not affected by the *category* 

2948 and *stacklevel* arguments. 

2949 

2950 The deprecation message passed to the decorator is saved in the 

2951 ``__deprecated__`` attribute on the decorated object. 

2952 If applied to an overload, the decorator 

2953 must be after the ``@overload`` decorator for the attribute to 

2954 exist on the overload as returned by ``get_overloads()``. 

2955 

2956 See PEP 702 for details. 

2957 

2958 """ 

2959 def __init__( 

2960 self, 

2961 message: str, 

2962 /, 

2963 *, 

2964 category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, 

2965 stacklevel: int = 1, 

2966 ) -> None: 

2967 if not isinstance(message, str): 

2968 raise TypeError( 

2969 "Expected an object of type str for 'message', not " 

2970 f"{type(message).__name__!r}" 

2971 ) 

2972 self.message = message 

2973 self.category = category 

2974 self.stacklevel = stacklevel 

2975 

2976 def __call__(self, arg: _T, /) -> _T: 

2977 # Make sure the inner functions created below don't 

2978 # retain a reference to self. 

2979 msg = self.message 

2980 category = self.category 

2981 stacklevel = self.stacklevel 

2982 if category is None: 

2983 arg.__deprecated__ = msg 

2984 return arg 

2985 elif isinstance(arg, type): 

2986 import functools 

2987 from types import MethodType 

2988 

2989 original_new = arg.__new__ 

2990 

2991 @functools.wraps(original_new) 

2992 def __new__(cls, /, *args, **kwargs): 

2993 if cls is arg: 

2994 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

2995 if original_new is not object.__new__: 

2996 return original_new(cls, *args, **kwargs) 

2997 # Mirrors a similar check in object.__new__. 

2998 elif cls.__init__ is object.__init__ and (args or kwargs): 

2999 raise TypeError(f"{cls.__name__}() takes no arguments") 

3000 else: 

3001 return original_new(cls) 

3002 

3003 arg.__new__ = staticmethod(__new__) 

3004 

3005 if "__init_subclass__" in arg.__dict__: 

3006 # __init_subclass__ is directly present on the decorated class. 

3007 # Synthesize a wrapper that calls this method directly. 

3008 original_init_subclass = arg.__init_subclass__ 

3009 # We need slightly different behavior if __init_subclass__ 

3010 # is a bound method (likely if it was implemented in Python). 

3011 # Otherwise, it likely means it's a builtin such as 

3012 # object's implementation of __init_subclass__. 

3013 if isinstance(original_init_subclass, MethodType): 

3014 original_init_subclass = original_init_subclass.__func__ 

3015 

3016 @functools.wraps(original_init_subclass) 

3017 def __init_subclass__(*args, **kwargs): 

3018 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

3019 return original_init_subclass(*args, **kwargs) 

3020 else: 

3021 def __init_subclass__(cls, *args, **kwargs): 

3022 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

3023 return super(arg, cls).__init_subclass__(*args, **kwargs) 

3024 

3025 arg.__init_subclass__ = classmethod(__init_subclass__) 

3026 

3027 arg.__deprecated__ = __new__.__deprecated__ = msg 

3028 __init_subclass__.__deprecated__ = msg 

3029 return arg 

3030 elif callable(arg): 

3031 import asyncio.coroutines 

3032 import functools 

3033 import inspect 

3034 

3035 @functools.wraps(arg) 

3036 def wrapper(*args, **kwargs): 

3037 warnings.warn(msg, category=category, stacklevel=stacklevel + 1) 

3038 return arg(*args, **kwargs) 

3039 

3040 if asyncio.coroutines.iscoroutinefunction(arg): 

3041 # Breakpoint: https://github.com/python/cpython/pull/99247 

3042 if sys.version_info >= (3, 12): 

3043 wrapper = inspect.markcoroutinefunction(wrapper) 

3044 else: 

3045 wrapper._is_coroutine = asyncio.coroutines._is_coroutine 

3046 

3047 arg.__deprecated__ = wrapper.__deprecated__ = msg 

3048 return wrapper 

3049 else: 

3050 raise TypeError( 

3051 "@deprecated decorator with non-None category must be applied to " 

3052 f"a class or callable, not {arg!r}" 

3053 ) 

3054 

3055# Breakpoint: https://github.com/python/cpython/pull/23702 

3056if sys.version_info < (3, 10): 

3057 def _is_param_expr(arg): 

3058 return arg is ... or isinstance( 

3059 arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias) 

3060 ) 

3061else: 

3062 def _is_param_expr(arg): 

3063 return arg is ... or isinstance( 

3064 arg, 

3065 ( 

3066 tuple, 

3067 list, 

3068 ParamSpec, 

3069 _ConcatenateGenericAlias, 

3070 typing._ConcatenateGenericAlias, 

3071 ), 

3072 ) 

3073 

3074 

3075# We have to do some monkey patching to deal with the dual nature of 

3076# Unpack/TypeVarTuple: 

3077# - We want Unpack to be a kind of TypeVar so it gets accepted in 

3078# Generic[Unpack[Ts]] 

3079# - We want it to *not* be treated as a TypeVar for the purposes of 

3080# counting generic parameters, so that when we subscript a generic, 

3081# the runtime doesn't try to substitute the Unpack with the subscripted type. 

3082if not hasattr(typing, "TypeVarTuple"): 

3083 def _check_generic(cls, parameters, elen=_marker): 

3084 """Check correct count for parameters of a generic cls (internal helper). 

3085 

3086 This gives a nice error message in case of count mismatch. 

3087 """ 

3088 # If substituting a single ParamSpec with multiple arguments 

3089 # we do not check the count 

3090 if (inspect.isclass(cls) and issubclass(cls, typing.Generic) 

3091 and len(cls.__parameters__) == 1 

3092 and isinstance(cls.__parameters__[0], ParamSpec) 

3093 and parameters 

3094 and not _is_param_expr(parameters[0]) 

3095 ): 

3096 # Generic modifies parameters variable, but here we cannot do this 

3097 return 

3098 

3099 if not elen: 

3100 raise TypeError(f"{cls} is not a generic class") 

3101 if elen is _marker: 

3102 if not hasattr(cls, "__parameters__") or not cls.__parameters__: 

3103 raise TypeError(f"{cls} is not a generic class") 

3104 elen = len(cls.__parameters__) 

3105 alen = len(parameters) 

3106 if alen != elen: 

3107 expect_val = elen 

3108 if hasattr(cls, "__parameters__"): 

3109 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] 

3110 num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) 

3111 if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): 

3112 return 

3113 

3114 # deal with TypeVarLike defaults 

3115 # required TypeVarLikes cannot appear after a defaulted one. 

3116 if alen < elen: 

3117 # since we validate TypeVarLike default in _collect_type_vars 

3118 # or _collect_parameters we can safely check parameters[alen] 

3119 if ( 

3120 getattr(parameters[alen], '__default__', NoDefault) 

3121 is not NoDefault 

3122 ): 

3123 return 

3124 

3125 num_default_tv = sum(getattr(p, '__default__', NoDefault) 

3126 is not NoDefault for p in parameters) 

3127 

3128 elen -= num_default_tv 

3129 

3130 expect_val = f"at least {elen}" 

3131 

3132 # Breakpoint: https://github.com/python/cpython/pull/27515 

3133 things = "arguments" if sys.version_info >= (3, 10) else "parameters" 

3134 raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}" 

3135 f" for {cls}; actual {alen}, expected {expect_val}") 

3136else: 

3137 # Python 3.11+ 

3138 

3139 def _check_generic(cls, parameters, elen): 

3140 """Check correct count for parameters of a generic cls (internal helper). 

3141 

3142 This gives a nice error message in case of count mismatch. 

3143 """ 

3144 if not elen: 

3145 raise TypeError(f"{cls} is not a generic class") 

3146 alen = len(parameters) 

3147 if alen != elen: 

3148 expect_val = elen 

3149 if hasattr(cls, "__parameters__"): 

3150 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] 

3151 

3152 # deal with TypeVarLike defaults 

3153 # required TypeVarLikes cannot appear after a defaulted one. 

3154 if alen < elen: 

3155 # since we validate TypeVarLike default in _collect_type_vars 

3156 # or _collect_parameters we can safely check parameters[alen] 

3157 if ( 

3158 getattr(parameters[alen], '__default__', NoDefault) 

3159 is not NoDefault 

3160 ): 

3161 return 

3162 

3163 num_default_tv = sum(getattr(p, '__default__', NoDefault) 

3164 is not NoDefault for p in parameters) 

3165 

3166 elen -= num_default_tv 

3167 

3168 expect_val = f"at least {elen}" 

3169 

3170 raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments" 

3171 f" for {cls}; actual {alen}, expected {expect_val}") 

3172 

3173if not _PEP_696_IMPLEMENTED: 

3174 typing._check_generic = _check_generic 

3175 

3176 

3177def _has_generic_or_protocol_as_origin() -> bool: 

3178 try: 

3179 frame = sys._getframe(2) 

3180 # - Catch AttributeError: not all Python implementations have sys._getframe() 

3181 # - Catch ValueError: maybe we're called from an unexpected module 

3182 # and the call stack isn't deep enough 

3183 except (AttributeError, ValueError): 

3184 return False # err on the side of leniency 

3185 else: 

3186 # If we somehow get invoked from outside typing.py, 

3187 # also err on the side of leniency 

3188 if frame.f_globals.get("__name__") != "typing": 

3189 return False 

3190 origin = frame.f_locals.get("origin") 

3191 # Cannot use "in" because origin may be an object with a buggy __eq__ that 

3192 # throws an error. 

3193 return origin is typing.Generic or origin is Protocol or origin is typing.Protocol 

3194 

3195 

3196_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)} 

3197 

3198 

3199def _is_unpacked_typevartuple(x) -> bool: 

3200 if get_origin(x) is not Unpack: 

3201 return False 

3202 args = get_args(x) 

3203 return ( 

3204 bool(args) 

3205 and len(args) == 1 

3206 and type(args[0]) in _TYPEVARTUPLE_TYPES 

3207 ) 

3208 

3209 

3210# Python 3.11+ _collect_type_vars was renamed to _collect_parameters 

3211if hasattr(typing, '_collect_type_vars'): 

3212 def _collect_type_vars(types, typevar_types=None): 

3213 """Collect all type variable contained in types in order of 

3214 first appearance (lexicographic order). For example:: 

3215 

3216 _collect_type_vars((T, List[S, T])) == (T, S) 

3217 """ 

3218 if typevar_types is None: 

3219 typevar_types = typing.TypeVar 

3220 tvars = [] 

3221 

3222 # A required TypeVarLike cannot appear after a TypeVarLike with a default 

3223 # if it was a direct call to `Generic[]` or `Protocol[]` 

3224 enforce_default_ordering = _has_generic_or_protocol_as_origin() 

3225 default_encountered = False 

3226 

3227 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple 

3228 type_var_tuple_encountered = False 

3229 

3230 for t in types: 

3231 if _is_unpacked_typevartuple(t): 

3232 type_var_tuple_encountered = True 

3233 elif ( 

3234 isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias) 

3235 and t not in tvars 

3236 ): 

3237 if enforce_default_ordering: 

3238 has_default = getattr(t, '__default__', NoDefault) is not NoDefault 

3239 if has_default: 

3240 if type_var_tuple_encountered: 

3241 raise TypeError('Type parameter with a default' 

3242 ' follows TypeVarTuple') 

3243 default_encountered = True 

3244 elif default_encountered: 

3245 raise TypeError(f'Type parameter {t!r} without a default' 

3246 ' follows type parameter with a default') 

3247 

3248 tvars.append(t) 

3249 if _should_collect_from_parameters(t): 

3250 tvars.extend([t for t in t.__parameters__ if t not in tvars]) 

3251 elif isinstance(t, tuple): 

3252 # Collect nested type_vars 

3253 # tuple wrapped by _prepare_paramspec_params(cls, params) 

3254 for x in t: 

3255 for collected in _collect_type_vars([x]): 

3256 if collected not in tvars: 

3257 tvars.append(collected) 

3258 return tuple(tvars) 

3259 

3260 typing._collect_type_vars = _collect_type_vars 

3261else: 

3262 def _collect_parameters(args): 

3263 """Collect all type variables and parameter specifications in args 

3264 in order of first appearance (lexicographic order). 

3265 

3266 For example:: 

3267 

3268 assert _collect_parameters((T, Callable[P, T])) == (T, P) 

3269 """ 

3270 parameters = [] 

3271 

3272 # A required TypeVarLike cannot appear after a TypeVarLike with default 

3273 # if it was a direct call to `Generic[]` or `Protocol[]` 

3274 enforce_default_ordering = _has_generic_or_protocol_as_origin() 

3275 default_encountered = False 

3276 

3277 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple 

3278 type_var_tuple_encountered = False 

3279 

3280 for t in args: 

3281 if isinstance(t, type): 

3282 # We don't want __parameters__ descriptor of a bare Python class. 

3283 pass 

3284 elif isinstance(t, tuple): 

3285 # `t` might be a tuple, when `ParamSpec` is substituted with 

3286 # `[T, int]`, or `[int, *Ts]`, etc. 

3287 for x in t: 

3288 for collected in _collect_parameters([x]): 

3289 if collected not in parameters: 

3290 parameters.append(collected) 

3291 elif hasattr(t, '__typing_subst__'): 

3292 if t not in parameters: 

3293 if enforce_default_ordering: 

3294 has_default = ( 

3295 getattr(t, '__default__', NoDefault) is not NoDefault 

3296 ) 

3297 

3298 if type_var_tuple_encountered and has_default: 

3299 raise TypeError('Type parameter with a default' 

3300 ' follows TypeVarTuple') 

3301 

3302 if has_default: 

3303 default_encountered = True 

3304 elif default_encountered: 

3305 raise TypeError(f'Type parameter {t!r} without a default' 

3306 ' follows type parameter with a default') 

3307 

3308 parameters.append(t) 

3309 else: 

3310 if _is_unpacked_typevartuple(t): 

3311 type_var_tuple_encountered = True 

3312 for x in getattr(t, '__parameters__', ()): 

3313 if x not in parameters: 

3314 parameters.append(x) 

3315 

3316 return tuple(parameters) 

3317 

3318 if not _PEP_696_IMPLEMENTED: 

3319 typing._collect_parameters = _collect_parameters 

3320 

3321# Backport typing.NamedTuple as it exists in Python 3.13. 

3322# In 3.11, the ability to define generic `NamedTuple`s was supported. 

3323# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. 

3324# On 3.12, we added __orig_bases__ to call-based NamedTuples 

3325# On 3.13, we deprecated kwargs-based NamedTuples 

3326# Breakpoint: https://github.com/python/cpython/pull/105609 

3327if sys.version_info >= (3, 13): 

3328 NamedTuple = typing.NamedTuple 

3329else: 

3330 def _make_nmtuple(name, types, module, defaults=()): 

3331 fields = [n for n, t in types] 

3332 annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") 

3333 for n, t in types} 

3334 nm_tpl = collections.namedtuple(name, fields, 

3335 defaults=defaults, module=module) 

3336 nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations 

3337 return nm_tpl 

3338 

3339 _prohibited_namedtuple_fields = typing._prohibited 

3340 _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) 

3341 

3342 class _NamedTupleMeta(type): 

3343 def __new__(cls, typename, bases, ns): 

3344 assert _NamedTuple in bases 

3345 for base in bases: 

3346 if base is not _NamedTuple and base is not typing.Generic: 

3347 raise TypeError( 

3348 'can only inherit from a NamedTuple type and Generic') 

3349 bases = tuple(tuple if base is _NamedTuple else base for base in bases) 

3350 if "__annotations__" in ns: 

3351 types = ns["__annotations__"] 

3352 elif "__annotate__" in ns: 

3353 # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated 

3354 types = ns["__annotate__"](1) 

3355 else: 

3356 types = {} 

3357 default_names = [] 

3358 for field_name in types: 

3359 if field_name in ns: 

3360 default_names.append(field_name) 

3361 elif default_names: 

3362 raise TypeError(f"Non-default namedtuple field {field_name} " 

3363 f"cannot follow default field" 

3364 f"{'s' if len(default_names) > 1 else ''} " 

3365 f"{', '.join(default_names)}") 

3366 nm_tpl = _make_nmtuple( 

3367 typename, types.items(), 

3368 defaults=[ns[n] for n in default_names], 

3369 module=ns['__module__'] 

3370 ) 

3371 nm_tpl.__bases__ = bases 

3372 if typing.Generic in bases: 

3373 if hasattr(typing, '_generic_class_getitem'): # 3.12+ 

3374 nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) 

3375 else: 

3376 class_getitem = typing.Generic.__class_getitem__.__func__ 

3377 nm_tpl.__class_getitem__ = classmethod(class_getitem) 

3378 # update from user namespace without overriding special namedtuple attributes 

3379 for key, val in ns.items(): 

3380 if key in _prohibited_namedtuple_fields: 

3381 raise AttributeError("Cannot overwrite NamedTuple attribute " + key) 

3382 elif key not in _special_namedtuple_fields: 

3383 if key not in nm_tpl._fields: 

3384 setattr(nm_tpl, key, ns[key]) 

3385 try: 

3386 set_name = type(val).__set_name__ 

3387 except AttributeError: 

3388 pass 

3389 else: 

3390 try: 

3391 set_name(val, nm_tpl, key) 

3392 except BaseException as e: 

3393 msg = ( 

3394 f"Error calling __set_name__ on {type(val).__name__!r} " 

3395 f"instance {key!r} in {typename!r}" 

3396 ) 

3397 # BaseException.add_note() existed on py311, 

3398 # but the __set_name__ machinery didn't start 

3399 # using add_note() until py312. 

3400 # Making sure exceptions are raised in the same way 

3401 # as in "normal" classes seems most important here. 

3402 # Breakpoint: https://github.com/python/cpython/pull/95915 

3403 if sys.version_info >= (3, 12): 

3404 e.add_note(msg) 

3405 raise 

3406 else: 

3407 raise RuntimeError(msg) from e 

3408 

3409 if typing.Generic in bases: 

3410 nm_tpl.__init_subclass__() 

3411 return nm_tpl 

3412 

3413 _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) 

3414 

3415 def _namedtuple_mro_entries(bases): 

3416 assert NamedTuple in bases 

3417 return (_NamedTuple,) 

3418 

3419 def NamedTuple(typename, fields=_marker, /, **kwargs): 

3420 """Typed version of namedtuple. 

3421 

3422 Usage:: 

3423 

3424 class Employee(NamedTuple): 

3425 name: str 

3426 id: int 

3427 

3428 This is equivalent to:: 

3429 

3430 Employee = collections.namedtuple('Employee', ['name', 'id']) 

3431 

3432 The resulting class has an extra __annotations__ attribute, giving a 

3433 dict that maps field names to types. (The field names are also in 

3434 the _fields attribute, which is part of the namedtuple API.) 

3435 An alternative equivalent functional syntax is also accepted:: 

3436 

3437 Employee = NamedTuple('Employee', [('name', str), ('id', int)]) 

3438 """ 

3439 if fields is _marker: 

3440 if kwargs: 

3441 deprecated_thing = "Creating NamedTuple classes using keyword arguments" 

3442 deprecation_msg = ( 

3443 "{name} is deprecated and will be disallowed in Python {remove}. " 

3444 "Use the class-based or functional syntax instead." 

3445 ) 

3446 else: 

3447 deprecated_thing = "Failing to pass a value for the 'fields' parameter" 

3448 example = f"`{typename} = NamedTuple({typename!r}, [])`" 

3449 deprecation_msg = ( 

3450 "{name} is deprecated and will be disallowed in Python {remove}. " 

3451 "To create a NamedTuple class with 0 fields " 

3452 "using the functional syntax, " 

3453 "pass an empty list, e.g. " 

3454 ) + example + "." 

3455 elif fields is None: 

3456 if kwargs: 

3457 raise TypeError( 

3458 "Cannot pass `None` as the 'fields' parameter " 

3459 "and also specify fields using keyword arguments" 

3460 ) 

3461 else: 

3462 deprecated_thing = "Passing `None` as the 'fields' parameter" 

3463 example = f"`{typename} = NamedTuple({typename!r}, [])`" 

3464 deprecation_msg = ( 

3465 "{name} is deprecated and will be disallowed in Python {remove}. " 

3466 "To create a NamedTuple class with 0 fields " 

3467 "using the functional syntax, " 

3468 "pass an empty list, e.g. " 

3469 ) + example + "." 

3470 elif kwargs: 

3471 raise TypeError("Either list of fields or keywords" 

3472 " can be provided to NamedTuple, not both") 

3473 if fields is _marker or fields is None: 

3474 warnings.warn( 

3475 deprecation_msg.format(name=deprecated_thing, remove="3.15"), 

3476 DeprecationWarning, 

3477 stacklevel=2, 

3478 ) 

3479 fields = kwargs.items() 

3480 nt = _make_nmtuple(typename, fields, module=_caller()) 

3481 nt.__orig_bases__ = (NamedTuple,) 

3482 return nt 

3483 

3484 NamedTuple.__mro_entries__ = _namedtuple_mro_entries 

3485 

3486 

3487if hasattr(collections.abc, "Buffer"): 

3488 Buffer = collections.abc.Buffer 

3489else: 

3490 class Buffer(abc.ABC): # noqa: B024 

3491 """Base class for classes that implement the buffer protocol. 

3492 

3493 The buffer protocol allows Python objects to expose a low-level 

3494 memory buffer interface. Before Python 3.12, it is not possible 

3495 to implement the buffer protocol in pure Python code, or even 

3496 to check whether a class implements the buffer protocol. In 

3497 Python 3.12 and higher, the ``__buffer__`` method allows access 

3498 to the buffer protocol from Python code, and the 

3499 ``collections.abc.Buffer`` ABC allows checking whether a class 

3500 implements the buffer protocol. 

3501 

3502 To indicate support for the buffer protocol in earlier versions, 

3503 inherit from this ABC, either in a stub file or at runtime, 

3504 or use ABC registration. This ABC provides no methods, because 

3505 there is no Python-accessible methods shared by pre-3.12 buffer 

3506 classes. It is useful primarily for static checks. 

3507 

3508 """ 

3509 

3510 # As a courtesy, register the most common stdlib buffer classes. 

3511 Buffer.register(memoryview) 

3512 Buffer.register(bytearray) 

3513 Buffer.register(bytes) 

3514 

3515 

3516# Backport of types.get_original_bases, available on 3.12+ in CPython 

3517if hasattr(_types, "get_original_bases"): 

3518 get_original_bases = _types.get_original_bases 

3519else: 

3520 def get_original_bases(cls, /): 

3521 """Return the class's "original" bases prior to modification by `__mro_entries__`. 

3522 

3523 Examples:: 

3524 

3525 from typing import TypeVar, Generic 

3526 from typing_extensions import NamedTuple, TypedDict 

3527 

3528 T = TypeVar("T") 

3529 class Foo(Generic[T]): ... 

3530 class Bar(Foo[int], float): ... 

3531 class Baz(list[str]): ... 

3532 Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) 

3533 Spam = TypedDict("Spam", {"a": int, "b": str}) 

3534 

3535 assert get_original_bases(Bar) == (Foo[int], float) 

3536 assert get_original_bases(Baz) == (list[str],) 

3537 assert get_original_bases(Eggs) == (NamedTuple,) 

3538 assert get_original_bases(Spam) == (TypedDict,) 

3539 assert get_original_bases(int) == (object,) 

3540 """ 

3541 try: 

3542 return cls.__dict__.get("__orig_bases__", cls.__bases__) 

3543 except AttributeError: 

3544 raise TypeError( 

3545 f'Expected an instance of type, not {type(cls).__name__!r}' 

3546 ) from None 

3547 

3548 

3549# NewType is a class on Python 3.10+, making it pickleable 

3550# The error message for subclassing instances of NewType was improved on 3.11+ 

3551# Breakpoint: https://github.com/python/cpython/pull/30268 

3552if sys.version_info >= (3, 11): 

3553 NewType = typing.NewType 

3554else: 

3555 class NewType: 

3556 """NewType creates simple unique types with almost zero 

3557 runtime overhead. NewType(name, tp) is considered a subtype of tp 

3558 by static type checkers. At runtime, NewType(name, tp) returns 

3559 a dummy callable that simply returns its argument. Usage:: 

3560 UserId = NewType('UserId', int) 

3561 def name_by_id(user_id: UserId) -> str: 

3562 ... 

3563 UserId('user') # Fails type check 

3564 name_by_id(42) # Fails type check 

3565 name_by_id(UserId(42)) # OK 

3566 num = UserId(5) + 1 # type: int 

3567 """ 

3568 

3569 def __call__(self, obj, /): 

3570 return obj 

3571 

3572 def __init__(self, name, tp): 

3573 self.__qualname__ = name 

3574 if '.' in name: 

3575 name = name.rpartition('.')[-1] 

3576 self.__name__ = name 

3577 self.__supertype__ = tp 

3578 def_mod = _caller() 

3579 if def_mod != 'typing_extensions': 

3580 self.__module__ = def_mod 

3581 

3582 def __mro_entries__(self, bases): 

3583 # We defined __mro_entries__ to get a better error message 

3584 # if a user attempts to subclass a NewType instance. bpo-46170 

3585 supercls_name = self.__name__ 

3586 

3587 class Dummy: 

3588 def __init_subclass__(cls): 

3589 subcls_name = cls.__name__ 

3590 raise TypeError( 

3591 f"Cannot subclass an instance of NewType. " 

3592 f"Perhaps you were looking for: " 

3593 f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" 

3594 ) 

3595 

3596 return (Dummy,) 

3597 

3598 def __repr__(self): 

3599 return f'{self.__module__}.{self.__qualname__}' 

3600 

3601 def __reduce__(self): 

3602 return self.__qualname__ 

3603 

3604 # Breakpoint: https://github.com/python/cpython/pull/21515 

3605 if sys.version_info >= (3, 10): 

3606 # PEP 604 methods 

3607 # It doesn't make sense to have these methods on Python <3.10 

3608 

3609 def __or__(self, other): 

3610 return typing.Union[self, other] 

3611 

3612 def __ror__(self, other): 

3613 return typing.Union[other, self] 

3614 

3615 

3616# Breakpoint: https://github.com/python/cpython/pull/124795 

3617if sys.version_info >= (3, 14): 

3618 TypeAliasType = typing.TypeAliasType 

3619# <=3.13 

3620else: 

3621 # Breakpoint: https://github.com/python/cpython/pull/103764 

3622 if sys.version_info >= (3, 12): 

3623 # 3.12-3.13 

3624 def _is_unionable(obj): 

3625 """Corresponds to is_unionable() in unionobject.c in CPython.""" 

3626 return obj is None or isinstance(obj, ( 

3627 type, 

3628 _types.GenericAlias, 

3629 _types.UnionType, 

3630 typing.TypeAliasType, 

3631 TypeAliasType, 

3632 )) 

3633 else: 

3634 # <=3.11 

3635 def _is_unionable(obj): 

3636 """Corresponds to is_unionable() in unionobject.c in CPython.""" 

3637 return obj is None or isinstance(obj, ( 

3638 type, 

3639 _types.GenericAlias, 

3640 _types.UnionType, 

3641 TypeAliasType, 

3642 )) 

3643 

3644 if sys.version_info < (3, 10): 

3645 # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582, 

3646 # so that we emulate the behaviour of `types.GenericAlias` 

3647 # on the latest versions of CPython 

3648 _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({ 

3649 "__class__", 

3650 "__bases__", 

3651 "__origin__", 

3652 "__args__", 

3653 "__unpacked__", 

3654 "__parameters__", 

3655 "__typing_unpacked_tuple_args__", 

3656 "__mro_entries__", 

3657 "__reduce_ex__", 

3658 "__reduce__", 

3659 "__copy__", 

3660 "__deepcopy__", 

3661 }) 

3662 

3663 class _TypeAliasGenericAlias(typing._GenericAlias, _root=True): 

3664 def __getattr__(self, attr): 

3665 if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS: 

3666 return object.__getattr__(self, attr) 

3667 return getattr(self.__origin__, attr) 

3668 

3669 

3670 class TypeAliasType: 

3671 """Create named, parameterized type aliases. 

3672 

3673 This provides a backport of the new `type` statement in Python 3.12: 

3674 

3675 type ListOrSet[T] = list[T] | set[T] 

3676 

3677 is equivalent to: 

3678 

3679 T = TypeVar("T") 

3680 ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) 

3681 

3682 The name ListOrSet can then be used as an alias for the type it refers to. 

3683 

3684 The type_params argument should contain all the type parameters used 

3685 in the value of the type alias. If the alias is not generic, this 

3686 argument is omitted. 

3687 

3688 Static type checkers should only support type aliases declared using 

3689 TypeAliasType that follow these rules: 

3690 

3691 - The first argument (the name) must be a string literal. 

3692 - The TypeAliasType instance must be immediately assigned to a variable 

3693 of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, 

3694 as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). 

3695 

3696 """ 

3697 

3698 def __init__(self, name: str, value, *, type_params=()): 

3699 if not isinstance(name, str): 

3700 raise TypeError("TypeAliasType name must be a string") 

3701 if not isinstance(type_params, tuple): 

3702 raise TypeError("type_params must be a tuple") 

3703 self.__value__ = value 

3704 self.__type_params__ = type_params 

3705 

3706 default_value_encountered = False 

3707 parameters = [] 

3708 for type_param in type_params: 

3709 if ( 

3710 not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec)) 

3711 # <=3.11 

3712 # Unpack Backport passes isinstance(type_param, TypeVar) 

3713 or _is_unpack(type_param) 

3714 ): 

3715 raise TypeError(f"Expected a type param, got {type_param!r}") 

3716 has_default = ( 

3717 getattr(type_param, '__default__', NoDefault) is not NoDefault 

3718 ) 

3719 if default_value_encountered and not has_default: 

3720 raise TypeError(f"non-default type parameter '{type_param!r}'" 

3721 " follows default type parameter") 

3722 if has_default: 

3723 default_value_encountered = True 

3724 if isinstance(type_param, TypeVarTuple): 

3725 parameters.extend(type_param) 

3726 else: 

3727 parameters.append(type_param) 

3728 self.__parameters__ = tuple(parameters) 

3729 def_mod = _caller() 

3730 if def_mod != 'typing_extensions': 

3731 self.__module__ = def_mod 

3732 # Setting this attribute closes the TypeAliasType from further modification 

3733 self.__name__ = name 

3734 

3735 def __setattr__(self, name: str, value: object, /) -> None: 

3736 if hasattr(self, "__name__"): 

3737 self._raise_attribute_error(name) 

3738 super().__setattr__(name, value) 

3739 

3740 def __delattr__(self, name: str, /) -> Never: 

3741 self._raise_attribute_error(name) 

3742 

3743 def _raise_attribute_error(self, name: str) -> Never: 

3744 # Match the Python 3.12 error messages exactly 

3745 if name == "__name__": 

3746 raise AttributeError("readonly attribute") 

3747 elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: 

3748 raise AttributeError( 

3749 f"attribute '{name}' of 'typing.TypeAliasType' objects " 

3750 "is not writable" 

3751 ) 

3752 else: 

3753 raise AttributeError( 

3754 f"'typing.TypeAliasType' object has no attribute '{name}'" 

3755 ) 

3756 

3757 def __repr__(self) -> str: 

3758 return self.__name__ 

3759 

3760 if sys.version_info < (3, 11): 

3761 def _check_single_param(self, param, recursion=0): 

3762 # Allow [], [int], [int, str], [int, ...], [int, T] 

3763 if param is ...: 

3764 return ... 

3765 if param is None: 

3766 return None 

3767 # Note in <= 3.9 _ConcatenateGenericAlias inherits from list 

3768 if isinstance(param, list) and recursion == 0: 

3769 return [self._check_single_param(arg, recursion+1) 

3770 for arg in param] 

3771 return typing._type_check( 

3772 param, f'Subscripting {self.__name__} requires a type.' 

3773 ) 

3774 

3775 def _check_parameters(self, parameters): 

3776 if sys.version_info < (3, 11): 

3777 return tuple( 

3778 self._check_single_param(item) 

3779 for item in parameters 

3780 ) 

3781 return tuple(typing._type_check( 

3782 item, f'Subscripting {self.__name__} requires a type.' 

3783 ) 

3784 for item in parameters 

3785 ) 

3786 

3787 def __getitem__(self, parameters): 

3788 if not self.__type_params__: 

3789 raise TypeError("Only generic type aliases are subscriptable") 

3790 if not isinstance(parameters, tuple): 

3791 parameters = (parameters,) 

3792 # Using 3.9 here will create problems with Concatenate 

3793 if sys.version_info >= (3, 10): 

3794 return _types.GenericAlias(self, parameters) 

3795 type_vars = _collect_type_vars(parameters) 

3796 parameters = self._check_parameters(parameters) 

3797 alias = _TypeAliasGenericAlias(self, parameters) 

3798 # alias.__parameters__ is not complete if Concatenate is present 

3799 # as it is converted to a list from which no parameters are extracted. 

3800 if alias.__parameters__ != type_vars: 

3801 alias.__parameters__ = type_vars 

3802 return alias 

3803 

3804 def __reduce__(self): 

3805 return self.__name__ 

3806 

3807 def __init_subclass__(cls, *args, **kwargs): 

3808 raise TypeError( 

3809 "type 'typing_extensions.TypeAliasType' is not an acceptable base type" 

3810 ) 

3811 

3812 # The presence of this method convinces typing._type_check 

3813 # that TypeAliasTypes are types. 

3814 def __call__(self): 

3815 raise TypeError("Type alias is not callable") 

3816 

3817 # Breakpoint: https://github.com/python/cpython/pull/21515 

3818 if sys.version_info >= (3, 10): 

3819 def __or__(self, right): 

3820 # For forward compatibility with 3.12, reject Unions 

3821 # that are not accepted by the built-in Union. 

3822 if not _is_unionable(right): 

3823 return NotImplemented 

3824 return typing.Union[self, right] 

3825 

3826 def __ror__(self, left): 

3827 if not _is_unionable(left): 

3828 return NotImplemented 

3829 return typing.Union[left, self] 

3830 

3831 

3832if hasattr(typing, "is_protocol"): 

3833 is_protocol = typing.is_protocol 

3834 get_protocol_members = typing.get_protocol_members 

3835else: 

3836 def is_protocol(tp: type, /) -> bool: 

3837 """Return True if the given type is a Protocol. 

3838 

3839 Example:: 

3840 

3841 >>> from typing_extensions import Protocol, is_protocol 

3842 >>> class P(Protocol): 

3843 ... def a(self) -> str: ... 

3844 ... b: int 

3845 >>> is_protocol(P) 

3846 True 

3847 >>> is_protocol(int) 

3848 False 

3849 """ 

3850 return ( 

3851 isinstance(tp, type) 

3852 and getattr(tp, '_is_protocol', False) 

3853 and tp is not Protocol 

3854 and tp is not typing.Protocol 

3855 ) 

3856 

3857 def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: 

3858 """Return the set of members defined in a Protocol. 

3859 

3860 Example:: 

3861 

3862 >>> from typing_extensions import Protocol, get_protocol_members 

3863 >>> class P(Protocol): 

3864 ... def a(self) -> str: ... 

3865 ... b: int 

3866 >>> get_protocol_members(P) == frozenset({'a', 'b'}) 

3867 True 

3868 

3869 Raise a TypeError for arguments that are not Protocols. 

3870 """ 

3871 if not is_protocol(tp): 

3872 raise TypeError(f'{tp!r} is not a Protocol') 

3873 if hasattr(tp, '__protocol_attrs__'): 

3874 return frozenset(tp.__protocol_attrs__) 

3875 return frozenset(_get_protocol_attrs(tp)) 

3876 

3877 

3878if hasattr(typing, "Doc"): 

3879 Doc = typing.Doc 

3880else: 

3881 class Doc: 

3882 """Define the documentation of a type annotation using ``Annotated``, to be 

3883 used in class attributes, function and method parameters, return values, 

3884 and variables. 

3885 

3886 The value should be a positional-only string literal to allow static tools 

3887 like editors and documentation generators to use it. 

3888 

3889 This complements docstrings. 

3890 

3891 The string value passed is available in the attribute ``documentation``. 

3892 

3893 Example:: 

3894 

3895 >>> from typing_extensions import Annotated, Doc 

3896 >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... 

3897 """ 

3898 def __init__(self, documentation: str, /) -> None: 

3899 self.documentation = documentation 

3900 

3901 def __repr__(self) -> str: 

3902 return f"Doc({self.documentation!r})" 

3903 

3904 def __hash__(self) -> int: 

3905 return hash(self.documentation) 

3906 

3907 def __eq__(self, other: object) -> bool: 

3908 if not isinstance(other, Doc): 

3909 return NotImplemented 

3910 return self.documentation == other.documentation 

3911 

3912 

3913_CapsuleType = getattr(_types, "CapsuleType", None) 

3914 

3915if _CapsuleType is None: 

3916 try: 

3917 import _socket 

3918 except ImportError: 

3919 pass 

3920 else: 

3921 _CAPI = getattr(_socket, "CAPI", None) 

3922 if _CAPI is not None: 

3923 _CapsuleType = type(_CAPI) 

3924 

3925if _CapsuleType is not None: 

3926 CapsuleType = _CapsuleType 

3927 __all__.append("CapsuleType") 

3928 

3929 

3930if sys.version_info >= (3, 14): 

3931 from annotationlib import Format, get_annotations 

3932else: 

3933 # Available since Python 3.14.0a3 

3934 # PR: https://github.com/python/cpython/pull/124415 

3935 class Format(enum.IntEnum): 

3936 VALUE = 1 

3937 VALUE_WITH_FAKE_GLOBALS = 2 

3938 FORWARDREF = 3 

3939 STRING = 4 

3940 

3941 # Available since Python 3.14.0a1 

3942 # PR: https://github.com/python/cpython/pull/119891 

3943 def get_annotations(obj, *, globals=None, locals=None, eval_str=False, 

3944 format=Format.VALUE): 

3945 """Compute the annotations dict for an object. 

3946 

3947 obj may be a callable, class, or module. 

3948 Passing in an object of any other type raises TypeError. 

3949 

3950 Returns a dict. get_annotations() returns a new dict every time 

3951 it's called; calling it twice on the same object will return two 

3952 different but equivalent dicts. 

3953 

3954 This is a backport of `inspect.get_annotations`, which has been 

3955 in the standard library since Python 3.10. See the standard library 

3956 documentation for more: 

3957 

3958 https://docs.python.org/3/library/inspect.html#inspect.get_annotations 

3959 

3960 This backport adds the *format* argument introduced by PEP 649. The 

3961 three formats supported are: 

3962 * VALUE: the annotations are returned as-is. This is the default and 

3963 it is compatible with the behavior on previous Python versions. 

3964 * FORWARDREF: return annotations as-is if possible, but replace any 

3965 undefined names with ForwardRef objects. The implementation proposed by 

3966 PEP 649 relies on language changes that cannot be backported; the 

3967 typing-extensions implementation simply returns the same result as VALUE. 

3968 * STRING: return annotations as strings, in a format close to the original 

3969 source. Again, this behavior cannot be replicated directly in a backport. 

3970 As an approximation, typing-extensions retrieves the annotations under 

3971 VALUE semantics and then stringifies them. 

3972 

3973 The purpose of this backport is to allow users who would like to use 

3974 FORWARDREF or STRING semantics once PEP 649 is implemented, but who also 

3975 want to support earlier Python versions, to simply write: 

3976 

3977 typing_extensions.get_annotations(obj, format=Format.FORWARDREF) 

3978 

3979 """ 

3980 format = Format(format) 

3981 if format is Format.VALUE_WITH_FAKE_GLOBALS: 

3982 raise ValueError( 

3983 "The VALUE_WITH_FAKE_GLOBALS format is for internal use only" 

3984 ) 

3985 

3986 if eval_str and format is not Format.VALUE: 

3987 raise ValueError("eval_str=True is only supported with format=Format.VALUE") 

3988 

3989 if isinstance(obj, type): 

3990 # class 

3991 obj_dict = getattr(obj, '__dict__', None) 

3992 if obj_dict and hasattr(obj_dict, 'get'): 

3993 ann = obj_dict.get('__annotations__', None) 

3994 if isinstance(ann, _types.GetSetDescriptorType): 

3995 ann = None 

3996 else: 

3997 ann = None 

3998 

3999 obj_globals = None 

4000 module_name = getattr(obj, '__module__', None) 

4001 if module_name: 

4002 module = sys.modules.get(module_name, None) 

4003 if module: 

4004 obj_globals = getattr(module, '__dict__', None) 

4005 obj_locals = dict(vars(obj)) 

4006 unwrap = obj 

4007 elif isinstance(obj, _types.ModuleType): 

4008 # module 

4009 ann = getattr(obj, '__annotations__', None) 

4010 obj_globals = obj.__dict__ 

4011 obj_locals = None 

4012 unwrap = None 

4013 elif callable(obj): 

4014 # this includes types.Function, types.BuiltinFunctionType, 

4015 # types.BuiltinMethodType, functools.partial, functools.singledispatch, 

4016 # "class funclike" from Lib/test/test_inspect... on and on it goes. 

4017 ann = getattr(obj, '__annotations__', None) 

4018 obj_globals = getattr(obj, '__globals__', None) 

4019 obj_locals = None 

4020 unwrap = obj 

4021 elif hasattr(obj, '__annotations__'): 

4022 ann = obj.__annotations__ 

4023 obj_globals = obj_locals = unwrap = None 

4024 else: 

4025 raise TypeError(f"{obj!r} is not a module, class, or callable.") 

4026 

4027 if ann is None: 

4028 return {} 

4029 

4030 if not isinstance(ann, dict): 

4031 raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None") 

4032 

4033 if not ann: 

4034 return {} 

4035 

4036 if not eval_str: 

4037 if format is Format.STRING: 

4038 return { 

4039 key: value if isinstance(value, str) else typing._type_repr(value) 

4040 for key, value in ann.items() 

4041 } 

4042 return dict(ann) 

4043 

4044 if unwrap is not None: 

4045 while True: 

4046 if hasattr(unwrap, '__wrapped__'): 

4047 unwrap = unwrap.__wrapped__ 

4048 continue 

4049 if isinstance(unwrap, functools.partial): 

4050 unwrap = unwrap.func 

4051 continue 

4052 break 

4053 if hasattr(unwrap, "__globals__"): 

4054 obj_globals = unwrap.__globals__ 

4055 

4056 if globals is None: 

4057 globals = obj_globals 

4058 if locals is None: 

4059 locals = obj_locals or {} 

4060 

4061 # "Inject" type parameters into the local namespace 

4062 # (unless they are shadowed by assignments *in* the local namespace), 

4063 # as a way of emulating annotation scopes when calling `eval()` 

4064 if type_params := getattr(obj, "__type_params__", ()): 

4065 locals = {param.__name__: param for param in type_params} | locals 

4066 

4067 return_value = {key: 

4068 value if not isinstance(value, str) else eval(value, globals, locals) 

4069 for key, value in ann.items() } 

4070 return return_value 

4071 

4072 

4073if hasattr(typing, "evaluate_forward_ref"): 

4074 evaluate_forward_ref = typing.evaluate_forward_ref 

4075else: 

4076 # Implements annotationlib.ForwardRef.evaluate 

4077 def _eval_with_owner( 

4078 forward_ref, *, owner=None, globals=None, locals=None, type_params=None 

4079 ): 

4080 if forward_ref.__forward_evaluated__: 

4081 return forward_ref.__forward_value__ 

4082 if getattr(forward_ref, "__cell__", None) is not None: 

4083 try: 

4084 value = forward_ref.__cell__.cell_contents 

4085 except ValueError: 

4086 pass 

4087 else: 

4088 forward_ref.__forward_evaluated__ = True 

4089 forward_ref.__forward_value__ = value 

4090 return value 

4091 if owner is None: 

4092 owner = getattr(forward_ref, "__owner__", None) 

4093 

4094 if ( 

4095 globals is None 

4096 and getattr(forward_ref, "__forward_module__", None) is not None 

4097 ): 

4098 globals = getattr( 

4099 sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None 

4100 ) 

4101 if globals is None: 

4102 globals = getattr(forward_ref, "__globals__", None) 

4103 if globals is None: 

4104 if isinstance(owner, type): 

4105 module_name = getattr(owner, "__module__", None) 

4106 if module_name: 

4107 module = sys.modules.get(module_name, None) 

4108 if module: 

4109 globals = getattr(module, "__dict__", None) 

4110 elif isinstance(owner, _types.ModuleType): 

4111 globals = getattr(owner, "__dict__", None) 

4112 elif callable(owner): 

4113 globals = getattr(owner, "__globals__", None) 

4114 

4115 # If we pass None to eval() below, the globals of this module are used. 

4116 if globals is None: 

4117 globals = {} 

4118 

4119 if locals is None: 

4120 locals = {} 

4121 if isinstance(owner, type): 

4122 locals.update(vars(owner)) 

4123 

4124 if type_params is None and owner is not None: 

4125 # "Inject" type parameters into the local namespace 

4126 # (unless they are shadowed by assignments *in* the local namespace), 

4127 # as a way of emulating annotation scopes when calling `eval()` 

4128 type_params = getattr(owner, "__type_params__", None) 

4129 

4130 # Type parameters exist in their own scope, which is logically 

4131 # between the locals and the globals. We simulate this by adding 

4132 # them to the globals. 

4133 if type_params is not None: 

4134 globals = dict(globals) 

4135 for param in type_params: 

4136 globals[param.__name__] = param 

4137 

4138 arg = forward_ref.__forward_arg__ 

4139 if arg.isidentifier() and not keyword.iskeyword(arg): 

4140 if arg in locals: 

4141 value = locals[arg] 

4142 elif arg in globals: 

4143 value = globals[arg] 

4144 elif hasattr(builtins, arg): 

4145 return getattr(builtins, arg) 

4146 else: 

4147 raise NameError(arg) 

4148 else: 

4149 code = forward_ref.__forward_code__ 

4150 value = eval(code, globals, locals) 

4151 forward_ref.__forward_evaluated__ = True 

4152 forward_ref.__forward_value__ = value 

4153 return value 

4154 

4155 def evaluate_forward_ref( 

4156 forward_ref, 

4157 *, 

4158 owner=None, 

4159 globals=None, 

4160 locals=None, 

4161 type_params=None, 

4162 format=None, 

4163 _recursive_guard=frozenset(), 

4164 ): 

4165 """Evaluate a forward reference as a type hint. 

4166 

4167 This is similar to calling the ForwardRef.evaluate() method, 

4168 but unlike that method, evaluate_forward_ref() also: 

4169 

4170 * Recursively evaluates forward references nested within the type hint. 

4171 * Rejects certain objects that are not valid type hints. 

4172 * Replaces type hints that evaluate to None with types.NoneType. 

4173 * Supports the *FORWARDREF* and *STRING* formats. 

4174 

4175 *forward_ref* must be an instance of ForwardRef. *owner*, if given, 

4176 should be the object that holds the annotations that the forward reference 

4177 derived from, such as a module, class object, or function. It is used to 

4178 infer the namespaces to use for looking up names. *globals* and *locals* 

4179 can also be explicitly given to provide the global and local namespaces. 

4180 *type_params* is a tuple of type parameters that are in scope when 

4181 evaluating the forward reference. This parameter must be provided (though 

4182 it may be an empty tuple) if *owner* is not given and the forward reference 

4183 does not already have an owner set. *format* specifies the format of the 

4184 annotation and is a member of the annotationlib.Format enum. 

4185 

4186 """ 

4187 if format == Format.STRING: 

4188 return forward_ref.__forward_arg__ 

4189 if forward_ref.__forward_arg__ in _recursive_guard: 

4190 return forward_ref 

4191 

4192 # Evaluate the forward reference 

4193 try: 

4194 value = _eval_with_owner( 

4195 forward_ref, 

4196 owner=owner, 

4197 globals=globals, 

4198 locals=locals, 

4199 type_params=type_params, 

4200 ) 

4201 except NameError: 

4202 if format == Format.FORWARDREF: 

4203 return forward_ref 

4204 else: 

4205 raise 

4206 

4207 if isinstance(value, str): 

4208 value = ForwardRef(value) 

4209 

4210 # Recursively evaluate the type 

4211 if isinstance(value, ForwardRef): 

4212 if getattr(value, "__forward_module__", True) is not None: 

4213 globals = None 

4214 return evaluate_forward_ref( 

4215 value, 

4216 globals=globals, 

4217 locals=locals, 

4218 type_params=type_params, owner=owner, 

4219 _recursive_guard=_recursive_guard, format=format 

4220 ) 

4221 if sys.version_info < (3, 12, 5) and type_params: 

4222 # Make use of type_params 

4223 locals = dict(locals) if locals else {} 

4224 for tvar in type_params: 

4225 if tvar.__name__ not in locals: # lets not overwrite something present 

4226 locals[tvar.__name__] = tvar 

4227 if sys.version_info < (3, 12, 5): 

4228 return typing._eval_type( 

4229 value, 

4230 globals, 

4231 locals, 

4232 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, 

4233 ) 

4234 else: 

4235 return typing._eval_type( 

4236 value, 

4237 globals, 

4238 locals, 

4239 type_params, 

4240 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__}, 

4241 ) 

4242 

4243 

4244if sys.version_info >= (3, 14, 0, "beta"): 

4245 type_repr = annotationlib.type_repr 

4246else: 

4247 def type_repr(value): 

4248 """Convert a Python value to a format suitable for use with the STRING format. 

4249 

4250 This is intended as a helper for tools that support the STRING format but do 

4251 not have access to the code that originally produced the annotations. It uses 

4252 repr() for most objects. 

4253 

4254 """ 

4255 if isinstance(value, (type, _types.FunctionType, _types.BuiltinFunctionType)): 

4256 if value.__module__ == "builtins": 

4257 return value.__qualname__ 

4258 return f"{value.__module__}.{value.__qualname__}" 

4259 if value is ...: 

4260 return "..." 

4261 return repr(value) 

4262 

4263 

4264# Aliases for items that are in typing in all supported versions. 

4265# We use hasattr() checks so this library will continue to import on 

4266# future versions of Python that may remove these names. 

4267_typing_names = [ 

4268 "AbstractSet", 

4269 "AnyStr", 

4270 "BinaryIO", 

4271 "Callable", 

4272 "Collection", 

4273 "Container", 

4274 "Dict", 

4275 "FrozenSet", 

4276 "Hashable", 

4277 "IO", 

4278 "ItemsView", 

4279 "Iterable", 

4280 "Iterator", 

4281 "KeysView", 

4282 "List", 

4283 "Mapping", 

4284 "MappingView", 

4285 "Match", 

4286 "MutableMapping", 

4287 "MutableSequence", 

4288 "MutableSet", 

4289 "Optional", 

4290 "Pattern", 

4291 "Reversible", 

4292 "Sequence", 

4293 "Set", 

4294 "Sized", 

4295 "TextIO", 

4296 "Tuple", 

4297 "Union", 

4298 "ValuesView", 

4299 "cast", 

4300 "no_type_check", 

4301 "no_type_check_decorator", 

4302 # This is private, but it was defined by typing_extensions for a long time 

4303 # and some users rely on it. 

4304 "_AnnotatedAlias", 

4305] 

4306globals().update( 

4307 {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)} 

4308) 

4309# These are defined unconditionally because they are used in 

4310# typing-extensions itself. 

4311Generic = typing.Generic 

4312ForwardRef = typing.ForwardRef 

4313Annotated = typing.Annotated 

4314 

4315# Breakpoint: https://github.com/python/cpython/pull/133602 

4316if sys.version_info < (3, 15, 0): 

4317 __all__.append("no_type_check_decorator")