1import abc
2import builtins
3import collections
4import collections.abc
5import contextlib
6import enum
7import functools
8import inspect
9import io
10import keyword
11import operator
12import sys
13import types as _types
14import typing
15import warnings
16
17# Breakpoint: https://github.com/python/cpython/pull/119891
18if sys.version_info >= (3, 14):
19 import annotationlib
20
21__all__ = [
22 # Super-special typing primitives.
23 'Any',
24 'ClassVar',
25 'Concatenate',
26 'Final',
27 'LiteralString',
28 'ParamSpec',
29 'ParamSpecArgs',
30 'ParamSpecKwargs',
31 'Self',
32 'Type',
33 'TypeVar',
34 'TypeVarTuple',
35 'Unpack',
36
37 # ABCs (from collections.abc).
38 'Awaitable',
39 'AsyncIterator',
40 'AsyncIterable',
41 'Coroutine',
42 'AsyncGenerator',
43 'AsyncContextManager',
44 'Buffer',
45 'ChainMap',
46
47 # Concrete collection types.
48 'ContextManager',
49 'Counter',
50 'Deque',
51 'DefaultDict',
52 'NamedTuple',
53 'OrderedDict',
54 'TypedDict',
55
56 # Structural checks, a.k.a. protocols.
57 'SupportsAbs',
58 'SupportsBytes',
59 'SupportsComplex',
60 'SupportsFloat',
61 'SupportsIndex',
62 'SupportsInt',
63 'SupportsRound',
64 'Reader',
65 'Writer',
66
67 # One-off things.
68 'Annotated',
69 'assert_never',
70 'assert_type',
71 'clear_overloads',
72 'dataclass_transform',
73 'deprecated',
74 'disjoint_base',
75 'Doc',
76 'evaluate_forward_ref',
77 'get_overloads',
78 'final',
79 'Format',
80 'get_annotations',
81 'get_args',
82 'get_origin',
83 'get_original_bases',
84 'get_protocol_members',
85 'get_type_hints',
86 'IntVar',
87 'is_protocol',
88 'is_typeddict',
89 'Literal',
90 'NewType',
91 'overload',
92 'override',
93 'Protocol',
94 'Sentinel',
95 'reveal_type',
96 'runtime',
97 'runtime_checkable',
98 'Text',
99 'TypeAlias',
100 'TypeAliasType',
101 'TypeForm',
102 'TypeGuard',
103 'TypeIs',
104 'TYPE_CHECKING',
105 'type_repr',
106 'Never',
107 'NoReturn',
108 'ReadOnly',
109 'Required',
110 'NotRequired',
111 'NoDefault',
112 'NoExtraItems',
113
114 # Pure aliases, have always been in typing
115 'AbstractSet',
116 'AnyStr',
117 'BinaryIO',
118 'Callable',
119 'Collection',
120 'Container',
121 'Dict',
122 'ForwardRef',
123 'FrozenSet',
124 'Generator',
125 'Generic',
126 'Hashable',
127 'IO',
128 'ItemsView',
129 'Iterable',
130 'Iterator',
131 'KeysView',
132 'List',
133 'Mapping',
134 'MappingView',
135 'Match',
136 'MutableMapping',
137 'MutableSequence',
138 'MutableSet',
139 'Optional',
140 'Pattern',
141 'Reversible',
142 'Sequence',
143 'Set',
144 'Sized',
145 'TextIO',
146 'Tuple',
147 'Union',
148 'ValuesView',
149 'cast',
150 'no_type_check',
151 'no_type_check_decorator',
152]
153
154# for backward compatibility
155PEP_560 = True
156GenericMeta = type
157# Breakpoint: https://github.com/python/cpython/pull/116129
158_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
159
160# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
161_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
162
163# The functions below are modified copies of typing internal helpers.
164# They are needed by _ProtocolMeta and they provide support for PEP 646.
165
166
167class _Sentinel:
168 def __repr__(self):
169 return "<sentinel>"
170
171
172_marker = _Sentinel()
173
174
175# Breakpoint: https://github.com/python/cpython/pull/27342
176if sys.version_info >= (3, 10):
177 def _should_collect_from_parameters(t):
178 return isinstance(
179 t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
180 )
181else:
182 def _should_collect_from_parameters(t):
183 return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
184
185
186NoReturn = typing.NoReturn
187
188# Some unconstrained type variables. These are used by the container types.
189# (These are not for export.)
190T = typing.TypeVar('T') # Any type.
191KT = typing.TypeVar('KT') # Key type.
192VT = typing.TypeVar('VT') # Value type.
193T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
194T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
195
196
197# Breakpoint: https://github.com/python/cpython/pull/31841
198if sys.version_info >= (3, 11):
199 from typing import Any
200else:
201
202 class _AnyMeta(type):
203 def __instancecheck__(self, obj):
204 if self is Any:
205 raise TypeError("typing_extensions.Any cannot be used with isinstance()")
206 return super().__instancecheck__(obj)
207
208 def __repr__(self):
209 if self is Any:
210 return "typing_extensions.Any"
211 return super().__repr__()
212
213 class Any(metaclass=_AnyMeta):
214 """Special type indicating an unconstrained type.
215 - Any is compatible with every type.
216 - Any assumed to have all methods.
217 - All values assumed to be instances of Any.
218 Note that all the above statements are true from the point of view of
219 static type checkers. At runtime, Any should not be used with instance
220 checks.
221 """
222 def __new__(cls, *args, **kwargs):
223 if cls is Any:
224 raise TypeError("Any cannot be instantiated")
225 return super().__new__(cls, *args, **kwargs)
226
227
228ClassVar = typing.ClassVar
229
230# Vendored from cpython typing._SpecialFrom
231# Having a separate class means that instances will not be rejected by
232# typing._type_check.
233class _SpecialForm(typing._Final, _root=True):
234 __slots__ = ('_name', '__doc__', '_getitem')
235
236 def __init__(self, getitem):
237 self._getitem = getitem
238 self._name = getitem.__name__
239 self.__doc__ = getitem.__doc__
240
241 def __getattr__(self, item):
242 if item in {'__name__', '__qualname__'}:
243 return self._name
244
245 raise AttributeError(item)
246
247 def __mro_entries__(self, bases):
248 raise TypeError(f"Cannot subclass {self!r}")
249
250 def __repr__(self):
251 return f'typing_extensions.{self._name}'
252
253 def __reduce__(self):
254 return self._name
255
256 def __call__(self, *args, **kwds):
257 raise TypeError(f"Cannot instantiate {self!r}")
258
259 def __or__(self, other):
260 return typing.Union[self, other]
261
262 def __ror__(self, other):
263 return typing.Union[other, self]
264
265 def __instancecheck__(self, obj):
266 raise TypeError(f"{self} cannot be used with isinstance()")
267
268 def __subclasscheck__(self, cls):
269 raise TypeError(f"{self} cannot be used with issubclass()")
270
271 @typing._tp_cache
272 def __getitem__(self, parameters):
273 return self._getitem(self, parameters)
274
275
276# Note that inheriting from this class means that the object will be
277# rejected by typing._type_check, so do not use it if the special form
278# is arguably valid as a type by itself.
279class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
280 def __repr__(self):
281 return 'typing_extensions.' + self._name
282
283
284Final = typing.Final
285
286# Breakpoint: https://github.com/python/cpython/pull/30530
287if sys.version_info >= (3, 11):
288 final = typing.final
289else:
290 # @final exists in 3.8+, but we backport it for all versions
291 # before 3.11 to keep support for the __final__ attribute.
292 # See https://bugs.python.org/issue46342
293 def final(f):
294 """This decorator can be used to indicate to type checkers that
295 the decorated method cannot be overridden, and decorated class
296 cannot be subclassed. For example:
297
298 class Base:
299 @final
300 def done(self) -> None:
301 ...
302 class Sub(Base):
303 def done(self) -> None: # Error reported by type checker
304 ...
305 @final
306 class Leaf:
307 ...
308 class Other(Leaf): # Error reported by type checker
309 ...
310
311 There is no runtime checking of these properties. The decorator
312 sets the ``__final__`` attribute to ``True`` on the decorated object
313 to allow runtime introspection.
314 """
315 try:
316 f.__final__ = True
317 except (AttributeError, TypeError):
318 # Skip the attribute silently if it is not writable.
319 # AttributeError happens if the object has __slots__ or a
320 # read-only property, TypeError if it's a builtin class.
321 pass
322 return f
323
324
325if hasattr(typing, "disjoint_base"): # 3.15
326 disjoint_base = typing.disjoint_base
327else:
328 def disjoint_base(cls):
329 """This decorator marks a class as a disjoint base.
330
331 Child classes of a disjoint base cannot inherit from other disjoint bases that are
332 not parent classes of the disjoint base.
333
334 For example:
335
336 @disjoint_base
337 class Disjoint1: pass
338
339 @disjoint_base
340 class Disjoint2: pass
341
342 class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error
343
344 Type checkers can use knowledge of disjoint bases to detect unreachable code
345 and determine when two types can overlap.
346
347 See PEP 800."""
348 cls.__disjoint_base__ = True
349 return cls
350
351
352def IntVar(name):
353 return typing.TypeVar(name)
354
355
356# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
357# Breakpoint: https://github.com/python/cpython/pull/29334
358if sys.version_info >= (3, 10, 1):
359 Literal = typing.Literal
360else:
361 def _flatten_literal_params(parameters):
362 """An internal helper for Literal creation: flatten Literals among parameters"""
363 params = []
364 for p in parameters:
365 if isinstance(p, _LiteralGenericAlias):
366 params.extend(p.__args__)
367 else:
368 params.append(p)
369 return tuple(params)
370
371 def _value_and_type_iter(params):
372 for p in params:
373 yield p, type(p)
374
375 class _LiteralGenericAlias(typing._GenericAlias, _root=True):
376 def __eq__(self, other):
377 if not isinstance(other, _LiteralGenericAlias):
378 return NotImplemented
379 these_args_deduped = set(_value_and_type_iter(self.__args__))
380 other_args_deduped = set(_value_and_type_iter(other.__args__))
381 return these_args_deduped == other_args_deduped
382
383 def __hash__(self):
384 return hash(frozenset(_value_and_type_iter(self.__args__)))
385
386 class _LiteralForm(_ExtensionsSpecialForm, _root=True):
387 def __init__(self, doc: str):
388 self._name = 'Literal'
389 self._doc = self.__doc__ = doc
390
391 def __getitem__(self, parameters):
392 if not isinstance(parameters, tuple):
393 parameters = (parameters,)
394
395 parameters = _flatten_literal_params(parameters)
396
397 val_type_pairs = list(_value_and_type_iter(parameters))
398 try:
399 deduped_pairs = set(val_type_pairs)
400 except TypeError:
401 # unhashable parameters
402 pass
403 else:
404 # similar logic to typing._deduplicate on Python 3.9+
405 if len(deduped_pairs) < len(val_type_pairs):
406 new_parameters = []
407 for pair in val_type_pairs:
408 if pair in deduped_pairs:
409 new_parameters.append(pair[0])
410 deduped_pairs.remove(pair)
411 assert not deduped_pairs, deduped_pairs
412 parameters = tuple(new_parameters)
413
414 return _LiteralGenericAlias(self, parameters)
415
416 Literal = _LiteralForm(doc="""\
417 A type that can be used to indicate to type checkers
418 that the corresponding value has a value literally equivalent
419 to the provided parameter. For example:
420
421 var: Literal[4] = 4
422
423 The type checker understands that 'var' is literally equal to
424 the value 4 and no other value.
425
426 Literal[...] cannot be subclassed. There is no runtime
427 checking verifying that the parameter is actually a value
428 instead of a type.""")
429
430
431_overload_dummy = typing._overload_dummy
432
433
434if hasattr(typing, "get_overloads"): # 3.11+
435 overload = typing.overload
436 get_overloads = typing.get_overloads
437 clear_overloads = typing.clear_overloads
438else:
439 # {module: {qualname: {firstlineno: func}}}
440 _overload_registry = collections.defaultdict(
441 functools.partial(collections.defaultdict, dict)
442 )
443
444 def overload(func):
445 """Decorator for overloaded functions/methods.
446
447 In a stub file, place two or more stub definitions for the same
448 function in a row, each decorated with @overload. For example:
449
450 @overload
451 def utf8(value: None) -> None: ...
452 @overload
453 def utf8(value: bytes) -> bytes: ...
454 @overload
455 def utf8(value: str) -> bytes: ...
456
457 In a non-stub file (i.e. a regular .py file), do the same but
458 follow it with an implementation. The implementation should *not*
459 be decorated with @overload. For example:
460
461 @overload
462 def utf8(value: None) -> None: ...
463 @overload
464 def utf8(value: bytes) -> bytes: ...
465 @overload
466 def utf8(value: str) -> bytes: ...
467 def utf8(value):
468 # implementation goes here
469
470 The overloads for a function can be retrieved at runtime using the
471 get_overloads() function.
472 """
473 # classmethod and staticmethod
474 f = getattr(func, "__func__", func)
475 try:
476 _overload_registry[f.__module__][f.__qualname__][
477 f.__code__.co_firstlineno
478 ] = func
479 except AttributeError:
480 # Not a normal function; ignore.
481 pass
482 return _overload_dummy
483
484 def get_overloads(func):
485 """Return all defined overloads for *func* as a sequence."""
486 # classmethod and staticmethod
487 f = getattr(func, "__func__", func)
488 if f.__module__ not in _overload_registry:
489 return []
490 mod_dict = _overload_registry[f.__module__]
491 if f.__qualname__ not in mod_dict:
492 return []
493 return list(mod_dict[f.__qualname__].values())
494
495 def clear_overloads():
496 """Clear all overloads in the registry."""
497 _overload_registry.clear()
498
499
500# This is not a real generic class. Don't use outside annotations.
501Type = typing.Type
502
503# Various ABCs mimicking those in collections.abc.
504# A few are simply re-exported for completeness.
505Awaitable = typing.Awaitable
506Coroutine = typing.Coroutine
507AsyncIterable = typing.AsyncIterable
508AsyncIterator = typing.AsyncIterator
509Deque = typing.Deque
510DefaultDict = typing.DefaultDict
511OrderedDict = typing.OrderedDict
512Counter = typing.Counter
513ChainMap = typing.ChainMap
514Text = typing.Text
515TYPE_CHECKING = typing.TYPE_CHECKING
516
517
518# Breakpoint: https://github.com/python/cpython/pull/118681
519if sys.version_info >= (3, 13, 0, "beta"):
520 from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
521else:
522 def _is_dunder(attr):
523 return attr.startswith('__') and attr.endswith('__')
524
525
526 class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True):
527 def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
528 super().__init__(origin, nparams, inst=inst, name=name)
529 self._defaults = defaults
530
531 def __setattr__(self, attr, val):
532 allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
533 if _is_dunder(attr) or attr in allowed_attrs:
534 object.__setattr__(self, attr, val)
535 else:
536 setattr(self.__origin__, attr, val)
537
538 @typing._tp_cache
539 def __getitem__(self, params):
540 if not isinstance(params, tuple):
541 params = (params,)
542 msg = "Parameters to generic types must be types."
543 params = tuple(typing._type_check(p, msg) for p in params)
544 if (
545 self._defaults
546 and len(params) < self._nparams
547 and len(params) + len(self._defaults) >= self._nparams
548 ):
549 params = (*params, *self._defaults[len(params) - self._nparams:])
550 actual_len = len(params)
551
552 if actual_len != self._nparams:
553 if self._defaults:
554 expected = f"at least {self._nparams - len(self._defaults)}"
555 else:
556 expected = str(self._nparams)
557 if not self._nparams:
558 raise TypeError(f"{self} is not a generic class")
559 raise TypeError(
560 f"Too {'many' if actual_len > self._nparams else 'few'}"
561 f" arguments for {self};"
562 f" actual {actual_len}, expected {expected}"
563 )
564 return self.copy_with(params)
565
566 _NoneType = type(None)
567 Generator = _SpecialGenericAlias(
568 collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
569 )
570 AsyncGenerator = _SpecialGenericAlias(
571 collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
572 )
573 ContextManager = _SpecialGenericAlias(
574 contextlib.AbstractContextManager,
575 2,
576 name="ContextManager",
577 defaults=(typing.Optional[bool],)
578 )
579 AsyncContextManager = _SpecialGenericAlias(
580 contextlib.AbstractAsyncContextManager,
581 2,
582 name="AsyncContextManager",
583 defaults=(typing.Optional[bool],)
584 )
585
586
587_PROTO_ALLOWLIST = {
588 'collections.abc': [
589 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
590 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
591 ],
592 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
593 'typing_extensions': ['Buffer'],
594}
595
596
597_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
598 "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
599 "__final__",
600}
601
602
603def _get_protocol_attrs(cls):
604 attrs = set()
605 for base in cls.__mro__[:-1]: # without object
606 if base.__name__ in {'Protocol', 'Generic'}:
607 continue
608 annotations = getattr(base, '__annotations__', {})
609 for attr in (*base.__dict__, *annotations):
610 if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
611 attrs.add(attr)
612 return attrs
613
614
615def _caller(depth=1, default='__main__'):
616 try:
617 return sys._getframemodulename(depth + 1) or default
618 except AttributeError: # For platforms without _getframemodulename()
619 pass
620 try:
621 return sys._getframe(depth + 1).f_globals.get('__name__', default)
622 except (AttributeError, ValueError): # For platforms without _getframe()
623 pass
624 return None
625
626
627# `__match_args__` attribute was removed from protocol members in 3.13,
628# we want to backport this change to older Python versions.
629# Breakpoint: https://github.com/python/cpython/pull/110683
630if sys.version_info >= (3, 13):
631 Protocol = typing.Protocol
632else:
633 def _allow_reckless_class_checks(depth=2):
634 """Allow instance and class checks for special stdlib modules.
635 The abc and functools modules indiscriminately call isinstance() and
636 issubclass() on the whole MRO of a user class, which may contain protocols.
637 """
638 return _caller(depth) in {'abc', 'functools', None}
639
640 def _no_init(self, *args, **kwargs):
641 if type(self)._is_protocol:
642 raise TypeError('Protocols cannot be instantiated')
643
644 def _type_check_issubclass_arg_1(arg):
645 """Raise TypeError if `arg` is not an instance of `type`
646 in `issubclass(arg, <protocol>)`.
647
648 In most cases, this is verified by type.__subclasscheck__.
649 Checking it again unnecessarily would slow down issubclass() checks,
650 so, we don't perform this check unless we absolutely have to.
651
652 For various error paths, however,
653 we want to ensure that *this* error message is shown to the user
654 where relevant, rather than a typing.py-specific error message.
655 """
656 if not isinstance(arg, type):
657 # Same error message as for issubclass(1, int).
658 raise TypeError('issubclass() arg 1 must be a class')
659
660 # Inheriting from typing._ProtocolMeta isn't actually desirable,
661 # but is necessary to allow typing.Protocol and typing_extensions.Protocol
662 # to mix without getting TypeErrors about "metaclass conflict"
663 class _ProtocolMeta(type(typing.Protocol)):
664 # This metaclass is somewhat unfortunate,
665 # but is necessary for several reasons...
666 #
667 # NOTE: DO NOT call super() in any methods in this class
668 # That would call the methods on typing._ProtocolMeta on Python <=3.11
669 # and those are slow
670 def __new__(mcls, name, bases, namespace, **kwargs):
671 if name == "Protocol" and len(bases) < 2:
672 pass
673 elif {Protocol, typing.Protocol} & set(bases):
674 for base in bases:
675 if not (
676 base in {object, typing.Generic, Protocol, typing.Protocol}
677 or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
678 or is_protocol(base)
679 ):
680 raise TypeError(
681 f"Protocols can only inherit from other protocols, "
682 f"got {base!r}"
683 )
684 return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
685
686 def __init__(cls, *args, **kwargs):
687 abc.ABCMeta.__init__(cls, *args, **kwargs)
688 if getattr(cls, "_is_protocol", False):
689 cls.__protocol_attrs__ = _get_protocol_attrs(cls)
690
691 def __subclasscheck__(cls, other):
692 if cls is Protocol:
693 return type.__subclasscheck__(cls, other)
694 if (
695 getattr(cls, '_is_protocol', False)
696 and not _allow_reckless_class_checks()
697 ):
698 if not getattr(cls, '_is_runtime_protocol', False):
699 _type_check_issubclass_arg_1(other)
700 raise TypeError(
701 "Instance and class checks can only be used with "
702 "@runtime_checkable protocols"
703 )
704 if (
705 # this attribute is set by @runtime_checkable:
706 cls.__non_callable_proto_members__
707 and cls.__dict__.get("__subclasshook__") is _proto_hook
708 ):
709 _type_check_issubclass_arg_1(other)
710 non_method_attrs = sorted(cls.__non_callable_proto_members__)
711 raise TypeError(
712 "Protocols with non-method members don't support issubclass()."
713 f" Non-method members: {str(non_method_attrs)[1:-1]}."
714 )
715 return abc.ABCMeta.__subclasscheck__(cls, other)
716
717 def __instancecheck__(cls, instance):
718 # We need this method for situations where attributes are
719 # assigned in __init__.
720 if cls is Protocol:
721 return type.__instancecheck__(cls, instance)
722 if not getattr(cls, "_is_protocol", False):
723 # i.e., it's a concrete subclass of a protocol
724 return abc.ABCMeta.__instancecheck__(cls, instance)
725
726 if (
727 not getattr(cls, '_is_runtime_protocol', False) and
728 not _allow_reckless_class_checks()
729 ):
730 raise TypeError("Instance and class checks can only be used with"
731 " @runtime_checkable protocols")
732
733 if abc.ABCMeta.__instancecheck__(cls, instance):
734 return True
735
736 for attr in cls.__protocol_attrs__:
737 try:
738 val = inspect.getattr_static(instance, attr)
739 except AttributeError:
740 break
741 # this attribute is set by @runtime_checkable:
742 if val is None and attr not in cls.__non_callable_proto_members__:
743 break
744 else:
745 return True
746
747 return False
748
749 def __eq__(cls, other):
750 # Hack so that typing.Generic.__class_getitem__
751 # treats typing_extensions.Protocol
752 # as equivalent to typing.Protocol
753 if abc.ABCMeta.__eq__(cls, other) is True:
754 return True
755 return cls is Protocol and other is typing.Protocol
756
757 # This has to be defined, or the abc-module cache
758 # complains about classes with this metaclass being unhashable,
759 # if we define only __eq__!
760 def __hash__(cls) -> int:
761 return type.__hash__(cls)
762
763 @classmethod
764 def _proto_hook(cls, other):
765 if not cls.__dict__.get('_is_protocol', False):
766 return NotImplemented
767
768 for attr in cls.__protocol_attrs__:
769 for base in other.__mro__:
770 # Check if the members appears in the class dictionary...
771 if attr in base.__dict__:
772 if base.__dict__[attr] is None:
773 return NotImplemented
774 break
775
776 # ...or in annotations, if it is a sub-protocol.
777 annotations = getattr(base, '__annotations__', {})
778 if (
779 isinstance(annotations, collections.abc.Mapping)
780 and attr in annotations
781 and is_protocol(other)
782 ):
783 break
784 else:
785 return NotImplemented
786 return True
787
788 class Protocol(typing.Generic, metaclass=_ProtocolMeta):
789 __doc__ = typing.Protocol.__doc__
790 __slots__ = ()
791 _is_protocol = True
792 _is_runtime_protocol = False
793
794 def __init_subclass__(cls, *args, **kwargs):
795 super().__init_subclass__(*args, **kwargs)
796
797 # Determine if this is a protocol or a concrete subclass.
798 if not cls.__dict__.get('_is_protocol', False):
799 cls._is_protocol = any(b is Protocol for b in cls.__bases__)
800
801 # Set (or override) the protocol subclass hook.
802 if '__subclasshook__' not in cls.__dict__:
803 cls.__subclasshook__ = _proto_hook
804
805 # Prohibit instantiation for protocol classes
806 if cls._is_protocol and cls.__init__ is Protocol.__init__:
807 cls.__init__ = _no_init
808
809
810# Breakpoint: https://github.com/python/cpython/pull/113401
811if sys.version_info >= (3, 13):
812 runtime_checkable = typing.runtime_checkable
813else:
814 def runtime_checkable(cls):
815 """Mark a protocol class as a runtime protocol.
816
817 Such protocol can be used with isinstance() and issubclass().
818 Raise TypeError if applied to a non-protocol class.
819 This allows a simple-minded structural check very similar to
820 one trick ponies in collections.abc such as Iterable.
821
822 For example::
823
824 @runtime_checkable
825 class Closable(Protocol):
826 def close(self): ...
827
828 assert isinstance(open('/some/file'), Closable)
829
830 Warning: this will check only the presence of the required methods,
831 not their type signatures!
832 """
833 if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
834 raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
835 f' got {cls!r}')
836 cls._is_runtime_protocol = True
837
838 # typing.Protocol classes on <=3.11 break if we execute this block,
839 # because typing.Protocol classes on <=3.11 don't have a
840 # `__protocol_attrs__` attribute, and this block relies on the
841 # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
842 # break if we *don't* execute this block, because *they* assume that all
843 # protocol classes have a `__non_callable_proto_members__` attribute
844 # (which this block sets)
845 if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
846 # PEP 544 prohibits using issubclass()
847 # with protocols that have non-method members.
848 # See gh-113320 for why we compute this attribute here,
849 # rather than in `_ProtocolMeta.__init__`
850 cls.__non_callable_proto_members__ = set()
851 for attr in cls.__protocol_attrs__:
852 try:
853 is_callable = callable(getattr(cls, attr, None))
854 except Exception as e:
855 raise TypeError(
856 f"Failed to determine whether protocol member {attr!r} "
857 "is a method member"
858 ) from e
859 else:
860 if not is_callable:
861 cls.__non_callable_proto_members__.add(attr)
862
863 return cls
864
865
866# The "runtime" alias exists for backwards compatibility.
867runtime = runtime_checkable
868
869
870# Our version of runtime-checkable protocols is faster on Python <=3.11
871# Breakpoint: https://github.com/python/cpython/pull/112717
872if sys.version_info >= (3, 12):
873 SupportsInt = typing.SupportsInt
874 SupportsFloat = typing.SupportsFloat
875 SupportsComplex = typing.SupportsComplex
876 SupportsBytes = typing.SupportsBytes
877 SupportsIndex = typing.SupportsIndex
878 SupportsAbs = typing.SupportsAbs
879 SupportsRound = typing.SupportsRound
880else:
881 @runtime_checkable
882 class SupportsInt(Protocol):
883 """An ABC with one abstract method __int__."""
884 __slots__ = ()
885
886 @abc.abstractmethod
887 def __int__(self) -> int:
888 pass
889
890 @runtime_checkable
891 class SupportsFloat(Protocol):
892 """An ABC with one abstract method __float__."""
893 __slots__ = ()
894
895 @abc.abstractmethod
896 def __float__(self) -> float:
897 pass
898
899 @runtime_checkable
900 class SupportsComplex(Protocol):
901 """An ABC with one abstract method __complex__."""
902 __slots__ = ()
903
904 @abc.abstractmethod
905 def __complex__(self) -> complex:
906 pass
907
908 @runtime_checkable
909 class SupportsBytes(Protocol):
910 """An ABC with one abstract method __bytes__."""
911 __slots__ = ()
912
913 @abc.abstractmethod
914 def __bytes__(self) -> bytes:
915 pass
916
917 @runtime_checkable
918 class SupportsIndex(Protocol):
919 __slots__ = ()
920
921 @abc.abstractmethod
922 def __index__(self) -> int:
923 pass
924
925 @runtime_checkable
926 class SupportsAbs(Protocol[T_co]):
927 """
928 An ABC with one abstract method __abs__ that is covariant in its return type.
929 """
930 __slots__ = ()
931
932 @abc.abstractmethod
933 def __abs__(self) -> T_co:
934 pass
935
936 @runtime_checkable
937 class SupportsRound(Protocol[T_co]):
938 """
939 An ABC with one abstract method __round__ that is covariant in its return type.
940 """
941 __slots__ = ()
942
943 @abc.abstractmethod
944 def __round__(self, ndigits: int = 0) -> T_co:
945 pass
946
947
948if hasattr(io, "Reader") and hasattr(io, "Writer"):
949 Reader = io.Reader
950 Writer = io.Writer
951else:
952 @runtime_checkable
953 class Reader(Protocol[T_co]):
954 """Protocol for simple I/O reader instances.
955
956 This protocol only supports blocking I/O.
957 """
958
959 __slots__ = ()
960
961 @abc.abstractmethod
962 def read(self, size: int = ..., /) -> T_co:
963 """Read data from the input stream and return it.
964
965 If *size* is specified, at most *size* items (bytes/characters) will be
966 read.
967 """
968
969 @runtime_checkable
970 class Writer(Protocol[T_contra]):
971 """Protocol for simple I/O writer instances.
972
973 This protocol only supports blocking I/O.
974 """
975
976 __slots__ = ()
977
978 @abc.abstractmethod
979 def write(self, data: T_contra, /) -> int:
980 """Write *data* to the output stream and return the number of items written.""" # noqa: E501
981
982
983_NEEDS_SINGLETONMETA = (
984 not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
985)
986
987if _NEEDS_SINGLETONMETA:
988 class SingletonMeta(type):
989 def __setattr__(cls, attr, value):
990 # TypeError is consistent with the behavior of NoneType
991 raise TypeError(
992 f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
993 )
994
995
996if hasattr(typing, "NoDefault"):
997 NoDefault = typing.NoDefault
998else:
999 class NoDefaultType(metaclass=SingletonMeta):
1000 """The type of the NoDefault singleton."""
1001
1002 __slots__ = ()
1003
1004 def __new__(cls):
1005 return globals().get("NoDefault") or object.__new__(cls)
1006
1007 def __repr__(self):
1008 return "typing_extensions.NoDefault"
1009
1010 def __reduce__(self):
1011 return "NoDefault"
1012
1013 NoDefault = NoDefaultType()
1014 del NoDefaultType
1015
1016if hasattr(typing, "NoExtraItems"):
1017 NoExtraItems = typing.NoExtraItems
1018else:
1019 class NoExtraItemsType(metaclass=SingletonMeta):
1020 """The type of the NoExtraItems singleton."""
1021
1022 __slots__ = ()
1023
1024 def __new__(cls):
1025 return globals().get("NoExtraItems") or object.__new__(cls)
1026
1027 def __repr__(self):
1028 return "typing_extensions.NoExtraItems"
1029
1030 def __reduce__(self):
1031 return "NoExtraItems"
1032
1033 NoExtraItems = NoExtraItemsType()
1034 del NoExtraItemsType
1035
1036if _NEEDS_SINGLETONMETA:
1037 del SingletonMeta
1038
1039
1040# Update this to something like >=3.13.0b1 if and when
1041# PEP 728 is implemented in CPython
1042_PEP_728_IMPLEMENTED = False
1043
1044if _PEP_728_IMPLEMENTED:
1045 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
1046 # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
1047 # The standard library TypedDict below Python 3.11 does not store runtime
1048 # information about optional and required keys when using Required or NotRequired.
1049 # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
1050 # Aaaand on 3.12 we add __orig_bases__ to TypedDict
1051 # to enable better runtime introspection.
1052 # On 3.13 we deprecate some odd ways of creating TypedDicts.
1053 # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
1054 # PEP 728 (still pending) makes more changes.
1055 TypedDict = typing.TypedDict
1056 _TypedDictMeta = typing._TypedDictMeta
1057 is_typeddict = typing.is_typeddict
1058else:
1059 # 3.10.0 and later
1060 _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
1061
1062 def _get_typeddict_qualifiers(annotation_type):
1063 while True:
1064 annotation_origin = get_origin(annotation_type)
1065 if annotation_origin is Annotated:
1066 annotation_args = get_args(annotation_type)
1067 if annotation_args:
1068 annotation_type = annotation_args[0]
1069 else:
1070 break
1071 elif annotation_origin is Required:
1072 yield Required
1073 annotation_type, = get_args(annotation_type)
1074 elif annotation_origin is NotRequired:
1075 yield NotRequired
1076 annotation_type, = get_args(annotation_type)
1077 elif annotation_origin is ReadOnly:
1078 yield ReadOnly
1079 annotation_type, = get_args(annotation_type)
1080 else:
1081 break
1082
1083 class _TypedDictMeta(type):
1084
1085 def __new__(cls, name, bases, ns, *, total=True, closed=None,
1086 extra_items=NoExtraItems):
1087 """Create new typed dict class object.
1088
1089 This method is called when TypedDict is subclassed,
1090 or when TypedDict is instantiated. This way
1091 TypedDict supports all three syntax forms described in its docstring.
1092 Subclasses and instances of TypedDict return actual dictionaries.
1093 """
1094 for base in bases:
1095 if type(base) is not _TypedDictMeta and base is not typing.Generic:
1096 raise TypeError('cannot inherit from both a TypedDict type '
1097 'and a non-TypedDict base class')
1098 if closed is not None and extra_items is not NoExtraItems:
1099 raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
1100
1101 if any(issubclass(b, typing.Generic) for b in bases):
1102 generic_base = (typing.Generic,)
1103 else:
1104 generic_base = ()
1105
1106 ns_annotations = ns.pop('__annotations__', None)
1107
1108 # typing.py generally doesn't let you inherit from plain Generic, unless
1109 # the name of the class happens to be "Protocol"
1110 tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
1111 tp_dict.__name__ = name
1112 if tp_dict.__qualname__ == "Protocol":
1113 tp_dict.__qualname__ = name
1114
1115 if not hasattr(tp_dict, '__orig_bases__'):
1116 tp_dict.__orig_bases__ = bases
1117
1118 annotations = {}
1119 own_annotate = None
1120 if ns_annotations is not None:
1121 own_annotations = ns_annotations
1122 elif sys.version_info >= (3, 14):
1123 if hasattr(annotationlib, "get_annotate_from_class_namespace"):
1124 own_annotate = annotationlib.get_annotate_from_class_namespace(ns)
1125 else:
1126 # 3.14.0a7 and earlier
1127 own_annotate = ns.get("__annotate__")
1128 if own_annotate is not None:
1129 own_annotations = annotationlib.call_annotate_function(
1130 own_annotate, Format.FORWARDREF, owner=tp_dict
1131 )
1132 else:
1133 own_annotations = {}
1134 else:
1135 own_annotations = {}
1136 msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
1137 if _TAKES_MODULE:
1138 own_checked_annotations = {
1139 n: typing._type_check(tp, msg, module=tp_dict.__module__)
1140 for n, tp in own_annotations.items()
1141 }
1142 else:
1143 own_checked_annotations = {
1144 n: typing._type_check(tp, msg)
1145 for n, tp in own_annotations.items()
1146 }
1147 required_keys = set()
1148 optional_keys = set()
1149 readonly_keys = set()
1150 mutable_keys = set()
1151 extra_items_type = extra_items
1152
1153 for base in bases:
1154 base_dict = base.__dict__
1155
1156 if sys.version_info <= (3, 14):
1157 annotations.update(base_dict.get('__annotations__', {}))
1158 required_keys.update(base_dict.get('__required_keys__', ()))
1159 optional_keys.update(base_dict.get('__optional_keys__', ()))
1160 readonly_keys.update(base_dict.get('__readonly_keys__', ()))
1161 mutable_keys.update(base_dict.get('__mutable_keys__', ()))
1162
1163 # This was specified in an earlier version of PEP 728. Support
1164 # is retained for backwards compatibility, but only for Python
1165 # 3.13 and lower.
1166 if (closed and sys.version_info < (3, 14)
1167 and "__extra_items__" in own_checked_annotations):
1168 annotation_type = own_checked_annotations.pop("__extra_items__")
1169 qualifiers = set(_get_typeddict_qualifiers(annotation_type))
1170 if Required in qualifiers:
1171 raise TypeError(
1172 "Special key __extra_items__ does not support "
1173 "Required"
1174 )
1175 if NotRequired in qualifiers:
1176 raise TypeError(
1177 "Special key __extra_items__ does not support "
1178 "NotRequired"
1179 )
1180 extra_items_type = annotation_type
1181
1182 annotations.update(own_checked_annotations)
1183 for annotation_key, annotation_type in own_checked_annotations.items():
1184 qualifiers = set(_get_typeddict_qualifiers(annotation_type))
1185
1186 if Required in qualifiers:
1187 required_keys.add(annotation_key)
1188 elif NotRequired in qualifiers:
1189 optional_keys.add(annotation_key)
1190 elif total:
1191 required_keys.add(annotation_key)
1192 else:
1193 optional_keys.add(annotation_key)
1194 if ReadOnly in qualifiers:
1195 mutable_keys.discard(annotation_key)
1196 readonly_keys.add(annotation_key)
1197 else:
1198 mutable_keys.add(annotation_key)
1199 readonly_keys.discard(annotation_key)
1200
1201 # Breakpoint: https://github.com/python/cpython/pull/119891
1202 if sys.version_info >= (3, 14):
1203 def __annotate__(format):
1204 annos = {}
1205 for base in bases:
1206 if base is Generic:
1207 continue
1208 base_annotate = base.__annotate__
1209 if base_annotate is None:
1210 continue
1211 base_annos = annotationlib.call_annotate_function(
1212 base_annotate, format, owner=base)
1213 annos.update(base_annos)
1214 if own_annotate is not None:
1215 own = annotationlib.call_annotate_function(
1216 own_annotate, format, owner=tp_dict)
1217 if format != Format.STRING:
1218 own = {
1219 n: typing._type_check(tp, msg, module=tp_dict.__module__)
1220 for n, tp in own.items()
1221 }
1222 elif format == Format.STRING:
1223 own = annotationlib.annotations_to_string(own_annotations)
1224 elif format in (Format.FORWARDREF, Format.VALUE):
1225 own = own_checked_annotations
1226 else:
1227 raise NotImplementedError(format)
1228 annos.update(own)
1229 return annos
1230
1231 tp_dict.__annotate__ = __annotate__
1232 else:
1233 tp_dict.__annotations__ = annotations
1234 tp_dict.__required_keys__ = frozenset(required_keys)
1235 tp_dict.__optional_keys__ = frozenset(optional_keys)
1236 tp_dict.__readonly_keys__ = frozenset(readonly_keys)
1237 tp_dict.__mutable_keys__ = frozenset(mutable_keys)
1238 tp_dict.__total__ = total
1239 tp_dict.__closed__ = closed
1240 tp_dict.__extra_items__ = extra_items_type
1241 return tp_dict
1242
1243 __call__ = dict # static method
1244
1245 def __subclasscheck__(cls, other):
1246 # Typed dicts are only for static structural subtyping.
1247 raise TypeError('TypedDict does not support instance and class checks')
1248
1249 __instancecheck__ = __subclasscheck__
1250
1251 _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
1252
1253 def _create_typeddict(
1254 typename,
1255 fields,
1256 /,
1257 *,
1258 typing_is_inline,
1259 total,
1260 closed,
1261 extra_items,
1262 **kwargs,
1263 ):
1264 if fields is _marker or fields is None:
1265 if fields is _marker:
1266 deprecated_thing = (
1267 "Failing to pass a value for the 'fields' parameter"
1268 )
1269 else:
1270 deprecated_thing = "Passing `None` as the 'fields' parameter"
1271
1272 example = f"`{typename} = TypedDict({typename!r}, {{}})`"
1273 deprecation_msg = (
1274 f"{deprecated_thing} is deprecated and will be disallowed in "
1275 "Python 3.15. To create a TypedDict class with 0 fields "
1276 "using the functional syntax, pass an empty dictionary, e.g. "
1277 ) + example + "."
1278 warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
1279 # Support a field called "closed"
1280 if closed is not False and closed is not True and closed is not None:
1281 kwargs["closed"] = closed
1282 closed = None
1283 # Or "extra_items"
1284 if extra_items is not NoExtraItems:
1285 kwargs["extra_items"] = extra_items
1286 extra_items = NoExtraItems
1287 fields = kwargs
1288 elif kwargs:
1289 raise TypeError("TypedDict takes either a dict or keyword arguments,"
1290 " but not both")
1291 if kwargs:
1292 # Breakpoint: https://github.com/python/cpython/pull/104891
1293 if sys.version_info >= (3, 13):
1294 raise TypeError("TypedDict takes no keyword arguments")
1295 warnings.warn(
1296 "The kwargs-based syntax for TypedDict definitions is deprecated "
1297 "in Python 3.11, will be removed in Python 3.13, and may not be "
1298 "understood by third-party type checkers.",
1299 DeprecationWarning,
1300 stacklevel=2,
1301 )
1302
1303 ns = {'__annotations__': dict(fields)}
1304 module = _caller(depth=4 if typing_is_inline else 2)
1305 if module is not None:
1306 # Setting correct module is necessary to make typed dict classes
1307 # pickleable.
1308 ns['__module__'] = module
1309
1310 td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
1311 extra_items=extra_items)
1312 td.__orig_bases__ = (TypedDict,)
1313 return td
1314
1315 class _TypedDictSpecialForm(_SpecialForm, _root=True):
1316 def __call__(
1317 self,
1318 typename,
1319 fields=_marker,
1320 /,
1321 *,
1322 total=True,
1323 closed=None,
1324 extra_items=NoExtraItems,
1325 **kwargs
1326 ):
1327 return _create_typeddict(
1328 typename,
1329 fields,
1330 typing_is_inline=False,
1331 total=total,
1332 closed=closed,
1333 extra_items=extra_items,
1334 **kwargs,
1335 )
1336
1337 def __mro_entries__(self, bases):
1338 return (_TypedDict,)
1339
1340 @_TypedDictSpecialForm
1341 def TypedDict(self, args):
1342 """A simple typed namespace. At runtime it is equivalent to a plain dict.
1343
1344 TypedDict creates a dictionary type such that a type checker will expect all
1345 instances to have a certain set of keys, where each key is
1346 associated with a value of a consistent type. This expectation
1347 is not checked at runtime.
1348
1349 Usage::
1350
1351 class Point2D(TypedDict):
1352 x: int
1353 y: int
1354 label: str
1355
1356 a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
1357 b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
1358
1359 assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
1360
1361 The type info can be accessed via the Point2D.__annotations__ dict, and
1362 the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
1363 TypedDict supports an additional equivalent form::
1364
1365 Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
1366
1367 By default, all keys must be present in a TypedDict. It is possible
1368 to override this by specifying totality::
1369
1370 class Point2D(TypedDict, total=False):
1371 x: int
1372 y: int
1373
1374 This means that a Point2D TypedDict can have any of the keys omitted. A type
1375 checker is only expected to support a literal False or True as the value of
1376 the total argument. True is the default, and makes all items defined in the
1377 class body be required.
1378
1379 The Required and NotRequired special forms can also be used to mark
1380 individual keys as being required or not required::
1381
1382 class Point2D(TypedDict):
1383 x: int # the "x" key must always be present (Required is the default)
1384 y: NotRequired[int] # the "y" key can be omitted
1385
1386 See PEP 655 for more details on Required and NotRequired.
1387 """
1388 # This runs when creating inline TypedDicts:
1389 if not isinstance(args, dict):
1390 raise TypeError(
1391 "TypedDict[...] should be used with a single dict argument"
1392 )
1393
1394 return _create_typeddict(
1395 "<inline TypedDict>",
1396 args,
1397 typing_is_inline=True,
1398 total=True,
1399 closed=True,
1400 extra_items=NoExtraItems,
1401 )
1402
1403 _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
1404
1405 def is_typeddict(tp):
1406 """Check if an annotation is a TypedDict class
1407
1408 For example::
1409 class Film(TypedDict):
1410 title: str
1411 year: int
1412
1413 is_typeddict(Film) # => True
1414 is_typeddict(Union[list, str]) # => False
1415 """
1416 return isinstance(tp, _TYPEDDICT_TYPES)
1417
1418
1419if hasattr(typing, "assert_type"):
1420 assert_type = typing.assert_type
1421
1422else:
1423 def assert_type(val, typ, /):
1424 """Assert (to the type checker) that the value is of the given type.
1425
1426 When the type checker encounters a call to assert_type(), it
1427 emits an error if the value is not of the specified type::
1428
1429 def greet(name: str) -> None:
1430 assert_type(name, str) # ok
1431 assert_type(name, int) # type checker error
1432
1433 At runtime this returns the first argument unchanged and otherwise
1434 does nothing.
1435 """
1436 return val
1437
1438
1439if hasattr(typing, "ReadOnly"): # 3.13+
1440 get_type_hints = typing.get_type_hints
1441else: # <=3.13
1442 # replaces _strip_annotations()
1443 def _strip_extras(t):
1444 """Strips Annotated, Required and NotRequired from a given type."""
1445 if isinstance(t, typing._AnnotatedAlias):
1446 return _strip_extras(t.__origin__)
1447 if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
1448 return _strip_extras(t.__args__[0])
1449 if isinstance(t, typing._GenericAlias):
1450 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1451 if stripped_args == t.__args__:
1452 return t
1453 return t.copy_with(stripped_args)
1454 if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
1455 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1456 if stripped_args == t.__args__:
1457 return t
1458 return _types.GenericAlias(t.__origin__, stripped_args)
1459 if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
1460 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1461 if stripped_args == t.__args__:
1462 return t
1463 return functools.reduce(operator.or_, stripped_args)
1464
1465 return t
1466
1467 def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
1468 """Return type hints for an object.
1469
1470 This is often the same as obj.__annotations__, but it handles
1471 forward references encoded as string literals, adds Optional[t] if a
1472 default value equal to None is set and recursively replaces all
1473 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
1474 (unless 'include_extras=True').
1475
1476 The argument may be a module, class, method, or function. The annotations
1477 are returned as a dictionary. For classes, annotations include also
1478 inherited members.
1479
1480 TypeError is raised if the argument is not of a type that can contain
1481 annotations, and an empty dictionary is returned if no annotations are
1482 present.
1483
1484 BEWARE -- the behavior of globalns and localns is counterintuitive
1485 (unless you are familiar with how eval() and exec() work). The
1486 search order is locals first, then globals.
1487
1488 - If no dict arguments are passed, an attempt is made to use the
1489 globals from obj (or the respective module's globals for classes),
1490 and these are also used as the locals. If the object does not appear
1491 to have globals, an empty dictionary is used.
1492
1493 - If one dict argument is passed, it is used for both globals and
1494 locals.
1495
1496 - If two dict arguments are passed, they specify globals and
1497 locals, respectively.
1498 """
1499 hint = typing.get_type_hints(
1500 obj, globalns=globalns, localns=localns, include_extras=True
1501 )
1502 # Breakpoint: https://github.com/python/cpython/pull/30304
1503 if sys.version_info < (3, 11):
1504 _clean_optional(obj, hint, globalns, localns)
1505 if include_extras:
1506 return hint
1507 return {k: _strip_extras(t) for k, t in hint.items()}
1508
1509 _NoneType = type(None)
1510
1511 def _could_be_inserted_optional(t):
1512 """detects Union[..., None] pattern"""
1513 if not isinstance(t, typing._UnionGenericAlias):
1514 return False
1515 # Assume if last argument is not None they are user defined
1516 if t.__args__[-1] is not _NoneType:
1517 return False
1518 return True
1519
1520 # < 3.11
1521 def _clean_optional(obj, hints, globalns=None, localns=None):
1522 # reverts injected Union[..., None] cases from typing.get_type_hints
1523 # when a None default value is used.
1524 # see https://github.com/python/typing_extensions/issues/310
1525 if not hints or isinstance(obj, type):
1526 return
1527 defaults = typing._get_defaults(obj) # avoid accessing __annotations___
1528 if not defaults:
1529 return
1530 original_hints = obj.__annotations__
1531 for name, value in hints.items():
1532 # Not a Union[..., None] or replacement conditions not fullfilled
1533 if (not _could_be_inserted_optional(value)
1534 or name not in defaults
1535 or defaults[name] is not None
1536 ):
1537 continue
1538 original_value = original_hints[name]
1539 # value=NoneType should have caused a skip above but check for safety
1540 if original_value is None:
1541 original_value = _NoneType
1542 # Forward reference
1543 if isinstance(original_value, str):
1544 if globalns is None:
1545 if isinstance(obj, _types.ModuleType):
1546 globalns = obj.__dict__
1547 else:
1548 nsobj = obj
1549 # Find globalns for the unwrapped object.
1550 while hasattr(nsobj, '__wrapped__'):
1551 nsobj = nsobj.__wrapped__
1552 globalns = getattr(nsobj, '__globals__', {})
1553 if localns is None:
1554 localns = globalns
1555 elif localns is None:
1556 localns = globalns
1557
1558 original_value = ForwardRef(
1559 original_value,
1560 is_argument=not isinstance(obj, _types.ModuleType)
1561 )
1562 original_evaluated = typing._eval_type(original_value, globalns, localns)
1563 # Compare if values differ. Note that even if equal
1564 # value might be cached by typing._tp_cache contrary to original_evaluated
1565 if original_evaluated != value or (
1566 # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
1567 hasattr(_types, "UnionType")
1568 and isinstance(original_evaluated, _types.UnionType)
1569 and not isinstance(value, _types.UnionType)
1570 ):
1571 hints[name] = original_evaluated
1572
1573# Python 3.9 has get_origin() and get_args() but those implementations don't support
1574# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
1575# Breakpoint: https://github.com/python/cpython/pull/25298
1576if sys.version_info >= (3, 10):
1577 get_origin = typing.get_origin
1578 get_args = typing.get_args
1579# 3.9
1580else:
1581 def get_origin(tp):
1582 """Get the unsubscripted version of a type.
1583
1584 This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
1585 and Annotated. Return None for unsupported types. Examples::
1586
1587 get_origin(Literal[42]) is Literal
1588 get_origin(int) is None
1589 get_origin(ClassVar[int]) is ClassVar
1590 get_origin(Generic) is Generic
1591 get_origin(Generic[T]) is Generic
1592 get_origin(Union[T, int]) is Union
1593 get_origin(List[Tuple[T, T]][int]) == list
1594 get_origin(P.args) is P
1595 """
1596 if isinstance(tp, typing._AnnotatedAlias):
1597 return Annotated
1598 if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias,
1599 ParamSpecArgs, ParamSpecKwargs)):
1600 return tp.__origin__
1601 if tp is typing.Generic:
1602 return typing.Generic
1603 return None
1604
1605 def get_args(tp):
1606 """Get type arguments with all substitutions performed.
1607
1608 For unions, basic simplifications used by Union constructor are performed.
1609 Examples::
1610 get_args(Dict[str, int]) == (str, int)
1611 get_args(int) == ()
1612 get_args(Union[int, Union[T, int], str][int]) == (int, str)
1613 get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
1614 get_args(Callable[[], T][int]) == ([], int)
1615 """
1616 if isinstance(tp, typing._AnnotatedAlias):
1617 return (tp.__origin__, *tp.__metadata__)
1618 if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)):
1619 res = tp.__args__
1620 if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
1621 res = (list(res[:-1]), res[-1])
1622 return res
1623 return ()
1624
1625
1626# 3.10+
1627if hasattr(typing, 'TypeAlias'):
1628 TypeAlias = typing.TypeAlias
1629# 3.9
1630else:
1631 @_ExtensionsSpecialForm
1632 def TypeAlias(self, parameters):
1633 """Special marker indicating that an assignment should
1634 be recognized as a proper type alias definition by type
1635 checkers.
1636
1637 For example::
1638
1639 Predicate: TypeAlias = Callable[..., bool]
1640
1641 It's invalid when used anywhere except as in the example above.
1642 """
1643 raise TypeError(f"{self} is not subscriptable")
1644
1645
1646def _set_default(type_param, default):
1647 type_param.has_default = lambda: default is not NoDefault
1648 type_param.__default__ = default
1649
1650
1651def _set_module(typevarlike):
1652 # for pickling:
1653 def_mod = _caller(depth=2)
1654 if def_mod != 'typing_extensions':
1655 typevarlike.__module__ = def_mod
1656
1657
1658class _DefaultMixin:
1659 """Mixin for TypeVarLike defaults."""
1660
1661 __slots__ = ()
1662 __init__ = _set_default
1663
1664
1665# Classes using this metaclass must provide a _backported_typevarlike ClassVar
1666class _TypeVarLikeMeta(type):
1667 def __instancecheck__(cls, __instance: Any) -> bool:
1668 return isinstance(__instance, cls._backported_typevarlike)
1669
1670
1671if _PEP_696_IMPLEMENTED:
1672 from typing import TypeVar
1673else:
1674 # Add default and infer_variance parameters from PEP 696 and 695
1675 class TypeVar(metaclass=_TypeVarLikeMeta):
1676 """Type variable."""
1677
1678 _backported_typevarlike = typing.TypeVar
1679
1680 def __new__(cls, name, *constraints, bound=None,
1681 covariant=False, contravariant=False,
1682 default=NoDefault, infer_variance=False):
1683 if hasattr(typing, "TypeAliasType"):
1684 # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
1685 typevar = typing.TypeVar(name, *constraints, bound=bound,
1686 covariant=covariant, contravariant=contravariant,
1687 infer_variance=infer_variance)
1688 else:
1689 typevar = typing.TypeVar(name, *constraints, bound=bound,
1690 covariant=covariant, contravariant=contravariant)
1691 if infer_variance and (covariant or contravariant):
1692 raise ValueError("Variance cannot be specified with infer_variance.")
1693 typevar.__infer_variance__ = infer_variance
1694
1695 _set_default(typevar, default)
1696 _set_module(typevar)
1697
1698 def _tvar_prepare_subst(alias, args):
1699 if (
1700 typevar.has_default()
1701 and alias.__parameters__.index(typevar) == len(args)
1702 ):
1703 args += (typevar.__default__,)
1704 return args
1705
1706 typevar.__typing_prepare_subst__ = _tvar_prepare_subst
1707 return typevar
1708
1709 def __init_subclass__(cls) -> None:
1710 raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
1711
1712
1713# Python 3.10+ has PEP 612
1714if hasattr(typing, 'ParamSpecArgs'):
1715 ParamSpecArgs = typing.ParamSpecArgs
1716 ParamSpecKwargs = typing.ParamSpecKwargs
1717# 3.9
1718else:
1719 class _Immutable:
1720 """Mixin to indicate that object should not be copied."""
1721 __slots__ = ()
1722
1723 def __copy__(self):
1724 return self
1725
1726 def __deepcopy__(self, memo):
1727 return self
1728
1729 class ParamSpecArgs(_Immutable):
1730 """The args for a ParamSpec object.
1731
1732 Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
1733
1734 ParamSpecArgs objects have a reference back to their ParamSpec:
1735
1736 P.args.__origin__ is P
1737
1738 This type is meant for runtime introspection and has no special meaning to
1739 static type checkers.
1740 """
1741 def __init__(self, origin):
1742 self.__origin__ = origin
1743
1744 def __repr__(self):
1745 return f"{self.__origin__.__name__}.args"
1746
1747 def __eq__(self, other):
1748 if not isinstance(other, ParamSpecArgs):
1749 return NotImplemented
1750 return self.__origin__ == other.__origin__
1751
1752 class ParamSpecKwargs(_Immutable):
1753 """The kwargs for a ParamSpec object.
1754
1755 Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
1756
1757 ParamSpecKwargs objects have a reference back to their ParamSpec:
1758
1759 P.kwargs.__origin__ is P
1760
1761 This type is meant for runtime introspection and has no special meaning to
1762 static type checkers.
1763 """
1764 def __init__(self, origin):
1765 self.__origin__ = origin
1766
1767 def __repr__(self):
1768 return f"{self.__origin__.__name__}.kwargs"
1769
1770 def __eq__(self, other):
1771 if not isinstance(other, ParamSpecKwargs):
1772 return NotImplemented
1773 return self.__origin__ == other.__origin__
1774
1775
1776if _PEP_696_IMPLEMENTED:
1777 from typing import ParamSpec
1778
1779# 3.10+
1780elif hasattr(typing, 'ParamSpec'):
1781
1782 # Add default parameter - PEP 696
1783 class ParamSpec(metaclass=_TypeVarLikeMeta):
1784 """Parameter specification."""
1785
1786 _backported_typevarlike = typing.ParamSpec
1787
1788 def __new__(cls, name, *, bound=None,
1789 covariant=False, contravariant=False,
1790 infer_variance=False, default=NoDefault):
1791 if hasattr(typing, "TypeAliasType"):
1792 # PEP 695 implemented, can pass infer_variance to typing.TypeVar
1793 paramspec = typing.ParamSpec(name, bound=bound,
1794 covariant=covariant,
1795 contravariant=contravariant,
1796 infer_variance=infer_variance)
1797 else:
1798 paramspec = typing.ParamSpec(name, bound=bound,
1799 covariant=covariant,
1800 contravariant=contravariant)
1801 paramspec.__infer_variance__ = infer_variance
1802
1803 _set_default(paramspec, default)
1804 _set_module(paramspec)
1805
1806 def _paramspec_prepare_subst(alias, args):
1807 params = alias.__parameters__
1808 i = params.index(paramspec)
1809 if i == len(args) and paramspec.has_default():
1810 args = [*args, paramspec.__default__]
1811 if i >= len(args):
1812 raise TypeError(f"Too few arguments for {alias}")
1813 # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
1814 if len(params) == 1 and not typing._is_param_expr(args[0]):
1815 assert i == 0
1816 args = (args,)
1817 # Convert lists to tuples to help other libraries cache the results.
1818 elif isinstance(args[i], list):
1819 args = (*args[:i], tuple(args[i]), *args[i + 1:])
1820 return args
1821
1822 paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
1823 return paramspec
1824
1825 def __init_subclass__(cls) -> None:
1826 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
1827
1828# 3.9
1829else:
1830
1831 # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
1832 class ParamSpec(list, _DefaultMixin):
1833 """Parameter specification variable.
1834
1835 Usage::
1836
1837 P = ParamSpec('P')
1838
1839 Parameter specification variables exist primarily for the benefit of static
1840 type checkers. They are used to forward the parameter types of one
1841 callable to another callable, a pattern commonly found in higher order
1842 functions and decorators. They are only valid when used in ``Concatenate``,
1843 or s the first argument to ``Callable``. In Python 3.10 and higher,
1844 they are also supported in user-defined Generics at runtime.
1845 See class Generic for more information on generic types. An
1846 example for annotating a decorator::
1847
1848 T = TypeVar('T')
1849 P = ParamSpec('P')
1850
1851 def add_logging(f: Callable[P, T]) -> Callable[P, T]:
1852 '''A type-safe decorator to add logging to a function.'''
1853 def inner(*args: P.args, **kwargs: P.kwargs) -> T:
1854 logging.info(f'{f.__name__} was called')
1855 return f(*args, **kwargs)
1856 return inner
1857
1858 @add_logging
1859 def add_two(x: float, y: float) -> float:
1860 '''Add two numbers together.'''
1861 return x + y
1862
1863 Parameter specification variables defined with covariant=True or
1864 contravariant=True can be used to declare covariant or contravariant
1865 generic types. These keyword arguments are valid, but their actual semantics
1866 are yet to be decided. See PEP 612 for details.
1867
1868 Parameter specification variables can be introspected. e.g.:
1869
1870 P.__name__ == 'T'
1871 P.__bound__ == None
1872 P.__covariant__ == False
1873 P.__contravariant__ == False
1874
1875 Note that only parameter specification variables defined in global scope can
1876 be pickled.
1877 """
1878
1879 # Trick Generic __parameters__.
1880 __class__ = typing.TypeVar
1881
1882 @property
1883 def args(self):
1884 return ParamSpecArgs(self)
1885
1886 @property
1887 def kwargs(self):
1888 return ParamSpecKwargs(self)
1889
1890 def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
1891 infer_variance=False, default=NoDefault):
1892 list.__init__(self, [self])
1893 self.__name__ = name
1894 self.__covariant__ = bool(covariant)
1895 self.__contravariant__ = bool(contravariant)
1896 self.__infer_variance__ = bool(infer_variance)
1897 if bound:
1898 self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
1899 else:
1900 self.__bound__ = None
1901 _DefaultMixin.__init__(self, default)
1902
1903 # for pickling:
1904 def_mod = _caller()
1905 if def_mod != 'typing_extensions':
1906 self.__module__ = def_mod
1907
1908 def __repr__(self):
1909 if self.__infer_variance__:
1910 prefix = ''
1911 elif self.__covariant__:
1912 prefix = '+'
1913 elif self.__contravariant__:
1914 prefix = '-'
1915 else:
1916 prefix = '~'
1917 return prefix + self.__name__
1918
1919 def __hash__(self):
1920 return object.__hash__(self)
1921
1922 def __eq__(self, other):
1923 return self is other
1924
1925 def __reduce__(self):
1926 return self.__name__
1927
1928 # Hack to get typing._type_check to pass.
1929 def __call__(self, *args, **kwargs):
1930 pass
1931
1932
1933# 3.9
1934if not hasattr(typing, 'Concatenate'):
1935 # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
1936
1937 # 3.9.0-1
1938 if not hasattr(typing, '_type_convert'):
1939 def _type_convert(arg, module=None, *, allow_special_forms=False):
1940 """For converting None to type(None), and strings to ForwardRef."""
1941 if arg is None:
1942 return type(None)
1943 if isinstance(arg, str):
1944 if sys.version_info <= (3, 9, 6):
1945 return ForwardRef(arg)
1946 if sys.version_info <= (3, 9, 7):
1947 return ForwardRef(arg, module=module)
1948 return ForwardRef(arg, module=module, is_class=allow_special_forms)
1949 return arg
1950 else:
1951 _type_convert = typing._type_convert
1952
1953 class _ConcatenateGenericAlias(list):
1954
1955 # Trick Generic into looking into this for __parameters__.
1956 __class__ = typing._GenericAlias
1957
1958 def __init__(self, origin, args):
1959 super().__init__(args)
1960 self.__origin__ = origin
1961 self.__args__ = args
1962
1963 def __repr__(self):
1964 _type_repr = typing._type_repr
1965 return (f'{_type_repr(self.__origin__)}'
1966 f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
1967
1968 def __hash__(self):
1969 return hash((self.__origin__, self.__args__))
1970
1971 # Hack to get typing._type_check to pass in Generic.
1972 def __call__(self, *args, **kwargs):
1973 pass
1974
1975 @property
1976 def __parameters__(self):
1977 return tuple(
1978 tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
1979 )
1980
1981 # 3.9 used by __getitem__ below
1982 def copy_with(self, params):
1983 if isinstance(params[-1], _ConcatenateGenericAlias):
1984 params = (*params[:-1], *params[-1].__args__)
1985 elif isinstance(params[-1], (list, tuple)):
1986 return (*params[:-1], *params[-1])
1987 elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
1988 raise TypeError("The last parameter to Concatenate should be a "
1989 "ParamSpec variable or ellipsis.")
1990 return self.__class__(self.__origin__, params)
1991
1992 # 3.9; accessed during GenericAlias.__getitem__ when substituting
1993 def __getitem__(self, args):
1994 if self.__origin__ in (Generic, Protocol):
1995 # Can't subscript Generic[...] or Protocol[...].
1996 raise TypeError(f"Cannot subscript already-subscripted {self}")
1997 if not self.__parameters__:
1998 raise TypeError(f"{self} is not a generic class")
1999
2000 if not isinstance(args, tuple):
2001 args = (args,)
2002 args = _unpack_args(*(_type_convert(p) for p in args))
2003 params = self.__parameters__
2004 for param in params:
2005 prepare = getattr(param, "__typing_prepare_subst__", None)
2006 if prepare is not None:
2007 args = prepare(self, args)
2008 # 3.9 & typing.ParamSpec
2009 elif isinstance(param, ParamSpec):
2010 i = params.index(param)
2011 if (
2012 i == len(args)
2013 and getattr(param, '__default__', NoDefault) is not NoDefault
2014 ):
2015 args = [*args, param.__default__]
2016 if i >= len(args):
2017 raise TypeError(f"Too few arguments for {self}")
2018 # Special case for Z[[int, str, bool]] == Z[int, str, bool]
2019 if len(params) == 1 and not _is_param_expr(args[0]):
2020 assert i == 0
2021 args = (args,)
2022 elif (
2023 isinstance(args[i], list)
2024 # 3.9
2025 # This class inherits from list do not convert
2026 and not isinstance(args[i], _ConcatenateGenericAlias)
2027 ):
2028 args = (*args[:i], tuple(args[i]), *args[i + 1:])
2029
2030 alen = len(args)
2031 plen = len(params)
2032 if alen != plen:
2033 raise TypeError(
2034 f"Too {'many' if alen > plen else 'few'} arguments for {self};"
2035 f" actual {alen}, expected {plen}"
2036 )
2037
2038 subst = dict(zip(self.__parameters__, args))
2039 # determine new args
2040 new_args = []
2041 for arg in self.__args__:
2042 if isinstance(arg, type):
2043 new_args.append(arg)
2044 continue
2045 if isinstance(arg, TypeVar):
2046 arg = subst[arg]
2047 if (
2048 (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
2049 or (
2050 hasattr(_types, "GenericAlias")
2051 and isinstance(arg, _types.GenericAlias)
2052 and getattr(arg, "__unpacked__", False)
2053 )
2054 ):
2055 raise TypeError(f"{arg} is not valid as type argument")
2056
2057 elif isinstance(arg,
2058 typing._GenericAlias
2059 if not hasattr(_types, "GenericAlias") else
2060 (typing._GenericAlias, _types.GenericAlias)
2061 ):
2062 subparams = arg.__parameters__
2063 if subparams:
2064 subargs = tuple(subst[x] for x in subparams)
2065 arg = arg[subargs]
2066 new_args.append(arg)
2067 return self.copy_with(tuple(new_args))
2068
2069# 3.10+
2070else:
2071 _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
2072
2073 # 3.10
2074 if sys.version_info < (3, 11):
2075
2076 class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
2077 # needed for checks in collections.abc.Callable to accept this class
2078 __module__ = "typing"
2079
2080 def copy_with(self, params):
2081 if isinstance(params[-1], (list, tuple)):
2082 return (*params[:-1], *params[-1])
2083 if isinstance(params[-1], typing._ConcatenateGenericAlias):
2084 params = (*params[:-1], *params[-1].__args__)
2085 elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
2086 raise TypeError("The last parameter to Concatenate should be a "
2087 "ParamSpec variable or ellipsis.")
2088 return super(typing._ConcatenateGenericAlias, self).copy_with(params)
2089
2090 def __getitem__(self, args):
2091 value = super().__getitem__(args)
2092 if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
2093 return tuple(_unpack_args(*(n for n in value)))
2094 return value
2095
2096
2097# 3.9.2
2098class _EllipsisDummy: ...
2099
2100
2101# <=3.10
2102def _create_concatenate_alias(origin, parameters):
2103 if parameters[-1] is ... and sys.version_info < (3, 9, 2):
2104 # Hack: Arguments must be types, replace it with one.
2105 parameters = (*parameters[:-1], _EllipsisDummy)
2106 if sys.version_info >= (3, 10, 3):
2107 concatenate = _ConcatenateGenericAlias(origin, parameters,
2108 _typevar_types=(TypeVar, ParamSpec),
2109 _paramspec_tvars=True)
2110 else:
2111 concatenate = _ConcatenateGenericAlias(origin, parameters)
2112 if parameters[-1] is not _EllipsisDummy:
2113 return concatenate
2114 # Remove dummy again
2115 concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
2116 for p in concatenate.__args__)
2117 if sys.version_info < (3, 10):
2118 # backport needs __args__ adjustment only
2119 return concatenate
2120 concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
2121 if p is not _EllipsisDummy)
2122 return concatenate
2123
2124
2125# <=3.10
2126@typing._tp_cache
2127def _concatenate_getitem(self, parameters):
2128 if parameters == ():
2129 raise TypeError("Cannot take a Concatenate of no types.")
2130 if not isinstance(parameters, tuple):
2131 parameters = (parameters,)
2132 if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
2133 raise TypeError("The last parameter to Concatenate should be a "
2134 "ParamSpec variable or ellipsis.")
2135 msg = "Concatenate[arg, ...]: each arg must be a type."
2136 parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
2137 parameters[-1])
2138 return _create_concatenate_alias(self, parameters)
2139
2140
2141# 3.11+; Concatenate does not accept ellipsis in 3.10
2142# Breakpoint: https://github.com/python/cpython/pull/30969
2143if sys.version_info >= (3, 11):
2144 Concatenate = typing.Concatenate
2145# <=3.10
2146else:
2147 @_ExtensionsSpecialForm
2148 def Concatenate(self, parameters):
2149 """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
2150 higher order function which adds, removes or transforms parameters of a
2151 callable.
2152
2153 For example::
2154
2155 Callable[Concatenate[int, P], int]
2156
2157 See PEP 612 for detailed information.
2158 """
2159 return _concatenate_getitem(self, parameters)
2160
2161
2162# 3.10+
2163if hasattr(typing, 'TypeGuard'):
2164 TypeGuard = typing.TypeGuard
2165# 3.9
2166else:
2167 @_ExtensionsSpecialForm
2168 def TypeGuard(self, parameters):
2169 """Special typing form used to annotate the return type of a user-defined
2170 type guard function. ``TypeGuard`` only accepts a single type argument.
2171 At runtime, functions marked this way should return a boolean.
2172
2173 ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
2174 type checkers to determine a more precise type of an expression within a
2175 program's code flow. Usually type narrowing is done by analyzing
2176 conditional code flow and applying the narrowing to a block of code. The
2177 conditional expression here is sometimes referred to as a "type guard".
2178
2179 Sometimes it would be convenient to use a user-defined boolean function
2180 as a type guard. Such a function should use ``TypeGuard[...]`` as its
2181 return type to alert static type checkers to this intention.
2182
2183 Using ``-> TypeGuard`` tells the static type checker that for a given
2184 function:
2185
2186 1. The return value is a boolean.
2187 2. If the return value is ``True``, the type of its argument
2188 is the type inside ``TypeGuard``.
2189
2190 For example::
2191
2192 def is_str(val: Union[str, float]):
2193 # "isinstance" type guard
2194 if isinstance(val, str):
2195 # Type of ``val`` is narrowed to ``str``
2196 ...
2197 else:
2198 # Else, type of ``val`` is narrowed to ``float``.
2199 ...
2200
2201 Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
2202 form of ``TypeA`` (it can even be a wider form) and this may lead to
2203 type-unsafe results. The main reason is to allow for things like
2204 narrowing ``List[object]`` to ``List[str]`` even though the latter is not
2205 a subtype of the former, since ``List`` is invariant. The responsibility of
2206 writing type-safe type guards is left to the user.
2207
2208 ``TypeGuard`` also works with type variables. For more information, see
2209 PEP 647 (User-Defined Type Guards).
2210 """
2211 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2212 return typing._GenericAlias(self, (item,))
2213
2214
2215# 3.13+
2216if hasattr(typing, 'TypeIs'):
2217 TypeIs = typing.TypeIs
2218# <=3.12
2219else:
2220 @_ExtensionsSpecialForm
2221 def TypeIs(self, parameters):
2222 """Special typing form used to annotate the return type of a user-defined
2223 type narrower function. ``TypeIs`` only accepts a single type argument.
2224 At runtime, functions marked this way should return a boolean.
2225
2226 ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
2227 type checkers to determine a more precise type of an expression within a
2228 program's code flow. Usually type narrowing is done by analyzing
2229 conditional code flow and applying the narrowing to a block of code. The
2230 conditional expression here is sometimes referred to as a "type guard".
2231
2232 Sometimes it would be convenient to use a user-defined boolean function
2233 as a type guard. Such a function should use ``TypeIs[...]`` as its
2234 return type to alert static type checkers to this intention.
2235
2236 Using ``-> TypeIs`` tells the static type checker that for a given
2237 function:
2238
2239 1. The return value is a boolean.
2240 2. If the return value is ``True``, the type of its argument
2241 is the intersection of the type inside ``TypeIs`` and the argument's
2242 previously known type.
2243
2244 For example::
2245
2246 def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
2247 return hasattr(val, '__await__')
2248
2249 def f(val: Union[int, Awaitable[int]]) -> int:
2250 if is_awaitable(val):
2251 assert_type(val, Awaitable[int])
2252 else:
2253 assert_type(val, int)
2254
2255 ``TypeIs`` also works with type variables. For more information, see
2256 PEP 742 (Narrowing types with TypeIs).
2257 """
2258 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2259 return typing._GenericAlias(self, (item,))
2260
2261
2262# 3.14+?
2263if hasattr(typing, 'TypeForm'):
2264 TypeForm = typing.TypeForm
2265# <=3.13
2266else:
2267 class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
2268 # TypeForm(X) is equivalent to X but indicates to the type checker
2269 # that the object is a TypeForm.
2270 def __call__(self, obj, /):
2271 return obj
2272
2273 @_TypeFormForm
2274 def TypeForm(self, parameters):
2275 """A special form representing the value that results from the evaluation
2276 of a type expression. This value encodes the information supplied in the
2277 type expression, and it represents the type described by that type expression.
2278
2279 When used in a type expression, TypeForm describes a set of type form objects.
2280 It accepts a single type argument, which must be a valid type expression.
2281 ``TypeForm[T]`` describes the set of all type form objects that represent
2282 the type T or types that are assignable to T.
2283
2284 Usage:
2285
2286 def cast[T](typ: TypeForm[T], value: Any) -> T: ...
2287
2288 reveal_type(cast(int, "x")) # int
2289
2290 See PEP 747 for more information.
2291 """
2292 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2293 return typing._GenericAlias(self, (item,))
2294
2295
2296
2297
2298if hasattr(typing, "LiteralString"): # 3.11+
2299 LiteralString = typing.LiteralString
2300else:
2301 @_SpecialForm
2302 def LiteralString(self, params):
2303 """Represents an arbitrary literal string.
2304
2305 Example::
2306
2307 from typing_extensions import LiteralString
2308
2309 def query(sql: LiteralString) -> ...:
2310 ...
2311
2312 query("SELECT * FROM table") # ok
2313 query(f"SELECT * FROM {input()}") # not ok
2314
2315 See PEP 675 for details.
2316
2317 """
2318 raise TypeError(f"{self} is not subscriptable")
2319
2320
2321if hasattr(typing, "Self"): # 3.11+
2322 Self = typing.Self
2323else:
2324 @_SpecialForm
2325 def Self(self, params):
2326 """Used to spell the type of "self" in classes.
2327
2328 Example::
2329
2330 from typing import Self
2331
2332 class ReturnsSelf:
2333 def parse(self, data: bytes) -> Self:
2334 ...
2335 return self
2336
2337 """
2338
2339 raise TypeError(f"{self} is not subscriptable")
2340
2341
2342if hasattr(typing, "Never"): # 3.11+
2343 Never = typing.Never
2344else:
2345 @_SpecialForm
2346 def Never(self, params):
2347 """The bottom type, a type that has no members.
2348
2349 This can be used to define a function that should never be
2350 called, or a function that never returns::
2351
2352 from typing_extensions import Never
2353
2354 def never_call_me(arg: Never) -> None:
2355 pass
2356
2357 def int_or_str(arg: int | str) -> None:
2358 never_call_me(arg) # type checker error
2359 match arg:
2360 case int():
2361 print("It's an int")
2362 case str():
2363 print("It's a str")
2364 case _:
2365 never_call_me(arg) # ok, arg is of type Never
2366
2367 """
2368
2369 raise TypeError(f"{self} is not subscriptable")
2370
2371
2372if hasattr(typing, 'Required'): # 3.11+
2373 Required = typing.Required
2374 NotRequired = typing.NotRequired
2375else: # <=3.10
2376 @_ExtensionsSpecialForm
2377 def Required(self, parameters):
2378 """A special typing construct to mark a key of a total=False TypedDict
2379 as required. For example:
2380
2381 class Movie(TypedDict, total=False):
2382 title: Required[str]
2383 year: int
2384
2385 m = Movie(
2386 title='The Matrix', # typechecker error if key is omitted
2387 year=1999,
2388 )
2389
2390 There is no runtime checking that a required key is actually provided
2391 when instantiating a related TypedDict.
2392 """
2393 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2394 return typing._GenericAlias(self, (item,))
2395
2396 @_ExtensionsSpecialForm
2397 def NotRequired(self, parameters):
2398 """A special typing construct to mark a key of a TypedDict as
2399 potentially missing. For example:
2400
2401 class Movie(TypedDict):
2402 title: str
2403 year: NotRequired[int]
2404
2405 m = Movie(
2406 title='The Matrix', # typechecker error if key is omitted
2407 year=1999,
2408 )
2409 """
2410 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2411 return typing._GenericAlias(self, (item,))
2412
2413
2414if hasattr(typing, 'ReadOnly'):
2415 ReadOnly = typing.ReadOnly
2416else: # <=3.12
2417 @_ExtensionsSpecialForm
2418 def ReadOnly(self, parameters):
2419 """A special typing construct to mark an item of a TypedDict as read-only.
2420
2421 For example:
2422
2423 class Movie(TypedDict):
2424 title: ReadOnly[str]
2425 year: int
2426
2427 def mutate_movie(m: Movie) -> None:
2428 m["year"] = 1992 # allowed
2429 m["title"] = "The Matrix" # typechecker error
2430
2431 There is no runtime checking for this property.
2432 """
2433 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2434 return typing._GenericAlias(self, (item,))
2435
2436
2437_UNPACK_DOC = """\
2438Type unpack operator.
2439
2440The type unpack operator takes the child types from some container type,
2441such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
2442example:
2443
2444 # For some generic class `Foo`:
2445 Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str]
2446
2447 Ts = TypeVarTuple('Ts')
2448 # Specifies that `Bar` is generic in an arbitrary number of types.
2449 # (Think of `Ts` as a tuple of an arbitrary number of individual
2450 # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
2451 # `Generic[]`.)
2452 class Bar(Generic[Unpack[Ts]]): ...
2453 Bar[int] # Valid
2454 Bar[int, str] # Also valid
2455
2456From Python 3.11, this can also be done using the `*` operator:
2457
2458 Foo[*tuple[int, str]]
2459 class Bar(Generic[*Ts]): ...
2460
2461The operator can also be used along with a `TypedDict` to annotate
2462`**kwargs` in a function signature. For instance:
2463
2464 class Movie(TypedDict):
2465 name: str
2466 year: int
2467
2468 # This function expects two keyword arguments - *name* of type `str` and
2469 # *year* of type `int`.
2470 def foo(**kwargs: Unpack[Movie]): ...
2471
2472Note that there is only some runtime checking of this operator. Not
2473everything the runtime allows may be accepted by static type checkers.
2474
2475For more information, see PEP 646 and PEP 692.
2476"""
2477
2478
2479# PEP 692 changed the repr of Unpack[]
2480# Breakpoint: https://github.com/python/cpython/pull/104048
2481if sys.version_info >= (3, 12):
2482 Unpack = typing.Unpack
2483
2484 def _is_unpack(obj):
2485 return get_origin(obj) is Unpack
2486
2487else: # <=3.11
2488 class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
2489 def __init__(self, getitem):
2490 super().__init__(getitem)
2491 self.__doc__ = _UNPACK_DOC
2492
2493 class _UnpackAlias(typing._GenericAlias, _root=True):
2494 if sys.version_info < (3, 11):
2495 # needed for compatibility with Generic[Unpack[Ts]]
2496 __class__ = typing.TypeVar
2497
2498 @property
2499 def __typing_unpacked_tuple_args__(self):
2500 assert self.__origin__ is Unpack
2501 assert len(self.__args__) == 1
2502 arg, = self.__args__
2503 if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
2504 if arg.__origin__ is not tuple:
2505 raise TypeError("Unpack[...] must be used with a tuple type")
2506 return arg.__args__
2507 return None
2508
2509 @property
2510 def __typing_is_unpacked_typevartuple__(self):
2511 assert self.__origin__ is Unpack
2512 assert len(self.__args__) == 1
2513 return isinstance(self.__args__[0], TypeVarTuple)
2514
2515 def __getitem__(self, args):
2516 if self.__typing_is_unpacked_typevartuple__:
2517 return args
2518 return super().__getitem__(args)
2519
2520 @_UnpackSpecialForm
2521 def Unpack(self, parameters):
2522 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2523 return _UnpackAlias(self, (item,))
2524
2525 def _is_unpack(obj):
2526 return isinstance(obj, _UnpackAlias)
2527
2528
2529def _unpack_args(*args):
2530 newargs = []
2531 for arg in args:
2532 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
2533 if subargs is not None and (not (subargs and subargs[-1] is ...)):
2534 newargs.extend(subargs)
2535 else:
2536 newargs.append(arg)
2537 return newargs
2538
2539
2540if _PEP_696_IMPLEMENTED:
2541 from typing import TypeVarTuple
2542
2543elif hasattr(typing, "TypeVarTuple"): # 3.11+
2544
2545 # Add default parameter - PEP 696
2546 class TypeVarTuple(metaclass=_TypeVarLikeMeta):
2547 """Type variable tuple."""
2548
2549 _backported_typevarlike = typing.TypeVarTuple
2550
2551 def __new__(cls, name, *, default=NoDefault):
2552 tvt = typing.TypeVarTuple(name)
2553 _set_default(tvt, default)
2554 _set_module(tvt)
2555
2556 def _typevartuple_prepare_subst(alias, args):
2557 params = alias.__parameters__
2558 typevartuple_index = params.index(tvt)
2559 for param in params[typevartuple_index + 1:]:
2560 if isinstance(param, TypeVarTuple):
2561 raise TypeError(
2562 f"More than one TypeVarTuple parameter in {alias}"
2563 )
2564
2565 alen = len(args)
2566 plen = len(params)
2567 left = typevartuple_index
2568 right = plen - typevartuple_index - 1
2569 var_tuple_index = None
2570 fillarg = None
2571 for k, arg in enumerate(args):
2572 if not isinstance(arg, type):
2573 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
2574 if subargs and len(subargs) == 2 and subargs[-1] is ...:
2575 if var_tuple_index is not None:
2576 raise TypeError(
2577 "More than one unpacked "
2578 "arbitrary-length tuple argument"
2579 )
2580 var_tuple_index = k
2581 fillarg = subargs[0]
2582 if var_tuple_index is not None:
2583 left = min(left, var_tuple_index)
2584 right = min(right, alen - var_tuple_index - 1)
2585 elif left + right > alen:
2586 raise TypeError(f"Too few arguments for {alias};"
2587 f" actual {alen}, expected at least {plen - 1}")
2588 if left == alen - right and tvt.has_default():
2589 replacement = _unpack_args(tvt.__default__)
2590 else:
2591 replacement = args[left: alen - right]
2592
2593 return (
2594 *args[:left],
2595 *([fillarg] * (typevartuple_index - left)),
2596 replacement,
2597 *([fillarg] * (plen - right - left - typevartuple_index - 1)),
2598 *args[alen - right:],
2599 )
2600
2601 tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
2602 return tvt
2603
2604 def __init_subclass__(self, *args, **kwds):
2605 raise TypeError("Cannot subclass special typing classes")
2606
2607else: # <=3.10
2608 class TypeVarTuple(_DefaultMixin):
2609 """Type variable tuple.
2610
2611 Usage::
2612
2613 Ts = TypeVarTuple('Ts')
2614
2615 In the same way that a normal type variable is a stand-in for a single
2616 type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
2617 type such as ``Tuple[int, str]``.
2618
2619 Type variable tuples can be used in ``Generic`` declarations.
2620 Consider the following example::
2621
2622 class Array(Generic[*Ts]): ...
2623
2624 The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
2625 where ``T1`` and ``T2`` are type variables. To use these type variables
2626 as type parameters of ``Array``, we must *unpack* the type variable tuple using
2627 the star operator: ``*Ts``. The signature of ``Array`` then behaves
2628 as if we had simply written ``class Array(Generic[T1, T2]): ...``.
2629 In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
2630 us to parameterise the class with an *arbitrary* number of type parameters.
2631
2632 Type variable tuples can be used anywhere a normal ``TypeVar`` can.
2633 This includes class definitions, as shown above, as well as function
2634 signatures and variable annotations::
2635
2636 class Array(Generic[*Ts]):
2637
2638 def __init__(self, shape: Tuple[*Ts]):
2639 self._shape: Tuple[*Ts] = shape
2640
2641 def get_shape(self) -> Tuple[*Ts]:
2642 return self._shape
2643
2644 shape = (Height(480), Width(640))
2645 x: Array[Height, Width] = Array(shape)
2646 y = abs(x) # Inferred type is Array[Height, Width]
2647 z = x + x # ... is Array[Height, Width]
2648 x.get_shape() # ... is tuple[Height, Width]
2649
2650 """
2651
2652 # Trick Generic __parameters__.
2653 __class__ = typing.TypeVar
2654
2655 def __iter__(self):
2656 yield self.__unpacked__
2657
2658 def __init__(self, name, *, default=NoDefault):
2659 self.__name__ = name
2660 _DefaultMixin.__init__(self, default)
2661
2662 # for pickling:
2663 def_mod = _caller()
2664 if def_mod != 'typing_extensions':
2665 self.__module__ = def_mod
2666
2667 self.__unpacked__ = Unpack[self]
2668
2669 def __repr__(self):
2670 return self.__name__
2671
2672 def __hash__(self):
2673 return object.__hash__(self)
2674
2675 def __eq__(self, other):
2676 return self is other
2677
2678 def __reduce__(self):
2679 return self.__name__
2680
2681 def __init_subclass__(self, *args, **kwds):
2682 if '_root' not in kwds:
2683 raise TypeError("Cannot subclass special typing classes")
2684
2685
2686if hasattr(typing, "reveal_type"): # 3.11+
2687 reveal_type = typing.reveal_type
2688else: # <=3.10
2689 def reveal_type(obj: T, /) -> T:
2690 """Reveal the inferred type of a variable.
2691
2692 When a static type checker encounters a call to ``reveal_type()``,
2693 it will emit the inferred type of the argument::
2694
2695 x: int = 1
2696 reveal_type(x)
2697
2698 Running a static type checker (e.g., ``mypy``) on this example
2699 will produce output similar to 'Revealed type is "builtins.int"'.
2700
2701 At runtime, the function prints the runtime type of the
2702 argument and returns it unchanged.
2703
2704 """
2705 print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
2706 return obj
2707
2708
2709if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
2710 _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
2711else: # <=3.10
2712 _ASSERT_NEVER_REPR_MAX_LENGTH = 100
2713
2714
2715if hasattr(typing, "assert_never"): # 3.11+
2716 assert_never = typing.assert_never
2717else: # <=3.10
2718 def assert_never(arg: Never, /) -> Never:
2719 """Assert to the type checker that a line of code is unreachable.
2720
2721 Example::
2722
2723 def int_or_str(arg: int | str) -> None:
2724 match arg:
2725 case int():
2726 print("It's an int")
2727 case str():
2728 print("It's a str")
2729 case _:
2730 assert_never(arg)
2731
2732 If a type checker finds that a call to assert_never() is
2733 reachable, it will emit an error.
2734
2735 At runtime, this throws an exception when called.
2736
2737 """
2738 value = repr(arg)
2739 if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
2740 value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
2741 raise AssertionError(f"Expected code to be unreachable, but got: {value}")
2742
2743
2744# dataclass_transform exists in 3.11 but lacks the frozen_default parameter
2745# Breakpoint: https://github.com/python/cpython/pull/99958
2746if sys.version_info >= (3, 12): # 3.12+
2747 dataclass_transform = typing.dataclass_transform
2748else: # <=3.11
2749 def dataclass_transform(
2750 *,
2751 eq_default: bool = True,
2752 order_default: bool = False,
2753 kw_only_default: bool = False,
2754 frozen_default: bool = False,
2755 field_specifiers: typing.Tuple[
2756 typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
2757 ...
2758 ] = (),
2759 **kwargs: typing.Any,
2760 ) -> typing.Callable[[T], T]:
2761 """Decorator that marks a function, class, or metaclass as providing
2762 dataclass-like behavior.
2763
2764 Example:
2765
2766 from typing_extensions import dataclass_transform
2767
2768 _T = TypeVar("_T")
2769
2770 # Used on a decorator function
2771 @dataclass_transform()
2772 def create_model(cls: type[_T]) -> type[_T]:
2773 ...
2774 return cls
2775
2776 @create_model
2777 class CustomerModel:
2778 id: int
2779 name: str
2780
2781 # Used on a base class
2782 @dataclass_transform()
2783 class ModelBase: ...
2784
2785 class CustomerModel(ModelBase):
2786 id: int
2787 name: str
2788
2789 # Used on a metaclass
2790 @dataclass_transform()
2791 class ModelMeta(type): ...
2792
2793 class ModelBase(metaclass=ModelMeta): ...
2794
2795 class CustomerModel(ModelBase):
2796 id: int
2797 name: str
2798
2799 Each of the ``CustomerModel`` classes defined in this example will now
2800 behave similarly to a dataclass created with the ``@dataclasses.dataclass``
2801 decorator. For example, the type checker will synthesize an ``__init__``
2802 method.
2803
2804 The arguments to this decorator can be used to customize this behavior:
2805 - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
2806 True or False if it is omitted by the caller.
2807 - ``order_default`` indicates whether the ``order`` parameter is
2808 assumed to be True or False if it is omitted by the caller.
2809 - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
2810 assumed to be True or False if it is omitted by the caller.
2811 - ``frozen_default`` indicates whether the ``frozen`` parameter is
2812 assumed to be True or False if it is omitted by the caller.
2813 - ``field_specifiers`` specifies a static list of supported classes
2814 or functions that describe fields, similar to ``dataclasses.field()``.
2815
2816 At runtime, this decorator records its arguments in the
2817 ``__dataclass_transform__`` attribute on the decorated object.
2818
2819 See PEP 681 for details.
2820
2821 """
2822 def decorator(cls_or_fn):
2823 cls_or_fn.__dataclass_transform__ = {
2824 "eq_default": eq_default,
2825 "order_default": order_default,
2826 "kw_only_default": kw_only_default,
2827 "frozen_default": frozen_default,
2828 "field_specifiers": field_specifiers,
2829 "kwargs": kwargs,
2830 }
2831 return cls_or_fn
2832 return decorator
2833
2834
2835if hasattr(typing, "override"): # 3.12+
2836 override = typing.override
2837else: # <=3.11
2838 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
2839
2840 def override(arg: _F, /) -> _F:
2841 """Indicate that a method is intended to override a method in a base class.
2842
2843 Usage:
2844
2845 class Base:
2846 def method(self) -> None:
2847 pass
2848
2849 class Child(Base):
2850 @override
2851 def method(self) -> None:
2852 super().method()
2853
2854 When this decorator is applied to a method, the type checker will
2855 validate that it overrides a method with the same name on a base class.
2856 This helps prevent bugs that may occur when a base class is changed
2857 without an equivalent change to a child class.
2858
2859 There is no runtime checking of these properties. The decorator
2860 sets the ``__override__`` attribute to ``True`` on the decorated object
2861 to allow runtime introspection.
2862
2863 See PEP 698 for details.
2864
2865 """
2866 try:
2867 arg.__override__ = True
2868 except (AttributeError, TypeError):
2869 # Skip the attribute silently if it is not writable.
2870 # AttributeError happens if the object has __slots__ or a
2871 # read-only property, TypeError if it's a builtin class.
2872 pass
2873 return arg
2874
2875
2876# Python 3.13.3+ contains a fix for the wrapped __new__
2877# Breakpoint: https://github.com/python/cpython/pull/132160
2878if sys.version_info >= (3, 13, 3):
2879 deprecated = warnings.deprecated
2880else:
2881 _T = typing.TypeVar("_T")
2882
2883 class deprecated:
2884 """Indicate that a class, function or overload is deprecated.
2885
2886 When this decorator is applied to an object, the type checker
2887 will generate a diagnostic on usage of the deprecated object.
2888
2889 Usage:
2890
2891 @deprecated("Use B instead")
2892 class A:
2893 pass
2894
2895 @deprecated("Use g instead")
2896 def f():
2897 pass
2898
2899 @overload
2900 @deprecated("int support is deprecated")
2901 def g(x: int) -> int: ...
2902 @overload
2903 def g(x: str) -> int: ...
2904
2905 The warning specified by *category* will be emitted at runtime
2906 on use of deprecated objects. For functions, that happens on calls;
2907 for classes, on instantiation and on creation of subclasses.
2908 If the *category* is ``None``, no warning is emitted at runtime.
2909 The *stacklevel* determines where the
2910 warning is emitted. If it is ``1`` (the default), the warning
2911 is emitted at the direct caller of the deprecated object; if it
2912 is higher, it is emitted further up the stack.
2913 Static type checker behavior is not affected by the *category*
2914 and *stacklevel* arguments.
2915
2916 The deprecation message passed to the decorator is saved in the
2917 ``__deprecated__`` attribute on the decorated object.
2918 If applied to an overload, the decorator
2919 must be after the ``@overload`` decorator for the attribute to
2920 exist on the overload as returned by ``get_overloads()``.
2921
2922 See PEP 702 for details.
2923
2924 """
2925 def __init__(
2926 self,
2927 message: str,
2928 /,
2929 *,
2930 category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
2931 stacklevel: int = 1,
2932 ) -> None:
2933 if not isinstance(message, str):
2934 raise TypeError(
2935 "Expected an object of type str for 'message', not "
2936 f"{type(message).__name__!r}"
2937 )
2938 self.message = message
2939 self.category = category
2940 self.stacklevel = stacklevel
2941
2942 def __call__(self, arg: _T, /) -> _T:
2943 # Make sure the inner functions created below don't
2944 # retain a reference to self.
2945 msg = self.message
2946 category = self.category
2947 stacklevel = self.stacklevel
2948 if category is None:
2949 arg.__deprecated__ = msg
2950 return arg
2951 elif isinstance(arg, type):
2952 import functools
2953 from types import MethodType
2954
2955 original_new = arg.__new__
2956
2957 @functools.wraps(original_new)
2958 def __new__(cls, /, *args, **kwargs):
2959 if cls is arg:
2960 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
2961 if original_new is not object.__new__:
2962 return original_new(cls, *args, **kwargs)
2963 # Mirrors a similar check in object.__new__.
2964 elif cls.__init__ is object.__init__ and (args or kwargs):
2965 raise TypeError(f"{cls.__name__}() takes no arguments")
2966 else:
2967 return original_new(cls)
2968
2969 arg.__new__ = staticmethod(__new__)
2970
2971 original_init_subclass = arg.__init_subclass__
2972 # We need slightly different behavior if __init_subclass__
2973 # is a bound method (likely if it was implemented in Python)
2974 if isinstance(original_init_subclass, MethodType):
2975 original_init_subclass = original_init_subclass.__func__
2976
2977 @functools.wraps(original_init_subclass)
2978 def __init_subclass__(*args, **kwargs):
2979 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
2980 return original_init_subclass(*args, **kwargs)
2981
2982 arg.__init_subclass__ = classmethod(__init_subclass__)
2983 # Or otherwise, which likely means it's a builtin such as
2984 # object's implementation of __init_subclass__.
2985 else:
2986 @functools.wraps(original_init_subclass)
2987 def __init_subclass__(*args, **kwargs):
2988 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
2989 return original_init_subclass(*args, **kwargs)
2990
2991 arg.__init_subclass__ = __init_subclass__
2992
2993 arg.__deprecated__ = __new__.__deprecated__ = msg
2994 __init_subclass__.__deprecated__ = msg
2995 return arg
2996 elif callable(arg):
2997 import asyncio.coroutines
2998 import functools
2999 import inspect
3000
3001 @functools.wraps(arg)
3002 def wrapper(*args, **kwargs):
3003 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3004 return arg(*args, **kwargs)
3005
3006 if asyncio.coroutines.iscoroutinefunction(arg):
3007 # Breakpoint: https://github.com/python/cpython/pull/99247
3008 if sys.version_info >= (3, 12):
3009 wrapper = inspect.markcoroutinefunction(wrapper)
3010 else:
3011 wrapper._is_coroutine = asyncio.coroutines._is_coroutine
3012
3013 arg.__deprecated__ = wrapper.__deprecated__ = msg
3014 return wrapper
3015 else:
3016 raise TypeError(
3017 "@deprecated decorator with non-None category must be applied to "
3018 f"a class or callable, not {arg!r}"
3019 )
3020
3021# Breakpoint: https://github.com/python/cpython/pull/23702
3022if sys.version_info < (3, 10):
3023 def _is_param_expr(arg):
3024 return arg is ... or isinstance(
3025 arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
3026 )
3027else:
3028 def _is_param_expr(arg):
3029 return arg is ... or isinstance(
3030 arg,
3031 (
3032 tuple,
3033 list,
3034 ParamSpec,
3035 _ConcatenateGenericAlias,
3036 typing._ConcatenateGenericAlias,
3037 ),
3038 )
3039
3040
3041# We have to do some monkey patching to deal with the dual nature of
3042# Unpack/TypeVarTuple:
3043# - We want Unpack to be a kind of TypeVar so it gets accepted in
3044# Generic[Unpack[Ts]]
3045# - We want it to *not* be treated as a TypeVar for the purposes of
3046# counting generic parameters, so that when we subscript a generic,
3047# the runtime doesn't try to substitute the Unpack with the subscripted type.
3048if not hasattr(typing, "TypeVarTuple"):
3049 def _check_generic(cls, parameters, elen=_marker):
3050 """Check correct count for parameters of a generic cls (internal helper).
3051
3052 This gives a nice error message in case of count mismatch.
3053 """
3054 # If substituting a single ParamSpec with multiple arguments
3055 # we do not check the count
3056 if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
3057 and len(cls.__parameters__) == 1
3058 and isinstance(cls.__parameters__[0], ParamSpec)
3059 and parameters
3060 and not _is_param_expr(parameters[0])
3061 ):
3062 # Generic modifies parameters variable, but here we cannot do this
3063 return
3064
3065 if not elen:
3066 raise TypeError(f"{cls} is not a generic class")
3067 if elen is _marker:
3068 if not hasattr(cls, "__parameters__") or not cls.__parameters__:
3069 raise TypeError(f"{cls} is not a generic class")
3070 elen = len(cls.__parameters__)
3071 alen = len(parameters)
3072 if alen != elen:
3073 expect_val = elen
3074 if hasattr(cls, "__parameters__"):
3075 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
3076 num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
3077 if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
3078 return
3079
3080 # deal with TypeVarLike defaults
3081 # required TypeVarLikes cannot appear after a defaulted one.
3082 if alen < elen:
3083 # since we validate TypeVarLike default in _collect_type_vars
3084 # or _collect_parameters we can safely check parameters[alen]
3085 if (
3086 getattr(parameters[alen], '__default__', NoDefault)
3087 is not NoDefault
3088 ):
3089 return
3090
3091 num_default_tv = sum(getattr(p, '__default__', NoDefault)
3092 is not NoDefault for p in parameters)
3093
3094 elen -= num_default_tv
3095
3096 expect_val = f"at least {elen}"
3097
3098 # Breakpoint: https://github.com/python/cpython/pull/27515
3099 things = "arguments" if sys.version_info >= (3, 10) else "parameters"
3100 raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
3101 f" for {cls}; actual {alen}, expected {expect_val}")
3102else:
3103 # Python 3.11+
3104
3105 def _check_generic(cls, parameters, elen):
3106 """Check correct count for parameters of a generic cls (internal helper).
3107
3108 This gives a nice error message in case of count mismatch.
3109 """
3110 if not elen:
3111 raise TypeError(f"{cls} is not a generic class")
3112 alen = len(parameters)
3113 if alen != elen:
3114 expect_val = elen
3115 if hasattr(cls, "__parameters__"):
3116 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
3117
3118 # deal with TypeVarLike defaults
3119 # required TypeVarLikes cannot appear after a defaulted one.
3120 if alen < elen:
3121 # since we validate TypeVarLike default in _collect_type_vars
3122 # or _collect_parameters we can safely check parameters[alen]
3123 if (
3124 getattr(parameters[alen], '__default__', NoDefault)
3125 is not NoDefault
3126 ):
3127 return
3128
3129 num_default_tv = sum(getattr(p, '__default__', NoDefault)
3130 is not NoDefault for p in parameters)
3131
3132 elen -= num_default_tv
3133
3134 expect_val = f"at least {elen}"
3135
3136 raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
3137 f" for {cls}; actual {alen}, expected {expect_val}")
3138
3139if not _PEP_696_IMPLEMENTED:
3140 typing._check_generic = _check_generic
3141
3142
3143def _has_generic_or_protocol_as_origin() -> bool:
3144 try:
3145 frame = sys._getframe(2)
3146 # - Catch AttributeError: not all Python implementations have sys._getframe()
3147 # - Catch ValueError: maybe we're called from an unexpected module
3148 # and the call stack isn't deep enough
3149 except (AttributeError, ValueError):
3150 return False # err on the side of leniency
3151 else:
3152 # If we somehow get invoked from outside typing.py,
3153 # also err on the side of leniency
3154 if frame.f_globals.get("__name__") != "typing":
3155 return False
3156 origin = frame.f_locals.get("origin")
3157 # Cannot use "in" because origin may be an object with a buggy __eq__ that
3158 # throws an error.
3159 return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
3160
3161
3162_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
3163
3164
3165def _is_unpacked_typevartuple(x) -> bool:
3166 if get_origin(x) is not Unpack:
3167 return False
3168 args = get_args(x)
3169 return (
3170 bool(args)
3171 and len(args) == 1
3172 and type(args[0]) in _TYPEVARTUPLE_TYPES
3173 )
3174
3175
3176# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
3177if hasattr(typing, '_collect_type_vars'):
3178 def _collect_type_vars(types, typevar_types=None):
3179 """Collect all type variable contained in types in order of
3180 first appearance (lexicographic order). For example::
3181
3182 _collect_type_vars((T, List[S, T])) == (T, S)
3183 """
3184 if typevar_types is None:
3185 typevar_types = typing.TypeVar
3186 tvars = []
3187
3188 # A required TypeVarLike cannot appear after a TypeVarLike with a default
3189 # if it was a direct call to `Generic[]` or `Protocol[]`
3190 enforce_default_ordering = _has_generic_or_protocol_as_origin()
3191 default_encountered = False
3192
3193 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
3194 type_var_tuple_encountered = False
3195
3196 for t in types:
3197 if _is_unpacked_typevartuple(t):
3198 type_var_tuple_encountered = True
3199 elif (
3200 isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
3201 and t not in tvars
3202 ):
3203 if enforce_default_ordering:
3204 has_default = getattr(t, '__default__', NoDefault) is not NoDefault
3205 if has_default:
3206 if type_var_tuple_encountered:
3207 raise TypeError('Type parameter with a default'
3208 ' follows TypeVarTuple')
3209 default_encountered = True
3210 elif default_encountered:
3211 raise TypeError(f'Type parameter {t!r} without a default'
3212 ' follows type parameter with a default')
3213
3214 tvars.append(t)
3215 if _should_collect_from_parameters(t):
3216 tvars.extend([t for t in t.__parameters__ if t not in tvars])
3217 elif isinstance(t, tuple):
3218 # Collect nested type_vars
3219 # tuple wrapped by _prepare_paramspec_params(cls, params)
3220 for x in t:
3221 for collected in _collect_type_vars([x]):
3222 if collected not in tvars:
3223 tvars.append(collected)
3224 return tuple(tvars)
3225
3226 typing._collect_type_vars = _collect_type_vars
3227else:
3228 def _collect_parameters(args):
3229 """Collect all type variables and parameter specifications in args
3230 in order of first appearance (lexicographic order).
3231
3232 For example::
3233
3234 assert _collect_parameters((T, Callable[P, T])) == (T, P)
3235 """
3236 parameters = []
3237
3238 # A required TypeVarLike cannot appear after a TypeVarLike with default
3239 # if it was a direct call to `Generic[]` or `Protocol[]`
3240 enforce_default_ordering = _has_generic_or_protocol_as_origin()
3241 default_encountered = False
3242
3243 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
3244 type_var_tuple_encountered = False
3245
3246 for t in args:
3247 if isinstance(t, type):
3248 # We don't want __parameters__ descriptor of a bare Python class.
3249 pass
3250 elif isinstance(t, tuple):
3251 # `t` might be a tuple, when `ParamSpec` is substituted with
3252 # `[T, int]`, or `[int, *Ts]`, etc.
3253 for x in t:
3254 for collected in _collect_parameters([x]):
3255 if collected not in parameters:
3256 parameters.append(collected)
3257 elif hasattr(t, '__typing_subst__'):
3258 if t not in parameters:
3259 if enforce_default_ordering:
3260 has_default = (
3261 getattr(t, '__default__', NoDefault) is not NoDefault
3262 )
3263
3264 if type_var_tuple_encountered and has_default:
3265 raise TypeError('Type parameter with a default'
3266 ' follows TypeVarTuple')
3267
3268 if has_default:
3269 default_encountered = True
3270 elif default_encountered:
3271 raise TypeError(f'Type parameter {t!r} without a default'
3272 ' follows type parameter with a default')
3273
3274 parameters.append(t)
3275 else:
3276 if _is_unpacked_typevartuple(t):
3277 type_var_tuple_encountered = True
3278 for x in getattr(t, '__parameters__', ()):
3279 if x not in parameters:
3280 parameters.append(x)
3281
3282 return tuple(parameters)
3283
3284 if not _PEP_696_IMPLEMENTED:
3285 typing._collect_parameters = _collect_parameters
3286
3287# Backport typing.NamedTuple as it exists in Python 3.13.
3288# In 3.11, the ability to define generic `NamedTuple`s was supported.
3289# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
3290# On 3.12, we added __orig_bases__ to call-based NamedTuples
3291# On 3.13, we deprecated kwargs-based NamedTuples
3292# Breakpoint: https://github.com/python/cpython/pull/105609
3293if sys.version_info >= (3, 13):
3294 NamedTuple = typing.NamedTuple
3295else:
3296 def _make_nmtuple(name, types, module, defaults=()):
3297 fields = [n for n, t in types]
3298 annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
3299 for n, t in types}
3300 nm_tpl = collections.namedtuple(name, fields,
3301 defaults=defaults, module=module)
3302 nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
3303 return nm_tpl
3304
3305 _prohibited_namedtuple_fields = typing._prohibited
3306 _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
3307
3308 class _NamedTupleMeta(type):
3309 def __new__(cls, typename, bases, ns):
3310 assert _NamedTuple in bases
3311 for base in bases:
3312 if base is not _NamedTuple and base is not typing.Generic:
3313 raise TypeError(
3314 'can only inherit from a NamedTuple type and Generic')
3315 bases = tuple(tuple if base is _NamedTuple else base for base in bases)
3316 if "__annotations__" in ns:
3317 types = ns["__annotations__"]
3318 elif "__annotate__" in ns:
3319 # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
3320 types = ns["__annotate__"](1)
3321 else:
3322 types = {}
3323 default_names = []
3324 for field_name in types:
3325 if field_name in ns:
3326 default_names.append(field_name)
3327 elif default_names:
3328 raise TypeError(f"Non-default namedtuple field {field_name} "
3329 f"cannot follow default field"
3330 f"{'s' if len(default_names) > 1 else ''} "
3331 f"{', '.join(default_names)}")
3332 nm_tpl = _make_nmtuple(
3333 typename, types.items(),
3334 defaults=[ns[n] for n in default_names],
3335 module=ns['__module__']
3336 )
3337 nm_tpl.__bases__ = bases
3338 if typing.Generic in bases:
3339 if hasattr(typing, '_generic_class_getitem'): # 3.12+
3340 nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
3341 else:
3342 class_getitem = typing.Generic.__class_getitem__.__func__
3343 nm_tpl.__class_getitem__ = classmethod(class_getitem)
3344 # update from user namespace without overriding special namedtuple attributes
3345 for key, val in ns.items():
3346 if key in _prohibited_namedtuple_fields:
3347 raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
3348 elif key not in _special_namedtuple_fields:
3349 if key not in nm_tpl._fields:
3350 setattr(nm_tpl, key, ns[key])
3351 try:
3352 set_name = type(val).__set_name__
3353 except AttributeError:
3354 pass
3355 else:
3356 try:
3357 set_name(val, nm_tpl, key)
3358 except BaseException as e:
3359 msg = (
3360 f"Error calling __set_name__ on {type(val).__name__!r} "
3361 f"instance {key!r} in {typename!r}"
3362 )
3363 # BaseException.add_note() existed on py311,
3364 # but the __set_name__ machinery didn't start
3365 # using add_note() until py312.
3366 # Making sure exceptions are raised in the same way
3367 # as in "normal" classes seems most important here.
3368 # Breakpoint: https://github.com/python/cpython/pull/95915
3369 if sys.version_info >= (3, 12):
3370 e.add_note(msg)
3371 raise
3372 else:
3373 raise RuntimeError(msg) from e
3374
3375 if typing.Generic in bases:
3376 nm_tpl.__init_subclass__()
3377 return nm_tpl
3378
3379 _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
3380
3381 def _namedtuple_mro_entries(bases):
3382 assert NamedTuple in bases
3383 return (_NamedTuple,)
3384
3385 def NamedTuple(typename, fields=_marker, /, **kwargs):
3386 """Typed version of namedtuple.
3387
3388 Usage::
3389
3390 class Employee(NamedTuple):
3391 name: str
3392 id: int
3393
3394 This is equivalent to::
3395
3396 Employee = collections.namedtuple('Employee', ['name', 'id'])
3397
3398 The resulting class has an extra __annotations__ attribute, giving a
3399 dict that maps field names to types. (The field names are also in
3400 the _fields attribute, which is part of the namedtuple API.)
3401 An alternative equivalent functional syntax is also accepted::
3402
3403 Employee = NamedTuple('Employee', [('name', str), ('id', int)])
3404 """
3405 if fields is _marker:
3406 if kwargs:
3407 deprecated_thing = "Creating NamedTuple classes using keyword arguments"
3408 deprecation_msg = (
3409 "{name} is deprecated and will be disallowed in Python {remove}. "
3410 "Use the class-based or functional syntax instead."
3411 )
3412 else:
3413 deprecated_thing = "Failing to pass a value for the 'fields' parameter"
3414 example = f"`{typename} = NamedTuple({typename!r}, [])`"
3415 deprecation_msg = (
3416 "{name} is deprecated and will be disallowed in Python {remove}. "
3417 "To create a NamedTuple class with 0 fields "
3418 "using the functional syntax, "
3419 "pass an empty list, e.g. "
3420 ) + example + "."
3421 elif fields is None:
3422 if kwargs:
3423 raise TypeError(
3424 "Cannot pass `None` as the 'fields' parameter "
3425 "and also specify fields using keyword arguments"
3426 )
3427 else:
3428 deprecated_thing = "Passing `None` as the 'fields' parameter"
3429 example = f"`{typename} = NamedTuple({typename!r}, [])`"
3430 deprecation_msg = (
3431 "{name} is deprecated and will be disallowed in Python {remove}. "
3432 "To create a NamedTuple class with 0 fields "
3433 "using the functional syntax, "
3434 "pass an empty list, e.g. "
3435 ) + example + "."
3436 elif kwargs:
3437 raise TypeError("Either list of fields or keywords"
3438 " can be provided to NamedTuple, not both")
3439 if fields is _marker or fields is None:
3440 warnings.warn(
3441 deprecation_msg.format(name=deprecated_thing, remove="3.15"),
3442 DeprecationWarning,
3443 stacklevel=2,
3444 )
3445 fields = kwargs.items()
3446 nt = _make_nmtuple(typename, fields, module=_caller())
3447 nt.__orig_bases__ = (NamedTuple,)
3448 return nt
3449
3450 NamedTuple.__mro_entries__ = _namedtuple_mro_entries
3451
3452
3453if hasattr(collections.abc, "Buffer"):
3454 Buffer = collections.abc.Buffer
3455else:
3456 class Buffer(abc.ABC): # noqa: B024
3457 """Base class for classes that implement the buffer protocol.
3458
3459 The buffer protocol allows Python objects to expose a low-level
3460 memory buffer interface. Before Python 3.12, it is not possible
3461 to implement the buffer protocol in pure Python code, or even
3462 to check whether a class implements the buffer protocol. In
3463 Python 3.12 and higher, the ``__buffer__`` method allows access
3464 to the buffer protocol from Python code, and the
3465 ``collections.abc.Buffer`` ABC allows checking whether a class
3466 implements the buffer protocol.
3467
3468 To indicate support for the buffer protocol in earlier versions,
3469 inherit from this ABC, either in a stub file or at runtime,
3470 or use ABC registration. This ABC provides no methods, because
3471 there is no Python-accessible methods shared by pre-3.12 buffer
3472 classes. It is useful primarily for static checks.
3473
3474 """
3475
3476 # As a courtesy, register the most common stdlib buffer classes.
3477 Buffer.register(memoryview)
3478 Buffer.register(bytearray)
3479 Buffer.register(bytes)
3480
3481
3482# Backport of types.get_original_bases, available on 3.12+ in CPython
3483if hasattr(_types, "get_original_bases"):
3484 get_original_bases = _types.get_original_bases
3485else:
3486 def get_original_bases(cls, /):
3487 """Return the class's "original" bases prior to modification by `__mro_entries__`.
3488
3489 Examples::
3490
3491 from typing import TypeVar, Generic
3492 from typing_extensions import NamedTuple, TypedDict
3493
3494 T = TypeVar("T")
3495 class Foo(Generic[T]): ...
3496 class Bar(Foo[int], float): ...
3497 class Baz(list[str]): ...
3498 Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
3499 Spam = TypedDict("Spam", {"a": int, "b": str})
3500
3501 assert get_original_bases(Bar) == (Foo[int], float)
3502 assert get_original_bases(Baz) == (list[str],)
3503 assert get_original_bases(Eggs) == (NamedTuple,)
3504 assert get_original_bases(Spam) == (TypedDict,)
3505 assert get_original_bases(int) == (object,)
3506 """
3507 try:
3508 return cls.__dict__.get("__orig_bases__", cls.__bases__)
3509 except AttributeError:
3510 raise TypeError(
3511 f'Expected an instance of type, not {type(cls).__name__!r}'
3512 ) from None
3513
3514
3515# NewType is a class on Python 3.10+, making it pickleable
3516# The error message for subclassing instances of NewType was improved on 3.11+
3517# Breakpoint: https://github.com/python/cpython/pull/30268
3518if sys.version_info >= (3, 11):
3519 NewType = typing.NewType
3520else:
3521 class NewType:
3522 """NewType creates simple unique types with almost zero
3523 runtime overhead. NewType(name, tp) is considered a subtype of tp
3524 by static type checkers. At runtime, NewType(name, tp) returns
3525 a dummy callable that simply returns its argument. Usage::
3526 UserId = NewType('UserId', int)
3527 def name_by_id(user_id: UserId) -> str:
3528 ...
3529 UserId('user') # Fails type check
3530 name_by_id(42) # Fails type check
3531 name_by_id(UserId(42)) # OK
3532 num = UserId(5) + 1 # type: int
3533 """
3534
3535 def __call__(self, obj, /):
3536 return obj
3537
3538 def __init__(self, name, tp):
3539 self.__qualname__ = name
3540 if '.' in name:
3541 name = name.rpartition('.')[-1]
3542 self.__name__ = name
3543 self.__supertype__ = tp
3544 def_mod = _caller()
3545 if def_mod != 'typing_extensions':
3546 self.__module__ = def_mod
3547
3548 def __mro_entries__(self, bases):
3549 # We defined __mro_entries__ to get a better error message
3550 # if a user attempts to subclass a NewType instance. bpo-46170
3551 supercls_name = self.__name__
3552
3553 class Dummy:
3554 def __init_subclass__(cls):
3555 subcls_name = cls.__name__
3556 raise TypeError(
3557 f"Cannot subclass an instance of NewType. "
3558 f"Perhaps you were looking for: "
3559 f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
3560 )
3561
3562 return (Dummy,)
3563
3564 def __repr__(self):
3565 return f'{self.__module__}.{self.__qualname__}'
3566
3567 def __reduce__(self):
3568 return self.__qualname__
3569
3570 # Breakpoint: https://github.com/python/cpython/pull/21515
3571 if sys.version_info >= (3, 10):
3572 # PEP 604 methods
3573 # It doesn't make sense to have these methods on Python <3.10
3574
3575 def __or__(self, other):
3576 return typing.Union[self, other]
3577
3578 def __ror__(self, other):
3579 return typing.Union[other, self]
3580
3581
3582# Breakpoint: https://github.com/python/cpython/pull/124795
3583if sys.version_info >= (3, 14):
3584 TypeAliasType = typing.TypeAliasType
3585# <=3.13
3586else:
3587 # Breakpoint: https://github.com/python/cpython/pull/103764
3588 if sys.version_info >= (3, 12):
3589 # 3.12-3.13
3590 def _is_unionable(obj):
3591 """Corresponds to is_unionable() in unionobject.c in CPython."""
3592 return obj is None or isinstance(obj, (
3593 type,
3594 _types.GenericAlias,
3595 _types.UnionType,
3596 typing.TypeAliasType,
3597 TypeAliasType,
3598 ))
3599 else:
3600 # <=3.11
3601 def _is_unionable(obj):
3602 """Corresponds to is_unionable() in unionobject.c in CPython."""
3603 return obj is None or isinstance(obj, (
3604 type,
3605 _types.GenericAlias,
3606 _types.UnionType,
3607 TypeAliasType,
3608 ))
3609
3610 if sys.version_info < (3, 10):
3611 # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
3612 # so that we emulate the behaviour of `types.GenericAlias`
3613 # on the latest versions of CPython
3614 _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
3615 "__class__",
3616 "__bases__",
3617 "__origin__",
3618 "__args__",
3619 "__unpacked__",
3620 "__parameters__",
3621 "__typing_unpacked_tuple_args__",
3622 "__mro_entries__",
3623 "__reduce_ex__",
3624 "__reduce__",
3625 "__copy__",
3626 "__deepcopy__",
3627 })
3628
3629 class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
3630 def __getattr__(self, attr):
3631 if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
3632 return object.__getattr__(self, attr)
3633 return getattr(self.__origin__, attr)
3634
3635
3636 class TypeAliasType:
3637 """Create named, parameterized type aliases.
3638
3639 This provides a backport of the new `type` statement in Python 3.12:
3640
3641 type ListOrSet[T] = list[T] | set[T]
3642
3643 is equivalent to:
3644
3645 T = TypeVar("T")
3646 ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
3647
3648 The name ListOrSet can then be used as an alias for the type it refers to.
3649
3650 The type_params argument should contain all the type parameters used
3651 in the value of the type alias. If the alias is not generic, this
3652 argument is omitted.
3653
3654 Static type checkers should only support type aliases declared using
3655 TypeAliasType that follow these rules:
3656
3657 - The first argument (the name) must be a string literal.
3658 - The TypeAliasType instance must be immediately assigned to a variable
3659 of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
3660 as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
3661
3662 """
3663
3664 def __init__(self, name: str, value, *, type_params=()):
3665 if not isinstance(name, str):
3666 raise TypeError("TypeAliasType name must be a string")
3667 if not isinstance(type_params, tuple):
3668 raise TypeError("type_params must be a tuple")
3669 self.__value__ = value
3670 self.__type_params__ = type_params
3671
3672 default_value_encountered = False
3673 parameters = []
3674 for type_param in type_params:
3675 if (
3676 not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
3677 # <=3.11
3678 # Unpack Backport passes isinstance(type_param, TypeVar)
3679 or _is_unpack(type_param)
3680 ):
3681 raise TypeError(f"Expected a type param, got {type_param!r}")
3682 has_default = (
3683 getattr(type_param, '__default__', NoDefault) is not NoDefault
3684 )
3685 if default_value_encountered and not has_default:
3686 raise TypeError(f"non-default type parameter '{type_param!r}'"
3687 " follows default type parameter")
3688 if has_default:
3689 default_value_encountered = True
3690 if isinstance(type_param, TypeVarTuple):
3691 parameters.extend(type_param)
3692 else:
3693 parameters.append(type_param)
3694 self.__parameters__ = tuple(parameters)
3695 def_mod = _caller()
3696 if def_mod != 'typing_extensions':
3697 self.__module__ = def_mod
3698 # Setting this attribute closes the TypeAliasType from further modification
3699 self.__name__ = name
3700
3701 def __setattr__(self, name: str, value: object, /) -> None:
3702 if hasattr(self, "__name__"):
3703 self._raise_attribute_error(name)
3704 super().__setattr__(name, value)
3705
3706 def __delattr__(self, name: str, /) -> Never:
3707 self._raise_attribute_error(name)
3708
3709 def _raise_attribute_error(self, name: str) -> Never:
3710 # Match the Python 3.12 error messages exactly
3711 if name == "__name__":
3712 raise AttributeError("readonly attribute")
3713 elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
3714 raise AttributeError(
3715 f"attribute '{name}' of 'typing.TypeAliasType' objects "
3716 "is not writable"
3717 )
3718 else:
3719 raise AttributeError(
3720 f"'typing.TypeAliasType' object has no attribute '{name}'"
3721 )
3722
3723 def __repr__(self) -> str:
3724 return self.__name__
3725
3726 if sys.version_info < (3, 11):
3727 def _check_single_param(self, param, recursion=0):
3728 # Allow [], [int], [int, str], [int, ...], [int, T]
3729 if param is ...:
3730 return ...
3731 if param is None:
3732 return None
3733 # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
3734 if isinstance(param, list) and recursion == 0:
3735 return [self._check_single_param(arg, recursion+1)
3736 for arg in param]
3737 return typing._type_check(
3738 param, f'Subscripting {self.__name__} requires a type.'
3739 )
3740
3741 def _check_parameters(self, parameters):
3742 if sys.version_info < (3, 11):
3743 return tuple(
3744 self._check_single_param(item)
3745 for item in parameters
3746 )
3747 return tuple(typing._type_check(
3748 item, f'Subscripting {self.__name__} requires a type.'
3749 )
3750 for item in parameters
3751 )
3752
3753 def __getitem__(self, parameters):
3754 if not self.__type_params__:
3755 raise TypeError("Only generic type aliases are subscriptable")
3756 if not isinstance(parameters, tuple):
3757 parameters = (parameters,)
3758 # Using 3.9 here will create problems with Concatenate
3759 if sys.version_info >= (3, 10):
3760 return _types.GenericAlias(self, parameters)
3761 type_vars = _collect_type_vars(parameters)
3762 parameters = self._check_parameters(parameters)
3763 alias = _TypeAliasGenericAlias(self, parameters)
3764 # alias.__parameters__ is not complete if Concatenate is present
3765 # as it is converted to a list from which no parameters are extracted.
3766 if alias.__parameters__ != type_vars:
3767 alias.__parameters__ = type_vars
3768 return alias
3769
3770 def __reduce__(self):
3771 return self.__name__
3772
3773 def __init_subclass__(cls, *args, **kwargs):
3774 raise TypeError(
3775 "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
3776 )
3777
3778 # The presence of this method convinces typing._type_check
3779 # that TypeAliasTypes are types.
3780 def __call__(self):
3781 raise TypeError("Type alias is not callable")
3782
3783 # Breakpoint: https://github.com/python/cpython/pull/21515
3784 if sys.version_info >= (3, 10):
3785 def __or__(self, right):
3786 # For forward compatibility with 3.12, reject Unions
3787 # that are not accepted by the built-in Union.
3788 if not _is_unionable(right):
3789 return NotImplemented
3790 return typing.Union[self, right]
3791
3792 def __ror__(self, left):
3793 if not _is_unionable(left):
3794 return NotImplemented
3795 return typing.Union[left, self]
3796
3797
3798if hasattr(typing, "is_protocol"):
3799 is_protocol = typing.is_protocol
3800 get_protocol_members = typing.get_protocol_members
3801else:
3802 def is_protocol(tp: type, /) -> bool:
3803 """Return True if the given type is a Protocol.
3804
3805 Example::
3806
3807 >>> from typing_extensions import Protocol, is_protocol
3808 >>> class P(Protocol):
3809 ... def a(self) -> str: ...
3810 ... b: int
3811 >>> is_protocol(P)
3812 True
3813 >>> is_protocol(int)
3814 False
3815 """
3816 return (
3817 isinstance(tp, type)
3818 and getattr(tp, '_is_protocol', False)
3819 and tp is not Protocol
3820 and tp is not typing.Protocol
3821 )
3822
3823 def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
3824 """Return the set of members defined in a Protocol.
3825
3826 Example::
3827
3828 >>> from typing_extensions import Protocol, get_protocol_members
3829 >>> class P(Protocol):
3830 ... def a(self) -> str: ...
3831 ... b: int
3832 >>> get_protocol_members(P)
3833 frozenset({'a', 'b'})
3834
3835 Raise a TypeError for arguments that are not Protocols.
3836 """
3837 if not is_protocol(tp):
3838 raise TypeError(f'{tp!r} is not a Protocol')
3839 if hasattr(tp, '__protocol_attrs__'):
3840 return frozenset(tp.__protocol_attrs__)
3841 return frozenset(_get_protocol_attrs(tp))
3842
3843
3844if hasattr(typing, "Doc"):
3845 Doc = typing.Doc
3846else:
3847 class Doc:
3848 """Define the documentation of a type annotation using ``Annotated``, to be
3849 used in class attributes, function and method parameters, return values,
3850 and variables.
3851
3852 The value should be a positional-only string literal to allow static tools
3853 like editors and documentation generators to use it.
3854
3855 This complements docstrings.
3856
3857 The string value passed is available in the attribute ``documentation``.
3858
3859 Example::
3860
3861 >>> from typing_extensions import Annotated, Doc
3862 >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
3863 """
3864 def __init__(self, documentation: str, /) -> None:
3865 self.documentation = documentation
3866
3867 def __repr__(self) -> str:
3868 return f"Doc({self.documentation!r})"
3869
3870 def __hash__(self) -> int:
3871 return hash(self.documentation)
3872
3873 def __eq__(self, other: object) -> bool:
3874 if not isinstance(other, Doc):
3875 return NotImplemented
3876 return self.documentation == other.documentation
3877
3878
3879_CapsuleType = getattr(_types, "CapsuleType", None)
3880
3881if _CapsuleType is None:
3882 try:
3883 import _socket
3884 except ImportError:
3885 pass
3886 else:
3887 _CAPI = getattr(_socket, "CAPI", None)
3888 if _CAPI is not None:
3889 _CapsuleType = type(_CAPI)
3890
3891if _CapsuleType is not None:
3892 CapsuleType = _CapsuleType
3893 __all__.append("CapsuleType")
3894
3895
3896if sys.version_info >= (3, 14):
3897 from annotationlib import Format, get_annotations
3898else:
3899 # Available since Python 3.14.0a3
3900 # PR: https://github.com/python/cpython/pull/124415
3901 class Format(enum.IntEnum):
3902 VALUE = 1
3903 VALUE_WITH_FAKE_GLOBALS = 2
3904 FORWARDREF = 3
3905 STRING = 4
3906
3907 # Available since Python 3.14.0a1
3908 # PR: https://github.com/python/cpython/pull/119891
3909 def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
3910 format=Format.VALUE):
3911 """Compute the annotations dict for an object.
3912
3913 obj may be a callable, class, or module.
3914 Passing in an object of any other type raises TypeError.
3915
3916 Returns a dict. get_annotations() returns a new dict every time
3917 it's called; calling it twice on the same object will return two
3918 different but equivalent dicts.
3919
3920 This is a backport of `inspect.get_annotations`, which has been
3921 in the standard library since Python 3.10. See the standard library
3922 documentation for more:
3923
3924 https://docs.python.org/3/library/inspect.html#inspect.get_annotations
3925
3926 This backport adds the *format* argument introduced by PEP 649. The
3927 three formats supported are:
3928 * VALUE: the annotations are returned as-is. This is the default and
3929 it is compatible with the behavior on previous Python versions.
3930 * FORWARDREF: return annotations as-is if possible, but replace any
3931 undefined names with ForwardRef objects. The implementation proposed by
3932 PEP 649 relies on language changes that cannot be backported; the
3933 typing-extensions implementation simply returns the same result as VALUE.
3934 * STRING: return annotations as strings, in a format close to the original
3935 source. Again, this behavior cannot be replicated directly in a backport.
3936 As an approximation, typing-extensions retrieves the annotations under
3937 VALUE semantics and then stringifies them.
3938
3939 The purpose of this backport is to allow users who would like to use
3940 FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
3941 want to support earlier Python versions, to simply write:
3942
3943 typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
3944
3945 """
3946 format = Format(format)
3947 if format is Format.VALUE_WITH_FAKE_GLOBALS:
3948 raise ValueError(
3949 "The VALUE_WITH_FAKE_GLOBALS format is for internal use only"
3950 )
3951
3952 if eval_str and format is not Format.VALUE:
3953 raise ValueError("eval_str=True is only supported with format=Format.VALUE")
3954
3955 if isinstance(obj, type):
3956 # class
3957 obj_dict = getattr(obj, '__dict__', None)
3958 if obj_dict and hasattr(obj_dict, 'get'):
3959 ann = obj_dict.get('__annotations__', None)
3960 if isinstance(ann, _types.GetSetDescriptorType):
3961 ann = None
3962 else:
3963 ann = None
3964
3965 obj_globals = None
3966 module_name = getattr(obj, '__module__', None)
3967 if module_name:
3968 module = sys.modules.get(module_name, None)
3969 if module:
3970 obj_globals = getattr(module, '__dict__', None)
3971 obj_locals = dict(vars(obj))
3972 unwrap = obj
3973 elif isinstance(obj, _types.ModuleType):
3974 # module
3975 ann = getattr(obj, '__annotations__', None)
3976 obj_globals = obj.__dict__
3977 obj_locals = None
3978 unwrap = None
3979 elif callable(obj):
3980 # this includes types.Function, types.BuiltinFunctionType,
3981 # types.BuiltinMethodType, functools.partial, functools.singledispatch,
3982 # "class funclike" from Lib/test/test_inspect... on and on it goes.
3983 ann = getattr(obj, '__annotations__', None)
3984 obj_globals = getattr(obj, '__globals__', None)
3985 obj_locals = None
3986 unwrap = obj
3987 elif hasattr(obj, '__annotations__'):
3988 ann = obj.__annotations__
3989 obj_globals = obj_locals = unwrap = None
3990 else:
3991 raise TypeError(f"{obj!r} is not a module, class, or callable.")
3992
3993 if ann is None:
3994 return {}
3995
3996 if not isinstance(ann, dict):
3997 raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
3998
3999 if not ann:
4000 return {}
4001
4002 if not eval_str:
4003 if format is Format.STRING:
4004 return {
4005 key: value if isinstance(value, str) else typing._type_repr(value)
4006 for key, value in ann.items()
4007 }
4008 return dict(ann)
4009
4010 if unwrap is not None:
4011 while True:
4012 if hasattr(unwrap, '__wrapped__'):
4013 unwrap = unwrap.__wrapped__
4014 continue
4015 if isinstance(unwrap, functools.partial):
4016 unwrap = unwrap.func
4017 continue
4018 break
4019 if hasattr(unwrap, "__globals__"):
4020 obj_globals = unwrap.__globals__
4021
4022 if globals is None:
4023 globals = obj_globals
4024 if locals is None:
4025 locals = obj_locals or {}
4026
4027 # "Inject" type parameters into the local namespace
4028 # (unless they are shadowed by assignments *in* the local namespace),
4029 # as a way of emulating annotation scopes when calling `eval()`
4030 if type_params := getattr(obj, "__type_params__", ()):
4031 locals = {param.__name__: param for param in type_params} | locals
4032
4033 return_value = {key:
4034 value if not isinstance(value, str) else eval(value, globals, locals)
4035 for key, value in ann.items() }
4036 return return_value
4037
4038
4039if hasattr(typing, "evaluate_forward_ref"):
4040 evaluate_forward_ref = typing.evaluate_forward_ref
4041else:
4042 # Implements annotationlib.ForwardRef.evaluate
4043 def _eval_with_owner(
4044 forward_ref, *, owner=None, globals=None, locals=None, type_params=None
4045 ):
4046 if forward_ref.__forward_evaluated__:
4047 return forward_ref.__forward_value__
4048 if getattr(forward_ref, "__cell__", None) is not None:
4049 try:
4050 value = forward_ref.__cell__.cell_contents
4051 except ValueError:
4052 pass
4053 else:
4054 forward_ref.__forward_evaluated__ = True
4055 forward_ref.__forward_value__ = value
4056 return value
4057 if owner is None:
4058 owner = getattr(forward_ref, "__owner__", None)
4059
4060 if (
4061 globals is None
4062 and getattr(forward_ref, "__forward_module__", None) is not None
4063 ):
4064 globals = getattr(
4065 sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
4066 )
4067 if globals is None:
4068 globals = getattr(forward_ref, "__globals__", None)
4069 if globals is None:
4070 if isinstance(owner, type):
4071 module_name = getattr(owner, "__module__", None)
4072 if module_name:
4073 module = sys.modules.get(module_name, None)
4074 if module:
4075 globals = getattr(module, "__dict__", None)
4076 elif isinstance(owner, _types.ModuleType):
4077 globals = getattr(owner, "__dict__", None)
4078 elif callable(owner):
4079 globals = getattr(owner, "__globals__", None)
4080
4081 # If we pass None to eval() below, the globals of this module are used.
4082 if globals is None:
4083 globals = {}
4084
4085 if locals is None:
4086 locals = {}
4087 if isinstance(owner, type):
4088 locals.update(vars(owner))
4089
4090 if type_params is None and owner is not None:
4091 # "Inject" type parameters into the local namespace
4092 # (unless they are shadowed by assignments *in* the local namespace),
4093 # as a way of emulating annotation scopes when calling `eval()`
4094 type_params = getattr(owner, "__type_params__", None)
4095
4096 # Type parameters exist in their own scope, which is logically
4097 # between the locals and the globals. We simulate this by adding
4098 # them to the globals.
4099 if type_params is not None:
4100 globals = dict(globals)
4101 for param in type_params:
4102 globals[param.__name__] = param
4103
4104 arg = forward_ref.__forward_arg__
4105 if arg.isidentifier() and not keyword.iskeyword(arg):
4106 if arg in locals:
4107 value = locals[arg]
4108 elif arg in globals:
4109 value = globals[arg]
4110 elif hasattr(builtins, arg):
4111 return getattr(builtins, arg)
4112 else:
4113 raise NameError(arg)
4114 else:
4115 code = forward_ref.__forward_code__
4116 value = eval(code, globals, locals)
4117 forward_ref.__forward_evaluated__ = True
4118 forward_ref.__forward_value__ = value
4119 return value
4120
4121 def evaluate_forward_ref(
4122 forward_ref,
4123 *,
4124 owner=None,
4125 globals=None,
4126 locals=None,
4127 type_params=None,
4128 format=None,
4129 _recursive_guard=frozenset(),
4130 ):
4131 """Evaluate a forward reference as a type hint.
4132
4133 This is similar to calling the ForwardRef.evaluate() method,
4134 but unlike that method, evaluate_forward_ref() also:
4135
4136 * Recursively evaluates forward references nested within the type hint.
4137 * Rejects certain objects that are not valid type hints.
4138 * Replaces type hints that evaluate to None with types.NoneType.
4139 * Supports the *FORWARDREF* and *STRING* formats.
4140
4141 *forward_ref* must be an instance of ForwardRef. *owner*, if given,
4142 should be the object that holds the annotations that the forward reference
4143 derived from, such as a module, class object, or function. It is used to
4144 infer the namespaces to use for looking up names. *globals* and *locals*
4145 can also be explicitly given to provide the global and local namespaces.
4146 *type_params* is a tuple of type parameters that are in scope when
4147 evaluating the forward reference. This parameter must be provided (though
4148 it may be an empty tuple) if *owner* is not given and the forward reference
4149 does not already have an owner set. *format* specifies the format of the
4150 annotation and is a member of the annotationlib.Format enum.
4151
4152 """
4153 if format == Format.STRING:
4154 return forward_ref.__forward_arg__
4155 if forward_ref.__forward_arg__ in _recursive_guard:
4156 return forward_ref
4157
4158 # Evaluate the forward reference
4159 try:
4160 value = _eval_with_owner(
4161 forward_ref,
4162 owner=owner,
4163 globals=globals,
4164 locals=locals,
4165 type_params=type_params,
4166 )
4167 except NameError:
4168 if format == Format.FORWARDREF:
4169 return forward_ref
4170 else:
4171 raise
4172
4173 if isinstance(value, str):
4174 value = ForwardRef(value)
4175
4176 # Recursively evaluate the type
4177 if isinstance(value, ForwardRef):
4178 if getattr(value, "__forward_module__", True) is not None:
4179 globals = None
4180 return evaluate_forward_ref(
4181 value,
4182 globals=globals,
4183 locals=locals,
4184 type_params=type_params, owner=owner,
4185 _recursive_guard=_recursive_guard, format=format
4186 )
4187 if sys.version_info < (3, 12, 5) and type_params:
4188 # Make use of type_params
4189 locals = dict(locals) if locals else {}
4190 for tvar in type_params:
4191 if tvar.__name__ not in locals: # lets not overwrite something present
4192 locals[tvar.__name__] = tvar
4193 if sys.version_info < (3, 12, 5):
4194 return typing._eval_type(
4195 value,
4196 globals,
4197 locals,
4198 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
4199 )
4200 else:
4201 return typing._eval_type(
4202 value,
4203 globals,
4204 locals,
4205 type_params,
4206 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
4207 )
4208
4209
4210class Sentinel:
4211 """Create a unique sentinel object.
4212
4213 *name* should be the name of the variable to which the return value shall be assigned.
4214
4215 *repr*, if supplied, will be used for the repr of the sentinel object.
4216 If not provided, "<name>" will be used.
4217 """
4218
4219 def __init__(
4220 self,
4221 name: str,
4222 repr: typing.Optional[str] = None,
4223 ):
4224 self._name = name
4225 self._repr = repr if repr is not None else f'<{name}>'
4226
4227 def __repr__(self):
4228 return self._repr
4229
4230 if sys.version_info < (3, 11):
4231 # The presence of this method convinces typing._type_check
4232 # that Sentinels are types.
4233 def __call__(self, *args, **kwargs):
4234 raise TypeError(f"{type(self).__name__!r} object is not callable")
4235
4236 # Breakpoint: https://github.com/python/cpython/pull/21515
4237 if sys.version_info >= (3, 10):
4238 def __or__(self, other):
4239 return typing.Union[self, other]
4240
4241 def __ror__(self, other):
4242 return typing.Union[other, self]
4243
4244 def __getstate__(self):
4245 raise TypeError(f"Cannot pickle {type(self).__name__!r} object")
4246
4247
4248if sys.version_info >= (3, 14, 0, "beta"):
4249 type_repr = annotationlib.type_repr
4250else:
4251 def type_repr(value):
4252 """Convert a Python value to a format suitable for use with the STRING format.
4253
4254 This is intended as a helper for tools that support the STRING format but do
4255 not have access to the code that originally produced the annotations. It uses
4256 repr() for most objects.
4257
4258 """
4259 if isinstance(value, (type, _types.FunctionType, _types.BuiltinFunctionType)):
4260 if value.__module__ == "builtins":
4261 return value.__qualname__
4262 return f"{value.__module__}.{value.__qualname__}"
4263 if value is ...:
4264 return "..."
4265 return repr(value)
4266
4267
4268# Aliases for items that are in typing in all supported versions.
4269# We use hasattr() checks so this library will continue to import on
4270# future versions of Python that may remove these names.
4271_typing_names = [
4272 "AbstractSet",
4273 "AnyStr",
4274 "BinaryIO",
4275 "Callable",
4276 "Collection",
4277 "Container",
4278 "Dict",
4279 "FrozenSet",
4280 "Hashable",
4281 "IO",
4282 "ItemsView",
4283 "Iterable",
4284 "Iterator",
4285 "KeysView",
4286 "List",
4287 "Mapping",
4288 "MappingView",
4289 "Match",
4290 "MutableMapping",
4291 "MutableSequence",
4292 "MutableSet",
4293 "Optional",
4294 "Pattern",
4295 "Reversible",
4296 "Sequence",
4297 "Set",
4298 "Sized",
4299 "TextIO",
4300 "Tuple",
4301 "Union",
4302 "ValuesView",
4303 "cast",
4304 "no_type_check",
4305 "no_type_check_decorator",
4306 # This is private, but it was defined by typing_extensions for a long time
4307 # and some users rely on it.
4308 "_AnnotatedAlias",
4309]
4310globals().update(
4311 {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)}
4312)
4313# These are defined unconditionally because they are used in
4314# typing-extensions itself.
4315Generic = typing.Generic
4316ForwardRef = typing.ForwardRef
4317Annotated = typing.Annotated