1import abc
2import builtins
3import collections
4import collections.abc
5import contextlib
6import enum
7import functools
8import inspect
9import io
10import keyword
11import operator
12import sys
13import types as _types
14import typing
15import warnings
16
17# Breakpoint: https://github.com/python/cpython/pull/119891
18if sys.version_info >= (3, 14):
19 import annotationlib
20
21__all__ = [
22 # Super-special typing primitives.
23 'Any',
24 'ClassVar',
25 'Concatenate',
26 'Final',
27 'LiteralString',
28 'ParamSpec',
29 'ParamSpecArgs',
30 'ParamSpecKwargs',
31 'Self',
32 'Type',
33 'TypeVar',
34 'TypeVarTuple',
35 'Unpack',
36
37 # ABCs (from collections.abc).
38 'Awaitable',
39 'AsyncIterator',
40 'AsyncIterable',
41 'Coroutine',
42 'AsyncGenerator',
43 'AsyncContextManager',
44 'Buffer',
45 'ChainMap',
46
47 # Concrete collection types.
48 'ContextManager',
49 'Counter',
50 'Deque',
51 'DefaultDict',
52 'NamedTuple',
53 'OrderedDict',
54 'TypedDict',
55
56 # Structural checks, a.k.a. protocols.
57 'SupportsAbs',
58 'SupportsBytes',
59 'SupportsComplex',
60 'SupportsFloat',
61 'SupportsIndex',
62 'SupportsInt',
63 'SupportsRound',
64 'Reader',
65 'Writer',
66
67 # One-off things.
68 'Annotated',
69 'assert_never',
70 'assert_type',
71 'clear_overloads',
72 'dataclass_transform',
73 'deprecated',
74 'disjoint_base',
75 'Doc',
76 'evaluate_forward_ref',
77 'get_overloads',
78 'final',
79 'Format',
80 'get_annotations',
81 'get_args',
82 'get_origin',
83 'get_original_bases',
84 'get_protocol_members',
85 'get_type_hints',
86 'IntVar',
87 'is_protocol',
88 'is_typeddict',
89 'Literal',
90 'NewType',
91 'overload',
92 'override',
93 'Protocol',
94 'Sentinel',
95 'reveal_type',
96 'runtime',
97 'runtime_checkable',
98 'Text',
99 'TypeAlias',
100 'TypeAliasType',
101 'TypeForm',
102 'TypeGuard',
103 'TypeIs',
104 'TYPE_CHECKING',
105 'type_repr',
106 'Never',
107 'NoReturn',
108 'ReadOnly',
109 'Required',
110 'NotRequired',
111 'NoDefault',
112 'NoExtraItems',
113
114 # Pure aliases, have always been in typing
115 'AbstractSet',
116 'AnyStr',
117 'BinaryIO',
118 'Callable',
119 'Collection',
120 'Container',
121 'Dict',
122 'ForwardRef',
123 'FrozenSet',
124 'Generator',
125 'Generic',
126 'Hashable',
127 'IO',
128 'ItemsView',
129 'Iterable',
130 'Iterator',
131 'KeysView',
132 'List',
133 'Mapping',
134 'MappingView',
135 'Match',
136 'MutableMapping',
137 'MutableSequence',
138 'MutableSet',
139 'Optional',
140 'Pattern',
141 'Reversible',
142 'Sequence',
143 'Set',
144 'Sized',
145 'TextIO',
146 'Tuple',
147 'Union',
148 'ValuesView',
149 'cast',
150 'no_type_check',
151 'no_type_check_decorator',
152]
153
154# for backward compatibility
155PEP_560 = True
156GenericMeta = type
157# Breakpoint: https://github.com/python/cpython/pull/116129
158_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
159
160# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
161_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
162
163class Sentinel:
164 """Create a unique sentinel object.
165
166 *name* should be the name of the variable to which the return value shall be assigned.
167
168 *repr*, if supplied, will be used for the repr of the sentinel object.
169 If not provided, "<name>" will be used.
170 """
171
172 def __init__(
173 self,
174 name: str,
175 repr: typing.Optional[str] = None,
176 ):
177 self._name = name
178 self._repr = repr if repr is not None else f'<{name}>'
179
180 def __repr__(self):
181 return self._repr
182
183 if sys.version_info < (3, 11):
184 # The presence of this method convinces typing._type_check
185 # that Sentinels are types.
186 def __call__(self, *args, **kwargs):
187 raise TypeError(f"{type(self).__name__!r} object is not callable")
188
189 # Breakpoint: https://github.com/python/cpython/pull/21515
190 if sys.version_info >= (3, 10):
191 def __or__(self, other):
192 return typing.Union[self, other]
193
194 def __ror__(self, other):
195 return typing.Union[other, self]
196
197 def __getstate__(self):
198 raise TypeError(f"Cannot pickle {type(self).__name__!r} object")
199
200
201_marker = Sentinel("sentinel")
202
203# The functions below are modified copies of typing internal helpers.
204# They are needed by _ProtocolMeta and they provide support for PEP 646.
205
206# Breakpoint: https://github.com/python/cpython/pull/27342
207if sys.version_info >= (3, 10):
208 def _should_collect_from_parameters(t):
209 return isinstance(
210 t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
211 )
212else:
213 def _should_collect_from_parameters(t):
214 return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
215
216
217NoReturn = typing.NoReturn
218
219# Some unconstrained type variables. These are used by the container types.
220# (These are not for export.)
221T = typing.TypeVar('T') # Any type.
222KT = typing.TypeVar('KT') # Key type.
223VT = typing.TypeVar('VT') # Value type.
224T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
225T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
226
227
228# Breakpoint: https://github.com/python/cpython/pull/31841
229if sys.version_info >= (3, 11):
230 from typing import Any
231else:
232
233 class _AnyMeta(type):
234 def __instancecheck__(self, obj):
235 if self is Any:
236 raise TypeError("typing_extensions.Any cannot be used with isinstance()")
237 return super().__instancecheck__(obj)
238
239 def __repr__(self):
240 if self is Any:
241 return "typing_extensions.Any"
242 return super().__repr__()
243
244 class Any(metaclass=_AnyMeta):
245 """Special type indicating an unconstrained type.
246 - Any is compatible with every type.
247 - Any assumed to have all methods.
248 - All values assumed to be instances of Any.
249 Note that all the above statements are true from the point of view of
250 static type checkers. At runtime, Any should not be used with instance
251 checks.
252 """
253 def __new__(cls, *args, **kwargs):
254 if cls is Any:
255 raise TypeError("Any cannot be instantiated")
256 return super().__new__(cls, *args, **kwargs)
257
258
259ClassVar = typing.ClassVar
260
261# Vendored from cpython typing._SpecialFrom
262# Having a separate class means that instances will not be rejected by
263# typing._type_check.
264class _SpecialForm(typing._Final, _root=True):
265 __slots__ = ('_name', '__doc__', '_getitem')
266
267 def __init__(self, getitem):
268 self._getitem = getitem
269 self._name = getitem.__name__
270 self.__doc__ = getitem.__doc__
271
272 def __getattr__(self, item):
273 if item in {'__name__', '__qualname__'}:
274 return self._name
275
276 raise AttributeError(item)
277
278 def __mro_entries__(self, bases):
279 raise TypeError(f"Cannot subclass {self!r}")
280
281 def __repr__(self):
282 return f'typing_extensions.{self._name}'
283
284 def __reduce__(self):
285 return self._name
286
287 def __call__(self, *args, **kwds):
288 raise TypeError(f"Cannot instantiate {self!r}")
289
290 def __or__(self, other):
291 return typing.Union[self, other]
292
293 def __ror__(self, other):
294 return typing.Union[other, self]
295
296 def __instancecheck__(self, obj):
297 raise TypeError(f"{self} cannot be used with isinstance()")
298
299 def __subclasscheck__(self, cls):
300 raise TypeError(f"{self} cannot be used with issubclass()")
301
302 @typing._tp_cache
303 def __getitem__(self, parameters):
304 return self._getitem(self, parameters)
305
306
307# Note that inheriting from this class means that the object will be
308# rejected by typing._type_check, so do not use it if the special form
309# is arguably valid as a type by itself.
310class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
311 def __repr__(self):
312 return 'typing_extensions.' + self._name
313
314
315Final = typing.Final
316
317# Breakpoint: https://github.com/python/cpython/pull/30530
318if sys.version_info >= (3, 11):
319 final = typing.final
320else:
321 # @final exists in 3.8+, but we backport it for all versions
322 # before 3.11 to keep support for the __final__ attribute.
323 # See https://bugs.python.org/issue46342
324 def final(f):
325 """This decorator can be used to indicate to type checkers that
326 the decorated method cannot be overridden, and decorated class
327 cannot be subclassed. For example:
328
329 class Base:
330 @final
331 def done(self) -> None:
332 ...
333 class Sub(Base):
334 def done(self) -> None: # Error reported by type checker
335 ...
336 @final
337 class Leaf:
338 ...
339 class Other(Leaf): # Error reported by type checker
340 ...
341
342 There is no runtime checking of these properties. The decorator
343 sets the ``__final__`` attribute to ``True`` on the decorated object
344 to allow runtime introspection.
345 """
346 try:
347 f.__final__ = True
348 except (AttributeError, TypeError):
349 # Skip the attribute silently if it is not writable.
350 # AttributeError happens if the object has __slots__ or a
351 # read-only property, TypeError if it's a builtin class.
352 pass
353 return f
354
355
356if hasattr(typing, "disjoint_base"): # 3.15
357 disjoint_base = typing.disjoint_base
358else:
359 def disjoint_base(cls):
360 """This decorator marks a class as a disjoint base.
361
362 Child classes of a disjoint base cannot inherit from other disjoint bases that are
363 not parent classes of the disjoint base.
364
365 For example:
366
367 @disjoint_base
368 class Disjoint1: pass
369
370 @disjoint_base
371 class Disjoint2: pass
372
373 class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error
374
375 Type checkers can use knowledge of disjoint bases to detect unreachable code
376 and determine when two types can overlap.
377
378 See PEP 800."""
379 cls.__disjoint_base__ = True
380 return cls
381
382
383def IntVar(name):
384 return typing.TypeVar(name)
385
386
387# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
388# Breakpoint: https://github.com/python/cpython/pull/29334
389if sys.version_info >= (3, 10, 1):
390 Literal = typing.Literal
391else:
392 def _flatten_literal_params(parameters):
393 """An internal helper for Literal creation: flatten Literals among parameters"""
394 params = []
395 for p in parameters:
396 if isinstance(p, _LiteralGenericAlias):
397 params.extend(p.__args__)
398 else:
399 params.append(p)
400 return tuple(params)
401
402 def _value_and_type_iter(params):
403 for p in params:
404 yield p, type(p)
405
406 class _LiteralGenericAlias(typing._GenericAlias, _root=True):
407 def __eq__(self, other):
408 if not isinstance(other, _LiteralGenericAlias):
409 return NotImplemented
410 these_args_deduped = set(_value_and_type_iter(self.__args__))
411 other_args_deduped = set(_value_and_type_iter(other.__args__))
412 return these_args_deduped == other_args_deduped
413
414 def __hash__(self):
415 return hash(frozenset(_value_and_type_iter(self.__args__)))
416
417 class _LiteralForm(_ExtensionsSpecialForm, _root=True):
418 def __init__(self, doc: str):
419 self._name = 'Literal'
420 self._doc = self.__doc__ = doc
421
422 def __getitem__(self, parameters):
423 if not isinstance(parameters, tuple):
424 parameters = (parameters,)
425
426 parameters = _flatten_literal_params(parameters)
427
428 val_type_pairs = list(_value_and_type_iter(parameters))
429 try:
430 deduped_pairs = set(val_type_pairs)
431 except TypeError:
432 # unhashable parameters
433 pass
434 else:
435 # similar logic to typing._deduplicate on Python 3.9+
436 if len(deduped_pairs) < len(val_type_pairs):
437 new_parameters = []
438 for pair in val_type_pairs:
439 if pair in deduped_pairs:
440 new_parameters.append(pair[0])
441 deduped_pairs.remove(pair)
442 assert not deduped_pairs, deduped_pairs
443 parameters = tuple(new_parameters)
444
445 return _LiteralGenericAlias(self, parameters)
446
447 Literal = _LiteralForm(doc="""\
448 A type that can be used to indicate to type checkers
449 that the corresponding value has a value literally equivalent
450 to the provided parameter. For example:
451
452 var: Literal[4] = 4
453
454 The type checker understands that 'var' is literally equal to
455 the value 4 and no other value.
456
457 Literal[...] cannot be subclassed. There is no runtime
458 checking verifying that the parameter is actually a value
459 instead of a type.""")
460
461
462_overload_dummy = typing._overload_dummy
463
464
465if hasattr(typing, "get_overloads"): # 3.11+
466 overload = typing.overload
467 get_overloads = typing.get_overloads
468 clear_overloads = typing.clear_overloads
469else:
470 # {module: {qualname: {firstlineno: func}}}
471 _overload_registry = collections.defaultdict(
472 functools.partial(collections.defaultdict, dict)
473 )
474
475 def overload(func):
476 """Decorator for overloaded functions/methods.
477
478 In a stub file, place two or more stub definitions for the same
479 function in a row, each decorated with @overload. For example:
480
481 @overload
482 def utf8(value: None) -> None: ...
483 @overload
484 def utf8(value: bytes) -> bytes: ...
485 @overload
486 def utf8(value: str) -> bytes: ...
487
488 In a non-stub file (i.e. a regular .py file), do the same but
489 follow it with an implementation. The implementation should *not*
490 be decorated with @overload. For example:
491
492 @overload
493 def utf8(value: None) -> None: ...
494 @overload
495 def utf8(value: bytes) -> bytes: ...
496 @overload
497 def utf8(value: str) -> bytes: ...
498 def utf8(value):
499 # implementation goes here
500
501 The overloads for a function can be retrieved at runtime using the
502 get_overloads() function.
503 """
504 # classmethod and staticmethod
505 f = getattr(func, "__func__", func)
506 try:
507 _overload_registry[f.__module__][f.__qualname__][
508 f.__code__.co_firstlineno
509 ] = func
510 except AttributeError:
511 # Not a normal function; ignore.
512 pass
513 return _overload_dummy
514
515 def get_overloads(func):
516 """Return all defined overloads for *func* as a sequence."""
517 # classmethod and staticmethod
518 f = getattr(func, "__func__", func)
519 if f.__module__ not in _overload_registry:
520 return []
521 mod_dict = _overload_registry[f.__module__]
522 if f.__qualname__ not in mod_dict:
523 return []
524 return list(mod_dict[f.__qualname__].values())
525
526 def clear_overloads():
527 """Clear all overloads in the registry."""
528 _overload_registry.clear()
529
530
531# This is not a real generic class. Don't use outside annotations.
532Type = typing.Type
533
534# Various ABCs mimicking those in collections.abc.
535# A few are simply re-exported for completeness.
536Awaitable = typing.Awaitable
537Coroutine = typing.Coroutine
538AsyncIterable = typing.AsyncIterable
539AsyncIterator = typing.AsyncIterator
540Deque = typing.Deque
541DefaultDict = typing.DefaultDict
542OrderedDict = typing.OrderedDict
543Counter = typing.Counter
544ChainMap = typing.ChainMap
545Text = typing.Text
546TYPE_CHECKING = typing.TYPE_CHECKING
547
548
549# Breakpoint: https://github.com/python/cpython/pull/118681
550if sys.version_info >= (3, 13, 0, "beta"):
551 from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
552else:
553 def _is_dunder(attr):
554 return attr.startswith('__') and attr.endswith('__')
555
556
557 class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True):
558 def __init__(self, origin, nparams, *, defaults, inst=True, name=None):
559 assert nparams > 0, "`nparams` must be a positive integer"
560 assert defaults, "Must always specify a non-empty sequence for `defaults`"
561 super().__init__(origin, nparams, inst=inst, name=name)
562 self._defaults = defaults
563
564 def __setattr__(self, attr, val):
565 allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
566 if _is_dunder(attr) or attr in allowed_attrs:
567 object.__setattr__(self, attr, val)
568 else:
569 setattr(self.__origin__, attr, val)
570
571 @typing._tp_cache
572 def __getitem__(self, params):
573 if not isinstance(params, tuple):
574 params = (params,)
575 msg = "Parameters to generic types must be types."
576 params = tuple(typing._type_check(p, msg) for p in params)
577 if (
578 len(params) < self._nparams
579 and len(params) + len(self._defaults) >= self._nparams
580 ):
581 params = (*params, *self._defaults[len(params) - self._nparams:])
582 actual_len = len(params)
583
584 if actual_len != self._nparams:
585 expected = f"at least {self._nparams - len(self._defaults)}"
586 raise TypeError(
587 f"Too {'many' if actual_len > self._nparams else 'few'}"
588 f" arguments for {self};"
589 f" actual {actual_len}, expected {expected}"
590 )
591 return self.copy_with(params)
592
593 _NoneType = type(None)
594 Generator = _SpecialGenericAlias(
595 collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
596 )
597 AsyncGenerator = _SpecialGenericAlias(
598 collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
599 )
600 ContextManager = _SpecialGenericAlias(
601 contextlib.AbstractContextManager,
602 2,
603 name="ContextManager",
604 defaults=(typing.Optional[bool],)
605 )
606 AsyncContextManager = _SpecialGenericAlias(
607 contextlib.AbstractAsyncContextManager,
608 2,
609 name="AsyncContextManager",
610 defaults=(typing.Optional[bool],)
611 )
612
613
614_PROTO_ALLOWLIST = {
615 'collections.abc': [
616 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
617 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
618 ],
619 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
620 'typing_extensions': ['Buffer'],
621}
622
623
624_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
625 "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
626 "__final__",
627}
628
629
630def _get_protocol_attrs(cls):
631 attrs = set()
632 for base in cls.__mro__[:-1]: # without object
633 if base.__name__ in {'Protocol', 'Generic'}:
634 continue
635 annotations = getattr(base, '__annotations__', {})
636 for attr in (*base.__dict__, *annotations):
637 if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
638 attrs.add(attr)
639 return attrs
640
641
642def _caller(depth=1, default='__main__'):
643 try:
644 return sys._getframemodulename(depth + 1) or default
645 except AttributeError: # For platforms without _getframemodulename()
646 pass
647 try:
648 return sys._getframe(depth + 1).f_globals.get('__name__', default)
649 except (AttributeError, ValueError): # For platforms without _getframe()
650 pass
651 return None
652
653
654# `__match_args__` attribute was removed from protocol members in 3.13,
655# we want to backport this change to older Python versions.
656# Breakpoint: https://github.com/python/cpython/pull/110683
657if sys.version_info >= (3, 13):
658 Protocol = typing.Protocol
659else:
660 def _allow_reckless_class_checks(depth=2):
661 """Allow instance and class checks for special stdlib modules.
662 The abc and functools modules indiscriminately call isinstance() and
663 issubclass() on the whole MRO of a user class, which may contain protocols.
664 """
665 return _caller(depth) in {'abc', 'functools', None}
666
667 def _no_init(self, *args, **kwargs):
668 if type(self)._is_protocol:
669 raise TypeError('Protocols cannot be instantiated')
670
671 def _type_check_issubclass_arg_1(arg):
672 """Raise TypeError if `arg` is not an instance of `type`
673 in `issubclass(arg, <protocol>)`.
674
675 In most cases, this is verified by type.__subclasscheck__.
676 Checking it again unnecessarily would slow down issubclass() checks,
677 so, we don't perform this check unless we absolutely have to.
678
679 For various error paths, however,
680 we want to ensure that *this* error message is shown to the user
681 where relevant, rather than a typing.py-specific error message.
682 """
683 if not isinstance(arg, type):
684 # Same error message as for issubclass(1, int).
685 raise TypeError('issubclass() arg 1 must be a class')
686
687 # Inheriting from typing._ProtocolMeta isn't actually desirable,
688 # but is necessary to allow typing.Protocol and typing_extensions.Protocol
689 # to mix without getting TypeErrors about "metaclass conflict"
690 class _ProtocolMeta(type(typing.Protocol)):
691 # This metaclass is somewhat unfortunate,
692 # but is necessary for several reasons...
693 #
694 # NOTE: DO NOT call super() in any methods in this class
695 # That would call the methods on typing._ProtocolMeta on Python <=3.11
696 # and those are slow
697 def __new__(mcls, name, bases, namespace, **kwargs):
698 if name == "Protocol" and len(bases) < 2:
699 pass
700 elif {Protocol, typing.Protocol} & set(bases):
701 for base in bases:
702 if not (
703 base in {object, typing.Generic, Protocol, typing.Protocol}
704 or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
705 or is_protocol(base)
706 ):
707 raise TypeError(
708 f"Protocols can only inherit from other protocols, "
709 f"got {base!r}"
710 )
711 return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
712
713 def __init__(cls, *args, **kwargs):
714 abc.ABCMeta.__init__(cls, *args, **kwargs)
715 if getattr(cls, "_is_protocol", False):
716 cls.__protocol_attrs__ = _get_protocol_attrs(cls)
717
718 def __subclasscheck__(cls, other):
719 if cls is Protocol:
720 return type.__subclasscheck__(cls, other)
721 if (
722 getattr(cls, '_is_protocol', False)
723 and not _allow_reckless_class_checks()
724 ):
725 if not getattr(cls, '_is_runtime_protocol', False):
726 _type_check_issubclass_arg_1(other)
727 raise TypeError(
728 "Instance and class checks can only be used with "
729 "@runtime_checkable protocols"
730 )
731 if (
732 # this attribute is set by @runtime_checkable:
733 cls.__non_callable_proto_members__
734 and cls.__dict__.get("__subclasshook__") is _proto_hook
735 ):
736 _type_check_issubclass_arg_1(other)
737 non_method_attrs = sorted(cls.__non_callable_proto_members__)
738 raise TypeError(
739 "Protocols with non-method members don't support issubclass()."
740 f" Non-method members: {str(non_method_attrs)[1:-1]}."
741 )
742 return abc.ABCMeta.__subclasscheck__(cls, other)
743
744 def __instancecheck__(cls, instance):
745 # We need this method for situations where attributes are
746 # assigned in __init__.
747 if cls is Protocol:
748 return type.__instancecheck__(cls, instance)
749 if not getattr(cls, "_is_protocol", False):
750 # i.e., it's a concrete subclass of a protocol
751 return abc.ABCMeta.__instancecheck__(cls, instance)
752
753 if (
754 not getattr(cls, '_is_runtime_protocol', False) and
755 not _allow_reckless_class_checks()
756 ):
757 raise TypeError("Instance and class checks can only be used with"
758 " @runtime_checkable protocols")
759
760 if abc.ABCMeta.__instancecheck__(cls, instance):
761 return True
762
763 for attr in cls.__protocol_attrs__:
764 try:
765 val = inspect.getattr_static(instance, attr)
766 except AttributeError:
767 break
768 # this attribute is set by @runtime_checkable:
769 if val is None and attr not in cls.__non_callable_proto_members__:
770 break
771 else:
772 return True
773
774 return False
775
776 def __eq__(cls, other):
777 # Hack so that typing.Generic.__class_getitem__
778 # treats typing_extensions.Protocol
779 # as equivalent to typing.Protocol
780 if abc.ABCMeta.__eq__(cls, other) is True:
781 return True
782 return cls is Protocol and other is typing.Protocol
783
784 # This has to be defined, or the abc-module cache
785 # complains about classes with this metaclass being unhashable,
786 # if we define only __eq__!
787 def __hash__(cls) -> int:
788 return type.__hash__(cls)
789
790 @classmethod
791 def _proto_hook(cls, other):
792 if not cls.__dict__.get('_is_protocol', False):
793 return NotImplemented
794
795 for attr in cls.__protocol_attrs__:
796 for base in other.__mro__:
797 # Check if the members appears in the class dictionary...
798 if attr in base.__dict__:
799 if base.__dict__[attr] is None:
800 return NotImplemented
801 break
802
803 # ...or in annotations, if it is a sub-protocol.
804 annotations = getattr(base, '__annotations__', {})
805 if (
806 isinstance(annotations, collections.abc.Mapping)
807 and attr in annotations
808 and is_protocol(other)
809 ):
810 break
811 else:
812 return NotImplemented
813 return True
814
815 class Protocol(typing.Generic, metaclass=_ProtocolMeta):
816 __doc__ = typing.Protocol.__doc__
817 __slots__ = ()
818 _is_protocol = True
819 _is_runtime_protocol = False
820
821 def __init_subclass__(cls, *args, **kwargs):
822 super().__init_subclass__(*args, **kwargs)
823
824 # Determine if this is a protocol or a concrete subclass.
825 if not cls.__dict__.get('_is_protocol', False):
826 cls._is_protocol = any(b is Protocol for b in cls.__bases__)
827
828 # Set (or override) the protocol subclass hook.
829 if '__subclasshook__' not in cls.__dict__:
830 cls.__subclasshook__ = _proto_hook
831
832 # Prohibit instantiation for protocol classes
833 if cls._is_protocol and cls.__init__ is Protocol.__init__:
834 cls.__init__ = _no_init
835
836
837# Breakpoint: https://github.com/python/cpython/pull/113401
838if sys.version_info >= (3, 13):
839 runtime_checkable = typing.runtime_checkable
840else:
841 def runtime_checkable(cls):
842 """Mark a protocol class as a runtime protocol.
843
844 Such protocol can be used with isinstance() and issubclass().
845 Raise TypeError if applied to a non-protocol class.
846 This allows a simple-minded structural check very similar to
847 one trick ponies in collections.abc such as Iterable.
848
849 For example::
850
851 @runtime_checkable
852 class Closable(Protocol):
853 def close(self): ...
854
855 assert isinstance(open('/some/file'), Closable)
856
857 Warning: this will check only the presence of the required methods,
858 not their type signatures!
859 """
860 if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
861 raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
862 f' got {cls!r}')
863 cls._is_runtime_protocol = True
864
865 # typing.Protocol classes on <=3.11 break if we execute this block,
866 # because typing.Protocol classes on <=3.11 don't have a
867 # `__protocol_attrs__` attribute, and this block relies on the
868 # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
869 # break if we *don't* execute this block, because *they* assume that all
870 # protocol classes have a `__non_callable_proto_members__` attribute
871 # (which this block sets)
872 if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
873 # PEP 544 prohibits using issubclass()
874 # with protocols that have non-method members.
875 # See gh-113320 for why we compute this attribute here,
876 # rather than in `_ProtocolMeta.__init__`
877 cls.__non_callable_proto_members__ = set()
878 for attr in cls.__protocol_attrs__:
879 try:
880 is_callable = callable(getattr(cls, attr, None))
881 except Exception as e:
882 raise TypeError(
883 f"Failed to determine whether protocol member {attr!r} "
884 "is a method member"
885 ) from e
886 else:
887 if not is_callable:
888 cls.__non_callable_proto_members__.add(attr)
889
890 return cls
891
892
893# The "runtime" alias exists for backwards compatibility.
894runtime = runtime_checkable
895
896
897# Our version of runtime-checkable protocols is faster on Python <=3.11
898# Breakpoint: https://github.com/python/cpython/pull/112717
899if sys.version_info >= (3, 12):
900 SupportsInt = typing.SupportsInt
901 SupportsFloat = typing.SupportsFloat
902 SupportsComplex = typing.SupportsComplex
903 SupportsBytes = typing.SupportsBytes
904 SupportsIndex = typing.SupportsIndex
905 SupportsAbs = typing.SupportsAbs
906 SupportsRound = typing.SupportsRound
907else:
908 @runtime_checkable
909 class SupportsInt(Protocol):
910 """An ABC with one abstract method __int__."""
911 __slots__ = ()
912
913 @abc.abstractmethod
914 def __int__(self) -> int:
915 pass
916
917 @runtime_checkable
918 class SupportsFloat(Protocol):
919 """An ABC with one abstract method __float__."""
920 __slots__ = ()
921
922 @abc.abstractmethod
923 def __float__(self) -> float:
924 pass
925
926 @runtime_checkable
927 class SupportsComplex(Protocol):
928 """An ABC with one abstract method __complex__."""
929 __slots__ = ()
930
931 @abc.abstractmethod
932 def __complex__(self) -> complex:
933 pass
934
935 @runtime_checkable
936 class SupportsBytes(Protocol):
937 """An ABC with one abstract method __bytes__."""
938 __slots__ = ()
939
940 @abc.abstractmethod
941 def __bytes__(self) -> bytes:
942 pass
943
944 @runtime_checkable
945 class SupportsIndex(Protocol):
946 __slots__ = ()
947
948 @abc.abstractmethod
949 def __index__(self) -> int:
950 pass
951
952 @runtime_checkable
953 class SupportsAbs(Protocol[T_co]):
954 """
955 An ABC with one abstract method __abs__ that is covariant in its return type.
956 """
957 __slots__ = ()
958
959 @abc.abstractmethod
960 def __abs__(self) -> T_co:
961 pass
962
963 @runtime_checkable
964 class SupportsRound(Protocol[T_co]):
965 """
966 An ABC with one abstract method __round__ that is covariant in its return type.
967 """
968 __slots__ = ()
969
970 @abc.abstractmethod
971 def __round__(self, ndigits: int = 0) -> T_co:
972 pass
973
974
975if hasattr(io, "Reader") and hasattr(io, "Writer"):
976 Reader = io.Reader
977 Writer = io.Writer
978else:
979 @runtime_checkable
980 class Reader(Protocol[T_co]):
981 """Protocol for simple I/O reader instances.
982
983 This protocol only supports blocking I/O.
984 """
985
986 __slots__ = ()
987
988 @abc.abstractmethod
989 def read(self, size: int = ..., /) -> T_co:
990 """Read data from the input stream and return it.
991
992 If *size* is specified, at most *size* items (bytes/characters) will be
993 read.
994 """
995
996 @runtime_checkable
997 class Writer(Protocol[T_contra]):
998 """Protocol for simple I/O writer instances.
999
1000 This protocol only supports blocking I/O.
1001 """
1002
1003 __slots__ = ()
1004
1005 @abc.abstractmethod
1006 def write(self, data: T_contra, /) -> int:
1007 """Write *data* to the output stream and return the number of items written.""" # noqa: E501
1008
1009
1010_NEEDS_SINGLETONMETA = (
1011 not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
1012)
1013
1014if _NEEDS_SINGLETONMETA:
1015 class SingletonMeta(type):
1016 def __setattr__(cls, attr, value):
1017 # TypeError is consistent with the behavior of NoneType
1018 raise TypeError(
1019 f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
1020 )
1021
1022
1023if hasattr(typing, "NoDefault"):
1024 NoDefault = typing.NoDefault
1025else:
1026 class NoDefaultType(metaclass=SingletonMeta):
1027 """The type of the NoDefault singleton."""
1028
1029 __slots__ = ()
1030
1031 def __new__(cls):
1032 return globals().get("NoDefault") or object.__new__(cls)
1033
1034 def __repr__(self):
1035 return "typing_extensions.NoDefault"
1036
1037 def __reduce__(self):
1038 return "NoDefault"
1039
1040 NoDefault = NoDefaultType()
1041 del NoDefaultType
1042
1043if hasattr(typing, "NoExtraItems"):
1044 NoExtraItems = typing.NoExtraItems
1045else:
1046 class NoExtraItemsType(metaclass=SingletonMeta):
1047 """The type of the NoExtraItems singleton."""
1048
1049 __slots__ = ()
1050
1051 def __new__(cls):
1052 return globals().get("NoExtraItems") or object.__new__(cls)
1053
1054 def __repr__(self):
1055 return "typing_extensions.NoExtraItems"
1056
1057 def __reduce__(self):
1058 return "NoExtraItems"
1059
1060 NoExtraItems = NoExtraItemsType()
1061 del NoExtraItemsType
1062
1063if _NEEDS_SINGLETONMETA:
1064 del SingletonMeta
1065
1066
1067# Update this to something like >=3.13.0b1 if and when
1068# PEP 728 is implemented in CPython
1069_PEP_728_IMPLEMENTED = False
1070
1071if _PEP_728_IMPLEMENTED:
1072 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
1073 # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
1074 # The standard library TypedDict below Python 3.11 does not store runtime
1075 # information about optional and required keys when using Required or NotRequired.
1076 # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
1077 # Aaaand on 3.12 we add __orig_bases__ to TypedDict
1078 # to enable better runtime introspection.
1079 # On 3.13 we deprecate some odd ways of creating TypedDicts.
1080 # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
1081 # PEP 728 (still pending) makes more changes.
1082 TypedDict = typing.TypedDict
1083 _TypedDictMeta = typing._TypedDictMeta
1084 is_typeddict = typing.is_typeddict
1085else:
1086 # 3.10.0 and later
1087 _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
1088
1089 def _get_typeddict_qualifiers(annotation_type):
1090 while True:
1091 annotation_origin = get_origin(annotation_type)
1092 if annotation_origin is Annotated:
1093 annotation_args = get_args(annotation_type)
1094 if annotation_args:
1095 annotation_type = annotation_args[0]
1096 else:
1097 break
1098 elif annotation_origin is Required:
1099 yield Required
1100 annotation_type, = get_args(annotation_type)
1101 elif annotation_origin is NotRequired:
1102 yield NotRequired
1103 annotation_type, = get_args(annotation_type)
1104 elif annotation_origin is ReadOnly:
1105 yield ReadOnly
1106 annotation_type, = get_args(annotation_type)
1107 else:
1108 break
1109
1110 class _TypedDictMeta(type):
1111
1112 def __new__(cls, name, bases, ns, *, total=True, closed=None,
1113 extra_items=NoExtraItems):
1114 """Create new typed dict class object.
1115
1116 This method is called when TypedDict is subclassed,
1117 or when TypedDict is instantiated. This way
1118 TypedDict supports all three syntax forms described in its docstring.
1119 Subclasses and instances of TypedDict return actual dictionaries.
1120 """
1121 for base in bases:
1122 if type(base) is not _TypedDictMeta and base is not typing.Generic:
1123 raise TypeError('cannot inherit from both a TypedDict type '
1124 'and a non-TypedDict base class')
1125 if closed is not None and extra_items is not NoExtraItems:
1126 raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
1127
1128 if any(issubclass(b, typing.Generic) for b in bases):
1129 generic_base = (typing.Generic,)
1130 else:
1131 generic_base = ()
1132
1133 ns_annotations = ns.pop('__annotations__', None)
1134
1135 # typing.py generally doesn't let you inherit from plain Generic, unless
1136 # the name of the class happens to be "Protocol"
1137 tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
1138 tp_dict.__name__ = name
1139 if tp_dict.__qualname__ == "Protocol":
1140 tp_dict.__qualname__ = name
1141
1142 if not hasattr(tp_dict, '__orig_bases__'):
1143 tp_dict.__orig_bases__ = bases
1144
1145 annotations = {}
1146 own_annotate = None
1147 if ns_annotations is not None:
1148 own_annotations = ns_annotations
1149 elif sys.version_info >= (3, 14):
1150 if hasattr(annotationlib, "get_annotate_from_class_namespace"):
1151 own_annotate = annotationlib.get_annotate_from_class_namespace(ns)
1152 else:
1153 # 3.14.0a7 and earlier
1154 own_annotate = ns.get("__annotate__")
1155 if own_annotate is not None:
1156 own_annotations = annotationlib.call_annotate_function(
1157 own_annotate, Format.FORWARDREF, owner=tp_dict
1158 )
1159 else:
1160 own_annotations = {}
1161 else:
1162 own_annotations = {}
1163 msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
1164 if _TAKES_MODULE:
1165 own_checked_annotations = {
1166 n: typing._type_check(tp, msg, module=tp_dict.__module__)
1167 for n, tp in own_annotations.items()
1168 }
1169 else:
1170 own_checked_annotations = {
1171 n: typing._type_check(tp, msg)
1172 for n, tp in own_annotations.items()
1173 }
1174 required_keys = set()
1175 optional_keys = set()
1176 readonly_keys = set()
1177 mutable_keys = set()
1178 extra_items_type = extra_items
1179
1180 for base in bases:
1181 base_dict = base.__dict__
1182
1183 if sys.version_info <= (3, 14):
1184 annotations.update(base_dict.get('__annotations__', {}))
1185 required_keys.update(base_dict.get('__required_keys__', ()))
1186 optional_keys.update(base_dict.get('__optional_keys__', ()))
1187 readonly_keys.update(base_dict.get('__readonly_keys__', ()))
1188 mutable_keys.update(base_dict.get('__mutable_keys__', ()))
1189
1190 # This was specified in an earlier version of PEP 728. Support
1191 # is retained for backwards compatibility, but only for Python
1192 # 3.13 and lower.
1193 if (closed and sys.version_info < (3, 14)
1194 and "__extra_items__" in own_checked_annotations):
1195 annotation_type = own_checked_annotations.pop("__extra_items__")
1196 qualifiers = set(_get_typeddict_qualifiers(annotation_type))
1197 if Required in qualifiers:
1198 raise TypeError(
1199 "Special key __extra_items__ does not support "
1200 "Required"
1201 )
1202 if NotRequired in qualifiers:
1203 raise TypeError(
1204 "Special key __extra_items__ does not support "
1205 "NotRequired"
1206 )
1207 extra_items_type = annotation_type
1208
1209 annotations.update(own_checked_annotations)
1210 for annotation_key, annotation_type in own_checked_annotations.items():
1211 qualifiers = set(_get_typeddict_qualifiers(annotation_type))
1212
1213 if Required in qualifiers:
1214 required_keys.add(annotation_key)
1215 elif NotRequired in qualifiers:
1216 optional_keys.add(annotation_key)
1217 elif total:
1218 required_keys.add(annotation_key)
1219 else:
1220 optional_keys.add(annotation_key)
1221 if ReadOnly in qualifiers:
1222 mutable_keys.discard(annotation_key)
1223 readonly_keys.add(annotation_key)
1224 else:
1225 mutable_keys.add(annotation_key)
1226 readonly_keys.discard(annotation_key)
1227
1228 # Breakpoint: https://github.com/python/cpython/pull/119891
1229 if sys.version_info >= (3, 14):
1230 def __annotate__(format):
1231 annos = {}
1232 for base in bases:
1233 if base is Generic:
1234 continue
1235 base_annotate = base.__annotate__
1236 if base_annotate is None:
1237 continue
1238 base_annos = annotationlib.call_annotate_function(
1239 base_annotate, format, owner=base)
1240 annos.update(base_annos)
1241 if own_annotate is not None:
1242 own = annotationlib.call_annotate_function(
1243 own_annotate, format, owner=tp_dict)
1244 if format != Format.STRING:
1245 own = {
1246 n: typing._type_check(tp, msg, module=tp_dict.__module__)
1247 for n, tp in own.items()
1248 }
1249 elif format == Format.STRING:
1250 own = annotationlib.annotations_to_string(own_annotations)
1251 elif format in (Format.FORWARDREF, Format.VALUE):
1252 own = own_checked_annotations
1253 else:
1254 raise NotImplementedError(format)
1255 annos.update(own)
1256 return annos
1257
1258 tp_dict.__annotate__ = __annotate__
1259 else:
1260 tp_dict.__annotations__ = annotations
1261 tp_dict.__required_keys__ = frozenset(required_keys)
1262 tp_dict.__optional_keys__ = frozenset(optional_keys)
1263 tp_dict.__readonly_keys__ = frozenset(readonly_keys)
1264 tp_dict.__mutable_keys__ = frozenset(mutable_keys)
1265 tp_dict.__total__ = total
1266 tp_dict.__closed__ = closed
1267 tp_dict.__extra_items__ = extra_items_type
1268 return tp_dict
1269
1270 __call__ = dict # static method
1271
1272 def __subclasscheck__(cls, other):
1273 # Typed dicts are only for static structural subtyping.
1274 raise TypeError('TypedDict does not support instance and class checks')
1275
1276 __instancecheck__ = __subclasscheck__
1277
1278 _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
1279
1280 def _create_typeddict(
1281 typename,
1282 fields,
1283 /,
1284 *,
1285 typing_is_inline,
1286 total,
1287 closed,
1288 extra_items,
1289 **kwargs,
1290 ):
1291 if fields is _marker or fields is None:
1292 if fields is _marker:
1293 deprecated_thing = (
1294 "Failing to pass a value for the 'fields' parameter"
1295 )
1296 else:
1297 deprecated_thing = "Passing `None` as the 'fields' parameter"
1298
1299 example = f"`{typename} = TypedDict({typename!r}, {{}})`"
1300 deprecation_msg = (
1301 f"{deprecated_thing} is deprecated and will be disallowed in "
1302 "Python 3.15. To create a TypedDict class with 0 fields "
1303 "using the functional syntax, pass an empty dictionary, e.g. "
1304 ) + example + "."
1305 warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
1306 # Support a field called "closed"
1307 if closed is not False and closed is not True and closed is not None:
1308 kwargs["closed"] = closed
1309 closed = None
1310 # Or "extra_items"
1311 if extra_items is not NoExtraItems:
1312 kwargs["extra_items"] = extra_items
1313 extra_items = NoExtraItems
1314 fields = kwargs
1315 elif kwargs:
1316 raise TypeError("TypedDict takes either a dict or keyword arguments,"
1317 " but not both")
1318 if kwargs:
1319 # Breakpoint: https://github.com/python/cpython/pull/104891
1320 if sys.version_info >= (3, 13):
1321 raise TypeError("TypedDict takes no keyword arguments")
1322 warnings.warn(
1323 "The kwargs-based syntax for TypedDict definitions is deprecated "
1324 "in Python 3.11, will be removed in Python 3.13, and may not be "
1325 "understood by third-party type checkers.",
1326 DeprecationWarning,
1327 stacklevel=2,
1328 )
1329
1330 ns = {'__annotations__': dict(fields)}
1331 module = _caller(depth=4 if typing_is_inline else 2)
1332 if module is not None:
1333 # Setting correct module is necessary to make typed dict classes
1334 # pickleable.
1335 ns['__module__'] = module
1336
1337 td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
1338 extra_items=extra_items)
1339 td.__orig_bases__ = (TypedDict,)
1340 return td
1341
1342 class _TypedDictSpecialForm(_SpecialForm, _root=True):
1343 def __call__(
1344 self,
1345 typename,
1346 fields=_marker,
1347 /,
1348 *,
1349 total=True,
1350 closed=None,
1351 extra_items=NoExtraItems,
1352 **kwargs
1353 ):
1354 return _create_typeddict(
1355 typename,
1356 fields,
1357 typing_is_inline=False,
1358 total=total,
1359 closed=closed,
1360 extra_items=extra_items,
1361 **kwargs,
1362 )
1363
1364 def __mro_entries__(self, bases):
1365 return (_TypedDict,)
1366
1367 @_TypedDictSpecialForm
1368 def TypedDict(self, args):
1369 """A simple typed namespace. At runtime it is equivalent to a plain dict.
1370
1371 TypedDict creates a dictionary type such that a type checker will expect all
1372 instances to have a certain set of keys, where each key is
1373 associated with a value of a consistent type. This expectation
1374 is not checked at runtime.
1375
1376 Usage::
1377
1378 class Point2D(TypedDict):
1379 x: int
1380 y: int
1381 label: str
1382
1383 a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
1384 b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
1385
1386 assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
1387
1388 The type info can be accessed via the Point2D.__annotations__ dict, and
1389 the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
1390 TypedDict supports an additional equivalent form::
1391
1392 Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
1393
1394 By default, all keys must be present in a TypedDict. It is possible
1395 to override this by specifying totality::
1396
1397 class Point2D(TypedDict, total=False):
1398 x: int
1399 y: int
1400
1401 This means that a Point2D TypedDict can have any of the keys omitted. A type
1402 checker is only expected to support a literal False or True as the value of
1403 the total argument. True is the default, and makes all items defined in the
1404 class body be required.
1405
1406 The Required and NotRequired special forms can also be used to mark
1407 individual keys as being required or not required::
1408
1409 class Point2D(TypedDict):
1410 x: int # the "x" key must always be present (Required is the default)
1411 y: NotRequired[int] # the "y" key can be omitted
1412
1413 See PEP 655 for more details on Required and NotRequired.
1414 """
1415 # This runs when creating inline TypedDicts:
1416 if not isinstance(args, dict):
1417 raise TypeError(
1418 "TypedDict[...] should be used with a single dict argument"
1419 )
1420
1421 return _create_typeddict(
1422 "<inline TypedDict>",
1423 args,
1424 typing_is_inline=True,
1425 total=True,
1426 closed=True,
1427 extra_items=NoExtraItems,
1428 )
1429
1430 _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
1431
1432 def is_typeddict(tp):
1433 """Check if an annotation is a TypedDict class
1434
1435 For example::
1436 class Film(TypedDict):
1437 title: str
1438 year: int
1439
1440 is_typeddict(Film) # => True
1441 is_typeddict(Union[list, str]) # => False
1442 """
1443 return isinstance(tp, _TYPEDDICT_TYPES)
1444
1445
1446if hasattr(typing, "assert_type"):
1447 assert_type = typing.assert_type
1448
1449else:
1450 def assert_type(val, typ, /):
1451 """Assert (to the type checker) that the value is of the given type.
1452
1453 When the type checker encounters a call to assert_type(), it
1454 emits an error if the value is not of the specified type::
1455
1456 def greet(name: str) -> None:
1457 assert_type(name, str) # ok
1458 assert_type(name, int) # type checker error
1459
1460 At runtime this returns the first argument unchanged and otherwise
1461 does nothing.
1462 """
1463 return val
1464
1465
1466if hasattr(typing, "ReadOnly"): # 3.13+
1467 get_type_hints = typing.get_type_hints
1468else: # <=3.13
1469 # replaces _strip_annotations()
1470 def _strip_extras(t):
1471 """Strips Annotated, Required and NotRequired from a given type."""
1472 if isinstance(t, typing._AnnotatedAlias):
1473 return _strip_extras(t.__origin__)
1474 if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
1475 return _strip_extras(t.__args__[0])
1476 if isinstance(t, typing._GenericAlias):
1477 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1478 if stripped_args == t.__args__:
1479 return t
1480 return t.copy_with(stripped_args)
1481 if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
1482 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1483 if stripped_args == t.__args__:
1484 return t
1485 return _types.GenericAlias(t.__origin__, stripped_args)
1486 if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
1487 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1488 if stripped_args == t.__args__:
1489 return t
1490 return functools.reduce(operator.or_, stripped_args)
1491
1492 return t
1493
1494 def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
1495 """Return type hints for an object.
1496
1497 This is often the same as obj.__annotations__, but it handles
1498 forward references encoded as string literals, adds Optional[t] if a
1499 default value equal to None is set and recursively replaces all
1500 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
1501 (unless 'include_extras=True').
1502
1503 The argument may be a module, class, method, or function. The annotations
1504 are returned as a dictionary. For classes, annotations include also
1505 inherited members.
1506
1507 TypeError is raised if the argument is not of a type that can contain
1508 annotations, and an empty dictionary is returned if no annotations are
1509 present.
1510
1511 BEWARE -- the behavior of globalns and localns is counterintuitive
1512 (unless you are familiar with how eval() and exec() work). The
1513 search order is locals first, then globals.
1514
1515 - If no dict arguments are passed, an attempt is made to use the
1516 globals from obj (or the respective module's globals for classes),
1517 and these are also used as the locals. If the object does not appear
1518 to have globals, an empty dictionary is used.
1519
1520 - If one dict argument is passed, it is used for both globals and
1521 locals.
1522
1523 - If two dict arguments are passed, they specify globals and
1524 locals, respectively.
1525 """
1526 hint = typing.get_type_hints(
1527 obj, globalns=globalns, localns=localns, include_extras=True
1528 )
1529 # Breakpoint: https://github.com/python/cpython/pull/30304
1530 if sys.version_info < (3, 11):
1531 _clean_optional(obj, hint, globalns, localns)
1532 if include_extras:
1533 return hint
1534 return {k: _strip_extras(t) for k, t in hint.items()}
1535
1536 _NoneType = type(None)
1537
1538 def _could_be_inserted_optional(t):
1539 """detects Union[..., None] pattern"""
1540 if not isinstance(t, typing._UnionGenericAlias):
1541 return False
1542 # Assume if last argument is not None they are user defined
1543 if t.__args__[-1] is not _NoneType:
1544 return False
1545 return True
1546
1547 # < 3.11
1548 def _clean_optional(obj, hints, globalns=None, localns=None):
1549 # reverts injected Union[..., None] cases from typing.get_type_hints
1550 # when a None default value is used.
1551 # see https://github.com/python/typing_extensions/issues/310
1552 if not hints or isinstance(obj, type):
1553 return
1554 defaults = typing._get_defaults(obj) # avoid accessing __annotations___
1555 if not defaults:
1556 return
1557 original_hints = obj.__annotations__
1558 for name, value in hints.items():
1559 # Not a Union[..., None] or replacement conditions not fullfilled
1560 if (not _could_be_inserted_optional(value)
1561 or name not in defaults
1562 or defaults[name] is not None
1563 ):
1564 continue
1565 original_value = original_hints[name]
1566 # value=NoneType should have caused a skip above but check for safety
1567 if original_value is None:
1568 original_value = _NoneType
1569 # Forward reference
1570 if isinstance(original_value, str):
1571 if globalns is None:
1572 if isinstance(obj, _types.ModuleType):
1573 globalns = obj.__dict__
1574 else:
1575 nsobj = obj
1576 # Find globalns for the unwrapped object.
1577 while hasattr(nsobj, '__wrapped__'):
1578 nsobj = nsobj.__wrapped__
1579 globalns = getattr(nsobj, '__globals__', {})
1580 if localns is None:
1581 localns = globalns
1582 elif localns is None:
1583 localns = globalns
1584
1585 original_value = ForwardRef(
1586 original_value,
1587 is_argument=not isinstance(obj, _types.ModuleType)
1588 )
1589 original_evaluated = typing._eval_type(original_value, globalns, localns)
1590 # Compare if values differ. Note that even if equal
1591 # value might be cached by typing._tp_cache contrary to original_evaluated
1592 if original_evaluated != value or (
1593 # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
1594 hasattr(_types, "UnionType")
1595 and isinstance(original_evaluated, _types.UnionType)
1596 and not isinstance(value, _types.UnionType)
1597 ):
1598 hints[name] = original_evaluated
1599
1600# Python 3.9 has get_origin() and get_args() but those implementations don't support
1601# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
1602# Breakpoint: https://github.com/python/cpython/pull/25298
1603if sys.version_info >= (3, 10):
1604 get_origin = typing.get_origin
1605 get_args = typing.get_args
1606# 3.9
1607else:
1608 def get_origin(tp):
1609 """Get the unsubscripted version of a type.
1610
1611 This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
1612 and Annotated. Return None for unsupported types. Examples::
1613
1614 get_origin(Literal[42]) is Literal
1615 get_origin(int) is None
1616 get_origin(ClassVar[int]) is ClassVar
1617 get_origin(Generic) is Generic
1618 get_origin(Generic[T]) is Generic
1619 get_origin(Union[T, int]) is Union
1620 get_origin(List[Tuple[T, T]][int]) == list
1621 get_origin(P.args) is P
1622 """
1623 if isinstance(tp, typing._AnnotatedAlias):
1624 return Annotated
1625 if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias,
1626 ParamSpecArgs, ParamSpecKwargs)):
1627 return tp.__origin__
1628 if tp is typing.Generic:
1629 return typing.Generic
1630 return None
1631
1632 def get_args(tp):
1633 """Get type arguments with all substitutions performed.
1634
1635 For unions, basic simplifications used by Union constructor are performed.
1636 Examples::
1637 get_args(Dict[str, int]) == (str, int)
1638 get_args(int) == ()
1639 get_args(Union[int, Union[T, int], str][int]) == (int, str)
1640 get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
1641 get_args(Callable[[], T][int]) == ([], int)
1642 """
1643 if isinstance(tp, typing._AnnotatedAlias):
1644 return (tp.__origin__, *tp.__metadata__)
1645 if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)):
1646 res = tp.__args__
1647 if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
1648 res = (list(res[:-1]), res[-1])
1649 return res
1650 return ()
1651
1652
1653# 3.10+
1654if hasattr(typing, 'TypeAlias'):
1655 TypeAlias = typing.TypeAlias
1656# 3.9
1657else:
1658 @_ExtensionsSpecialForm
1659 def TypeAlias(self, parameters):
1660 """Special marker indicating that an assignment should
1661 be recognized as a proper type alias definition by type
1662 checkers.
1663
1664 For example::
1665
1666 Predicate: TypeAlias = Callable[..., bool]
1667
1668 It's invalid when used anywhere except as in the example above.
1669 """
1670 raise TypeError(f"{self} is not subscriptable")
1671
1672
1673def _set_default(type_param, default):
1674 type_param.has_default = lambda: default is not NoDefault
1675 type_param.__default__ = default
1676
1677
1678def _set_module(typevarlike):
1679 # for pickling:
1680 def_mod = _caller(depth=2)
1681 if def_mod != 'typing_extensions':
1682 typevarlike.__module__ = def_mod
1683
1684
1685class _DefaultMixin:
1686 """Mixin for TypeVarLike defaults."""
1687
1688 __slots__ = ()
1689 __init__ = _set_default
1690
1691
1692# Classes using this metaclass must provide a _backported_typevarlike ClassVar
1693class _TypeVarLikeMeta(type):
1694 def __instancecheck__(cls, __instance: Any) -> bool:
1695 return isinstance(__instance, cls._backported_typevarlike)
1696
1697
1698if _PEP_696_IMPLEMENTED:
1699 from typing import TypeVar
1700else:
1701 # Add default and infer_variance parameters from PEP 696 and 695
1702 class TypeVar(metaclass=_TypeVarLikeMeta):
1703 """Type variable."""
1704
1705 _backported_typevarlike = typing.TypeVar
1706
1707 def __new__(cls, name, *constraints, bound=None,
1708 covariant=False, contravariant=False,
1709 default=NoDefault, infer_variance=False):
1710 if hasattr(typing, "TypeAliasType"):
1711 # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
1712 typevar = typing.TypeVar(name, *constraints, bound=bound,
1713 covariant=covariant, contravariant=contravariant,
1714 infer_variance=infer_variance)
1715 else:
1716 typevar = typing.TypeVar(name, *constraints, bound=bound,
1717 covariant=covariant, contravariant=contravariant)
1718 if infer_variance and (covariant or contravariant):
1719 raise ValueError("Variance cannot be specified with infer_variance.")
1720 typevar.__infer_variance__ = infer_variance
1721
1722 _set_default(typevar, default)
1723 _set_module(typevar)
1724
1725 def _tvar_prepare_subst(alias, args):
1726 if (
1727 typevar.has_default()
1728 and alias.__parameters__.index(typevar) == len(args)
1729 ):
1730 args += (typevar.__default__,)
1731 return args
1732
1733 typevar.__typing_prepare_subst__ = _tvar_prepare_subst
1734 return typevar
1735
1736 def __init_subclass__(cls) -> None:
1737 raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
1738
1739
1740# Python 3.10+ has PEP 612
1741if hasattr(typing, 'ParamSpecArgs'):
1742 ParamSpecArgs = typing.ParamSpecArgs
1743 ParamSpecKwargs = typing.ParamSpecKwargs
1744# 3.9
1745else:
1746 class _Immutable:
1747 """Mixin to indicate that object should not be copied."""
1748 __slots__ = ()
1749
1750 def __copy__(self):
1751 return self
1752
1753 def __deepcopy__(self, memo):
1754 return self
1755
1756 class ParamSpecArgs(_Immutable):
1757 """The args for a ParamSpec object.
1758
1759 Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
1760
1761 ParamSpecArgs objects have a reference back to their ParamSpec:
1762
1763 P.args.__origin__ is P
1764
1765 This type is meant for runtime introspection and has no special meaning to
1766 static type checkers.
1767 """
1768 def __init__(self, origin):
1769 self.__origin__ = origin
1770
1771 def __repr__(self):
1772 return f"{self.__origin__.__name__}.args"
1773
1774 def __eq__(self, other):
1775 if not isinstance(other, ParamSpecArgs):
1776 return NotImplemented
1777 return self.__origin__ == other.__origin__
1778
1779 class ParamSpecKwargs(_Immutable):
1780 """The kwargs for a ParamSpec object.
1781
1782 Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
1783
1784 ParamSpecKwargs objects have a reference back to their ParamSpec:
1785
1786 P.kwargs.__origin__ is P
1787
1788 This type is meant for runtime introspection and has no special meaning to
1789 static type checkers.
1790 """
1791 def __init__(self, origin):
1792 self.__origin__ = origin
1793
1794 def __repr__(self):
1795 return f"{self.__origin__.__name__}.kwargs"
1796
1797 def __eq__(self, other):
1798 if not isinstance(other, ParamSpecKwargs):
1799 return NotImplemented
1800 return self.__origin__ == other.__origin__
1801
1802
1803if _PEP_696_IMPLEMENTED:
1804 from typing import ParamSpec
1805
1806# 3.10+
1807elif hasattr(typing, 'ParamSpec'):
1808
1809 # Add default parameter - PEP 696
1810 class ParamSpec(metaclass=_TypeVarLikeMeta):
1811 """Parameter specification."""
1812
1813 _backported_typevarlike = typing.ParamSpec
1814
1815 def __new__(cls, name, *, bound=None,
1816 covariant=False, contravariant=False,
1817 infer_variance=False, default=NoDefault):
1818 if hasattr(typing, "TypeAliasType"):
1819 # PEP 695 implemented, can pass infer_variance to typing.TypeVar
1820 paramspec = typing.ParamSpec(name, bound=bound,
1821 covariant=covariant,
1822 contravariant=contravariant,
1823 infer_variance=infer_variance)
1824 else:
1825 paramspec = typing.ParamSpec(name, bound=bound,
1826 covariant=covariant,
1827 contravariant=contravariant)
1828 paramspec.__infer_variance__ = infer_variance
1829
1830 _set_default(paramspec, default)
1831 _set_module(paramspec)
1832
1833 def _paramspec_prepare_subst(alias, args):
1834 params = alias.__parameters__
1835 i = params.index(paramspec)
1836 if i == len(args) and paramspec.has_default():
1837 args = [*args, paramspec.__default__]
1838 if i >= len(args):
1839 raise TypeError(f"Too few arguments for {alias}")
1840 # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
1841 if len(params) == 1 and not typing._is_param_expr(args[0]):
1842 assert i == 0
1843 args = (args,)
1844 # Convert lists to tuples to help other libraries cache the results.
1845 elif isinstance(args[i], list):
1846 args = (*args[:i], tuple(args[i]), *args[i + 1:])
1847 return args
1848
1849 paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
1850 return paramspec
1851
1852 def __init_subclass__(cls) -> None:
1853 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
1854
1855# 3.9
1856else:
1857
1858 # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
1859 class ParamSpec(list, _DefaultMixin):
1860 """Parameter specification variable.
1861
1862 Usage::
1863
1864 P = ParamSpec('P')
1865
1866 Parameter specification variables exist primarily for the benefit of static
1867 type checkers. They are used to forward the parameter types of one
1868 callable to another callable, a pattern commonly found in higher order
1869 functions and decorators. They are only valid when used in ``Concatenate``,
1870 or s the first argument to ``Callable``. In Python 3.10 and higher,
1871 they are also supported in user-defined Generics at runtime.
1872 See class Generic for more information on generic types. An
1873 example for annotating a decorator::
1874
1875 T = TypeVar('T')
1876 P = ParamSpec('P')
1877
1878 def add_logging(f: Callable[P, T]) -> Callable[P, T]:
1879 '''A type-safe decorator to add logging to a function.'''
1880 def inner(*args: P.args, **kwargs: P.kwargs) -> T:
1881 logging.info(f'{f.__name__} was called')
1882 return f(*args, **kwargs)
1883 return inner
1884
1885 @add_logging
1886 def add_two(x: float, y: float) -> float:
1887 '''Add two numbers together.'''
1888 return x + y
1889
1890 Parameter specification variables defined with covariant=True or
1891 contravariant=True can be used to declare covariant or contravariant
1892 generic types. These keyword arguments are valid, but their actual semantics
1893 are yet to be decided. See PEP 612 for details.
1894
1895 Parameter specification variables can be introspected. e.g.:
1896
1897 P.__name__ == 'T'
1898 P.__bound__ == None
1899 P.__covariant__ == False
1900 P.__contravariant__ == False
1901
1902 Note that only parameter specification variables defined in global scope can
1903 be pickled.
1904 """
1905
1906 # Trick Generic __parameters__.
1907 __class__ = typing.TypeVar
1908
1909 @property
1910 def args(self):
1911 return ParamSpecArgs(self)
1912
1913 @property
1914 def kwargs(self):
1915 return ParamSpecKwargs(self)
1916
1917 def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
1918 infer_variance=False, default=NoDefault):
1919 list.__init__(self, [self])
1920 self.__name__ = name
1921 self.__covariant__ = bool(covariant)
1922 self.__contravariant__ = bool(contravariant)
1923 self.__infer_variance__ = bool(infer_variance)
1924 if bound:
1925 self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
1926 else:
1927 self.__bound__ = None
1928 _DefaultMixin.__init__(self, default)
1929
1930 # for pickling:
1931 def_mod = _caller()
1932 if def_mod != 'typing_extensions':
1933 self.__module__ = def_mod
1934
1935 def __repr__(self):
1936 if self.__infer_variance__:
1937 prefix = ''
1938 elif self.__covariant__:
1939 prefix = '+'
1940 elif self.__contravariant__:
1941 prefix = '-'
1942 else:
1943 prefix = '~'
1944 return prefix + self.__name__
1945
1946 def __hash__(self):
1947 return object.__hash__(self)
1948
1949 def __eq__(self, other):
1950 return self is other
1951
1952 def __reduce__(self):
1953 return self.__name__
1954
1955 # Hack to get typing._type_check to pass.
1956 def __call__(self, *args, **kwargs):
1957 pass
1958
1959 def __init_subclass__(cls) -> None:
1960 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
1961
1962
1963# 3.9
1964if not hasattr(typing, 'Concatenate'):
1965 # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
1966
1967 # 3.9.0-1
1968 if not hasattr(typing, '_type_convert'):
1969 def _type_convert(arg, module=None, *, allow_special_forms=False):
1970 """For converting None to type(None), and strings to ForwardRef."""
1971 if arg is None:
1972 return type(None)
1973 if isinstance(arg, str):
1974 if sys.version_info <= (3, 9, 6):
1975 return ForwardRef(arg)
1976 if sys.version_info <= (3, 9, 7):
1977 return ForwardRef(arg, module=module)
1978 return ForwardRef(arg, module=module, is_class=allow_special_forms)
1979 return arg
1980 else:
1981 _type_convert = typing._type_convert
1982
1983 class _ConcatenateGenericAlias(list):
1984
1985 # Trick Generic into looking into this for __parameters__.
1986 __class__ = typing._GenericAlias
1987
1988 def __init__(self, origin, args):
1989 # Cannot use `super().__init__` here because of the `__class__` assignment
1990 # in the class body (https://github.com/python/typing_extensions/issues/661)
1991 list.__init__(self, args)
1992 self.__origin__ = origin
1993 self.__args__ = args
1994
1995 def __repr__(self):
1996 _type_repr = typing._type_repr
1997 return (f'{_type_repr(self.__origin__)}'
1998 f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
1999
2000 def __hash__(self):
2001 return hash((self.__origin__, self.__args__))
2002
2003 # Hack to get typing._type_check to pass in Generic.
2004 def __call__(self, *args, **kwargs):
2005 pass
2006
2007 @property
2008 def __parameters__(self):
2009 return tuple(
2010 tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
2011 )
2012
2013 # 3.9 used by __getitem__ below
2014 def copy_with(self, params):
2015 if isinstance(params[-1], _ConcatenateGenericAlias):
2016 params = (*params[:-1], *params[-1].__args__)
2017 elif isinstance(params[-1], (list, tuple)):
2018 return (*params[:-1], *params[-1])
2019 elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
2020 raise TypeError("The last parameter to Concatenate should be a "
2021 "ParamSpec variable or ellipsis.")
2022 return self.__class__(self.__origin__, params)
2023
2024 # 3.9; accessed during GenericAlias.__getitem__ when substituting
2025 def __getitem__(self, args):
2026 if self.__origin__ in (Generic, Protocol):
2027 # Can't subscript Generic[...] or Protocol[...].
2028 raise TypeError(f"Cannot subscript already-subscripted {self}")
2029 if not self.__parameters__:
2030 raise TypeError(f"{self} is not a generic class")
2031
2032 if not isinstance(args, tuple):
2033 args = (args,)
2034 args = _unpack_args(*(_type_convert(p) for p in args))
2035 params = self.__parameters__
2036 for param in params:
2037 prepare = getattr(param, "__typing_prepare_subst__", None)
2038 if prepare is not None:
2039 args = prepare(self, args)
2040 # 3.9 & typing.ParamSpec
2041 elif isinstance(param, ParamSpec):
2042 i = params.index(param)
2043 if (
2044 i == len(args)
2045 and getattr(param, '__default__', NoDefault) is not NoDefault
2046 ):
2047 args = [*args, param.__default__]
2048 if i >= len(args):
2049 raise TypeError(f"Too few arguments for {self}")
2050 # Special case for Z[[int, str, bool]] == Z[int, str, bool]
2051 if len(params) == 1 and not _is_param_expr(args[0]):
2052 assert i == 0
2053 args = (args,)
2054 elif (
2055 isinstance(args[i], list)
2056 # 3.9
2057 # This class inherits from list do not convert
2058 and not isinstance(args[i], _ConcatenateGenericAlias)
2059 ):
2060 args = (*args[:i], tuple(args[i]), *args[i + 1:])
2061
2062 alen = len(args)
2063 plen = len(params)
2064 if alen != plen:
2065 raise TypeError(
2066 f"Too {'many' if alen > plen else 'few'} arguments for {self};"
2067 f" actual {alen}, expected {plen}"
2068 )
2069
2070 subst = dict(zip(self.__parameters__, args))
2071 # determine new args
2072 new_args = []
2073 for arg in self.__args__:
2074 if isinstance(arg, type):
2075 new_args.append(arg)
2076 continue
2077 if isinstance(arg, TypeVar):
2078 arg = subst[arg]
2079 if (
2080 (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
2081 or (
2082 hasattr(_types, "GenericAlias")
2083 and isinstance(arg, _types.GenericAlias)
2084 and getattr(arg, "__unpacked__", False)
2085 )
2086 ):
2087 raise TypeError(f"{arg} is not valid as type argument")
2088
2089 elif isinstance(arg,
2090 typing._GenericAlias
2091 if not hasattr(_types, "GenericAlias") else
2092 (typing._GenericAlias, _types.GenericAlias)
2093 ):
2094 subparams = arg.__parameters__
2095 if subparams:
2096 subargs = tuple(subst[x] for x in subparams)
2097 arg = arg[subargs]
2098 new_args.append(arg)
2099 return self.copy_with(tuple(new_args))
2100
2101# 3.10+
2102else:
2103 _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
2104
2105 # 3.10
2106 if sys.version_info < (3, 11):
2107
2108 class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
2109 # needed for checks in collections.abc.Callable to accept this class
2110 __module__ = "typing"
2111
2112 def copy_with(self, params):
2113 if isinstance(params[-1], (list, tuple)):
2114 return (*params[:-1], *params[-1])
2115 if isinstance(params[-1], typing._ConcatenateGenericAlias):
2116 params = (*params[:-1], *params[-1].__args__)
2117 elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
2118 raise TypeError("The last parameter to Concatenate should be a "
2119 "ParamSpec variable or ellipsis.")
2120 return super(typing._ConcatenateGenericAlias, self).copy_with(params)
2121
2122 def __getitem__(self, args):
2123 value = super().__getitem__(args)
2124 if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
2125 return tuple(_unpack_args(*(n for n in value)))
2126 return value
2127
2128
2129# 3.9.2
2130class _EllipsisDummy: ...
2131
2132
2133# <=3.10
2134def _create_concatenate_alias(origin, parameters):
2135 if parameters[-1] is ... and sys.version_info < (3, 9, 2):
2136 # Hack: Arguments must be types, replace it with one.
2137 parameters = (*parameters[:-1], _EllipsisDummy)
2138 if sys.version_info >= (3, 10, 3):
2139 concatenate = _ConcatenateGenericAlias(origin, parameters,
2140 _typevar_types=(TypeVar, ParamSpec),
2141 _paramspec_tvars=True)
2142 else:
2143 concatenate = _ConcatenateGenericAlias(origin, parameters)
2144 if parameters[-1] is not _EllipsisDummy:
2145 return concatenate
2146 # Remove dummy again
2147 concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
2148 for p in concatenate.__args__)
2149 if sys.version_info < (3, 10):
2150 # backport needs __args__ adjustment only
2151 return concatenate
2152 concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
2153 if p is not _EllipsisDummy)
2154 return concatenate
2155
2156
2157# <=3.10
2158@typing._tp_cache
2159def _concatenate_getitem(self, parameters):
2160 if parameters == ():
2161 raise TypeError("Cannot take a Concatenate of no types.")
2162 if not isinstance(parameters, tuple):
2163 parameters = (parameters,)
2164 if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
2165 raise TypeError("The last parameter to Concatenate should be a "
2166 "ParamSpec variable or ellipsis.")
2167 msg = "Concatenate[arg, ...]: each arg must be a type."
2168 parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
2169 parameters[-1])
2170 return _create_concatenate_alias(self, parameters)
2171
2172
2173# 3.11+; Concatenate does not accept ellipsis in 3.10
2174# Breakpoint: https://github.com/python/cpython/pull/30969
2175if sys.version_info >= (3, 11):
2176 Concatenate = typing.Concatenate
2177# <=3.10
2178else:
2179 @_ExtensionsSpecialForm
2180 def Concatenate(self, parameters):
2181 """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
2182 higher order function which adds, removes or transforms parameters of a
2183 callable.
2184
2185 For example::
2186
2187 Callable[Concatenate[int, P], int]
2188
2189 See PEP 612 for detailed information.
2190 """
2191 return _concatenate_getitem(self, parameters)
2192
2193
2194# 3.10+
2195if hasattr(typing, 'TypeGuard'):
2196 TypeGuard = typing.TypeGuard
2197# 3.9
2198else:
2199 @_ExtensionsSpecialForm
2200 def TypeGuard(self, parameters):
2201 """Special typing form used to annotate the return type of a user-defined
2202 type guard function. ``TypeGuard`` only accepts a single type argument.
2203 At runtime, functions marked this way should return a boolean.
2204
2205 ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
2206 type checkers to determine a more precise type of an expression within a
2207 program's code flow. Usually type narrowing is done by analyzing
2208 conditional code flow and applying the narrowing to a block of code. The
2209 conditional expression here is sometimes referred to as a "type guard".
2210
2211 Sometimes it would be convenient to use a user-defined boolean function
2212 as a type guard. Such a function should use ``TypeGuard[...]`` as its
2213 return type to alert static type checkers to this intention.
2214
2215 Using ``-> TypeGuard`` tells the static type checker that for a given
2216 function:
2217
2218 1. The return value is a boolean.
2219 2. If the return value is ``True``, the type of its argument
2220 is the type inside ``TypeGuard``.
2221
2222 For example::
2223
2224 def is_str(val: Union[str, float]):
2225 # "isinstance" type guard
2226 if isinstance(val, str):
2227 # Type of ``val`` is narrowed to ``str``
2228 ...
2229 else:
2230 # Else, type of ``val`` is narrowed to ``float``.
2231 ...
2232
2233 Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
2234 form of ``TypeA`` (it can even be a wider form) and this may lead to
2235 type-unsafe results. The main reason is to allow for things like
2236 narrowing ``List[object]`` to ``List[str]`` even though the latter is not
2237 a subtype of the former, since ``List`` is invariant. The responsibility of
2238 writing type-safe type guards is left to the user.
2239
2240 ``TypeGuard`` also works with type variables. For more information, see
2241 PEP 647 (User-Defined Type Guards).
2242 """
2243 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2244 return typing._GenericAlias(self, (item,))
2245
2246
2247# 3.13+
2248if hasattr(typing, 'TypeIs'):
2249 TypeIs = typing.TypeIs
2250# <=3.12
2251else:
2252 @_ExtensionsSpecialForm
2253 def TypeIs(self, parameters):
2254 """Special typing form used to annotate the return type of a user-defined
2255 type narrower function. ``TypeIs`` only accepts a single type argument.
2256 At runtime, functions marked this way should return a boolean.
2257
2258 ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
2259 type checkers to determine a more precise type of an expression within a
2260 program's code flow. Usually type narrowing is done by analyzing
2261 conditional code flow and applying the narrowing to a block of code. The
2262 conditional expression here is sometimes referred to as a "type guard".
2263
2264 Sometimes it would be convenient to use a user-defined boolean function
2265 as a type guard. Such a function should use ``TypeIs[...]`` as its
2266 return type to alert static type checkers to this intention.
2267
2268 Using ``-> TypeIs`` tells the static type checker that for a given
2269 function:
2270
2271 1. The return value is a boolean.
2272 2. If the return value is ``True``, the type of its argument
2273 is the intersection of the type inside ``TypeIs`` and the argument's
2274 previously known type.
2275
2276 For example::
2277
2278 def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
2279 return hasattr(val, '__await__')
2280
2281 def f(val: Union[int, Awaitable[int]]) -> int:
2282 if is_awaitable(val):
2283 assert_type(val, Awaitable[int])
2284 else:
2285 assert_type(val, int)
2286
2287 ``TypeIs`` also works with type variables. For more information, see
2288 PEP 742 (Narrowing types with TypeIs).
2289 """
2290 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2291 return typing._GenericAlias(self, (item,))
2292
2293
2294# 3.15+?
2295if hasattr(typing, 'TypeForm'):
2296 TypeForm = typing.TypeForm
2297# <=3.14
2298else:
2299 class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
2300 # TypeForm(X) is equivalent to X but indicates to the type checker
2301 # that the object is a TypeForm.
2302 def __call__(self, obj, /):
2303 return obj
2304
2305 @_TypeFormForm
2306 def TypeForm(self, parameters):
2307 """A special form representing the value that results from the evaluation
2308 of a type expression. This value encodes the information supplied in the
2309 type expression, and it represents the type described by that type expression.
2310
2311 When used in a type expression, TypeForm describes a set of type form objects.
2312 It accepts a single type argument, which must be a valid type expression.
2313 ``TypeForm[T]`` describes the set of all type form objects that represent
2314 the type T or types that are assignable to T.
2315
2316 Usage:
2317
2318 def cast[T](typ: TypeForm[T], value: Any) -> T: ...
2319
2320 reveal_type(cast(int, "x")) # int
2321
2322 See PEP 747 for more information.
2323 """
2324 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2325 return typing._GenericAlias(self, (item,))
2326
2327
2328
2329
2330if hasattr(typing, "LiteralString"): # 3.11+
2331 LiteralString = typing.LiteralString
2332else:
2333 @_SpecialForm
2334 def LiteralString(self, params):
2335 """Represents an arbitrary literal string.
2336
2337 Example::
2338
2339 from typing_extensions import LiteralString
2340
2341 def query(sql: LiteralString) -> ...:
2342 ...
2343
2344 query("SELECT * FROM table") # ok
2345 query(f"SELECT * FROM {input()}") # not ok
2346
2347 See PEP 675 for details.
2348
2349 """
2350 raise TypeError(f"{self} is not subscriptable")
2351
2352
2353if hasattr(typing, "Self"): # 3.11+
2354 Self = typing.Self
2355else:
2356 @_SpecialForm
2357 def Self(self, params):
2358 """Used to spell the type of "self" in classes.
2359
2360 Example::
2361
2362 from typing import Self
2363
2364 class ReturnsSelf:
2365 def parse(self, data: bytes) -> Self:
2366 ...
2367 return self
2368
2369 """
2370
2371 raise TypeError(f"{self} is not subscriptable")
2372
2373
2374if hasattr(typing, "Never"): # 3.11+
2375 Never = typing.Never
2376else:
2377 @_SpecialForm
2378 def Never(self, params):
2379 """The bottom type, a type that has no members.
2380
2381 This can be used to define a function that should never be
2382 called, or a function that never returns::
2383
2384 from typing_extensions import Never
2385
2386 def never_call_me(arg: Never) -> None:
2387 pass
2388
2389 def int_or_str(arg: int | str) -> None:
2390 never_call_me(arg) # type checker error
2391 match arg:
2392 case int():
2393 print("It's an int")
2394 case str():
2395 print("It's a str")
2396 case _:
2397 never_call_me(arg) # ok, arg is of type Never
2398
2399 """
2400
2401 raise TypeError(f"{self} is not subscriptable")
2402
2403
2404if hasattr(typing, 'Required'): # 3.11+
2405 Required = typing.Required
2406 NotRequired = typing.NotRequired
2407else: # <=3.10
2408 @_ExtensionsSpecialForm
2409 def Required(self, parameters):
2410 """A special typing construct to mark a key of a total=False TypedDict
2411 as required. For example:
2412
2413 class Movie(TypedDict, total=False):
2414 title: Required[str]
2415 year: int
2416
2417 m = Movie(
2418 title='The Matrix', # typechecker error if key is omitted
2419 year=1999,
2420 )
2421
2422 There is no runtime checking that a required key is actually provided
2423 when instantiating a related TypedDict.
2424 """
2425 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2426 return typing._GenericAlias(self, (item,))
2427
2428 @_ExtensionsSpecialForm
2429 def NotRequired(self, parameters):
2430 """A special typing construct to mark a key of a TypedDict as
2431 potentially missing. For example:
2432
2433 class Movie(TypedDict):
2434 title: str
2435 year: NotRequired[int]
2436
2437 m = Movie(
2438 title='The Matrix', # typechecker error if key is omitted
2439 year=1999,
2440 )
2441 """
2442 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2443 return typing._GenericAlias(self, (item,))
2444
2445
2446if hasattr(typing, 'ReadOnly'):
2447 ReadOnly = typing.ReadOnly
2448else: # <=3.12
2449 @_ExtensionsSpecialForm
2450 def ReadOnly(self, parameters):
2451 """A special typing construct to mark an item of a TypedDict as read-only.
2452
2453 For example:
2454
2455 class Movie(TypedDict):
2456 title: ReadOnly[str]
2457 year: int
2458
2459 def mutate_movie(m: Movie) -> None:
2460 m["year"] = 1992 # allowed
2461 m["title"] = "The Matrix" # typechecker error
2462
2463 There is no runtime checking for this property.
2464 """
2465 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2466 return typing._GenericAlias(self, (item,))
2467
2468
2469_UNPACK_DOC = """\
2470Type unpack operator.
2471
2472The type unpack operator takes the child types from some container type,
2473such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
2474example:
2475
2476 # For some generic class `Foo`:
2477 Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str]
2478
2479 Ts = TypeVarTuple('Ts')
2480 # Specifies that `Bar` is generic in an arbitrary number of types.
2481 # (Think of `Ts` as a tuple of an arbitrary number of individual
2482 # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
2483 # `Generic[]`.)
2484 class Bar(Generic[Unpack[Ts]]): ...
2485 Bar[int] # Valid
2486 Bar[int, str] # Also valid
2487
2488From Python 3.11, this can also be done using the `*` operator:
2489
2490 Foo[*tuple[int, str]]
2491 class Bar(Generic[*Ts]): ...
2492
2493The operator can also be used along with a `TypedDict` to annotate
2494`**kwargs` in a function signature. For instance:
2495
2496 class Movie(TypedDict):
2497 name: str
2498 year: int
2499
2500 # This function expects two keyword arguments - *name* of type `str` and
2501 # *year* of type `int`.
2502 def foo(**kwargs: Unpack[Movie]): ...
2503
2504Note that there is only some runtime checking of this operator. Not
2505everything the runtime allows may be accepted by static type checkers.
2506
2507For more information, see PEP 646 and PEP 692.
2508"""
2509
2510
2511# PEP 692 changed the repr of Unpack[]
2512# Breakpoint: https://github.com/python/cpython/pull/104048
2513if sys.version_info >= (3, 12):
2514 Unpack = typing.Unpack
2515
2516 def _is_unpack(obj):
2517 return get_origin(obj) is Unpack
2518
2519else: # <=3.11
2520 class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
2521 def __init__(self, getitem):
2522 super().__init__(getitem)
2523 self.__doc__ = _UNPACK_DOC
2524
2525 class _UnpackAlias(typing._GenericAlias, _root=True):
2526 if sys.version_info < (3, 11):
2527 # needed for compatibility with Generic[Unpack[Ts]]
2528 __class__ = typing.TypeVar
2529
2530 @property
2531 def __typing_unpacked_tuple_args__(self):
2532 assert self.__origin__ is Unpack
2533 assert len(self.__args__) == 1
2534 arg, = self.__args__
2535 if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
2536 if arg.__origin__ is not tuple:
2537 raise TypeError("Unpack[...] must be used with a tuple type")
2538 return arg.__args__
2539 return None
2540
2541 @property
2542 def __typing_is_unpacked_typevartuple__(self):
2543 assert self.__origin__ is Unpack
2544 assert len(self.__args__) == 1
2545 return isinstance(self.__args__[0], TypeVarTuple)
2546
2547 def __getitem__(self, args):
2548 if self.__typing_is_unpacked_typevartuple__:
2549 return args
2550 # Cannot use `super().__getitem__` here because of the `__class__` assignment
2551 # in the class body on Python <=3.11
2552 # (https://github.com/python/typing_extensions/issues/661)
2553 return typing._GenericAlias.__getitem__(self, args)
2554
2555 @_UnpackSpecialForm
2556 def Unpack(self, parameters):
2557 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2558 return _UnpackAlias(self, (item,))
2559
2560 def _is_unpack(obj):
2561 return isinstance(obj, _UnpackAlias)
2562
2563
2564def _unpack_args(*args):
2565 newargs = []
2566 for arg in args:
2567 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
2568 if subargs is not None and (not (subargs and subargs[-1] is ...)):
2569 newargs.extend(subargs)
2570 else:
2571 newargs.append(arg)
2572 return newargs
2573
2574
2575if _PEP_696_IMPLEMENTED:
2576 from typing import TypeVarTuple
2577
2578elif hasattr(typing, "TypeVarTuple"): # 3.11+
2579
2580 # Add default parameter - PEP 696
2581 class TypeVarTuple(metaclass=_TypeVarLikeMeta):
2582 """Type variable tuple."""
2583
2584 _backported_typevarlike = typing.TypeVarTuple
2585
2586 def __new__(cls, name, *, default=NoDefault):
2587 tvt = typing.TypeVarTuple(name)
2588 _set_default(tvt, default)
2589 _set_module(tvt)
2590
2591 def _typevartuple_prepare_subst(alias, args):
2592 params = alias.__parameters__
2593 typevartuple_index = params.index(tvt)
2594 for param in params[typevartuple_index + 1:]:
2595 if isinstance(param, TypeVarTuple):
2596 raise TypeError(
2597 f"More than one TypeVarTuple parameter in {alias}"
2598 )
2599
2600 alen = len(args)
2601 plen = len(params)
2602 left = typevartuple_index
2603 right = plen - typevartuple_index - 1
2604 var_tuple_index = None
2605 fillarg = None
2606 for k, arg in enumerate(args):
2607 if not isinstance(arg, type):
2608 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
2609 if subargs and len(subargs) == 2 and subargs[-1] is ...:
2610 if var_tuple_index is not None:
2611 raise TypeError(
2612 "More than one unpacked "
2613 "arbitrary-length tuple argument"
2614 )
2615 var_tuple_index = k
2616 fillarg = subargs[0]
2617 if var_tuple_index is not None:
2618 left = min(left, var_tuple_index)
2619 right = min(right, alen - var_tuple_index - 1)
2620 elif left + right > alen:
2621 raise TypeError(f"Too few arguments for {alias};"
2622 f" actual {alen}, expected at least {plen - 1}")
2623 if left == alen - right and tvt.has_default():
2624 replacement = _unpack_args(tvt.__default__)
2625 else:
2626 replacement = args[left: alen - right]
2627
2628 return (
2629 *args[:left],
2630 *([fillarg] * (typevartuple_index - left)),
2631 replacement,
2632 *([fillarg] * (plen - right - left - typevartuple_index - 1)),
2633 *args[alen - right:],
2634 )
2635
2636 tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
2637 return tvt
2638
2639 def __init_subclass__(self, *args, **kwds):
2640 raise TypeError("Cannot subclass special typing classes")
2641
2642else: # <=3.10
2643 class TypeVarTuple(_DefaultMixin):
2644 """Type variable tuple.
2645
2646 Usage::
2647
2648 Ts = TypeVarTuple('Ts')
2649
2650 In the same way that a normal type variable is a stand-in for a single
2651 type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
2652 type such as ``Tuple[int, str]``.
2653
2654 Type variable tuples can be used in ``Generic`` declarations.
2655 Consider the following example::
2656
2657 class Array(Generic[*Ts]): ...
2658
2659 The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
2660 where ``T1`` and ``T2`` are type variables. To use these type variables
2661 as type parameters of ``Array``, we must *unpack* the type variable tuple using
2662 the star operator: ``*Ts``. The signature of ``Array`` then behaves
2663 as if we had simply written ``class Array(Generic[T1, T2]): ...``.
2664 In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
2665 us to parameterise the class with an *arbitrary* number of type parameters.
2666
2667 Type variable tuples can be used anywhere a normal ``TypeVar`` can.
2668 This includes class definitions, as shown above, as well as function
2669 signatures and variable annotations::
2670
2671 class Array(Generic[*Ts]):
2672
2673 def __init__(self, shape: Tuple[*Ts]):
2674 self._shape: Tuple[*Ts] = shape
2675
2676 def get_shape(self) -> Tuple[*Ts]:
2677 return self._shape
2678
2679 shape = (Height(480), Width(640))
2680 x: Array[Height, Width] = Array(shape)
2681 y = abs(x) # Inferred type is Array[Height, Width]
2682 z = x + x # ... is Array[Height, Width]
2683 x.get_shape() # ... is tuple[Height, Width]
2684
2685 """
2686
2687 # Trick Generic __parameters__.
2688 __class__ = typing.TypeVar
2689
2690 def __iter__(self):
2691 yield self.__unpacked__
2692
2693 def __init__(self, name, *, default=NoDefault):
2694 self.__name__ = name
2695 _DefaultMixin.__init__(self, default)
2696
2697 # for pickling:
2698 def_mod = _caller()
2699 if def_mod != 'typing_extensions':
2700 self.__module__ = def_mod
2701
2702 self.__unpacked__ = Unpack[self]
2703
2704 def __repr__(self):
2705 return self.__name__
2706
2707 def __hash__(self):
2708 return object.__hash__(self)
2709
2710 def __eq__(self, other):
2711 return self is other
2712
2713 def __reduce__(self):
2714 return self.__name__
2715
2716 def __init_subclass__(self, *args, **kwds):
2717 if '_root' not in kwds:
2718 raise TypeError("Cannot subclass special typing classes")
2719
2720
2721if hasattr(typing, "reveal_type"): # 3.11+
2722 reveal_type = typing.reveal_type
2723else: # <=3.10
2724 def reveal_type(obj: T, /) -> T:
2725 """Reveal the inferred type of a variable.
2726
2727 When a static type checker encounters a call to ``reveal_type()``,
2728 it will emit the inferred type of the argument::
2729
2730 x: int = 1
2731 reveal_type(x)
2732
2733 Running a static type checker (e.g., ``mypy``) on this example
2734 will produce output similar to 'Revealed type is "builtins.int"'.
2735
2736 At runtime, the function prints the runtime type of the
2737 argument and returns it unchanged.
2738
2739 """
2740 print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
2741 return obj
2742
2743
2744if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
2745 _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
2746else: # <=3.10
2747 _ASSERT_NEVER_REPR_MAX_LENGTH = 100
2748
2749
2750if hasattr(typing, "assert_never"): # 3.11+
2751 assert_never = typing.assert_never
2752else: # <=3.10
2753 def assert_never(arg: Never, /) -> Never:
2754 """Assert to the type checker that a line of code is unreachable.
2755
2756 Example::
2757
2758 def int_or_str(arg: int | str) -> None:
2759 match arg:
2760 case int():
2761 print("It's an int")
2762 case str():
2763 print("It's a str")
2764 case _:
2765 assert_never(arg)
2766
2767 If a type checker finds that a call to assert_never() is
2768 reachable, it will emit an error.
2769
2770 At runtime, this throws an exception when called.
2771
2772 """
2773 value = repr(arg)
2774 if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
2775 value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
2776 raise AssertionError(f"Expected code to be unreachable, but got: {value}")
2777
2778
2779# dataclass_transform exists in 3.11 but lacks the frozen_default parameter
2780# Breakpoint: https://github.com/python/cpython/pull/99958
2781if sys.version_info >= (3, 12): # 3.12+
2782 dataclass_transform = typing.dataclass_transform
2783else: # <=3.11
2784 def dataclass_transform(
2785 *,
2786 eq_default: bool = True,
2787 order_default: bool = False,
2788 kw_only_default: bool = False,
2789 frozen_default: bool = False,
2790 field_specifiers: typing.Tuple[
2791 typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
2792 ...
2793 ] = (),
2794 **kwargs: typing.Any,
2795 ) -> typing.Callable[[T], T]:
2796 """Decorator that marks a function, class, or metaclass as providing
2797 dataclass-like behavior.
2798
2799 Example:
2800
2801 from typing_extensions import dataclass_transform
2802
2803 _T = TypeVar("_T")
2804
2805 # Used on a decorator function
2806 @dataclass_transform()
2807 def create_model(cls: type[_T]) -> type[_T]:
2808 ...
2809 return cls
2810
2811 @create_model
2812 class CustomerModel:
2813 id: int
2814 name: str
2815
2816 # Used on a base class
2817 @dataclass_transform()
2818 class ModelBase: ...
2819
2820 class CustomerModel(ModelBase):
2821 id: int
2822 name: str
2823
2824 # Used on a metaclass
2825 @dataclass_transform()
2826 class ModelMeta(type): ...
2827
2828 class ModelBase(metaclass=ModelMeta): ...
2829
2830 class CustomerModel(ModelBase):
2831 id: int
2832 name: str
2833
2834 Each of the ``CustomerModel`` classes defined in this example will now
2835 behave similarly to a dataclass created with the ``@dataclasses.dataclass``
2836 decorator. For example, the type checker will synthesize an ``__init__``
2837 method.
2838
2839 The arguments to this decorator can be used to customize this behavior:
2840 - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
2841 True or False if it is omitted by the caller.
2842 - ``order_default`` indicates whether the ``order`` parameter is
2843 assumed to be True or False if it is omitted by the caller.
2844 - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
2845 assumed to be True or False if it is omitted by the caller.
2846 - ``frozen_default`` indicates whether the ``frozen`` parameter is
2847 assumed to be True or False if it is omitted by the caller.
2848 - ``field_specifiers`` specifies a static list of supported classes
2849 or functions that describe fields, similar to ``dataclasses.field()``.
2850
2851 At runtime, this decorator records its arguments in the
2852 ``__dataclass_transform__`` attribute on the decorated object.
2853
2854 See PEP 681 for details.
2855
2856 """
2857 def decorator(cls_or_fn):
2858 cls_or_fn.__dataclass_transform__ = {
2859 "eq_default": eq_default,
2860 "order_default": order_default,
2861 "kw_only_default": kw_only_default,
2862 "frozen_default": frozen_default,
2863 "field_specifiers": field_specifiers,
2864 "kwargs": kwargs,
2865 }
2866 return cls_or_fn
2867 return decorator
2868
2869
2870if hasattr(typing, "override"): # 3.12+
2871 override = typing.override
2872else: # <=3.11
2873 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
2874
2875 def override(arg: _F, /) -> _F:
2876 """Indicate that a method is intended to override a method in a base class.
2877
2878 Usage:
2879
2880 class Base:
2881 def method(self) -> None:
2882 pass
2883
2884 class Child(Base):
2885 @override
2886 def method(self) -> None:
2887 super().method()
2888
2889 When this decorator is applied to a method, the type checker will
2890 validate that it overrides a method with the same name on a base class.
2891 This helps prevent bugs that may occur when a base class is changed
2892 without an equivalent change to a child class.
2893
2894 There is no runtime checking of these properties. The decorator
2895 sets the ``__override__`` attribute to ``True`` on the decorated object
2896 to allow runtime introspection.
2897
2898 See PEP 698 for details.
2899
2900 """
2901 try:
2902 arg.__override__ = True
2903 except (AttributeError, TypeError):
2904 # Skip the attribute silently if it is not writable.
2905 # AttributeError happens if the object has __slots__ or a
2906 # read-only property, TypeError if it's a builtin class.
2907 pass
2908 return arg
2909
2910
2911# Python 3.13.8+ and 3.14.1+ contain a fix for the wrapped __init_subclass__
2912# Breakpoint: https://github.com/python/cpython/pull/138210
2913if ((3, 13, 8) <= sys.version_info < (3, 14)) or sys.version_info >= (3, 14, 1):
2914 deprecated = warnings.deprecated
2915else:
2916 _T = typing.TypeVar("_T")
2917
2918 class deprecated:
2919 """Indicate that a class, function or overload is deprecated.
2920
2921 When this decorator is applied to an object, the type checker
2922 will generate a diagnostic on usage of the deprecated object.
2923
2924 Usage:
2925
2926 @deprecated("Use B instead")
2927 class A:
2928 pass
2929
2930 @deprecated("Use g instead")
2931 def f():
2932 pass
2933
2934 @overload
2935 @deprecated("int support is deprecated")
2936 def g(x: int) -> int: ...
2937 @overload
2938 def g(x: str) -> int: ...
2939
2940 The warning specified by *category* will be emitted at runtime
2941 on use of deprecated objects. For functions, that happens on calls;
2942 for classes, on instantiation and on creation of subclasses.
2943 If the *category* is ``None``, no warning is emitted at runtime.
2944 The *stacklevel* determines where the
2945 warning is emitted. If it is ``1`` (the default), the warning
2946 is emitted at the direct caller of the deprecated object; if it
2947 is higher, it is emitted further up the stack.
2948 Static type checker behavior is not affected by the *category*
2949 and *stacklevel* arguments.
2950
2951 The deprecation message passed to the decorator is saved in the
2952 ``__deprecated__`` attribute on the decorated object.
2953 If applied to an overload, the decorator
2954 must be after the ``@overload`` decorator for the attribute to
2955 exist on the overload as returned by ``get_overloads()``.
2956
2957 See PEP 702 for details.
2958
2959 """
2960 def __init__(
2961 self,
2962 message: str,
2963 /,
2964 *,
2965 category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
2966 stacklevel: int = 1,
2967 ) -> None:
2968 if not isinstance(message, str):
2969 raise TypeError(
2970 "Expected an object of type str for 'message', not "
2971 f"{type(message).__name__!r}"
2972 )
2973 self.message = message
2974 self.category = category
2975 self.stacklevel = stacklevel
2976
2977 def __call__(self, arg: _T, /) -> _T:
2978 # Make sure the inner functions created below don't
2979 # retain a reference to self.
2980 msg = self.message
2981 category = self.category
2982 stacklevel = self.stacklevel
2983 if category is None:
2984 arg.__deprecated__ = msg
2985 return arg
2986 elif isinstance(arg, type):
2987 import functools
2988 from types import MethodType
2989
2990 original_new = arg.__new__
2991
2992 @functools.wraps(original_new)
2993 def __new__(cls, /, *args, **kwargs):
2994 if cls is arg:
2995 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
2996 if original_new is not object.__new__:
2997 return original_new(cls, *args, **kwargs)
2998 # Mirrors a similar check in object.__new__.
2999 elif cls.__init__ is object.__init__ and (args or kwargs):
3000 raise TypeError(f"{cls.__name__}() takes no arguments")
3001 else:
3002 return original_new(cls)
3003
3004 arg.__new__ = staticmethod(__new__)
3005
3006 if "__init_subclass__" in arg.__dict__:
3007 # __init_subclass__ is directly present on the decorated class.
3008 # Synthesize a wrapper that calls this method directly.
3009 original_init_subclass = arg.__init_subclass__
3010 # We need slightly different behavior if __init_subclass__
3011 # is a bound method (likely if it was implemented in Python).
3012 # Otherwise, it likely means it's a builtin such as
3013 # object's implementation of __init_subclass__.
3014 if isinstance(original_init_subclass, MethodType):
3015 original_init_subclass = original_init_subclass.__func__
3016
3017 @functools.wraps(original_init_subclass)
3018 def __init_subclass__(*args, **kwargs):
3019 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3020 return original_init_subclass(*args, **kwargs)
3021 else:
3022 def __init_subclass__(cls, *args, **kwargs):
3023 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3024 return super(arg, cls).__init_subclass__(*args, **kwargs)
3025
3026 arg.__init_subclass__ = classmethod(__init_subclass__)
3027
3028 arg.__deprecated__ = __new__.__deprecated__ = msg
3029 __init_subclass__.__deprecated__ = msg
3030 return arg
3031 elif callable(arg):
3032 import asyncio.coroutines
3033 import functools
3034 import inspect
3035
3036 @functools.wraps(arg)
3037 def wrapper(*args, **kwargs):
3038 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3039 return arg(*args, **kwargs)
3040
3041 if asyncio.coroutines.iscoroutinefunction(arg):
3042 # Breakpoint: https://github.com/python/cpython/pull/99247
3043 if sys.version_info >= (3, 12):
3044 wrapper = inspect.markcoroutinefunction(wrapper)
3045 else:
3046 wrapper._is_coroutine = asyncio.coroutines._is_coroutine
3047
3048 arg.__deprecated__ = wrapper.__deprecated__ = msg
3049 return wrapper
3050 else:
3051 raise TypeError(
3052 "@deprecated decorator with non-None category must be applied to "
3053 f"a class or callable, not {arg!r}"
3054 )
3055
3056# Breakpoint: https://github.com/python/cpython/pull/23702
3057if sys.version_info < (3, 10):
3058 def _is_param_expr(arg):
3059 return arg is ... or isinstance(
3060 arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
3061 )
3062else:
3063 def _is_param_expr(arg):
3064 return arg is ... or isinstance(
3065 arg,
3066 (
3067 tuple,
3068 list,
3069 ParamSpec,
3070 _ConcatenateGenericAlias,
3071 typing._ConcatenateGenericAlias,
3072 ),
3073 )
3074
3075
3076# We have to do some monkey patching to deal with the dual nature of
3077# Unpack/TypeVarTuple:
3078# - We want Unpack to be a kind of TypeVar so it gets accepted in
3079# Generic[Unpack[Ts]]
3080# - We want it to *not* be treated as a TypeVar for the purposes of
3081# counting generic parameters, so that when we subscript a generic,
3082# the runtime doesn't try to substitute the Unpack with the subscripted type.
3083if not hasattr(typing, "TypeVarTuple"):
3084 def _check_generic(cls, parameters, elen=_marker):
3085 """Check correct count for parameters of a generic cls (internal helper).
3086
3087 This gives a nice error message in case of count mismatch.
3088 """
3089 # If substituting a single ParamSpec with multiple arguments
3090 # we do not check the count
3091 if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
3092 and len(cls.__parameters__) == 1
3093 and isinstance(cls.__parameters__[0], ParamSpec)
3094 and parameters
3095 and not _is_param_expr(parameters[0])
3096 ):
3097 # Generic modifies parameters variable, but here we cannot do this
3098 return
3099
3100 if not elen:
3101 raise TypeError(f"{cls} is not a generic class")
3102 if elen is _marker:
3103 if not hasattr(cls, "__parameters__") or not cls.__parameters__:
3104 raise TypeError(f"{cls} is not a generic class")
3105 elen = len(cls.__parameters__)
3106 alen = len(parameters)
3107 if alen != elen:
3108 expect_val = elen
3109 if hasattr(cls, "__parameters__"):
3110 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
3111 num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
3112 if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
3113 return
3114
3115 # deal with TypeVarLike defaults
3116 # required TypeVarLikes cannot appear after a defaulted one.
3117 if alen < elen:
3118 # since we validate TypeVarLike default in _collect_type_vars
3119 # or _collect_parameters we can safely check parameters[alen]
3120 if (
3121 getattr(parameters[alen], '__default__', NoDefault)
3122 is not NoDefault
3123 ):
3124 return
3125
3126 num_default_tv = sum(getattr(p, '__default__', NoDefault)
3127 is not NoDefault for p in parameters)
3128
3129 elen -= num_default_tv
3130
3131 expect_val = f"at least {elen}"
3132
3133 # Breakpoint: https://github.com/python/cpython/pull/27515
3134 things = "arguments" if sys.version_info >= (3, 10) else "parameters"
3135 raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
3136 f" for {cls}; actual {alen}, expected {expect_val}")
3137else:
3138 # Python 3.11+
3139
3140 def _check_generic(cls, parameters, elen):
3141 """Check correct count for parameters of a generic cls (internal helper).
3142
3143 This gives a nice error message in case of count mismatch.
3144 """
3145 if not elen:
3146 raise TypeError(f"{cls} is not a generic class")
3147 alen = len(parameters)
3148 if alen != elen:
3149 expect_val = elen
3150 if hasattr(cls, "__parameters__"):
3151 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
3152
3153 # deal with TypeVarLike defaults
3154 # required TypeVarLikes cannot appear after a defaulted one.
3155 if alen < elen:
3156 # since we validate TypeVarLike default in _collect_type_vars
3157 # or _collect_parameters we can safely check parameters[alen]
3158 if (
3159 getattr(parameters[alen], '__default__', NoDefault)
3160 is not NoDefault
3161 ):
3162 return
3163
3164 num_default_tv = sum(getattr(p, '__default__', NoDefault)
3165 is not NoDefault for p in parameters)
3166
3167 elen -= num_default_tv
3168
3169 expect_val = f"at least {elen}"
3170
3171 raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
3172 f" for {cls}; actual {alen}, expected {expect_val}")
3173
3174if not _PEP_696_IMPLEMENTED:
3175 typing._check_generic = _check_generic
3176
3177
3178def _has_generic_or_protocol_as_origin() -> bool:
3179 try:
3180 frame = sys._getframe(2)
3181 # - Catch AttributeError: not all Python implementations have sys._getframe()
3182 # - Catch ValueError: maybe we're called from an unexpected module
3183 # and the call stack isn't deep enough
3184 except (AttributeError, ValueError):
3185 return False # err on the side of leniency
3186 else:
3187 # If we somehow get invoked from outside typing.py,
3188 # also err on the side of leniency
3189 if frame.f_globals.get("__name__") != "typing":
3190 return False
3191 origin = frame.f_locals.get("origin")
3192 # Cannot use "in" because origin may be an object with a buggy __eq__ that
3193 # throws an error.
3194 return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
3195
3196
3197_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
3198
3199
3200def _is_unpacked_typevartuple(x) -> bool:
3201 if get_origin(x) is not Unpack:
3202 return False
3203 args = get_args(x)
3204 return (
3205 bool(args)
3206 and len(args) == 1
3207 and type(args[0]) in _TYPEVARTUPLE_TYPES
3208 )
3209
3210
3211# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
3212if hasattr(typing, '_collect_type_vars'):
3213 def _collect_type_vars(types, typevar_types=None):
3214 """Collect all type variable contained in types in order of
3215 first appearance (lexicographic order). For example::
3216
3217 _collect_type_vars((T, List[S, T])) == (T, S)
3218 """
3219 if typevar_types is None:
3220 typevar_types = typing.TypeVar
3221 tvars = []
3222
3223 # A required TypeVarLike cannot appear after a TypeVarLike with a default
3224 # if it was a direct call to `Generic[]` or `Protocol[]`
3225 enforce_default_ordering = _has_generic_or_protocol_as_origin()
3226 default_encountered = False
3227
3228 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
3229 type_var_tuple_encountered = False
3230
3231 for t in types:
3232 if _is_unpacked_typevartuple(t):
3233 type_var_tuple_encountered = True
3234 elif (
3235 isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
3236 and t not in tvars
3237 ):
3238 if enforce_default_ordering:
3239 has_default = getattr(t, '__default__', NoDefault) is not NoDefault
3240 if has_default:
3241 if type_var_tuple_encountered:
3242 raise TypeError('Type parameter with a default'
3243 ' follows TypeVarTuple')
3244 default_encountered = True
3245 elif default_encountered:
3246 raise TypeError(f'Type parameter {t!r} without a default'
3247 ' follows type parameter with a default')
3248
3249 tvars.append(t)
3250 if _should_collect_from_parameters(t):
3251 tvars.extend([t for t in t.__parameters__ if t not in tvars])
3252 elif isinstance(t, tuple):
3253 # Collect nested type_vars
3254 # tuple wrapped by _prepare_paramspec_params(cls, params)
3255 for x in t:
3256 for collected in _collect_type_vars([x]):
3257 if collected not in tvars:
3258 tvars.append(collected)
3259 return tuple(tvars)
3260
3261 typing._collect_type_vars = _collect_type_vars
3262else:
3263 def _collect_parameters(args):
3264 """Collect all type variables and parameter specifications in args
3265 in order of first appearance (lexicographic order).
3266
3267 For example::
3268
3269 assert _collect_parameters((T, Callable[P, T])) == (T, P)
3270 """
3271 parameters = []
3272
3273 # A required TypeVarLike cannot appear after a TypeVarLike with default
3274 # if it was a direct call to `Generic[]` or `Protocol[]`
3275 enforce_default_ordering = _has_generic_or_protocol_as_origin()
3276 default_encountered = False
3277
3278 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
3279 type_var_tuple_encountered = False
3280
3281 for t in args:
3282 if isinstance(t, type):
3283 # We don't want __parameters__ descriptor of a bare Python class.
3284 pass
3285 elif isinstance(t, tuple):
3286 # `t` might be a tuple, when `ParamSpec` is substituted with
3287 # `[T, int]`, or `[int, *Ts]`, etc.
3288 for x in t:
3289 for collected in _collect_parameters([x]):
3290 if collected not in parameters:
3291 parameters.append(collected)
3292 elif hasattr(t, '__typing_subst__'):
3293 if t not in parameters:
3294 if enforce_default_ordering:
3295 has_default = (
3296 getattr(t, '__default__', NoDefault) is not NoDefault
3297 )
3298
3299 if type_var_tuple_encountered and has_default:
3300 raise TypeError('Type parameter with a default'
3301 ' follows TypeVarTuple')
3302
3303 if has_default:
3304 default_encountered = True
3305 elif default_encountered:
3306 raise TypeError(f'Type parameter {t!r} without a default'
3307 ' follows type parameter with a default')
3308
3309 parameters.append(t)
3310 else:
3311 if _is_unpacked_typevartuple(t):
3312 type_var_tuple_encountered = True
3313 for x in getattr(t, '__parameters__', ()):
3314 if x not in parameters:
3315 parameters.append(x)
3316
3317 return tuple(parameters)
3318
3319 if not _PEP_696_IMPLEMENTED:
3320 typing._collect_parameters = _collect_parameters
3321
3322# Backport typing.NamedTuple as it exists in Python 3.13.
3323# In 3.11, the ability to define generic `NamedTuple`s was supported.
3324# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
3325# On 3.12, we added __orig_bases__ to call-based NamedTuples
3326# On 3.13, we deprecated kwargs-based NamedTuples
3327# Breakpoint: https://github.com/python/cpython/pull/105609
3328if sys.version_info >= (3, 13):
3329 NamedTuple = typing.NamedTuple
3330else:
3331 def _make_nmtuple(name, types, module, defaults=()):
3332 fields = [n for n, t in types]
3333 annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
3334 for n, t in types}
3335 nm_tpl = collections.namedtuple(name, fields,
3336 defaults=defaults, module=module)
3337 nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
3338 return nm_tpl
3339
3340 _prohibited_namedtuple_fields = typing._prohibited
3341 _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
3342
3343 class _NamedTupleMeta(type):
3344 def __new__(cls, typename, bases, ns):
3345 assert _NamedTuple in bases
3346 for base in bases:
3347 if base is not _NamedTuple and base is not typing.Generic:
3348 raise TypeError(
3349 'can only inherit from a NamedTuple type and Generic')
3350 bases = tuple(tuple if base is _NamedTuple else base for base in bases)
3351 if "__annotations__" in ns:
3352 types = ns["__annotations__"]
3353 elif "__annotate__" in ns:
3354 # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
3355 types = ns["__annotate__"](1)
3356 else:
3357 types = {}
3358 default_names = []
3359 for field_name in types:
3360 if field_name in ns:
3361 default_names.append(field_name)
3362 elif default_names:
3363 raise TypeError(f"Non-default namedtuple field {field_name} "
3364 f"cannot follow default field"
3365 f"{'s' if len(default_names) > 1 else ''} "
3366 f"{', '.join(default_names)}")
3367 nm_tpl = _make_nmtuple(
3368 typename, types.items(),
3369 defaults=[ns[n] for n in default_names],
3370 module=ns['__module__']
3371 )
3372 nm_tpl.__bases__ = bases
3373 if typing.Generic in bases:
3374 if hasattr(typing, '_generic_class_getitem'): # 3.12+
3375 nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
3376 else:
3377 class_getitem = typing.Generic.__class_getitem__.__func__
3378 nm_tpl.__class_getitem__ = classmethod(class_getitem)
3379 # update from user namespace without overriding special namedtuple attributes
3380 for key, val in ns.items():
3381 if key in _prohibited_namedtuple_fields:
3382 raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
3383 elif key not in _special_namedtuple_fields:
3384 if key not in nm_tpl._fields:
3385 setattr(nm_tpl, key, ns[key])
3386 try:
3387 set_name = type(val).__set_name__
3388 except AttributeError:
3389 pass
3390 else:
3391 try:
3392 set_name(val, nm_tpl, key)
3393 except BaseException as e:
3394 msg = (
3395 f"Error calling __set_name__ on {type(val).__name__!r} "
3396 f"instance {key!r} in {typename!r}"
3397 )
3398 # BaseException.add_note() existed on py311,
3399 # but the __set_name__ machinery didn't start
3400 # using add_note() until py312.
3401 # Making sure exceptions are raised in the same way
3402 # as in "normal" classes seems most important here.
3403 # Breakpoint: https://github.com/python/cpython/pull/95915
3404 if sys.version_info >= (3, 12):
3405 e.add_note(msg)
3406 raise
3407 else:
3408 raise RuntimeError(msg) from e
3409
3410 if typing.Generic in bases:
3411 nm_tpl.__init_subclass__()
3412 return nm_tpl
3413
3414 _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
3415
3416 def _namedtuple_mro_entries(bases):
3417 assert NamedTuple in bases
3418 return (_NamedTuple,)
3419
3420 def NamedTuple(typename, fields=_marker, /, **kwargs):
3421 """Typed version of namedtuple.
3422
3423 Usage::
3424
3425 class Employee(NamedTuple):
3426 name: str
3427 id: int
3428
3429 This is equivalent to::
3430
3431 Employee = collections.namedtuple('Employee', ['name', 'id'])
3432
3433 The resulting class has an extra __annotations__ attribute, giving a
3434 dict that maps field names to types. (The field names are also in
3435 the _fields attribute, which is part of the namedtuple API.)
3436 An alternative equivalent functional syntax is also accepted::
3437
3438 Employee = NamedTuple('Employee', [('name', str), ('id', int)])
3439 """
3440 if fields is _marker:
3441 if kwargs:
3442 deprecated_thing = "Creating NamedTuple classes using keyword arguments"
3443 deprecation_msg = (
3444 "{name} is deprecated and will be disallowed in Python {remove}. "
3445 "Use the class-based or functional syntax instead."
3446 )
3447 else:
3448 deprecated_thing = "Failing to pass a value for the 'fields' parameter"
3449 example = f"`{typename} = NamedTuple({typename!r}, [])`"
3450 deprecation_msg = (
3451 "{name} is deprecated and will be disallowed in Python {remove}. "
3452 "To create a NamedTuple class with 0 fields "
3453 "using the functional syntax, "
3454 "pass an empty list, e.g. "
3455 ) + example + "."
3456 elif fields is None:
3457 if kwargs:
3458 raise TypeError(
3459 "Cannot pass `None` as the 'fields' parameter "
3460 "and also specify fields using keyword arguments"
3461 )
3462 else:
3463 deprecated_thing = "Passing `None` as the 'fields' parameter"
3464 example = f"`{typename} = NamedTuple({typename!r}, [])`"
3465 deprecation_msg = (
3466 "{name} is deprecated and will be disallowed in Python {remove}. "
3467 "To create a NamedTuple class with 0 fields "
3468 "using the functional syntax, "
3469 "pass an empty list, e.g. "
3470 ) + example + "."
3471 elif kwargs:
3472 raise TypeError("Either list of fields or keywords"
3473 " can be provided to NamedTuple, not both")
3474 if fields is _marker or fields is None:
3475 warnings.warn(
3476 deprecation_msg.format(name=deprecated_thing, remove="3.15"),
3477 DeprecationWarning,
3478 stacklevel=2,
3479 )
3480 fields = kwargs.items()
3481 nt = _make_nmtuple(typename, fields, module=_caller())
3482 nt.__orig_bases__ = (NamedTuple,)
3483 return nt
3484
3485 NamedTuple.__mro_entries__ = _namedtuple_mro_entries
3486
3487
3488if hasattr(collections.abc, "Buffer"):
3489 Buffer = collections.abc.Buffer
3490else:
3491 class Buffer(abc.ABC): # noqa: B024
3492 """Base class for classes that implement the buffer protocol.
3493
3494 The buffer protocol allows Python objects to expose a low-level
3495 memory buffer interface. Before Python 3.12, it is not possible
3496 to implement the buffer protocol in pure Python code, or even
3497 to check whether a class implements the buffer protocol. In
3498 Python 3.12 and higher, the ``__buffer__`` method allows access
3499 to the buffer protocol from Python code, and the
3500 ``collections.abc.Buffer`` ABC allows checking whether a class
3501 implements the buffer protocol.
3502
3503 To indicate support for the buffer protocol in earlier versions,
3504 inherit from this ABC, either in a stub file or at runtime,
3505 or use ABC registration. This ABC provides no methods, because
3506 there is no Python-accessible methods shared by pre-3.12 buffer
3507 classes. It is useful primarily for static checks.
3508
3509 """
3510
3511 # As a courtesy, register the most common stdlib buffer classes.
3512 Buffer.register(memoryview)
3513 Buffer.register(bytearray)
3514 Buffer.register(bytes)
3515
3516
3517# Backport of types.get_original_bases, available on 3.12+ in CPython
3518if hasattr(_types, "get_original_bases"):
3519 get_original_bases = _types.get_original_bases
3520else:
3521 def get_original_bases(cls, /):
3522 """Return the class's "original" bases prior to modification by `__mro_entries__`.
3523
3524 Examples::
3525
3526 from typing import TypeVar, Generic
3527 from typing_extensions import NamedTuple, TypedDict
3528
3529 T = TypeVar("T")
3530 class Foo(Generic[T]): ...
3531 class Bar(Foo[int], float): ...
3532 class Baz(list[str]): ...
3533 Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
3534 Spam = TypedDict("Spam", {"a": int, "b": str})
3535
3536 assert get_original_bases(Bar) == (Foo[int], float)
3537 assert get_original_bases(Baz) == (list[str],)
3538 assert get_original_bases(Eggs) == (NamedTuple,)
3539 assert get_original_bases(Spam) == (TypedDict,)
3540 assert get_original_bases(int) == (object,)
3541 """
3542 try:
3543 return cls.__dict__.get("__orig_bases__", cls.__bases__)
3544 except AttributeError:
3545 raise TypeError(
3546 f'Expected an instance of type, not {type(cls).__name__!r}'
3547 ) from None
3548
3549
3550# NewType is a class on Python 3.10+, making it pickleable
3551# The error message for subclassing instances of NewType was improved on 3.11+
3552# Breakpoint: https://github.com/python/cpython/pull/30268
3553if sys.version_info >= (3, 11):
3554 NewType = typing.NewType
3555else:
3556 class NewType:
3557 """NewType creates simple unique types with almost zero
3558 runtime overhead. NewType(name, tp) is considered a subtype of tp
3559 by static type checkers. At runtime, NewType(name, tp) returns
3560 a dummy callable that simply returns its argument. Usage::
3561 UserId = NewType('UserId', int)
3562 def name_by_id(user_id: UserId) -> str:
3563 ...
3564 UserId('user') # Fails type check
3565 name_by_id(42) # Fails type check
3566 name_by_id(UserId(42)) # OK
3567 num = UserId(5) + 1 # type: int
3568 """
3569
3570 def __call__(self, obj, /):
3571 return obj
3572
3573 def __init__(self, name, tp):
3574 self.__qualname__ = name
3575 if '.' in name:
3576 name = name.rpartition('.')[-1]
3577 self.__name__ = name
3578 self.__supertype__ = tp
3579 def_mod = _caller()
3580 if def_mod != 'typing_extensions':
3581 self.__module__ = def_mod
3582
3583 def __mro_entries__(self, bases):
3584 # We defined __mro_entries__ to get a better error message
3585 # if a user attempts to subclass a NewType instance. bpo-46170
3586 supercls_name = self.__name__
3587
3588 class Dummy:
3589 def __init_subclass__(cls):
3590 subcls_name = cls.__name__
3591 raise TypeError(
3592 f"Cannot subclass an instance of NewType. "
3593 f"Perhaps you were looking for: "
3594 f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
3595 )
3596
3597 return (Dummy,)
3598
3599 def __repr__(self):
3600 return f'{self.__module__}.{self.__qualname__}'
3601
3602 def __reduce__(self):
3603 return self.__qualname__
3604
3605 # Breakpoint: https://github.com/python/cpython/pull/21515
3606 if sys.version_info >= (3, 10):
3607 # PEP 604 methods
3608 # It doesn't make sense to have these methods on Python <3.10
3609
3610 def __or__(self, other):
3611 return typing.Union[self, other]
3612
3613 def __ror__(self, other):
3614 return typing.Union[other, self]
3615
3616
3617# Breakpoint: https://github.com/python/cpython/pull/124795
3618if sys.version_info >= (3, 14):
3619 TypeAliasType = typing.TypeAliasType
3620# <=3.13
3621else:
3622 # Breakpoint: https://github.com/python/cpython/pull/103764
3623 if sys.version_info >= (3, 12):
3624 # 3.12-3.13
3625 def _is_unionable(obj):
3626 """Corresponds to is_unionable() in unionobject.c in CPython."""
3627 return obj is None or isinstance(obj, (
3628 type,
3629 _types.GenericAlias,
3630 _types.UnionType,
3631 typing.TypeAliasType,
3632 TypeAliasType,
3633 ))
3634 else:
3635 # <=3.11
3636 def _is_unionable(obj):
3637 """Corresponds to is_unionable() in unionobject.c in CPython."""
3638 return obj is None or isinstance(obj, (
3639 type,
3640 _types.GenericAlias,
3641 _types.UnionType,
3642 TypeAliasType,
3643 ))
3644
3645 if sys.version_info < (3, 10):
3646 # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
3647 # so that we emulate the behaviour of `types.GenericAlias`
3648 # on the latest versions of CPython
3649 _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
3650 "__class__",
3651 "__bases__",
3652 "__origin__",
3653 "__args__",
3654 "__unpacked__",
3655 "__parameters__",
3656 "__typing_unpacked_tuple_args__",
3657 "__mro_entries__",
3658 "__reduce_ex__",
3659 "__reduce__",
3660 "__copy__",
3661 "__deepcopy__",
3662 })
3663
3664 class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
3665 def __getattr__(self, attr):
3666 if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
3667 return object.__getattr__(self, attr)
3668 return getattr(self.__origin__, attr)
3669
3670
3671 class TypeAliasType:
3672 """Create named, parameterized type aliases.
3673
3674 This provides a backport of the new `type` statement in Python 3.12:
3675
3676 type ListOrSet[T] = list[T] | set[T]
3677
3678 is equivalent to:
3679
3680 T = TypeVar("T")
3681 ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
3682
3683 The name ListOrSet can then be used as an alias for the type it refers to.
3684
3685 The type_params argument should contain all the type parameters used
3686 in the value of the type alias. If the alias is not generic, this
3687 argument is omitted.
3688
3689 Static type checkers should only support type aliases declared using
3690 TypeAliasType that follow these rules:
3691
3692 - The first argument (the name) must be a string literal.
3693 - The TypeAliasType instance must be immediately assigned to a variable
3694 of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
3695 as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
3696
3697 """
3698
3699 def __init__(self, name: str, value, *, type_params=()):
3700 if not isinstance(name, str):
3701 raise TypeError("TypeAliasType name must be a string")
3702 if not isinstance(type_params, tuple):
3703 raise TypeError("type_params must be a tuple")
3704 self.__value__ = value
3705 self.__type_params__ = type_params
3706
3707 default_value_encountered = False
3708 parameters = []
3709 for type_param in type_params:
3710 if (
3711 not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
3712 # <=3.11
3713 # Unpack Backport passes isinstance(type_param, TypeVar)
3714 or _is_unpack(type_param)
3715 ):
3716 raise TypeError(f"Expected a type param, got {type_param!r}")
3717 has_default = (
3718 getattr(type_param, '__default__', NoDefault) is not NoDefault
3719 )
3720 if default_value_encountered and not has_default:
3721 raise TypeError(f"non-default type parameter '{type_param!r}'"
3722 " follows default type parameter")
3723 if has_default:
3724 default_value_encountered = True
3725 if isinstance(type_param, TypeVarTuple):
3726 parameters.extend(type_param)
3727 else:
3728 parameters.append(type_param)
3729 self.__parameters__ = tuple(parameters)
3730 def_mod = _caller()
3731 if def_mod != 'typing_extensions':
3732 self.__module__ = def_mod
3733 # Setting this attribute closes the TypeAliasType from further modification
3734 self.__name__ = name
3735
3736 def __setattr__(self, name: str, value: object, /) -> None:
3737 if hasattr(self, "__name__"):
3738 self._raise_attribute_error(name)
3739 super().__setattr__(name, value)
3740
3741 def __delattr__(self, name: str, /) -> Never:
3742 self._raise_attribute_error(name)
3743
3744 def _raise_attribute_error(self, name: str) -> Never:
3745 # Match the Python 3.12 error messages exactly
3746 if name == "__name__":
3747 raise AttributeError("readonly attribute")
3748 elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
3749 raise AttributeError(
3750 f"attribute '{name}' of 'typing.TypeAliasType' objects "
3751 "is not writable"
3752 )
3753 else:
3754 raise AttributeError(
3755 f"'typing.TypeAliasType' object has no attribute '{name}'"
3756 )
3757
3758 def __repr__(self) -> str:
3759 return self.__name__
3760
3761 if sys.version_info < (3, 11):
3762 def _check_single_param(self, param, recursion=0):
3763 # Allow [], [int], [int, str], [int, ...], [int, T]
3764 if param is ...:
3765 return ...
3766 if param is None:
3767 return None
3768 # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
3769 if isinstance(param, list) and recursion == 0:
3770 return [self._check_single_param(arg, recursion+1)
3771 for arg in param]
3772 return typing._type_check(
3773 param, f'Subscripting {self.__name__} requires a type.'
3774 )
3775
3776 def _check_parameters(self, parameters):
3777 if sys.version_info < (3, 11):
3778 return tuple(
3779 self._check_single_param(item)
3780 for item in parameters
3781 )
3782 return tuple(typing._type_check(
3783 item, f'Subscripting {self.__name__} requires a type.'
3784 )
3785 for item in parameters
3786 )
3787
3788 def __getitem__(self, parameters):
3789 if not self.__type_params__:
3790 raise TypeError("Only generic type aliases are subscriptable")
3791 if not isinstance(parameters, tuple):
3792 parameters = (parameters,)
3793 # Using 3.9 here will create problems with Concatenate
3794 if sys.version_info >= (3, 10):
3795 return _types.GenericAlias(self, parameters)
3796 type_vars = _collect_type_vars(parameters)
3797 parameters = self._check_parameters(parameters)
3798 alias = _TypeAliasGenericAlias(self, parameters)
3799 # alias.__parameters__ is not complete if Concatenate is present
3800 # as it is converted to a list from which no parameters are extracted.
3801 if alias.__parameters__ != type_vars:
3802 alias.__parameters__ = type_vars
3803 return alias
3804
3805 def __reduce__(self):
3806 return self.__name__
3807
3808 def __init_subclass__(cls, *args, **kwargs):
3809 raise TypeError(
3810 "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
3811 )
3812
3813 # The presence of this method convinces typing._type_check
3814 # that TypeAliasTypes are types.
3815 def __call__(self):
3816 raise TypeError("Type alias is not callable")
3817
3818 # Breakpoint: https://github.com/python/cpython/pull/21515
3819 if sys.version_info >= (3, 10):
3820 def __or__(self, right):
3821 # For forward compatibility with 3.12, reject Unions
3822 # that are not accepted by the built-in Union.
3823 if not _is_unionable(right):
3824 return NotImplemented
3825 return typing.Union[self, right]
3826
3827 def __ror__(self, left):
3828 if not _is_unionable(left):
3829 return NotImplemented
3830 return typing.Union[left, self]
3831
3832
3833if hasattr(typing, "is_protocol"):
3834 is_protocol = typing.is_protocol
3835 get_protocol_members = typing.get_protocol_members
3836else:
3837 def is_protocol(tp: type, /) -> bool:
3838 """Return True if the given type is a Protocol.
3839
3840 Example::
3841
3842 >>> from typing_extensions import Protocol, is_protocol
3843 >>> class P(Protocol):
3844 ... def a(self) -> str: ...
3845 ... b: int
3846 >>> is_protocol(P)
3847 True
3848 >>> is_protocol(int)
3849 False
3850 """
3851 return (
3852 isinstance(tp, type)
3853 and getattr(tp, '_is_protocol', False)
3854 and tp is not Protocol
3855 and tp is not typing.Protocol
3856 )
3857
3858 def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
3859 """Return the set of members defined in a Protocol.
3860
3861 Example::
3862
3863 >>> from typing_extensions import Protocol, get_protocol_members
3864 >>> class P(Protocol):
3865 ... def a(self) -> str: ...
3866 ... b: int
3867 >>> get_protocol_members(P) == frozenset({'a', 'b'})
3868 True
3869
3870 Raise a TypeError for arguments that are not Protocols.
3871 """
3872 if not is_protocol(tp):
3873 raise TypeError(f'{tp!r} is not a Protocol')
3874 if hasattr(tp, '__protocol_attrs__'):
3875 return frozenset(tp.__protocol_attrs__)
3876 return frozenset(_get_protocol_attrs(tp))
3877
3878
3879if hasattr(typing, "Doc"):
3880 Doc = typing.Doc
3881else:
3882 class Doc:
3883 """Define the documentation of a type annotation using ``Annotated``, to be
3884 used in class attributes, function and method parameters, return values,
3885 and variables.
3886
3887 The value should be a positional-only string literal to allow static tools
3888 like editors and documentation generators to use it.
3889
3890 This complements docstrings.
3891
3892 The string value passed is available in the attribute ``documentation``.
3893
3894 Example::
3895
3896 >>> from typing_extensions import Annotated, Doc
3897 >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
3898 """
3899 def __init__(self, documentation: str, /) -> None:
3900 self.documentation = documentation
3901
3902 def __repr__(self) -> str:
3903 return f"Doc({self.documentation!r})"
3904
3905 def __hash__(self) -> int:
3906 return hash(self.documentation)
3907
3908 def __eq__(self, other: object) -> bool:
3909 if not isinstance(other, Doc):
3910 return NotImplemented
3911 return self.documentation == other.documentation
3912
3913
3914_CapsuleType = getattr(_types, "CapsuleType", None)
3915
3916if _CapsuleType is None:
3917 try:
3918 import _socket
3919 except ImportError:
3920 pass
3921 else:
3922 _CAPI = getattr(_socket, "CAPI", None)
3923 if _CAPI is not None:
3924 _CapsuleType = type(_CAPI)
3925
3926if _CapsuleType is not None:
3927 CapsuleType = _CapsuleType
3928 __all__.append("CapsuleType")
3929
3930
3931if sys.version_info >= (3, 14):
3932 from annotationlib import Format, get_annotations
3933else:
3934 # Available since Python 3.14.0a3
3935 # PR: https://github.com/python/cpython/pull/124415
3936 class Format(enum.IntEnum):
3937 VALUE = 1
3938 VALUE_WITH_FAKE_GLOBALS = 2
3939 FORWARDREF = 3
3940 STRING = 4
3941
3942 # Available since Python 3.14.0a1
3943 # PR: https://github.com/python/cpython/pull/119891
3944 def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
3945 format=Format.VALUE):
3946 """Compute the annotations dict for an object.
3947
3948 obj may be a callable, class, or module.
3949 Passing in an object of any other type raises TypeError.
3950
3951 Returns a dict. get_annotations() returns a new dict every time
3952 it's called; calling it twice on the same object will return two
3953 different but equivalent dicts.
3954
3955 This is a backport of `inspect.get_annotations`, which has been
3956 in the standard library since Python 3.10. See the standard library
3957 documentation for more:
3958
3959 https://docs.python.org/3/library/inspect.html#inspect.get_annotations
3960
3961 This backport adds the *format* argument introduced by PEP 649. The
3962 three formats supported are:
3963 * VALUE: the annotations are returned as-is. This is the default and
3964 it is compatible with the behavior on previous Python versions.
3965 * FORWARDREF: return annotations as-is if possible, but replace any
3966 undefined names with ForwardRef objects. The implementation proposed by
3967 PEP 649 relies on language changes that cannot be backported; the
3968 typing-extensions implementation simply returns the same result as VALUE.
3969 * STRING: return annotations as strings, in a format close to the original
3970 source. Again, this behavior cannot be replicated directly in a backport.
3971 As an approximation, typing-extensions retrieves the annotations under
3972 VALUE semantics and then stringifies them.
3973
3974 The purpose of this backport is to allow users who would like to use
3975 FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
3976 want to support earlier Python versions, to simply write:
3977
3978 typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
3979
3980 """
3981 format = Format(format)
3982 if format is Format.VALUE_WITH_FAKE_GLOBALS:
3983 raise ValueError(
3984 "The VALUE_WITH_FAKE_GLOBALS format is for internal use only"
3985 )
3986
3987 if eval_str and format is not Format.VALUE:
3988 raise ValueError("eval_str=True is only supported with format=Format.VALUE")
3989
3990 if isinstance(obj, type):
3991 # class
3992 obj_dict = getattr(obj, '__dict__', None)
3993 if obj_dict and hasattr(obj_dict, 'get'):
3994 ann = obj_dict.get('__annotations__', None)
3995 if isinstance(ann, _types.GetSetDescriptorType):
3996 ann = None
3997 else:
3998 ann = None
3999
4000 obj_globals = None
4001 module_name = getattr(obj, '__module__', None)
4002 if module_name:
4003 module = sys.modules.get(module_name, None)
4004 if module:
4005 obj_globals = getattr(module, '__dict__', None)
4006 obj_locals = dict(vars(obj))
4007 unwrap = obj
4008 elif isinstance(obj, _types.ModuleType):
4009 # module
4010 ann = getattr(obj, '__annotations__', None)
4011 obj_globals = obj.__dict__
4012 obj_locals = None
4013 unwrap = None
4014 elif callable(obj):
4015 # this includes types.Function, types.BuiltinFunctionType,
4016 # types.BuiltinMethodType, functools.partial, functools.singledispatch,
4017 # "class funclike" from Lib/test/test_inspect... on and on it goes.
4018 ann = getattr(obj, '__annotations__', None)
4019 obj_globals = getattr(obj, '__globals__', None)
4020 obj_locals = None
4021 unwrap = obj
4022 elif hasattr(obj, '__annotations__'):
4023 ann = obj.__annotations__
4024 obj_globals = obj_locals = unwrap = None
4025 else:
4026 raise TypeError(f"{obj!r} is not a module, class, or callable.")
4027
4028 if ann is None:
4029 return {}
4030
4031 if not isinstance(ann, dict):
4032 raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
4033
4034 if not ann:
4035 return {}
4036
4037 if not eval_str:
4038 if format is Format.STRING:
4039 return {
4040 key: value if isinstance(value, str) else typing._type_repr(value)
4041 for key, value in ann.items()
4042 }
4043 return dict(ann)
4044
4045 if unwrap is not None:
4046 while True:
4047 if hasattr(unwrap, '__wrapped__'):
4048 unwrap = unwrap.__wrapped__
4049 continue
4050 if isinstance(unwrap, functools.partial):
4051 unwrap = unwrap.func
4052 continue
4053 break
4054 if hasattr(unwrap, "__globals__"):
4055 obj_globals = unwrap.__globals__
4056
4057 if globals is None:
4058 globals = obj_globals
4059 if locals is None:
4060 locals = obj_locals or {}
4061
4062 # "Inject" type parameters into the local namespace
4063 # (unless they are shadowed by assignments *in* the local namespace),
4064 # as a way of emulating annotation scopes when calling `eval()`
4065 if type_params := getattr(obj, "__type_params__", ()):
4066 locals = {param.__name__: param for param in type_params} | locals
4067
4068 return_value = {key:
4069 value if not isinstance(value, str) else eval(value, globals, locals)
4070 for key, value in ann.items() }
4071 return return_value
4072
4073
4074if hasattr(typing, "evaluate_forward_ref"):
4075 evaluate_forward_ref = typing.evaluate_forward_ref
4076else:
4077 # Implements annotationlib.ForwardRef.evaluate
4078 def _eval_with_owner(
4079 forward_ref, *, owner=None, globals=None, locals=None, type_params=None
4080 ):
4081 if forward_ref.__forward_evaluated__:
4082 return forward_ref.__forward_value__
4083 if getattr(forward_ref, "__cell__", None) is not None:
4084 try:
4085 value = forward_ref.__cell__.cell_contents
4086 except ValueError:
4087 pass
4088 else:
4089 forward_ref.__forward_evaluated__ = True
4090 forward_ref.__forward_value__ = value
4091 return value
4092 if owner is None:
4093 owner = getattr(forward_ref, "__owner__", None)
4094
4095 if (
4096 globals is None
4097 and getattr(forward_ref, "__forward_module__", None) is not None
4098 ):
4099 globals = getattr(
4100 sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
4101 )
4102 if globals is None:
4103 globals = getattr(forward_ref, "__globals__", None)
4104 if globals is None:
4105 if isinstance(owner, type):
4106 module_name = getattr(owner, "__module__", None)
4107 if module_name:
4108 module = sys.modules.get(module_name, None)
4109 if module:
4110 globals = getattr(module, "__dict__", None)
4111 elif isinstance(owner, _types.ModuleType):
4112 globals = getattr(owner, "__dict__", None)
4113 elif callable(owner):
4114 globals = getattr(owner, "__globals__", None)
4115
4116 # If we pass None to eval() below, the globals of this module are used.
4117 if globals is None:
4118 globals = {}
4119
4120 if locals is None:
4121 locals = {}
4122 if isinstance(owner, type):
4123 locals.update(vars(owner))
4124
4125 if type_params is None and owner is not None:
4126 # "Inject" type parameters into the local namespace
4127 # (unless they are shadowed by assignments *in* the local namespace),
4128 # as a way of emulating annotation scopes when calling `eval()`
4129 type_params = getattr(owner, "__type_params__", None)
4130
4131 # Type parameters exist in their own scope, which is logically
4132 # between the locals and the globals. We simulate this by adding
4133 # them to the globals.
4134 if type_params is not None:
4135 globals = dict(globals)
4136 for param in type_params:
4137 globals[param.__name__] = param
4138
4139 arg = forward_ref.__forward_arg__
4140 if arg.isidentifier() and not keyword.iskeyword(arg):
4141 if arg in locals:
4142 value = locals[arg]
4143 elif arg in globals:
4144 value = globals[arg]
4145 elif hasattr(builtins, arg):
4146 return getattr(builtins, arg)
4147 else:
4148 raise NameError(arg)
4149 else:
4150 code = forward_ref.__forward_code__
4151 value = eval(code, globals, locals)
4152 forward_ref.__forward_evaluated__ = True
4153 forward_ref.__forward_value__ = value
4154 return value
4155
4156 def evaluate_forward_ref(
4157 forward_ref,
4158 *,
4159 owner=None,
4160 globals=None,
4161 locals=None,
4162 type_params=None,
4163 format=None,
4164 _recursive_guard=frozenset(),
4165 ):
4166 """Evaluate a forward reference as a type hint.
4167
4168 This is similar to calling the ForwardRef.evaluate() method,
4169 but unlike that method, evaluate_forward_ref() also:
4170
4171 * Recursively evaluates forward references nested within the type hint.
4172 * Rejects certain objects that are not valid type hints.
4173 * Replaces type hints that evaluate to None with types.NoneType.
4174 * Supports the *FORWARDREF* and *STRING* formats.
4175
4176 *forward_ref* must be an instance of ForwardRef. *owner*, if given,
4177 should be the object that holds the annotations that the forward reference
4178 derived from, such as a module, class object, or function. It is used to
4179 infer the namespaces to use for looking up names. *globals* and *locals*
4180 can also be explicitly given to provide the global and local namespaces.
4181 *type_params* is a tuple of type parameters that are in scope when
4182 evaluating the forward reference. This parameter must be provided (though
4183 it may be an empty tuple) if *owner* is not given and the forward reference
4184 does not already have an owner set. *format* specifies the format of the
4185 annotation and is a member of the annotationlib.Format enum.
4186
4187 """
4188 if format == Format.STRING:
4189 return forward_ref.__forward_arg__
4190 if forward_ref.__forward_arg__ in _recursive_guard:
4191 return forward_ref
4192
4193 # Evaluate the forward reference
4194 try:
4195 value = _eval_with_owner(
4196 forward_ref,
4197 owner=owner,
4198 globals=globals,
4199 locals=locals,
4200 type_params=type_params,
4201 )
4202 except NameError:
4203 if format == Format.FORWARDREF:
4204 return forward_ref
4205 else:
4206 raise
4207
4208 if isinstance(value, str):
4209 value = ForwardRef(value)
4210
4211 # Recursively evaluate the type
4212 if isinstance(value, ForwardRef):
4213 if getattr(value, "__forward_module__", True) is not None:
4214 globals = None
4215 return evaluate_forward_ref(
4216 value,
4217 globals=globals,
4218 locals=locals,
4219 type_params=type_params, owner=owner,
4220 _recursive_guard=_recursive_guard, format=format
4221 )
4222 if sys.version_info < (3, 12, 5) and type_params:
4223 # Make use of type_params
4224 locals = dict(locals) if locals else {}
4225 for tvar in type_params:
4226 if tvar.__name__ not in locals: # lets not overwrite something present
4227 locals[tvar.__name__] = tvar
4228 if sys.version_info < (3, 12, 5):
4229 return typing._eval_type(
4230 value,
4231 globals,
4232 locals,
4233 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
4234 )
4235 else:
4236 return typing._eval_type(
4237 value,
4238 globals,
4239 locals,
4240 type_params,
4241 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
4242 )
4243
4244
4245if sys.version_info >= (3, 14, 0, "beta"):
4246 type_repr = annotationlib.type_repr
4247else:
4248 def type_repr(value):
4249 """Convert a Python value to a format suitable for use with the STRING format.
4250
4251 This is intended as a helper for tools that support the STRING format but do
4252 not have access to the code that originally produced the annotations. It uses
4253 repr() for most objects.
4254
4255 """
4256 if isinstance(value, (type, _types.FunctionType, _types.BuiltinFunctionType)):
4257 if value.__module__ == "builtins":
4258 return value.__qualname__
4259 return f"{value.__module__}.{value.__qualname__}"
4260 if value is ...:
4261 return "..."
4262 return repr(value)
4263
4264
4265# Aliases for items that are in typing in all supported versions.
4266# We use hasattr() checks so this library will continue to import on
4267# future versions of Python that may remove these names.
4268_typing_names = [
4269 "AbstractSet",
4270 "AnyStr",
4271 "BinaryIO",
4272 "Callable",
4273 "Collection",
4274 "Container",
4275 "Dict",
4276 "FrozenSet",
4277 "Hashable",
4278 "IO",
4279 "ItemsView",
4280 "Iterable",
4281 "Iterator",
4282 "KeysView",
4283 "List",
4284 "Mapping",
4285 "MappingView",
4286 "Match",
4287 "MutableMapping",
4288 "MutableSequence",
4289 "MutableSet",
4290 "Optional",
4291 "Pattern",
4292 "Reversible",
4293 "Sequence",
4294 "Set",
4295 "Sized",
4296 "TextIO",
4297 "Tuple",
4298 "Union",
4299 "ValuesView",
4300 "cast",
4301 "no_type_check",
4302 "no_type_check_decorator",
4303 # This is private, but it was defined by typing_extensions for a long time
4304 # and some users rely on it.
4305 "_AnnotatedAlias",
4306]
4307globals().update(
4308 {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)}
4309)
4310# These are defined unconditionally because they are used in
4311# typing-extensions itself.
4312Generic = typing.Generic
4313ForwardRef = typing.ForwardRef
4314Annotated = typing.Annotated