1import abc
2import builtins
3import collections
4import collections.abc
5import contextlib
6import enum
7import functools
8import inspect
9import io
10import keyword
11import operator
12import sys
13import types as _types
14import typing
15import warnings
16
17# Breakpoint: https://github.com/python/cpython/pull/119891
18if sys.version_info >= (3, 14):
19 import annotationlib
20
21__all__ = [
22 # Super-special typing primitives.
23 'Any',
24 'ClassVar',
25 'Concatenate',
26 'Final',
27 'LiteralString',
28 'ParamSpec',
29 'ParamSpecArgs',
30 'ParamSpecKwargs',
31 'Self',
32 'Type',
33 'TypeVar',
34 'TypeVarTuple',
35 'Unpack',
36
37 # ABCs (from collections.abc).
38 'Awaitable',
39 'AsyncIterator',
40 'AsyncIterable',
41 'Coroutine',
42 'AsyncGenerator',
43 'AsyncContextManager',
44 'Buffer',
45 'ChainMap',
46
47 # Concrete collection types.
48 'ContextManager',
49 'Counter',
50 'Deque',
51 'DefaultDict',
52 'NamedTuple',
53 'OrderedDict',
54 'TypedDict',
55
56 # Structural checks, a.k.a. protocols.
57 'SupportsAbs',
58 'SupportsBytes',
59 'SupportsComplex',
60 'SupportsFloat',
61 'SupportsIndex',
62 'SupportsInt',
63 'SupportsRound',
64 'Reader',
65 'Writer',
66
67 # One-off things.
68 'Annotated',
69 'assert_never',
70 'assert_type',
71 'clear_overloads',
72 'dataclass_transform',
73 'deprecated',
74 'disjoint_base',
75 'Doc',
76 'evaluate_forward_ref',
77 'get_overloads',
78 'final',
79 'Format',
80 'get_annotations',
81 'get_args',
82 'get_origin',
83 'get_original_bases',
84 'get_protocol_members',
85 'get_type_hints',
86 'IntVar',
87 'is_protocol',
88 'is_typeddict',
89 'Literal',
90 'NewType',
91 'overload',
92 'override',
93 'Protocol',
94 'Sentinel',
95 'reveal_type',
96 'runtime',
97 'runtime_checkable',
98 'Text',
99 'TypeAlias',
100 'TypeAliasType',
101 'TypeForm',
102 'TypeGuard',
103 'TypeIs',
104 'TYPE_CHECKING',
105 'type_repr',
106 'Never',
107 'NoReturn',
108 'ReadOnly',
109 'Required',
110 'NotRequired',
111 'NoDefault',
112 'NoExtraItems',
113
114 # Pure aliases, have always been in typing
115 'AbstractSet',
116 'AnyStr',
117 'BinaryIO',
118 'Callable',
119 'Collection',
120 'Container',
121 'Dict',
122 'ForwardRef',
123 'FrozenSet',
124 'Generator',
125 'Generic',
126 'Hashable',
127 'IO',
128 'ItemsView',
129 'Iterable',
130 'Iterator',
131 'KeysView',
132 'List',
133 'Mapping',
134 'MappingView',
135 'Match',
136 'MutableMapping',
137 'MutableSequence',
138 'MutableSet',
139 'Optional',
140 'Pattern',
141 'Reversible',
142 'Sequence',
143 'Set',
144 'Sized',
145 'TextIO',
146 'Tuple',
147 'Union',
148 'ValuesView',
149 'cast',
150 'no_type_check',
151 'no_type_check_decorator',
152]
153
154# for backward compatibility
155PEP_560 = True
156GenericMeta = type
157# Breakpoint: https://github.com/python/cpython/pull/116129
158_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
159
160# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
161_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
162
163class Sentinel:
164 """Create a unique sentinel object.
165
166 *name* should be the name of the variable to which the return value shall be assigned.
167
168 *repr*, if supplied, will be used for the repr of the sentinel object.
169 If not provided, "<name>" will be used.
170 """
171
172 def __init__(
173 self,
174 name: str,
175 repr: typing.Optional[str] = None,
176 ):
177 self._name = name
178 self._repr = repr if repr is not None else f'<{name}>'
179
180 def __repr__(self):
181 return self._repr
182
183 if sys.version_info < (3, 11):
184 # The presence of this method convinces typing._type_check
185 # that Sentinels are types.
186 def __call__(self, *args, **kwargs):
187 raise TypeError(f"{type(self).__name__!r} object is not callable")
188
189 # Breakpoint: https://github.com/python/cpython/pull/21515
190 if sys.version_info >= (3, 10):
191 def __or__(self, other):
192 return typing.Union[self, other]
193
194 def __ror__(self, other):
195 return typing.Union[other, self]
196
197 def __getstate__(self):
198 raise TypeError(f"Cannot pickle {type(self).__name__!r} object")
199
200
201_marker = Sentinel("sentinel")
202
203# The functions below are modified copies of typing internal helpers.
204# They are needed by _ProtocolMeta and they provide support for PEP 646.
205
206# Breakpoint: https://github.com/python/cpython/pull/27342
207if sys.version_info >= (3, 10):
208 def _should_collect_from_parameters(t):
209 return isinstance(
210 t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
211 )
212else:
213 def _should_collect_from_parameters(t):
214 return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
215
216
217NoReturn = typing.NoReturn
218
219# Some unconstrained type variables. These are used by the container types.
220# (These are not for export.)
221T = typing.TypeVar('T') # Any type.
222KT = typing.TypeVar('KT') # Key type.
223VT = typing.TypeVar('VT') # Value type.
224T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
225T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
226
227
228# Breakpoint: https://github.com/python/cpython/pull/31841
229if sys.version_info >= (3, 11):
230 from typing import Any
231else:
232
233 class _AnyMeta(type):
234 def __instancecheck__(self, obj):
235 if self is Any:
236 raise TypeError("typing_extensions.Any cannot be used with isinstance()")
237 return super().__instancecheck__(obj)
238
239 def __repr__(self):
240 if self is Any:
241 return "typing_extensions.Any"
242 return super().__repr__()
243
244 class Any(metaclass=_AnyMeta):
245 """Special type indicating an unconstrained type.
246 - Any is compatible with every type.
247 - Any assumed to have all methods.
248 - All values assumed to be instances of Any.
249 Note that all the above statements are true from the point of view of
250 static type checkers. At runtime, Any should not be used with instance
251 checks.
252 """
253 def __new__(cls, *args, **kwargs):
254 if cls is Any:
255 raise TypeError("Any cannot be instantiated")
256 return super().__new__(cls, *args, **kwargs)
257
258
259ClassVar = typing.ClassVar
260
261# Vendored from cpython typing._SpecialFrom
262# Having a separate class means that instances will not be rejected by
263# typing._type_check.
264class _SpecialForm(typing._Final, _root=True):
265 __slots__ = ('_name', '__doc__', '_getitem')
266
267 def __init__(self, getitem):
268 self._getitem = getitem
269 self._name = getitem.__name__
270 self.__doc__ = getitem.__doc__
271
272 def __getattr__(self, item):
273 if item in {'__name__', '__qualname__'}:
274 return self._name
275
276 raise AttributeError(item)
277
278 def __mro_entries__(self, bases):
279 raise TypeError(f"Cannot subclass {self!r}")
280
281 def __repr__(self):
282 return f'typing_extensions.{self._name}'
283
284 def __reduce__(self):
285 return self._name
286
287 def __call__(self, *args, **kwds):
288 raise TypeError(f"Cannot instantiate {self!r}")
289
290 def __or__(self, other):
291 return typing.Union[self, other]
292
293 def __ror__(self, other):
294 return typing.Union[other, self]
295
296 def __instancecheck__(self, obj):
297 raise TypeError(f"{self} cannot be used with isinstance()")
298
299 def __subclasscheck__(self, cls):
300 raise TypeError(f"{self} cannot be used with issubclass()")
301
302 @typing._tp_cache
303 def __getitem__(self, parameters):
304 return self._getitem(self, parameters)
305
306
307# Note that inheriting from this class means that the object will be
308# rejected by typing._type_check, so do not use it if the special form
309# is arguably valid as a type by itself.
310class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
311 def __repr__(self):
312 return 'typing_extensions.' + self._name
313
314
315Final = typing.Final
316
317# Breakpoint: https://github.com/python/cpython/pull/30530
318if sys.version_info >= (3, 11):
319 final = typing.final
320else:
321 # @final exists in 3.8+, but we backport it for all versions
322 # before 3.11 to keep support for the __final__ attribute.
323 # See https://bugs.python.org/issue46342
324 def final(f):
325 """This decorator can be used to indicate to type checkers that
326 the decorated method cannot be overridden, and decorated class
327 cannot be subclassed. For example:
328
329 class Base:
330 @final
331 def done(self) -> None:
332 ...
333 class Sub(Base):
334 def done(self) -> None: # Error reported by type checker
335 ...
336 @final
337 class Leaf:
338 ...
339 class Other(Leaf): # Error reported by type checker
340 ...
341
342 There is no runtime checking of these properties. The decorator
343 sets the ``__final__`` attribute to ``True`` on the decorated object
344 to allow runtime introspection.
345 """
346 try:
347 f.__final__ = True
348 except (AttributeError, TypeError):
349 # Skip the attribute silently if it is not writable.
350 # AttributeError happens if the object has __slots__ or a
351 # read-only property, TypeError if it's a builtin class.
352 pass
353 return f
354
355
356if hasattr(typing, "disjoint_base"): # 3.15
357 disjoint_base = typing.disjoint_base
358else:
359 def disjoint_base(cls):
360 """This decorator marks a class as a disjoint base.
361
362 Child classes of a disjoint base cannot inherit from other disjoint bases that are
363 not parent classes of the disjoint base.
364
365 For example:
366
367 @disjoint_base
368 class Disjoint1: pass
369
370 @disjoint_base
371 class Disjoint2: pass
372
373 class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error
374
375 Type checkers can use knowledge of disjoint bases to detect unreachable code
376 and determine when two types can overlap.
377
378 See PEP 800."""
379 cls.__disjoint_base__ = True
380 return cls
381
382
383def IntVar(name):
384 return typing.TypeVar(name)
385
386
387# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
388# Breakpoint: https://github.com/python/cpython/pull/29334
389if sys.version_info >= (3, 10, 1):
390 Literal = typing.Literal
391else:
392 def _flatten_literal_params(parameters):
393 """An internal helper for Literal creation: flatten Literals among parameters"""
394 params = []
395 for p in parameters:
396 if isinstance(p, _LiteralGenericAlias):
397 params.extend(p.__args__)
398 else:
399 params.append(p)
400 return tuple(params)
401
402 def _value_and_type_iter(params):
403 for p in params:
404 yield p, type(p)
405
406 class _LiteralGenericAlias(typing._GenericAlias, _root=True):
407 def __eq__(self, other):
408 if not isinstance(other, _LiteralGenericAlias):
409 return NotImplemented
410 these_args_deduped = set(_value_and_type_iter(self.__args__))
411 other_args_deduped = set(_value_and_type_iter(other.__args__))
412 return these_args_deduped == other_args_deduped
413
414 def __hash__(self):
415 return hash(frozenset(_value_and_type_iter(self.__args__)))
416
417 class _LiteralForm(_ExtensionsSpecialForm, _root=True):
418 def __init__(self, doc: str):
419 self._name = 'Literal'
420 self._doc = self.__doc__ = doc
421
422 def __getitem__(self, parameters):
423 if not isinstance(parameters, tuple):
424 parameters = (parameters,)
425
426 parameters = _flatten_literal_params(parameters)
427
428 val_type_pairs = list(_value_and_type_iter(parameters))
429 try:
430 deduped_pairs = set(val_type_pairs)
431 except TypeError:
432 # unhashable parameters
433 pass
434 else:
435 # similar logic to typing._deduplicate on Python 3.9+
436 if len(deduped_pairs) < len(val_type_pairs):
437 new_parameters = []
438 for pair in val_type_pairs:
439 if pair in deduped_pairs:
440 new_parameters.append(pair[0])
441 deduped_pairs.remove(pair)
442 assert not deduped_pairs, deduped_pairs
443 parameters = tuple(new_parameters)
444
445 return _LiteralGenericAlias(self, parameters)
446
447 Literal = _LiteralForm(doc="""\
448 A type that can be used to indicate to type checkers
449 that the corresponding value has a value literally equivalent
450 to the provided parameter. For example:
451
452 var: Literal[4] = 4
453
454 The type checker understands that 'var' is literally equal to
455 the value 4 and no other value.
456
457 Literal[...] cannot be subclassed. There is no runtime
458 checking verifying that the parameter is actually a value
459 instead of a type.""")
460
461
462_overload_dummy = typing._overload_dummy
463
464
465if hasattr(typing, "get_overloads"): # 3.11+
466 overload = typing.overload
467 get_overloads = typing.get_overloads
468 clear_overloads = typing.clear_overloads
469else:
470 # {module: {qualname: {firstlineno: func}}}
471 _overload_registry = collections.defaultdict(
472 functools.partial(collections.defaultdict, dict)
473 )
474
475 def overload(func):
476 """Decorator for overloaded functions/methods.
477
478 In a stub file, place two or more stub definitions for the same
479 function in a row, each decorated with @overload. For example:
480
481 @overload
482 def utf8(value: None) -> None: ...
483 @overload
484 def utf8(value: bytes) -> bytes: ...
485 @overload
486 def utf8(value: str) -> bytes: ...
487
488 In a non-stub file (i.e. a regular .py file), do the same but
489 follow it with an implementation. The implementation should *not*
490 be decorated with @overload. For example:
491
492 @overload
493 def utf8(value: None) -> None: ...
494 @overload
495 def utf8(value: bytes) -> bytes: ...
496 @overload
497 def utf8(value: str) -> bytes: ...
498 def utf8(value):
499 # implementation goes here
500
501 The overloads for a function can be retrieved at runtime using the
502 get_overloads() function.
503 """
504 # classmethod and staticmethod
505 f = getattr(func, "__func__", func)
506 try:
507 _overload_registry[f.__module__][f.__qualname__][
508 f.__code__.co_firstlineno
509 ] = func
510 except AttributeError:
511 # Not a normal function; ignore.
512 pass
513 return _overload_dummy
514
515 def get_overloads(func):
516 """Return all defined overloads for *func* as a sequence."""
517 # classmethod and staticmethod
518 f = getattr(func, "__func__", func)
519 if f.__module__ not in _overload_registry:
520 return []
521 mod_dict = _overload_registry[f.__module__]
522 if f.__qualname__ not in mod_dict:
523 return []
524 return list(mod_dict[f.__qualname__].values())
525
526 def clear_overloads():
527 """Clear all overloads in the registry."""
528 _overload_registry.clear()
529
530
531# This is not a real generic class. Don't use outside annotations.
532Type = typing.Type
533
534# Various ABCs mimicking those in collections.abc.
535# A few are simply re-exported for completeness.
536Awaitable = typing.Awaitable
537Coroutine = typing.Coroutine
538AsyncIterable = typing.AsyncIterable
539AsyncIterator = typing.AsyncIterator
540Deque = typing.Deque
541DefaultDict = typing.DefaultDict
542OrderedDict = typing.OrderedDict
543Counter = typing.Counter
544ChainMap = typing.ChainMap
545Text = typing.Text
546TYPE_CHECKING = typing.TYPE_CHECKING
547
548
549# Breakpoint: https://github.com/python/cpython/pull/118681
550if sys.version_info >= (3, 13, 0, "beta"):
551 from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
552else:
553 def _is_dunder(attr):
554 return attr.startswith('__') and attr.endswith('__')
555
556
557 class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True):
558 def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
559 super().__init__(origin, nparams, inst=inst, name=name)
560 self._defaults = defaults
561
562 def __setattr__(self, attr, val):
563 allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
564 if _is_dunder(attr) or attr in allowed_attrs:
565 object.__setattr__(self, attr, val)
566 else:
567 setattr(self.__origin__, attr, val)
568
569 @typing._tp_cache
570 def __getitem__(self, params):
571 if not isinstance(params, tuple):
572 params = (params,)
573 msg = "Parameters to generic types must be types."
574 params = tuple(typing._type_check(p, msg) for p in params)
575 if (
576 self._defaults
577 and len(params) < self._nparams
578 and len(params) + len(self._defaults) >= self._nparams
579 ):
580 params = (*params, *self._defaults[len(params) - self._nparams:])
581 actual_len = len(params)
582
583 if actual_len != self._nparams:
584 if self._defaults:
585 expected = f"at least {self._nparams - len(self._defaults)}"
586 else:
587 expected = str(self._nparams)
588 if not self._nparams:
589 raise TypeError(f"{self} is not a generic class")
590 raise TypeError(
591 f"Too {'many' if actual_len > self._nparams else 'few'}"
592 f" arguments for {self};"
593 f" actual {actual_len}, expected {expected}"
594 )
595 return self.copy_with(params)
596
597 _NoneType = type(None)
598 Generator = _SpecialGenericAlias(
599 collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
600 )
601 AsyncGenerator = _SpecialGenericAlias(
602 collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
603 )
604 ContextManager = _SpecialGenericAlias(
605 contextlib.AbstractContextManager,
606 2,
607 name="ContextManager",
608 defaults=(typing.Optional[bool],)
609 )
610 AsyncContextManager = _SpecialGenericAlias(
611 contextlib.AbstractAsyncContextManager,
612 2,
613 name="AsyncContextManager",
614 defaults=(typing.Optional[bool],)
615 )
616
617
618_PROTO_ALLOWLIST = {
619 'collections.abc': [
620 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
621 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
622 ],
623 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
624 'typing_extensions': ['Buffer'],
625}
626
627
628_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
629 "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
630 "__final__",
631}
632
633
634def _get_protocol_attrs(cls):
635 attrs = set()
636 for base in cls.__mro__[:-1]: # without object
637 if base.__name__ in {'Protocol', 'Generic'}:
638 continue
639 annotations = getattr(base, '__annotations__', {})
640 for attr in (*base.__dict__, *annotations):
641 if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
642 attrs.add(attr)
643 return attrs
644
645
646def _caller(depth=1, default='__main__'):
647 try:
648 return sys._getframemodulename(depth + 1) or default
649 except AttributeError: # For platforms without _getframemodulename()
650 pass
651 try:
652 return sys._getframe(depth + 1).f_globals.get('__name__', default)
653 except (AttributeError, ValueError): # For platforms without _getframe()
654 pass
655 return None
656
657
658# `__match_args__` attribute was removed from protocol members in 3.13,
659# we want to backport this change to older Python versions.
660# Breakpoint: https://github.com/python/cpython/pull/110683
661if sys.version_info >= (3, 13):
662 Protocol = typing.Protocol
663else:
664 def _allow_reckless_class_checks(depth=2):
665 """Allow instance and class checks for special stdlib modules.
666 The abc and functools modules indiscriminately call isinstance() and
667 issubclass() on the whole MRO of a user class, which may contain protocols.
668 """
669 return _caller(depth) in {'abc', 'functools', None}
670
671 def _no_init(self, *args, **kwargs):
672 if type(self)._is_protocol:
673 raise TypeError('Protocols cannot be instantiated')
674
675 def _type_check_issubclass_arg_1(arg):
676 """Raise TypeError if `arg` is not an instance of `type`
677 in `issubclass(arg, <protocol>)`.
678
679 In most cases, this is verified by type.__subclasscheck__.
680 Checking it again unnecessarily would slow down issubclass() checks,
681 so, we don't perform this check unless we absolutely have to.
682
683 For various error paths, however,
684 we want to ensure that *this* error message is shown to the user
685 where relevant, rather than a typing.py-specific error message.
686 """
687 if not isinstance(arg, type):
688 # Same error message as for issubclass(1, int).
689 raise TypeError('issubclass() arg 1 must be a class')
690
691 # Inheriting from typing._ProtocolMeta isn't actually desirable,
692 # but is necessary to allow typing.Protocol and typing_extensions.Protocol
693 # to mix without getting TypeErrors about "metaclass conflict"
694 class _ProtocolMeta(type(typing.Protocol)):
695 # This metaclass is somewhat unfortunate,
696 # but is necessary for several reasons...
697 #
698 # NOTE: DO NOT call super() in any methods in this class
699 # That would call the methods on typing._ProtocolMeta on Python <=3.11
700 # and those are slow
701 def __new__(mcls, name, bases, namespace, **kwargs):
702 if name == "Protocol" and len(bases) < 2:
703 pass
704 elif {Protocol, typing.Protocol} & set(bases):
705 for base in bases:
706 if not (
707 base in {object, typing.Generic, Protocol, typing.Protocol}
708 or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
709 or is_protocol(base)
710 ):
711 raise TypeError(
712 f"Protocols can only inherit from other protocols, "
713 f"got {base!r}"
714 )
715 return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
716
717 def __init__(cls, *args, **kwargs):
718 abc.ABCMeta.__init__(cls, *args, **kwargs)
719 if getattr(cls, "_is_protocol", False):
720 cls.__protocol_attrs__ = _get_protocol_attrs(cls)
721
722 def __subclasscheck__(cls, other):
723 if cls is Protocol:
724 return type.__subclasscheck__(cls, other)
725 if (
726 getattr(cls, '_is_protocol', False)
727 and not _allow_reckless_class_checks()
728 ):
729 if not getattr(cls, '_is_runtime_protocol', False):
730 _type_check_issubclass_arg_1(other)
731 raise TypeError(
732 "Instance and class checks can only be used with "
733 "@runtime_checkable protocols"
734 )
735 if (
736 # this attribute is set by @runtime_checkable:
737 cls.__non_callable_proto_members__
738 and cls.__dict__.get("__subclasshook__") is _proto_hook
739 ):
740 _type_check_issubclass_arg_1(other)
741 non_method_attrs = sorted(cls.__non_callable_proto_members__)
742 raise TypeError(
743 "Protocols with non-method members don't support issubclass()."
744 f" Non-method members: {str(non_method_attrs)[1:-1]}."
745 )
746 return abc.ABCMeta.__subclasscheck__(cls, other)
747
748 def __instancecheck__(cls, instance):
749 # We need this method for situations where attributes are
750 # assigned in __init__.
751 if cls is Protocol:
752 return type.__instancecheck__(cls, instance)
753 if not getattr(cls, "_is_protocol", False):
754 # i.e., it's a concrete subclass of a protocol
755 return abc.ABCMeta.__instancecheck__(cls, instance)
756
757 if (
758 not getattr(cls, '_is_runtime_protocol', False) and
759 not _allow_reckless_class_checks()
760 ):
761 raise TypeError("Instance and class checks can only be used with"
762 " @runtime_checkable protocols")
763
764 if abc.ABCMeta.__instancecheck__(cls, instance):
765 return True
766
767 for attr in cls.__protocol_attrs__:
768 try:
769 val = inspect.getattr_static(instance, attr)
770 except AttributeError:
771 break
772 # this attribute is set by @runtime_checkable:
773 if val is None and attr not in cls.__non_callable_proto_members__:
774 break
775 else:
776 return True
777
778 return False
779
780 def __eq__(cls, other):
781 # Hack so that typing.Generic.__class_getitem__
782 # treats typing_extensions.Protocol
783 # as equivalent to typing.Protocol
784 if abc.ABCMeta.__eq__(cls, other) is True:
785 return True
786 return cls is Protocol and other is typing.Protocol
787
788 # This has to be defined, or the abc-module cache
789 # complains about classes with this metaclass being unhashable,
790 # if we define only __eq__!
791 def __hash__(cls) -> int:
792 return type.__hash__(cls)
793
794 @classmethod
795 def _proto_hook(cls, other):
796 if not cls.__dict__.get('_is_protocol', False):
797 return NotImplemented
798
799 for attr in cls.__protocol_attrs__:
800 for base in other.__mro__:
801 # Check if the members appears in the class dictionary...
802 if attr in base.__dict__:
803 if base.__dict__[attr] is None:
804 return NotImplemented
805 break
806
807 # ...or in annotations, if it is a sub-protocol.
808 annotations = getattr(base, '__annotations__', {})
809 if (
810 isinstance(annotations, collections.abc.Mapping)
811 and attr in annotations
812 and is_protocol(other)
813 ):
814 break
815 else:
816 return NotImplemented
817 return True
818
819 class Protocol(typing.Generic, metaclass=_ProtocolMeta):
820 __doc__ = typing.Protocol.__doc__
821 __slots__ = ()
822 _is_protocol = True
823 _is_runtime_protocol = False
824
825 def __init_subclass__(cls, *args, **kwargs):
826 super().__init_subclass__(*args, **kwargs)
827
828 # Determine if this is a protocol or a concrete subclass.
829 if not cls.__dict__.get('_is_protocol', False):
830 cls._is_protocol = any(b is Protocol for b in cls.__bases__)
831
832 # Set (or override) the protocol subclass hook.
833 if '__subclasshook__' not in cls.__dict__:
834 cls.__subclasshook__ = _proto_hook
835
836 # Prohibit instantiation for protocol classes
837 if cls._is_protocol and cls.__init__ is Protocol.__init__:
838 cls.__init__ = _no_init
839
840
841# Breakpoint: https://github.com/python/cpython/pull/113401
842if sys.version_info >= (3, 13):
843 runtime_checkable = typing.runtime_checkable
844else:
845 def runtime_checkable(cls):
846 """Mark a protocol class as a runtime protocol.
847
848 Such protocol can be used with isinstance() and issubclass().
849 Raise TypeError if applied to a non-protocol class.
850 This allows a simple-minded structural check very similar to
851 one trick ponies in collections.abc such as Iterable.
852
853 For example::
854
855 @runtime_checkable
856 class Closable(Protocol):
857 def close(self): ...
858
859 assert isinstance(open('/some/file'), Closable)
860
861 Warning: this will check only the presence of the required methods,
862 not their type signatures!
863 """
864 if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
865 raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
866 f' got {cls!r}')
867 cls._is_runtime_protocol = True
868
869 # typing.Protocol classes on <=3.11 break if we execute this block,
870 # because typing.Protocol classes on <=3.11 don't have a
871 # `__protocol_attrs__` attribute, and this block relies on the
872 # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
873 # break if we *don't* execute this block, because *they* assume that all
874 # protocol classes have a `__non_callable_proto_members__` attribute
875 # (which this block sets)
876 if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
877 # PEP 544 prohibits using issubclass()
878 # with protocols that have non-method members.
879 # See gh-113320 for why we compute this attribute here,
880 # rather than in `_ProtocolMeta.__init__`
881 cls.__non_callable_proto_members__ = set()
882 for attr in cls.__protocol_attrs__:
883 try:
884 is_callable = callable(getattr(cls, attr, None))
885 except Exception as e:
886 raise TypeError(
887 f"Failed to determine whether protocol member {attr!r} "
888 "is a method member"
889 ) from e
890 else:
891 if not is_callable:
892 cls.__non_callable_proto_members__.add(attr)
893
894 return cls
895
896
897# The "runtime" alias exists for backwards compatibility.
898runtime = runtime_checkable
899
900
901# Our version of runtime-checkable protocols is faster on Python <=3.11
902# Breakpoint: https://github.com/python/cpython/pull/112717
903if sys.version_info >= (3, 12):
904 SupportsInt = typing.SupportsInt
905 SupportsFloat = typing.SupportsFloat
906 SupportsComplex = typing.SupportsComplex
907 SupportsBytes = typing.SupportsBytes
908 SupportsIndex = typing.SupportsIndex
909 SupportsAbs = typing.SupportsAbs
910 SupportsRound = typing.SupportsRound
911else:
912 @runtime_checkable
913 class SupportsInt(Protocol):
914 """An ABC with one abstract method __int__."""
915 __slots__ = ()
916
917 @abc.abstractmethod
918 def __int__(self) -> int:
919 pass
920
921 @runtime_checkable
922 class SupportsFloat(Protocol):
923 """An ABC with one abstract method __float__."""
924 __slots__ = ()
925
926 @abc.abstractmethod
927 def __float__(self) -> float:
928 pass
929
930 @runtime_checkable
931 class SupportsComplex(Protocol):
932 """An ABC with one abstract method __complex__."""
933 __slots__ = ()
934
935 @abc.abstractmethod
936 def __complex__(self) -> complex:
937 pass
938
939 @runtime_checkable
940 class SupportsBytes(Protocol):
941 """An ABC with one abstract method __bytes__."""
942 __slots__ = ()
943
944 @abc.abstractmethod
945 def __bytes__(self) -> bytes:
946 pass
947
948 @runtime_checkable
949 class SupportsIndex(Protocol):
950 __slots__ = ()
951
952 @abc.abstractmethod
953 def __index__(self) -> int:
954 pass
955
956 @runtime_checkable
957 class SupportsAbs(Protocol[T_co]):
958 """
959 An ABC with one abstract method __abs__ that is covariant in its return type.
960 """
961 __slots__ = ()
962
963 @abc.abstractmethod
964 def __abs__(self) -> T_co:
965 pass
966
967 @runtime_checkable
968 class SupportsRound(Protocol[T_co]):
969 """
970 An ABC with one abstract method __round__ that is covariant in its return type.
971 """
972 __slots__ = ()
973
974 @abc.abstractmethod
975 def __round__(self, ndigits: int = 0) -> T_co:
976 pass
977
978
979if hasattr(io, "Reader") and hasattr(io, "Writer"):
980 Reader = io.Reader
981 Writer = io.Writer
982else:
983 @runtime_checkable
984 class Reader(Protocol[T_co]):
985 """Protocol for simple I/O reader instances.
986
987 This protocol only supports blocking I/O.
988 """
989
990 __slots__ = ()
991
992 @abc.abstractmethod
993 def read(self, size: int = ..., /) -> T_co:
994 """Read data from the input stream and return it.
995
996 If *size* is specified, at most *size* items (bytes/characters) will be
997 read.
998 """
999
1000 @runtime_checkable
1001 class Writer(Protocol[T_contra]):
1002 """Protocol for simple I/O writer instances.
1003
1004 This protocol only supports blocking I/O.
1005 """
1006
1007 __slots__ = ()
1008
1009 @abc.abstractmethod
1010 def write(self, data: T_contra, /) -> int:
1011 """Write *data* to the output stream and return the number of items written.""" # noqa: E501
1012
1013
1014_NEEDS_SINGLETONMETA = (
1015 not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
1016)
1017
1018if _NEEDS_SINGLETONMETA:
1019 class SingletonMeta(type):
1020 def __setattr__(cls, attr, value):
1021 # TypeError is consistent with the behavior of NoneType
1022 raise TypeError(
1023 f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
1024 )
1025
1026
1027if hasattr(typing, "NoDefault"):
1028 NoDefault = typing.NoDefault
1029else:
1030 class NoDefaultType(metaclass=SingletonMeta):
1031 """The type of the NoDefault singleton."""
1032
1033 __slots__ = ()
1034
1035 def __new__(cls):
1036 return globals().get("NoDefault") or object.__new__(cls)
1037
1038 def __repr__(self):
1039 return "typing_extensions.NoDefault"
1040
1041 def __reduce__(self):
1042 return "NoDefault"
1043
1044 NoDefault = NoDefaultType()
1045 del NoDefaultType
1046
1047if hasattr(typing, "NoExtraItems"):
1048 NoExtraItems = typing.NoExtraItems
1049else:
1050 class NoExtraItemsType(metaclass=SingletonMeta):
1051 """The type of the NoExtraItems singleton."""
1052
1053 __slots__ = ()
1054
1055 def __new__(cls):
1056 return globals().get("NoExtraItems") or object.__new__(cls)
1057
1058 def __repr__(self):
1059 return "typing_extensions.NoExtraItems"
1060
1061 def __reduce__(self):
1062 return "NoExtraItems"
1063
1064 NoExtraItems = NoExtraItemsType()
1065 del NoExtraItemsType
1066
1067if _NEEDS_SINGLETONMETA:
1068 del SingletonMeta
1069
1070
1071# Update this to something like >=3.13.0b1 if and when
1072# PEP 728 is implemented in CPython
1073_PEP_728_IMPLEMENTED = False
1074
1075if _PEP_728_IMPLEMENTED:
1076 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
1077 # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
1078 # The standard library TypedDict below Python 3.11 does not store runtime
1079 # information about optional and required keys when using Required or NotRequired.
1080 # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
1081 # Aaaand on 3.12 we add __orig_bases__ to TypedDict
1082 # to enable better runtime introspection.
1083 # On 3.13 we deprecate some odd ways of creating TypedDicts.
1084 # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
1085 # PEP 728 (still pending) makes more changes.
1086 TypedDict = typing.TypedDict
1087 _TypedDictMeta = typing._TypedDictMeta
1088 is_typeddict = typing.is_typeddict
1089else:
1090 # 3.10.0 and later
1091 _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
1092
1093 def _get_typeddict_qualifiers(annotation_type):
1094 while True:
1095 annotation_origin = get_origin(annotation_type)
1096 if annotation_origin is Annotated:
1097 annotation_args = get_args(annotation_type)
1098 if annotation_args:
1099 annotation_type = annotation_args[0]
1100 else:
1101 break
1102 elif annotation_origin is Required:
1103 yield Required
1104 annotation_type, = get_args(annotation_type)
1105 elif annotation_origin is NotRequired:
1106 yield NotRequired
1107 annotation_type, = get_args(annotation_type)
1108 elif annotation_origin is ReadOnly:
1109 yield ReadOnly
1110 annotation_type, = get_args(annotation_type)
1111 else:
1112 break
1113
1114 class _TypedDictMeta(type):
1115
1116 def __new__(cls, name, bases, ns, *, total=True, closed=None,
1117 extra_items=NoExtraItems):
1118 """Create new typed dict class object.
1119
1120 This method is called when TypedDict is subclassed,
1121 or when TypedDict is instantiated. This way
1122 TypedDict supports all three syntax forms described in its docstring.
1123 Subclasses and instances of TypedDict return actual dictionaries.
1124 """
1125 for base in bases:
1126 if type(base) is not _TypedDictMeta and base is not typing.Generic:
1127 raise TypeError('cannot inherit from both a TypedDict type '
1128 'and a non-TypedDict base class')
1129 if closed is not None and extra_items is not NoExtraItems:
1130 raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
1131
1132 if any(issubclass(b, typing.Generic) for b in bases):
1133 generic_base = (typing.Generic,)
1134 else:
1135 generic_base = ()
1136
1137 ns_annotations = ns.pop('__annotations__', None)
1138
1139 # typing.py generally doesn't let you inherit from plain Generic, unless
1140 # the name of the class happens to be "Protocol"
1141 tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
1142 tp_dict.__name__ = name
1143 if tp_dict.__qualname__ == "Protocol":
1144 tp_dict.__qualname__ = name
1145
1146 if not hasattr(tp_dict, '__orig_bases__'):
1147 tp_dict.__orig_bases__ = bases
1148
1149 annotations = {}
1150 own_annotate = None
1151 if ns_annotations is not None:
1152 own_annotations = ns_annotations
1153 elif sys.version_info >= (3, 14):
1154 if hasattr(annotationlib, "get_annotate_from_class_namespace"):
1155 own_annotate = annotationlib.get_annotate_from_class_namespace(ns)
1156 else:
1157 # 3.14.0a7 and earlier
1158 own_annotate = ns.get("__annotate__")
1159 if own_annotate is not None:
1160 own_annotations = annotationlib.call_annotate_function(
1161 own_annotate, Format.FORWARDREF, owner=tp_dict
1162 )
1163 else:
1164 own_annotations = {}
1165 else:
1166 own_annotations = {}
1167 msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
1168 if _TAKES_MODULE:
1169 own_checked_annotations = {
1170 n: typing._type_check(tp, msg, module=tp_dict.__module__)
1171 for n, tp in own_annotations.items()
1172 }
1173 else:
1174 own_checked_annotations = {
1175 n: typing._type_check(tp, msg)
1176 for n, tp in own_annotations.items()
1177 }
1178 required_keys = set()
1179 optional_keys = set()
1180 readonly_keys = set()
1181 mutable_keys = set()
1182 extra_items_type = extra_items
1183
1184 for base in bases:
1185 base_dict = base.__dict__
1186
1187 if sys.version_info <= (3, 14):
1188 annotations.update(base_dict.get('__annotations__', {}))
1189 required_keys.update(base_dict.get('__required_keys__', ()))
1190 optional_keys.update(base_dict.get('__optional_keys__', ()))
1191 readonly_keys.update(base_dict.get('__readonly_keys__', ()))
1192 mutable_keys.update(base_dict.get('__mutable_keys__', ()))
1193
1194 # This was specified in an earlier version of PEP 728. Support
1195 # is retained for backwards compatibility, but only for Python
1196 # 3.13 and lower.
1197 if (closed and sys.version_info < (3, 14)
1198 and "__extra_items__" in own_checked_annotations):
1199 annotation_type = own_checked_annotations.pop("__extra_items__")
1200 qualifiers = set(_get_typeddict_qualifiers(annotation_type))
1201 if Required in qualifiers:
1202 raise TypeError(
1203 "Special key __extra_items__ does not support "
1204 "Required"
1205 )
1206 if NotRequired in qualifiers:
1207 raise TypeError(
1208 "Special key __extra_items__ does not support "
1209 "NotRequired"
1210 )
1211 extra_items_type = annotation_type
1212
1213 annotations.update(own_checked_annotations)
1214 for annotation_key, annotation_type in own_checked_annotations.items():
1215 qualifiers = set(_get_typeddict_qualifiers(annotation_type))
1216
1217 if Required in qualifiers:
1218 required_keys.add(annotation_key)
1219 elif NotRequired in qualifiers:
1220 optional_keys.add(annotation_key)
1221 elif total:
1222 required_keys.add(annotation_key)
1223 else:
1224 optional_keys.add(annotation_key)
1225 if ReadOnly in qualifiers:
1226 mutable_keys.discard(annotation_key)
1227 readonly_keys.add(annotation_key)
1228 else:
1229 mutable_keys.add(annotation_key)
1230 readonly_keys.discard(annotation_key)
1231
1232 # Breakpoint: https://github.com/python/cpython/pull/119891
1233 if sys.version_info >= (3, 14):
1234 def __annotate__(format):
1235 annos = {}
1236 for base in bases:
1237 if base is Generic:
1238 continue
1239 base_annotate = base.__annotate__
1240 if base_annotate is None:
1241 continue
1242 base_annos = annotationlib.call_annotate_function(
1243 base_annotate, format, owner=base)
1244 annos.update(base_annos)
1245 if own_annotate is not None:
1246 own = annotationlib.call_annotate_function(
1247 own_annotate, format, owner=tp_dict)
1248 if format != Format.STRING:
1249 own = {
1250 n: typing._type_check(tp, msg, module=tp_dict.__module__)
1251 for n, tp in own.items()
1252 }
1253 elif format == Format.STRING:
1254 own = annotationlib.annotations_to_string(own_annotations)
1255 elif format in (Format.FORWARDREF, Format.VALUE):
1256 own = own_checked_annotations
1257 else:
1258 raise NotImplementedError(format)
1259 annos.update(own)
1260 return annos
1261
1262 tp_dict.__annotate__ = __annotate__
1263 else:
1264 tp_dict.__annotations__ = annotations
1265 tp_dict.__required_keys__ = frozenset(required_keys)
1266 tp_dict.__optional_keys__ = frozenset(optional_keys)
1267 tp_dict.__readonly_keys__ = frozenset(readonly_keys)
1268 tp_dict.__mutable_keys__ = frozenset(mutable_keys)
1269 tp_dict.__total__ = total
1270 tp_dict.__closed__ = closed
1271 tp_dict.__extra_items__ = extra_items_type
1272 return tp_dict
1273
1274 __call__ = dict # static method
1275
1276 def __subclasscheck__(cls, other):
1277 # Typed dicts are only for static structural subtyping.
1278 raise TypeError('TypedDict does not support instance and class checks')
1279
1280 __instancecheck__ = __subclasscheck__
1281
1282 _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
1283
1284 def _create_typeddict(
1285 typename,
1286 fields,
1287 /,
1288 *,
1289 typing_is_inline,
1290 total,
1291 closed,
1292 extra_items,
1293 **kwargs,
1294 ):
1295 if fields is _marker or fields is None:
1296 if fields is _marker:
1297 deprecated_thing = (
1298 "Failing to pass a value for the 'fields' parameter"
1299 )
1300 else:
1301 deprecated_thing = "Passing `None` as the 'fields' parameter"
1302
1303 example = f"`{typename} = TypedDict({typename!r}, {{}})`"
1304 deprecation_msg = (
1305 f"{deprecated_thing} is deprecated and will be disallowed in "
1306 "Python 3.15. To create a TypedDict class with 0 fields "
1307 "using the functional syntax, pass an empty dictionary, e.g. "
1308 ) + example + "."
1309 warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
1310 # Support a field called "closed"
1311 if closed is not False and closed is not True and closed is not None:
1312 kwargs["closed"] = closed
1313 closed = None
1314 # Or "extra_items"
1315 if extra_items is not NoExtraItems:
1316 kwargs["extra_items"] = extra_items
1317 extra_items = NoExtraItems
1318 fields = kwargs
1319 elif kwargs:
1320 raise TypeError("TypedDict takes either a dict or keyword arguments,"
1321 " but not both")
1322 if kwargs:
1323 # Breakpoint: https://github.com/python/cpython/pull/104891
1324 if sys.version_info >= (3, 13):
1325 raise TypeError("TypedDict takes no keyword arguments")
1326 warnings.warn(
1327 "The kwargs-based syntax for TypedDict definitions is deprecated "
1328 "in Python 3.11, will be removed in Python 3.13, and may not be "
1329 "understood by third-party type checkers.",
1330 DeprecationWarning,
1331 stacklevel=2,
1332 )
1333
1334 ns = {'__annotations__': dict(fields)}
1335 module = _caller(depth=4 if typing_is_inline else 2)
1336 if module is not None:
1337 # Setting correct module is necessary to make typed dict classes
1338 # pickleable.
1339 ns['__module__'] = module
1340
1341 td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
1342 extra_items=extra_items)
1343 td.__orig_bases__ = (TypedDict,)
1344 return td
1345
1346 class _TypedDictSpecialForm(_SpecialForm, _root=True):
1347 def __call__(
1348 self,
1349 typename,
1350 fields=_marker,
1351 /,
1352 *,
1353 total=True,
1354 closed=None,
1355 extra_items=NoExtraItems,
1356 **kwargs
1357 ):
1358 return _create_typeddict(
1359 typename,
1360 fields,
1361 typing_is_inline=False,
1362 total=total,
1363 closed=closed,
1364 extra_items=extra_items,
1365 **kwargs,
1366 )
1367
1368 def __mro_entries__(self, bases):
1369 return (_TypedDict,)
1370
1371 @_TypedDictSpecialForm
1372 def TypedDict(self, args):
1373 """A simple typed namespace. At runtime it is equivalent to a plain dict.
1374
1375 TypedDict creates a dictionary type such that a type checker will expect all
1376 instances to have a certain set of keys, where each key is
1377 associated with a value of a consistent type. This expectation
1378 is not checked at runtime.
1379
1380 Usage::
1381
1382 class Point2D(TypedDict):
1383 x: int
1384 y: int
1385 label: str
1386
1387 a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
1388 b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
1389
1390 assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
1391
1392 The type info can be accessed via the Point2D.__annotations__ dict, and
1393 the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
1394 TypedDict supports an additional equivalent form::
1395
1396 Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
1397
1398 By default, all keys must be present in a TypedDict. It is possible
1399 to override this by specifying totality::
1400
1401 class Point2D(TypedDict, total=False):
1402 x: int
1403 y: int
1404
1405 This means that a Point2D TypedDict can have any of the keys omitted. A type
1406 checker is only expected to support a literal False or True as the value of
1407 the total argument. True is the default, and makes all items defined in the
1408 class body be required.
1409
1410 The Required and NotRequired special forms can also be used to mark
1411 individual keys as being required or not required::
1412
1413 class Point2D(TypedDict):
1414 x: int # the "x" key must always be present (Required is the default)
1415 y: NotRequired[int] # the "y" key can be omitted
1416
1417 See PEP 655 for more details on Required and NotRequired.
1418 """
1419 # This runs when creating inline TypedDicts:
1420 if not isinstance(args, dict):
1421 raise TypeError(
1422 "TypedDict[...] should be used with a single dict argument"
1423 )
1424
1425 return _create_typeddict(
1426 "<inline TypedDict>",
1427 args,
1428 typing_is_inline=True,
1429 total=True,
1430 closed=True,
1431 extra_items=NoExtraItems,
1432 )
1433
1434 _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
1435
1436 def is_typeddict(tp):
1437 """Check if an annotation is a TypedDict class
1438
1439 For example::
1440 class Film(TypedDict):
1441 title: str
1442 year: int
1443
1444 is_typeddict(Film) # => True
1445 is_typeddict(Union[list, str]) # => False
1446 """
1447 return isinstance(tp, _TYPEDDICT_TYPES)
1448
1449
1450if hasattr(typing, "assert_type"):
1451 assert_type = typing.assert_type
1452
1453else:
1454 def assert_type(val, typ, /):
1455 """Assert (to the type checker) that the value is of the given type.
1456
1457 When the type checker encounters a call to assert_type(), it
1458 emits an error if the value is not of the specified type::
1459
1460 def greet(name: str) -> None:
1461 assert_type(name, str) # ok
1462 assert_type(name, int) # type checker error
1463
1464 At runtime this returns the first argument unchanged and otherwise
1465 does nothing.
1466 """
1467 return val
1468
1469
1470if hasattr(typing, "ReadOnly"): # 3.13+
1471 get_type_hints = typing.get_type_hints
1472else: # <=3.13
1473 # replaces _strip_annotations()
1474 def _strip_extras(t):
1475 """Strips Annotated, Required and NotRequired from a given type."""
1476 if isinstance(t, typing._AnnotatedAlias):
1477 return _strip_extras(t.__origin__)
1478 if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
1479 return _strip_extras(t.__args__[0])
1480 if isinstance(t, typing._GenericAlias):
1481 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1482 if stripped_args == t.__args__:
1483 return t
1484 return t.copy_with(stripped_args)
1485 if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
1486 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1487 if stripped_args == t.__args__:
1488 return t
1489 return _types.GenericAlias(t.__origin__, stripped_args)
1490 if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
1491 stripped_args = tuple(_strip_extras(a) for a in t.__args__)
1492 if stripped_args == t.__args__:
1493 return t
1494 return functools.reduce(operator.or_, stripped_args)
1495
1496 return t
1497
1498 def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
1499 """Return type hints for an object.
1500
1501 This is often the same as obj.__annotations__, but it handles
1502 forward references encoded as string literals, adds Optional[t] if a
1503 default value equal to None is set and recursively replaces all
1504 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
1505 (unless 'include_extras=True').
1506
1507 The argument may be a module, class, method, or function. The annotations
1508 are returned as a dictionary. For classes, annotations include also
1509 inherited members.
1510
1511 TypeError is raised if the argument is not of a type that can contain
1512 annotations, and an empty dictionary is returned if no annotations are
1513 present.
1514
1515 BEWARE -- the behavior of globalns and localns is counterintuitive
1516 (unless you are familiar with how eval() and exec() work). The
1517 search order is locals first, then globals.
1518
1519 - If no dict arguments are passed, an attempt is made to use the
1520 globals from obj (or the respective module's globals for classes),
1521 and these are also used as the locals. If the object does not appear
1522 to have globals, an empty dictionary is used.
1523
1524 - If one dict argument is passed, it is used for both globals and
1525 locals.
1526
1527 - If two dict arguments are passed, they specify globals and
1528 locals, respectively.
1529 """
1530 hint = typing.get_type_hints(
1531 obj, globalns=globalns, localns=localns, include_extras=True
1532 )
1533 # Breakpoint: https://github.com/python/cpython/pull/30304
1534 if sys.version_info < (3, 11):
1535 _clean_optional(obj, hint, globalns, localns)
1536 if include_extras:
1537 return hint
1538 return {k: _strip_extras(t) for k, t in hint.items()}
1539
1540 _NoneType = type(None)
1541
1542 def _could_be_inserted_optional(t):
1543 """detects Union[..., None] pattern"""
1544 if not isinstance(t, typing._UnionGenericAlias):
1545 return False
1546 # Assume if last argument is not None they are user defined
1547 if t.__args__[-1] is not _NoneType:
1548 return False
1549 return True
1550
1551 # < 3.11
1552 def _clean_optional(obj, hints, globalns=None, localns=None):
1553 # reverts injected Union[..., None] cases from typing.get_type_hints
1554 # when a None default value is used.
1555 # see https://github.com/python/typing_extensions/issues/310
1556 if not hints or isinstance(obj, type):
1557 return
1558 defaults = typing._get_defaults(obj) # avoid accessing __annotations___
1559 if not defaults:
1560 return
1561 original_hints = obj.__annotations__
1562 for name, value in hints.items():
1563 # Not a Union[..., None] or replacement conditions not fullfilled
1564 if (not _could_be_inserted_optional(value)
1565 or name not in defaults
1566 or defaults[name] is not None
1567 ):
1568 continue
1569 original_value = original_hints[name]
1570 # value=NoneType should have caused a skip above but check for safety
1571 if original_value is None:
1572 original_value = _NoneType
1573 # Forward reference
1574 if isinstance(original_value, str):
1575 if globalns is None:
1576 if isinstance(obj, _types.ModuleType):
1577 globalns = obj.__dict__
1578 else:
1579 nsobj = obj
1580 # Find globalns for the unwrapped object.
1581 while hasattr(nsobj, '__wrapped__'):
1582 nsobj = nsobj.__wrapped__
1583 globalns = getattr(nsobj, '__globals__', {})
1584 if localns is None:
1585 localns = globalns
1586 elif localns is None:
1587 localns = globalns
1588
1589 original_value = ForwardRef(
1590 original_value,
1591 is_argument=not isinstance(obj, _types.ModuleType)
1592 )
1593 original_evaluated = typing._eval_type(original_value, globalns, localns)
1594 # Compare if values differ. Note that even if equal
1595 # value might be cached by typing._tp_cache contrary to original_evaluated
1596 if original_evaluated != value or (
1597 # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
1598 hasattr(_types, "UnionType")
1599 and isinstance(original_evaluated, _types.UnionType)
1600 and not isinstance(value, _types.UnionType)
1601 ):
1602 hints[name] = original_evaluated
1603
1604# Python 3.9 has get_origin() and get_args() but those implementations don't support
1605# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
1606# Breakpoint: https://github.com/python/cpython/pull/25298
1607if sys.version_info >= (3, 10):
1608 get_origin = typing.get_origin
1609 get_args = typing.get_args
1610# 3.9
1611else:
1612 def get_origin(tp):
1613 """Get the unsubscripted version of a type.
1614
1615 This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
1616 and Annotated. Return None for unsupported types. Examples::
1617
1618 get_origin(Literal[42]) is Literal
1619 get_origin(int) is None
1620 get_origin(ClassVar[int]) is ClassVar
1621 get_origin(Generic) is Generic
1622 get_origin(Generic[T]) is Generic
1623 get_origin(Union[T, int]) is Union
1624 get_origin(List[Tuple[T, T]][int]) == list
1625 get_origin(P.args) is P
1626 """
1627 if isinstance(tp, typing._AnnotatedAlias):
1628 return Annotated
1629 if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias,
1630 ParamSpecArgs, ParamSpecKwargs)):
1631 return tp.__origin__
1632 if tp is typing.Generic:
1633 return typing.Generic
1634 return None
1635
1636 def get_args(tp):
1637 """Get type arguments with all substitutions performed.
1638
1639 For unions, basic simplifications used by Union constructor are performed.
1640 Examples::
1641 get_args(Dict[str, int]) == (str, int)
1642 get_args(int) == ()
1643 get_args(Union[int, Union[T, int], str][int]) == (int, str)
1644 get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
1645 get_args(Callable[[], T][int]) == ([], int)
1646 """
1647 if isinstance(tp, typing._AnnotatedAlias):
1648 return (tp.__origin__, *tp.__metadata__)
1649 if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)):
1650 res = tp.__args__
1651 if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
1652 res = (list(res[:-1]), res[-1])
1653 return res
1654 return ()
1655
1656
1657# 3.10+
1658if hasattr(typing, 'TypeAlias'):
1659 TypeAlias = typing.TypeAlias
1660# 3.9
1661else:
1662 @_ExtensionsSpecialForm
1663 def TypeAlias(self, parameters):
1664 """Special marker indicating that an assignment should
1665 be recognized as a proper type alias definition by type
1666 checkers.
1667
1668 For example::
1669
1670 Predicate: TypeAlias = Callable[..., bool]
1671
1672 It's invalid when used anywhere except as in the example above.
1673 """
1674 raise TypeError(f"{self} is not subscriptable")
1675
1676
1677def _set_default(type_param, default):
1678 type_param.has_default = lambda: default is not NoDefault
1679 type_param.__default__ = default
1680
1681
1682def _set_module(typevarlike):
1683 # for pickling:
1684 def_mod = _caller(depth=2)
1685 if def_mod != 'typing_extensions':
1686 typevarlike.__module__ = def_mod
1687
1688
1689class _DefaultMixin:
1690 """Mixin for TypeVarLike defaults."""
1691
1692 __slots__ = ()
1693 __init__ = _set_default
1694
1695
1696# Classes using this metaclass must provide a _backported_typevarlike ClassVar
1697class _TypeVarLikeMeta(type):
1698 def __instancecheck__(cls, __instance: Any) -> bool:
1699 return isinstance(__instance, cls._backported_typevarlike)
1700
1701
1702if _PEP_696_IMPLEMENTED:
1703 from typing import TypeVar
1704else:
1705 # Add default and infer_variance parameters from PEP 696 and 695
1706 class TypeVar(metaclass=_TypeVarLikeMeta):
1707 """Type variable."""
1708
1709 _backported_typevarlike = typing.TypeVar
1710
1711 def __new__(cls, name, *constraints, bound=None,
1712 covariant=False, contravariant=False,
1713 default=NoDefault, infer_variance=False):
1714 if hasattr(typing, "TypeAliasType"):
1715 # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
1716 typevar = typing.TypeVar(name, *constraints, bound=bound,
1717 covariant=covariant, contravariant=contravariant,
1718 infer_variance=infer_variance)
1719 else:
1720 typevar = typing.TypeVar(name, *constraints, bound=bound,
1721 covariant=covariant, contravariant=contravariant)
1722 if infer_variance and (covariant or contravariant):
1723 raise ValueError("Variance cannot be specified with infer_variance.")
1724 typevar.__infer_variance__ = infer_variance
1725
1726 _set_default(typevar, default)
1727 _set_module(typevar)
1728
1729 def _tvar_prepare_subst(alias, args):
1730 if (
1731 typevar.has_default()
1732 and alias.__parameters__.index(typevar) == len(args)
1733 ):
1734 args += (typevar.__default__,)
1735 return args
1736
1737 typevar.__typing_prepare_subst__ = _tvar_prepare_subst
1738 return typevar
1739
1740 def __init_subclass__(cls) -> None:
1741 raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
1742
1743
1744# Python 3.10+ has PEP 612
1745if hasattr(typing, 'ParamSpecArgs'):
1746 ParamSpecArgs = typing.ParamSpecArgs
1747 ParamSpecKwargs = typing.ParamSpecKwargs
1748# 3.9
1749else:
1750 class _Immutable:
1751 """Mixin to indicate that object should not be copied."""
1752 __slots__ = ()
1753
1754 def __copy__(self):
1755 return self
1756
1757 def __deepcopy__(self, memo):
1758 return self
1759
1760 class ParamSpecArgs(_Immutable):
1761 """The args for a ParamSpec object.
1762
1763 Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
1764
1765 ParamSpecArgs objects have a reference back to their ParamSpec:
1766
1767 P.args.__origin__ is P
1768
1769 This type is meant for runtime introspection and has no special meaning to
1770 static type checkers.
1771 """
1772 def __init__(self, origin):
1773 self.__origin__ = origin
1774
1775 def __repr__(self):
1776 return f"{self.__origin__.__name__}.args"
1777
1778 def __eq__(self, other):
1779 if not isinstance(other, ParamSpecArgs):
1780 return NotImplemented
1781 return self.__origin__ == other.__origin__
1782
1783 class ParamSpecKwargs(_Immutable):
1784 """The kwargs for a ParamSpec object.
1785
1786 Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
1787
1788 ParamSpecKwargs objects have a reference back to their ParamSpec:
1789
1790 P.kwargs.__origin__ is P
1791
1792 This type is meant for runtime introspection and has no special meaning to
1793 static type checkers.
1794 """
1795 def __init__(self, origin):
1796 self.__origin__ = origin
1797
1798 def __repr__(self):
1799 return f"{self.__origin__.__name__}.kwargs"
1800
1801 def __eq__(self, other):
1802 if not isinstance(other, ParamSpecKwargs):
1803 return NotImplemented
1804 return self.__origin__ == other.__origin__
1805
1806
1807if _PEP_696_IMPLEMENTED:
1808 from typing import ParamSpec
1809
1810# 3.10+
1811elif hasattr(typing, 'ParamSpec'):
1812
1813 # Add default parameter - PEP 696
1814 class ParamSpec(metaclass=_TypeVarLikeMeta):
1815 """Parameter specification."""
1816
1817 _backported_typevarlike = typing.ParamSpec
1818
1819 def __new__(cls, name, *, bound=None,
1820 covariant=False, contravariant=False,
1821 infer_variance=False, default=NoDefault):
1822 if hasattr(typing, "TypeAliasType"):
1823 # PEP 695 implemented, can pass infer_variance to typing.TypeVar
1824 paramspec = typing.ParamSpec(name, bound=bound,
1825 covariant=covariant,
1826 contravariant=contravariant,
1827 infer_variance=infer_variance)
1828 else:
1829 paramspec = typing.ParamSpec(name, bound=bound,
1830 covariant=covariant,
1831 contravariant=contravariant)
1832 paramspec.__infer_variance__ = infer_variance
1833
1834 _set_default(paramspec, default)
1835 _set_module(paramspec)
1836
1837 def _paramspec_prepare_subst(alias, args):
1838 params = alias.__parameters__
1839 i = params.index(paramspec)
1840 if i == len(args) and paramspec.has_default():
1841 args = [*args, paramspec.__default__]
1842 if i >= len(args):
1843 raise TypeError(f"Too few arguments for {alias}")
1844 # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
1845 if len(params) == 1 and not typing._is_param_expr(args[0]):
1846 assert i == 0
1847 args = (args,)
1848 # Convert lists to tuples to help other libraries cache the results.
1849 elif isinstance(args[i], list):
1850 args = (*args[:i], tuple(args[i]), *args[i + 1:])
1851 return args
1852
1853 paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
1854 return paramspec
1855
1856 def __init_subclass__(cls) -> None:
1857 raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
1858
1859# 3.9
1860else:
1861
1862 # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
1863 class ParamSpec(list, _DefaultMixin):
1864 """Parameter specification variable.
1865
1866 Usage::
1867
1868 P = ParamSpec('P')
1869
1870 Parameter specification variables exist primarily for the benefit of static
1871 type checkers. They are used to forward the parameter types of one
1872 callable to another callable, a pattern commonly found in higher order
1873 functions and decorators. They are only valid when used in ``Concatenate``,
1874 or s the first argument to ``Callable``. In Python 3.10 and higher,
1875 they are also supported in user-defined Generics at runtime.
1876 See class Generic for more information on generic types. An
1877 example for annotating a decorator::
1878
1879 T = TypeVar('T')
1880 P = ParamSpec('P')
1881
1882 def add_logging(f: Callable[P, T]) -> Callable[P, T]:
1883 '''A type-safe decorator to add logging to a function.'''
1884 def inner(*args: P.args, **kwargs: P.kwargs) -> T:
1885 logging.info(f'{f.__name__} was called')
1886 return f(*args, **kwargs)
1887 return inner
1888
1889 @add_logging
1890 def add_two(x: float, y: float) -> float:
1891 '''Add two numbers together.'''
1892 return x + y
1893
1894 Parameter specification variables defined with covariant=True or
1895 contravariant=True can be used to declare covariant or contravariant
1896 generic types. These keyword arguments are valid, but their actual semantics
1897 are yet to be decided. See PEP 612 for details.
1898
1899 Parameter specification variables can be introspected. e.g.:
1900
1901 P.__name__ == 'T'
1902 P.__bound__ == None
1903 P.__covariant__ == False
1904 P.__contravariant__ == False
1905
1906 Note that only parameter specification variables defined in global scope can
1907 be pickled.
1908 """
1909
1910 # Trick Generic __parameters__.
1911 __class__ = typing.TypeVar
1912
1913 @property
1914 def args(self):
1915 return ParamSpecArgs(self)
1916
1917 @property
1918 def kwargs(self):
1919 return ParamSpecKwargs(self)
1920
1921 def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
1922 infer_variance=False, default=NoDefault):
1923 list.__init__(self, [self])
1924 self.__name__ = name
1925 self.__covariant__ = bool(covariant)
1926 self.__contravariant__ = bool(contravariant)
1927 self.__infer_variance__ = bool(infer_variance)
1928 if bound:
1929 self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
1930 else:
1931 self.__bound__ = None
1932 _DefaultMixin.__init__(self, default)
1933
1934 # for pickling:
1935 def_mod = _caller()
1936 if def_mod != 'typing_extensions':
1937 self.__module__ = def_mod
1938
1939 def __repr__(self):
1940 if self.__infer_variance__:
1941 prefix = ''
1942 elif self.__covariant__:
1943 prefix = '+'
1944 elif self.__contravariant__:
1945 prefix = '-'
1946 else:
1947 prefix = '~'
1948 return prefix + self.__name__
1949
1950 def __hash__(self):
1951 return object.__hash__(self)
1952
1953 def __eq__(self, other):
1954 return self is other
1955
1956 def __reduce__(self):
1957 return self.__name__
1958
1959 # Hack to get typing._type_check to pass.
1960 def __call__(self, *args, **kwargs):
1961 pass
1962
1963
1964# 3.9
1965if not hasattr(typing, 'Concatenate'):
1966 # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
1967
1968 # 3.9.0-1
1969 if not hasattr(typing, '_type_convert'):
1970 def _type_convert(arg, module=None, *, allow_special_forms=False):
1971 """For converting None to type(None), and strings to ForwardRef."""
1972 if arg is None:
1973 return type(None)
1974 if isinstance(arg, str):
1975 if sys.version_info <= (3, 9, 6):
1976 return ForwardRef(arg)
1977 if sys.version_info <= (3, 9, 7):
1978 return ForwardRef(arg, module=module)
1979 return ForwardRef(arg, module=module, is_class=allow_special_forms)
1980 return arg
1981 else:
1982 _type_convert = typing._type_convert
1983
1984 class _ConcatenateGenericAlias(list):
1985
1986 # Trick Generic into looking into this for __parameters__.
1987 __class__ = typing._GenericAlias
1988
1989 def __init__(self, origin, args):
1990 super().__init__(args)
1991 self.__origin__ = origin
1992 self.__args__ = args
1993
1994 def __repr__(self):
1995 _type_repr = typing._type_repr
1996 return (f'{_type_repr(self.__origin__)}'
1997 f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
1998
1999 def __hash__(self):
2000 return hash((self.__origin__, self.__args__))
2001
2002 # Hack to get typing._type_check to pass in Generic.
2003 def __call__(self, *args, **kwargs):
2004 pass
2005
2006 @property
2007 def __parameters__(self):
2008 return tuple(
2009 tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
2010 )
2011
2012 # 3.9 used by __getitem__ below
2013 def copy_with(self, params):
2014 if isinstance(params[-1], _ConcatenateGenericAlias):
2015 params = (*params[:-1], *params[-1].__args__)
2016 elif isinstance(params[-1], (list, tuple)):
2017 return (*params[:-1], *params[-1])
2018 elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
2019 raise TypeError("The last parameter to Concatenate should be a "
2020 "ParamSpec variable or ellipsis.")
2021 return self.__class__(self.__origin__, params)
2022
2023 # 3.9; accessed during GenericAlias.__getitem__ when substituting
2024 def __getitem__(self, args):
2025 if self.__origin__ in (Generic, Protocol):
2026 # Can't subscript Generic[...] or Protocol[...].
2027 raise TypeError(f"Cannot subscript already-subscripted {self}")
2028 if not self.__parameters__:
2029 raise TypeError(f"{self} is not a generic class")
2030
2031 if not isinstance(args, tuple):
2032 args = (args,)
2033 args = _unpack_args(*(_type_convert(p) for p in args))
2034 params = self.__parameters__
2035 for param in params:
2036 prepare = getattr(param, "__typing_prepare_subst__", None)
2037 if prepare is not None:
2038 args = prepare(self, args)
2039 # 3.9 & typing.ParamSpec
2040 elif isinstance(param, ParamSpec):
2041 i = params.index(param)
2042 if (
2043 i == len(args)
2044 and getattr(param, '__default__', NoDefault) is not NoDefault
2045 ):
2046 args = [*args, param.__default__]
2047 if i >= len(args):
2048 raise TypeError(f"Too few arguments for {self}")
2049 # Special case for Z[[int, str, bool]] == Z[int, str, bool]
2050 if len(params) == 1 and not _is_param_expr(args[0]):
2051 assert i == 0
2052 args = (args,)
2053 elif (
2054 isinstance(args[i], list)
2055 # 3.9
2056 # This class inherits from list do not convert
2057 and not isinstance(args[i], _ConcatenateGenericAlias)
2058 ):
2059 args = (*args[:i], tuple(args[i]), *args[i + 1:])
2060
2061 alen = len(args)
2062 plen = len(params)
2063 if alen != plen:
2064 raise TypeError(
2065 f"Too {'many' if alen > plen else 'few'} arguments for {self};"
2066 f" actual {alen}, expected {plen}"
2067 )
2068
2069 subst = dict(zip(self.__parameters__, args))
2070 # determine new args
2071 new_args = []
2072 for arg in self.__args__:
2073 if isinstance(arg, type):
2074 new_args.append(arg)
2075 continue
2076 if isinstance(arg, TypeVar):
2077 arg = subst[arg]
2078 if (
2079 (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
2080 or (
2081 hasattr(_types, "GenericAlias")
2082 and isinstance(arg, _types.GenericAlias)
2083 and getattr(arg, "__unpacked__", False)
2084 )
2085 ):
2086 raise TypeError(f"{arg} is not valid as type argument")
2087
2088 elif isinstance(arg,
2089 typing._GenericAlias
2090 if not hasattr(_types, "GenericAlias") else
2091 (typing._GenericAlias, _types.GenericAlias)
2092 ):
2093 subparams = arg.__parameters__
2094 if subparams:
2095 subargs = tuple(subst[x] for x in subparams)
2096 arg = arg[subargs]
2097 new_args.append(arg)
2098 return self.copy_with(tuple(new_args))
2099
2100# 3.10+
2101else:
2102 _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
2103
2104 # 3.10
2105 if sys.version_info < (3, 11):
2106
2107 class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
2108 # needed for checks in collections.abc.Callable to accept this class
2109 __module__ = "typing"
2110
2111 def copy_with(self, params):
2112 if isinstance(params[-1], (list, tuple)):
2113 return (*params[:-1], *params[-1])
2114 if isinstance(params[-1], typing._ConcatenateGenericAlias):
2115 params = (*params[:-1], *params[-1].__args__)
2116 elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
2117 raise TypeError("The last parameter to Concatenate should be a "
2118 "ParamSpec variable or ellipsis.")
2119 return super(typing._ConcatenateGenericAlias, self).copy_with(params)
2120
2121 def __getitem__(self, args):
2122 value = super().__getitem__(args)
2123 if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
2124 return tuple(_unpack_args(*(n for n in value)))
2125 return value
2126
2127
2128# 3.9.2
2129class _EllipsisDummy: ...
2130
2131
2132# <=3.10
2133def _create_concatenate_alias(origin, parameters):
2134 if parameters[-1] is ... and sys.version_info < (3, 9, 2):
2135 # Hack: Arguments must be types, replace it with one.
2136 parameters = (*parameters[:-1], _EllipsisDummy)
2137 if sys.version_info >= (3, 10, 3):
2138 concatenate = _ConcatenateGenericAlias(origin, parameters,
2139 _typevar_types=(TypeVar, ParamSpec),
2140 _paramspec_tvars=True)
2141 else:
2142 concatenate = _ConcatenateGenericAlias(origin, parameters)
2143 if parameters[-1] is not _EllipsisDummy:
2144 return concatenate
2145 # Remove dummy again
2146 concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
2147 for p in concatenate.__args__)
2148 if sys.version_info < (3, 10):
2149 # backport needs __args__ adjustment only
2150 return concatenate
2151 concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
2152 if p is not _EllipsisDummy)
2153 return concatenate
2154
2155
2156# <=3.10
2157@typing._tp_cache
2158def _concatenate_getitem(self, parameters):
2159 if parameters == ():
2160 raise TypeError("Cannot take a Concatenate of no types.")
2161 if not isinstance(parameters, tuple):
2162 parameters = (parameters,)
2163 if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
2164 raise TypeError("The last parameter to Concatenate should be a "
2165 "ParamSpec variable or ellipsis.")
2166 msg = "Concatenate[arg, ...]: each arg must be a type."
2167 parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
2168 parameters[-1])
2169 return _create_concatenate_alias(self, parameters)
2170
2171
2172# 3.11+; Concatenate does not accept ellipsis in 3.10
2173# Breakpoint: https://github.com/python/cpython/pull/30969
2174if sys.version_info >= (3, 11):
2175 Concatenate = typing.Concatenate
2176# <=3.10
2177else:
2178 @_ExtensionsSpecialForm
2179 def Concatenate(self, parameters):
2180 """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
2181 higher order function which adds, removes or transforms parameters of a
2182 callable.
2183
2184 For example::
2185
2186 Callable[Concatenate[int, P], int]
2187
2188 See PEP 612 for detailed information.
2189 """
2190 return _concatenate_getitem(self, parameters)
2191
2192
2193# 3.10+
2194if hasattr(typing, 'TypeGuard'):
2195 TypeGuard = typing.TypeGuard
2196# 3.9
2197else:
2198 @_ExtensionsSpecialForm
2199 def TypeGuard(self, parameters):
2200 """Special typing form used to annotate the return type of a user-defined
2201 type guard function. ``TypeGuard`` only accepts a single type argument.
2202 At runtime, functions marked this way should return a boolean.
2203
2204 ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
2205 type checkers to determine a more precise type of an expression within a
2206 program's code flow. Usually type narrowing is done by analyzing
2207 conditional code flow and applying the narrowing to a block of code. The
2208 conditional expression here is sometimes referred to as a "type guard".
2209
2210 Sometimes it would be convenient to use a user-defined boolean function
2211 as a type guard. Such a function should use ``TypeGuard[...]`` as its
2212 return type to alert static type checkers to this intention.
2213
2214 Using ``-> TypeGuard`` tells the static type checker that for a given
2215 function:
2216
2217 1. The return value is a boolean.
2218 2. If the return value is ``True``, the type of its argument
2219 is the type inside ``TypeGuard``.
2220
2221 For example::
2222
2223 def is_str(val: Union[str, float]):
2224 # "isinstance" type guard
2225 if isinstance(val, str):
2226 # Type of ``val`` is narrowed to ``str``
2227 ...
2228 else:
2229 # Else, type of ``val`` is narrowed to ``float``.
2230 ...
2231
2232 Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
2233 form of ``TypeA`` (it can even be a wider form) and this may lead to
2234 type-unsafe results. The main reason is to allow for things like
2235 narrowing ``List[object]`` to ``List[str]`` even though the latter is not
2236 a subtype of the former, since ``List`` is invariant. The responsibility of
2237 writing type-safe type guards is left to the user.
2238
2239 ``TypeGuard`` also works with type variables. For more information, see
2240 PEP 647 (User-Defined Type Guards).
2241 """
2242 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2243 return typing._GenericAlias(self, (item,))
2244
2245
2246# 3.13+
2247if hasattr(typing, 'TypeIs'):
2248 TypeIs = typing.TypeIs
2249# <=3.12
2250else:
2251 @_ExtensionsSpecialForm
2252 def TypeIs(self, parameters):
2253 """Special typing form used to annotate the return type of a user-defined
2254 type narrower function. ``TypeIs`` only accepts a single type argument.
2255 At runtime, functions marked this way should return a boolean.
2256
2257 ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
2258 type checkers to determine a more precise type of an expression within a
2259 program's code flow. Usually type narrowing is done by analyzing
2260 conditional code flow and applying the narrowing to a block of code. The
2261 conditional expression here is sometimes referred to as a "type guard".
2262
2263 Sometimes it would be convenient to use a user-defined boolean function
2264 as a type guard. Such a function should use ``TypeIs[...]`` as its
2265 return type to alert static type checkers to this intention.
2266
2267 Using ``-> TypeIs`` tells the static type checker that for a given
2268 function:
2269
2270 1. The return value is a boolean.
2271 2. If the return value is ``True``, the type of its argument
2272 is the intersection of the type inside ``TypeIs`` and the argument's
2273 previously known type.
2274
2275 For example::
2276
2277 def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
2278 return hasattr(val, '__await__')
2279
2280 def f(val: Union[int, Awaitable[int]]) -> int:
2281 if is_awaitable(val):
2282 assert_type(val, Awaitable[int])
2283 else:
2284 assert_type(val, int)
2285
2286 ``TypeIs`` also works with type variables. For more information, see
2287 PEP 742 (Narrowing types with TypeIs).
2288 """
2289 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2290 return typing._GenericAlias(self, (item,))
2291
2292
2293# 3.14+?
2294if hasattr(typing, 'TypeForm'):
2295 TypeForm = typing.TypeForm
2296# <=3.13
2297else:
2298 class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
2299 # TypeForm(X) is equivalent to X but indicates to the type checker
2300 # that the object is a TypeForm.
2301 def __call__(self, obj, /):
2302 return obj
2303
2304 @_TypeFormForm
2305 def TypeForm(self, parameters):
2306 """A special form representing the value that results from the evaluation
2307 of a type expression. This value encodes the information supplied in the
2308 type expression, and it represents the type described by that type expression.
2309
2310 When used in a type expression, TypeForm describes a set of type form objects.
2311 It accepts a single type argument, which must be a valid type expression.
2312 ``TypeForm[T]`` describes the set of all type form objects that represent
2313 the type T or types that are assignable to T.
2314
2315 Usage:
2316
2317 def cast[T](typ: TypeForm[T], value: Any) -> T: ...
2318
2319 reveal_type(cast(int, "x")) # int
2320
2321 See PEP 747 for more information.
2322 """
2323 item = typing._type_check(parameters, f'{self} accepts only a single type.')
2324 return typing._GenericAlias(self, (item,))
2325
2326
2327
2328
2329if hasattr(typing, "LiteralString"): # 3.11+
2330 LiteralString = typing.LiteralString
2331else:
2332 @_SpecialForm
2333 def LiteralString(self, params):
2334 """Represents an arbitrary literal string.
2335
2336 Example::
2337
2338 from typing_extensions import LiteralString
2339
2340 def query(sql: LiteralString) -> ...:
2341 ...
2342
2343 query("SELECT * FROM table") # ok
2344 query(f"SELECT * FROM {input()}") # not ok
2345
2346 See PEP 675 for details.
2347
2348 """
2349 raise TypeError(f"{self} is not subscriptable")
2350
2351
2352if hasattr(typing, "Self"): # 3.11+
2353 Self = typing.Self
2354else:
2355 @_SpecialForm
2356 def Self(self, params):
2357 """Used to spell the type of "self" in classes.
2358
2359 Example::
2360
2361 from typing import Self
2362
2363 class ReturnsSelf:
2364 def parse(self, data: bytes) -> Self:
2365 ...
2366 return self
2367
2368 """
2369
2370 raise TypeError(f"{self} is not subscriptable")
2371
2372
2373if hasattr(typing, "Never"): # 3.11+
2374 Never = typing.Never
2375else:
2376 @_SpecialForm
2377 def Never(self, params):
2378 """The bottom type, a type that has no members.
2379
2380 This can be used to define a function that should never be
2381 called, or a function that never returns::
2382
2383 from typing_extensions import Never
2384
2385 def never_call_me(arg: Never) -> None:
2386 pass
2387
2388 def int_or_str(arg: int | str) -> None:
2389 never_call_me(arg) # type checker error
2390 match arg:
2391 case int():
2392 print("It's an int")
2393 case str():
2394 print("It's a str")
2395 case _:
2396 never_call_me(arg) # ok, arg is of type Never
2397
2398 """
2399
2400 raise TypeError(f"{self} is not subscriptable")
2401
2402
2403if hasattr(typing, 'Required'): # 3.11+
2404 Required = typing.Required
2405 NotRequired = typing.NotRequired
2406else: # <=3.10
2407 @_ExtensionsSpecialForm
2408 def Required(self, parameters):
2409 """A special typing construct to mark a key of a total=False TypedDict
2410 as required. For example:
2411
2412 class Movie(TypedDict, total=False):
2413 title: Required[str]
2414 year: int
2415
2416 m = Movie(
2417 title='The Matrix', # typechecker error if key is omitted
2418 year=1999,
2419 )
2420
2421 There is no runtime checking that a required key is actually provided
2422 when instantiating a related TypedDict.
2423 """
2424 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2425 return typing._GenericAlias(self, (item,))
2426
2427 @_ExtensionsSpecialForm
2428 def NotRequired(self, parameters):
2429 """A special typing construct to mark a key of a TypedDict as
2430 potentially missing. For example:
2431
2432 class Movie(TypedDict):
2433 title: str
2434 year: NotRequired[int]
2435
2436 m = Movie(
2437 title='The Matrix', # typechecker error if key is omitted
2438 year=1999,
2439 )
2440 """
2441 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2442 return typing._GenericAlias(self, (item,))
2443
2444
2445if hasattr(typing, 'ReadOnly'):
2446 ReadOnly = typing.ReadOnly
2447else: # <=3.12
2448 @_ExtensionsSpecialForm
2449 def ReadOnly(self, parameters):
2450 """A special typing construct to mark an item of a TypedDict as read-only.
2451
2452 For example:
2453
2454 class Movie(TypedDict):
2455 title: ReadOnly[str]
2456 year: int
2457
2458 def mutate_movie(m: Movie) -> None:
2459 m["year"] = 1992 # allowed
2460 m["title"] = "The Matrix" # typechecker error
2461
2462 There is no runtime checking for this property.
2463 """
2464 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2465 return typing._GenericAlias(self, (item,))
2466
2467
2468_UNPACK_DOC = """\
2469Type unpack operator.
2470
2471The type unpack operator takes the child types from some container type,
2472such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
2473example:
2474
2475 # For some generic class `Foo`:
2476 Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str]
2477
2478 Ts = TypeVarTuple('Ts')
2479 # Specifies that `Bar` is generic in an arbitrary number of types.
2480 # (Think of `Ts` as a tuple of an arbitrary number of individual
2481 # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
2482 # `Generic[]`.)
2483 class Bar(Generic[Unpack[Ts]]): ...
2484 Bar[int] # Valid
2485 Bar[int, str] # Also valid
2486
2487From Python 3.11, this can also be done using the `*` operator:
2488
2489 Foo[*tuple[int, str]]
2490 class Bar(Generic[*Ts]): ...
2491
2492The operator can also be used along with a `TypedDict` to annotate
2493`**kwargs` in a function signature. For instance:
2494
2495 class Movie(TypedDict):
2496 name: str
2497 year: int
2498
2499 # This function expects two keyword arguments - *name* of type `str` and
2500 # *year* of type `int`.
2501 def foo(**kwargs: Unpack[Movie]): ...
2502
2503Note that there is only some runtime checking of this operator. Not
2504everything the runtime allows may be accepted by static type checkers.
2505
2506For more information, see PEP 646 and PEP 692.
2507"""
2508
2509
2510# PEP 692 changed the repr of Unpack[]
2511# Breakpoint: https://github.com/python/cpython/pull/104048
2512if sys.version_info >= (3, 12):
2513 Unpack = typing.Unpack
2514
2515 def _is_unpack(obj):
2516 return get_origin(obj) is Unpack
2517
2518else: # <=3.11
2519 class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
2520 def __init__(self, getitem):
2521 super().__init__(getitem)
2522 self.__doc__ = _UNPACK_DOC
2523
2524 class _UnpackAlias(typing._GenericAlias, _root=True):
2525 if sys.version_info < (3, 11):
2526 # needed for compatibility with Generic[Unpack[Ts]]
2527 __class__ = typing.TypeVar
2528
2529 @property
2530 def __typing_unpacked_tuple_args__(self):
2531 assert self.__origin__ is Unpack
2532 assert len(self.__args__) == 1
2533 arg, = self.__args__
2534 if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
2535 if arg.__origin__ is not tuple:
2536 raise TypeError("Unpack[...] must be used with a tuple type")
2537 return arg.__args__
2538 return None
2539
2540 @property
2541 def __typing_is_unpacked_typevartuple__(self):
2542 assert self.__origin__ is Unpack
2543 assert len(self.__args__) == 1
2544 return isinstance(self.__args__[0], TypeVarTuple)
2545
2546 def __getitem__(self, args):
2547 if self.__typing_is_unpacked_typevartuple__:
2548 return args
2549 return super().__getitem__(args)
2550
2551 @_UnpackSpecialForm
2552 def Unpack(self, parameters):
2553 item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
2554 return _UnpackAlias(self, (item,))
2555
2556 def _is_unpack(obj):
2557 return isinstance(obj, _UnpackAlias)
2558
2559
2560def _unpack_args(*args):
2561 newargs = []
2562 for arg in args:
2563 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
2564 if subargs is not None and (not (subargs and subargs[-1] is ...)):
2565 newargs.extend(subargs)
2566 else:
2567 newargs.append(arg)
2568 return newargs
2569
2570
2571if _PEP_696_IMPLEMENTED:
2572 from typing import TypeVarTuple
2573
2574elif hasattr(typing, "TypeVarTuple"): # 3.11+
2575
2576 # Add default parameter - PEP 696
2577 class TypeVarTuple(metaclass=_TypeVarLikeMeta):
2578 """Type variable tuple."""
2579
2580 _backported_typevarlike = typing.TypeVarTuple
2581
2582 def __new__(cls, name, *, default=NoDefault):
2583 tvt = typing.TypeVarTuple(name)
2584 _set_default(tvt, default)
2585 _set_module(tvt)
2586
2587 def _typevartuple_prepare_subst(alias, args):
2588 params = alias.__parameters__
2589 typevartuple_index = params.index(tvt)
2590 for param in params[typevartuple_index + 1:]:
2591 if isinstance(param, TypeVarTuple):
2592 raise TypeError(
2593 f"More than one TypeVarTuple parameter in {alias}"
2594 )
2595
2596 alen = len(args)
2597 plen = len(params)
2598 left = typevartuple_index
2599 right = plen - typevartuple_index - 1
2600 var_tuple_index = None
2601 fillarg = None
2602 for k, arg in enumerate(args):
2603 if not isinstance(arg, type):
2604 subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
2605 if subargs and len(subargs) == 2 and subargs[-1] is ...:
2606 if var_tuple_index is not None:
2607 raise TypeError(
2608 "More than one unpacked "
2609 "arbitrary-length tuple argument"
2610 )
2611 var_tuple_index = k
2612 fillarg = subargs[0]
2613 if var_tuple_index is not None:
2614 left = min(left, var_tuple_index)
2615 right = min(right, alen - var_tuple_index - 1)
2616 elif left + right > alen:
2617 raise TypeError(f"Too few arguments for {alias};"
2618 f" actual {alen}, expected at least {plen - 1}")
2619 if left == alen - right and tvt.has_default():
2620 replacement = _unpack_args(tvt.__default__)
2621 else:
2622 replacement = args[left: alen - right]
2623
2624 return (
2625 *args[:left],
2626 *([fillarg] * (typevartuple_index - left)),
2627 replacement,
2628 *([fillarg] * (plen - right - left - typevartuple_index - 1)),
2629 *args[alen - right:],
2630 )
2631
2632 tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
2633 return tvt
2634
2635 def __init_subclass__(self, *args, **kwds):
2636 raise TypeError("Cannot subclass special typing classes")
2637
2638else: # <=3.10
2639 class TypeVarTuple(_DefaultMixin):
2640 """Type variable tuple.
2641
2642 Usage::
2643
2644 Ts = TypeVarTuple('Ts')
2645
2646 In the same way that a normal type variable is a stand-in for a single
2647 type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
2648 type such as ``Tuple[int, str]``.
2649
2650 Type variable tuples can be used in ``Generic`` declarations.
2651 Consider the following example::
2652
2653 class Array(Generic[*Ts]): ...
2654
2655 The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
2656 where ``T1`` and ``T2`` are type variables. To use these type variables
2657 as type parameters of ``Array``, we must *unpack* the type variable tuple using
2658 the star operator: ``*Ts``. The signature of ``Array`` then behaves
2659 as if we had simply written ``class Array(Generic[T1, T2]): ...``.
2660 In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
2661 us to parameterise the class with an *arbitrary* number of type parameters.
2662
2663 Type variable tuples can be used anywhere a normal ``TypeVar`` can.
2664 This includes class definitions, as shown above, as well as function
2665 signatures and variable annotations::
2666
2667 class Array(Generic[*Ts]):
2668
2669 def __init__(self, shape: Tuple[*Ts]):
2670 self._shape: Tuple[*Ts] = shape
2671
2672 def get_shape(self) -> Tuple[*Ts]:
2673 return self._shape
2674
2675 shape = (Height(480), Width(640))
2676 x: Array[Height, Width] = Array(shape)
2677 y = abs(x) # Inferred type is Array[Height, Width]
2678 z = x + x # ... is Array[Height, Width]
2679 x.get_shape() # ... is tuple[Height, Width]
2680
2681 """
2682
2683 # Trick Generic __parameters__.
2684 __class__ = typing.TypeVar
2685
2686 def __iter__(self):
2687 yield self.__unpacked__
2688
2689 def __init__(self, name, *, default=NoDefault):
2690 self.__name__ = name
2691 _DefaultMixin.__init__(self, default)
2692
2693 # for pickling:
2694 def_mod = _caller()
2695 if def_mod != 'typing_extensions':
2696 self.__module__ = def_mod
2697
2698 self.__unpacked__ = Unpack[self]
2699
2700 def __repr__(self):
2701 return self.__name__
2702
2703 def __hash__(self):
2704 return object.__hash__(self)
2705
2706 def __eq__(self, other):
2707 return self is other
2708
2709 def __reduce__(self):
2710 return self.__name__
2711
2712 def __init_subclass__(self, *args, **kwds):
2713 if '_root' not in kwds:
2714 raise TypeError("Cannot subclass special typing classes")
2715
2716
2717if hasattr(typing, "reveal_type"): # 3.11+
2718 reveal_type = typing.reveal_type
2719else: # <=3.10
2720 def reveal_type(obj: T, /) -> T:
2721 """Reveal the inferred type of a variable.
2722
2723 When a static type checker encounters a call to ``reveal_type()``,
2724 it will emit the inferred type of the argument::
2725
2726 x: int = 1
2727 reveal_type(x)
2728
2729 Running a static type checker (e.g., ``mypy``) on this example
2730 will produce output similar to 'Revealed type is "builtins.int"'.
2731
2732 At runtime, the function prints the runtime type of the
2733 argument and returns it unchanged.
2734
2735 """
2736 print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
2737 return obj
2738
2739
2740if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
2741 _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
2742else: # <=3.10
2743 _ASSERT_NEVER_REPR_MAX_LENGTH = 100
2744
2745
2746if hasattr(typing, "assert_never"): # 3.11+
2747 assert_never = typing.assert_never
2748else: # <=3.10
2749 def assert_never(arg: Never, /) -> Never:
2750 """Assert to the type checker that a line of code is unreachable.
2751
2752 Example::
2753
2754 def int_or_str(arg: int | str) -> None:
2755 match arg:
2756 case int():
2757 print("It's an int")
2758 case str():
2759 print("It's a str")
2760 case _:
2761 assert_never(arg)
2762
2763 If a type checker finds that a call to assert_never() is
2764 reachable, it will emit an error.
2765
2766 At runtime, this throws an exception when called.
2767
2768 """
2769 value = repr(arg)
2770 if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
2771 value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
2772 raise AssertionError(f"Expected code to be unreachable, but got: {value}")
2773
2774
2775# dataclass_transform exists in 3.11 but lacks the frozen_default parameter
2776# Breakpoint: https://github.com/python/cpython/pull/99958
2777if sys.version_info >= (3, 12): # 3.12+
2778 dataclass_transform = typing.dataclass_transform
2779else: # <=3.11
2780 def dataclass_transform(
2781 *,
2782 eq_default: bool = True,
2783 order_default: bool = False,
2784 kw_only_default: bool = False,
2785 frozen_default: bool = False,
2786 field_specifiers: typing.Tuple[
2787 typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
2788 ...
2789 ] = (),
2790 **kwargs: typing.Any,
2791 ) -> typing.Callable[[T], T]:
2792 """Decorator that marks a function, class, or metaclass as providing
2793 dataclass-like behavior.
2794
2795 Example:
2796
2797 from typing_extensions import dataclass_transform
2798
2799 _T = TypeVar("_T")
2800
2801 # Used on a decorator function
2802 @dataclass_transform()
2803 def create_model(cls: type[_T]) -> type[_T]:
2804 ...
2805 return cls
2806
2807 @create_model
2808 class CustomerModel:
2809 id: int
2810 name: str
2811
2812 # Used on a base class
2813 @dataclass_transform()
2814 class ModelBase: ...
2815
2816 class CustomerModel(ModelBase):
2817 id: int
2818 name: str
2819
2820 # Used on a metaclass
2821 @dataclass_transform()
2822 class ModelMeta(type): ...
2823
2824 class ModelBase(metaclass=ModelMeta): ...
2825
2826 class CustomerModel(ModelBase):
2827 id: int
2828 name: str
2829
2830 Each of the ``CustomerModel`` classes defined in this example will now
2831 behave similarly to a dataclass created with the ``@dataclasses.dataclass``
2832 decorator. For example, the type checker will synthesize an ``__init__``
2833 method.
2834
2835 The arguments to this decorator can be used to customize this behavior:
2836 - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
2837 True or False if it is omitted by the caller.
2838 - ``order_default`` indicates whether the ``order`` parameter is
2839 assumed to be True or False if it is omitted by the caller.
2840 - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
2841 assumed to be True or False if it is omitted by the caller.
2842 - ``frozen_default`` indicates whether the ``frozen`` parameter is
2843 assumed to be True or False if it is omitted by the caller.
2844 - ``field_specifiers`` specifies a static list of supported classes
2845 or functions that describe fields, similar to ``dataclasses.field()``.
2846
2847 At runtime, this decorator records its arguments in the
2848 ``__dataclass_transform__`` attribute on the decorated object.
2849
2850 See PEP 681 for details.
2851
2852 """
2853 def decorator(cls_or_fn):
2854 cls_or_fn.__dataclass_transform__ = {
2855 "eq_default": eq_default,
2856 "order_default": order_default,
2857 "kw_only_default": kw_only_default,
2858 "frozen_default": frozen_default,
2859 "field_specifiers": field_specifiers,
2860 "kwargs": kwargs,
2861 }
2862 return cls_or_fn
2863 return decorator
2864
2865
2866if hasattr(typing, "override"): # 3.12+
2867 override = typing.override
2868else: # <=3.11
2869 _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
2870
2871 def override(arg: _F, /) -> _F:
2872 """Indicate that a method is intended to override a method in a base class.
2873
2874 Usage:
2875
2876 class Base:
2877 def method(self) -> None:
2878 pass
2879
2880 class Child(Base):
2881 @override
2882 def method(self) -> None:
2883 super().method()
2884
2885 When this decorator is applied to a method, the type checker will
2886 validate that it overrides a method with the same name on a base class.
2887 This helps prevent bugs that may occur when a base class is changed
2888 without an equivalent change to a child class.
2889
2890 There is no runtime checking of these properties. The decorator
2891 sets the ``__override__`` attribute to ``True`` on the decorated object
2892 to allow runtime introspection.
2893
2894 See PEP 698 for details.
2895
2896 """
2897 try:
2898 arg.__override__ = True
2899 except (AttributeError, TypeError):
2900 # Skip the attribute silently if it is not writable.
2901 # AttributeError happens if the object has __slots__ or a
2902 # read-only property, TypeError if it's a builtin class.
2903 pass
2904 return arg
2905
2906
2907# Python 3.13.3+ contains a fix for the wrapped __new__
2908# Breakpoint: https://github.com/python/cpython/pull/132160
2909if sys.version_info >= (3, 13, 3):
2910 deprecated = warnings.deprecated
2911else:
2912 _T = typing.TypeVar("_T")
2913
2914 class deprecated:
2915 """Indicate that a class, function or overload is deprecated.
2916
2917 When this decorator is applied to an object, the type checker
2918 will generate a diagnostic on usage of the deprecated object.
2919
2920 Usage:
2921
2922 @deprecated("Use B instead")
2923 class A:
2924 pass
2925
2926 @deprecated("Use g instead")
2927 def f():
2928 pass
2929
2930 @overload
2931 @deprecated("int support is deprecated")
2932 def g(x: int) -> int: ...
2933 @overload
2934 def g(x: str) -> int: ...
2935
2936 The warning specified by *category* will be emitted at runtime
2937 on use of deprecated objects. For functions, that happens on calls;
2938 for classes, on instantiation and on creation of subclasses.
2939 If the *category* is ``None``, no warning is emitted at runtime.
2940 The *stacklevel* determines where the
2941 warning is emitted. If it is ``1`` (the default), the warning
2942 is emitted at the direct caller of the deprecated object; if it
2943 is higher, it is emitted further up the stack.
2944 Static type checker behavior is not affected by the *category*
2945 and *stacklevel* arguments.
2946
2947 The deprecation message passed to the decorator is saved in the
2948 ``__deprecated__`` attribute on the decorated object.
2949 If applied to an overload, the decorator
2950 must be after the ``@overload`` decorator for the attribute to
2951 exist on the overload as returned by ``get_overloads()``.
2952
2953 See PEP 702 for details.
2954
2955 """
2956 def __init__(
2957 self,
2958 message: str,
2959 /,
2960 *,
2961 category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
2962 stacklevel: int = 1,
2963 ) -> None:
2964 if not isinstance(message, str):
2965 raise TypeError(
2966 "Expected an object of type str for 'message', not "
2967 f"{type(message).__name__!r}"
2968 )
2969 self.message = message
2970 self.category = category
2971 self.stacklevel = stacklevel
2972
2973 def __call__(self, arg: _T, /) -> _T:
2974 # Make sure the inner functions created below don't
2975 # retain a reference to self.
2976 msg = self.message
2977 category = self.category
2978 stacklevel = self.stacklevel
2979 if category is None:
2980 arg.__deprecated__ = msg
2981 return arg
2982 elif isinstance(arg, type):
2983 import functools
2984 from types import MethodType
2985
2986 original_new = arg.__new__
2987
2988 @functools.wraps(original_new)
2989 def __new__(cls, /, *args, **kwargs):
2990 if cls is arg:
2991 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
2992 if original_new is not object.__new__:
2993 return original_new(cls, *args, **kwargs)
2994 # Mirrors a similar check in object.__new__.
2995 elif cls.__init__ is object.__init__ and (args or kwargs):
2996 raise TypeError(f"{cls.__name__}() takes no arguments")
2997 else:
2998 return original_new(cls)
2999
3000 arg.__new__ = staticmethod(__new__)
3001
3002 original_init_subclass = arg.__init_subclass__
3003 # We need slightly different behavior if __init_subclass__
3004 # is a bound method (likely if it was implemented in Python)
3005 if isinstance(original_init_subclass, MethodType):
3006 original_init_subclass = original_init_subclass.__func__
3007
3008 @functools.wraps(original_init_subclass)
3009 def __init_subclass__(*args, **kwargs):
3010 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3011 return original_init_subclass(*args, **kwargs)
3012
3013 arg.__init_subclass__ = classmethod(__init_subclass__)
3014 # Or otherwise, which likely means it's a builtin such as
3015 # object's implementation of __init_subclass__.
3016 else:
3017 @functools.wraps(original_init_subclass)
3018 def __init_subclass__(*args, **kwargs):
3019 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3020 return original_init_subclass(*args, **kwargs)
3021
3022 arg.__init_subclass__ = __init_subclass__
3023
3024 arg.__deprecated__ = __new__.__deprecated__ = msg
3025 __init_subclass__.__deprecated__ = msg
3026 return arg
3027 elif callable(arg):
3028 import asyncio.coroutines
3029 import functools
3030 import inspect
3031
3032 @functools.wraps(arg)
3033 def wrapper(*args, **kwargs):
3034 warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
3035 return arg(*args, **kwargs)
3036
3037 if asyncio.coroutines.iscoroutinefunction(arg):
3038 # Breakpoint: https://github.com/python/cpython/pull/99247
3039 if sys.version_info >= (3, 12):
3040 wrapper = inspect.markcoroutinefunction(wrapper)
3041 else:
3042 wrapper._is_coroutine = asyncio.coroutines._is_coroutine
3043
3044 arg.__deprecated__ = wrapper.__deprecated__ = msg
3045 return wrapper
3046 else:
3047 raise TypeError(
3048 "@deprecated decorator with non-None category must be applied to "
3049 f"a class or callable, not {arg!r}"
3050 )
3051
3052# Breakpoint: https://github.com/python/cpython/pull/23702
3053if sys.version_info < (3, 10):
3054 def _is_param_expr(arg):
3055 return arg is ... or isinstance(
3056 arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
3057 )
3058else:
3059 def _is_param_expr(arg):
3060 return arg is ... or isinstance(
3061 arg,
3062 (
3063 tuple,
3064 list,
3065 ParamSpec,
3066 _ConcatenateGenericAlias,
3067 typing._ConcatenateGenericAlias,
3068 ),
3069 )
3070
3071
3072# We have to do some monkey patching to deal with the dual nature of
3073# Unpack/TypeVarTuple:
3074# - We want Unpack to be a kind of TypeVar so it gets accepted in
3075# Generic[Unpack[Ts]]
3076# - We want it to *not* be treated as a TypeVar for the purposes of
3077# counting generic parameters, so that when we subscript a generic,
3078# the runtime doesn't try to substitute the Unpack with the subscripted type.
3079if not hasattr(typing, "TypeVarTuple"):
3080 def _check_generic(cls, parameters, elen=_marker):
3081 """Check correct count for parameters of a generic cls (internal helper).
3082
3083 This gives a nice error message in case of count mismatch.
3084 """
3085 # If substituting a single ParamSpec with multiple arguments
3086 # we do not check the count
3087 if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
3088 and len(cls.__parameters__) == 1
3089 and isinstance(cls.__parameters__[0], ParamSpec)
3090 and parameters
3091 and not _is_param_expr(parameters[0])
3092 ):
3093 # Generic modifies parameters variable, but here we cannot do this
3094 return
3095
3096 if not elen:
3097 raise TypeError(f"{cls} is not a generic class")
3098 if elen is _marker:
3099 if not hasattr(cls, "__parameters__") or not cls.__parameters__:
3100 raise TypeError(f"{cls} is not a generic class")
3101 elen = len(cls.__parameters__)
3102 alen = len(parameters)
3103 if alen != elen:
3104 expect_val = elen
3105 if hasattr(cls, "__parameters__"):
3106 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
3107 num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
3108 if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
3109 return
3110
3111 # deal with TypeVarLike defaults
3112 # required TypeVarLikes cannot appear after a defaulted one.
3113 if alen < elen:
3114 # since we validate TypeVarLike default in _collect_type_vars
3115 # or _collect_parameters we can safely check parameters[alen]
3116 if (
3117 getattr(parameters[alen], '__default__', NoDefault)
3118 is not NoDefault
3119 ):
3120 return
3121
3122 num_default_tv = sum(getattr(p, '__default__', NoDefault)
3123 is not NoDefault for p in parameters)
3124
3125 elen -= num_default_tv
3126
3127 expect_val = f"at least {elen}"
3128
3129 # Breakpoint: https://github.com/python/cpython/pull/27515
3130 things = "arguments" if sys.version_info >= (3, 10) else "parameters"
3131 raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
3132 f" for {cls}; actual {alen}, expected {expect_val}")
3133else:
3134 # Python 3.11+
3135
3136 def _check_generic(cls, parameters, elen):
3137 """Check correct count for parameters of a generic cls (internal helper).
3138
3139 This gives a nice error message in case of count mismatch.
3140 """
3141 if not elen:
3142 raise TypeError(f"{cls} is not a generic class")
3143 alen = len(parameters)
3144 if alen != elen:
3145 expect_val = elen
3146 if hasattr(cls, "__parameters__"):
3147 parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
3148
3149 # deal with TypeVarLike defaults
3150 # required TypeVarLikes cannot appear after a defaulted one.
3151 if alen < elen:
3152 # since we validate TypeVarLike default in _collect_type_vars
3153 # or _collect_parameters we can safely check parameters[alen]
3154 if (
3155 getattr(parameters[alen], '__default__', NoDefault)
3156 is not NoDefault
3157 ):
3158 return
3159
3160 num_default_tv = sum(getattr(p, '__default__', NoDefault)
3161 is not NoDefault for p in parameters)
3162
3163 elen -= num_default_tv
3164
3165 expect_val = f"at least {elen}"
3166
3167 raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
3168 f" for {cls}; actual {alen}, expected {expect_val}")
3169
3170if not _PEP_696_IMPLEMENTED:
3171 typing._check_generic = _check_generic
3172
3173
3174def _has_generic_or_protocol_as_origin() -> bool:
3175 try:
3176 frame = sys._getframe(2)
3177 # - Catch AttributeError: not all Python implementations have sys._getframe()
3178 # - Catch ValueError: maybe we're called from an unexpected module
3179 # and the call stack isn't deep enough
3180 except (AttributeError, ValueError):
3181 return False # err on the side of leniency
3182 else:
3183 # If we somehow get invoked from outside typing.py,
3184 # also err on the side of leniency
3185 if frame.f_globals.get("__name__") != "typing":
3186 return False
3187 origin = frame.f_locals.get("origin")
3188 # Cannot use "in" because origin may be an object with a buggy __eq__ that
3189 # throws an error.
3190 return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
3191
3192
3193_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
3194
3195
3196def _is_unpacked_typevartuple(x) -> bool:
3197 if get_origin(x) is not Unpack:
3198 return False
3199 args = get_args(x)
3200 return (
3201 bool(args)
3202 and len(args) == 1
3203 and type(args[0]) in _TYPEVARTUPLE_TYPES
3204 )
3205
3206
3207# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
3208if hasattr(typing, '_collect_type_vars'):
3209 def _collect_type_vars(types, typevar_types=None):
3210 """Collect all type variable contained in types in order of
3211 first appearance (lexicographic order). For example::
3212
3213 _collect_type_vars((T, List[S, T])) == (T, S)
3214 """
3215 if typevar_types is None:
3216 typevar_types = typing.TypeVar
3217 tvars = []
3218
3219 # A required TypeVarLike cannot appear after a TypeVarLike with a default
3220 # if it was a direct call to `Generic[]` or `Protocol[]`
3221 enforce_default_ordering = _has_generic_or_protocol_as_origin()
3222 default_encountered = False
3223
3224 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
3225 type_var_tuple_encountered = False
3226
3227 for t in types:
3228 if _is_unpacked_typevartuple(t):
3229 type_var_tuple_encountered = True
3230 elif (
3231 isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
3232 and t not in tvars
3233 ):
3234 if enforce_default_ordering:
3235 has_default = getattr(t, '__default__', NoDefault) is not NoDefault
3236 if has_default:
3237 if type_var_tuple_encountered:
3238 raise TypeError('Type parameter with a default'
3239 ' follows TypeVarTuple')
3240 default_encountered = True
3241 elif default_encountered:
3242 raise TypeError(f'Type parameter {t!r} without a default'
3243 ' follows type parameter with a default')
3244
3245 tvars.append(t)
3246 if _should_collect_from_parameters(t):
3247 tvars.extend([t for t in t.__parameters__ if t not in tvars])
3248 elif isinstance(t, tuple):
3249 # Collect nested type_vars
3250 # tuple wrapped by _prepare_paramspec_params(cls, params)
3251 for x in t:
3252 for collected in _collect_type_vars([x]):
3253 if collected not in tvars:
3254 tvars.append(collected)
3255 return tuple(tvars)
3256
3257 typing._collect_type_vars = _collect_type_vars
3258else:
3259 def _collect_parameters(args):
3260 """Collect all type variables and parameter specifications in args
3261 in order of first appearance (lexicographic order).
3262
3263 For example::
3264
3265 assert _collect_parameters((T, Callable[P, T])) == (T, P)
3266 """
3267 parameters = []
3268
3269 # A required TypeVarLike cannot appear after a TypeVarLike with default
3270 # if it was a direct call to `Generic[]` or `Protocol[]`
3271 enforce_default_ordering = _has_generic_or_protocol_as_origin()
3272 default_encountered = False
3273
3274 # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
3275 type_var_tuple_encountered = False
3276
3277 for t in args:
3278 if isinstance(t, type):
3279 # We don't want __parameters__ descriptor of a bare Python class.
3280 pass
3281 elif isinstance(t, tuple):
3282 # `t` might be a tuple, when `ParamSpec` is substituted with
3283 # `[T, int]`, or `[int, *Ts]`, etc.
3284 for x in t:
3285 for collected in _collect_parameters([x]):
3286 if collected not in parameters:
3287 parameters.append(collected)
3288 elif hasattr(t, '__typing_subst__'):
3289 if t not in parameters:
3290 if enforce_default_ordering:
3291 has_default = (
3292 getattr(t, '__default__', NoDefault) is not NoDefault
3293 )
3294
3295 if type_var_tuple_encountered and has_default:
3296 raise TypeError('Type parameter with a default'
3297 ' follows TypeVarTuple')
3298
3299 if has_default:
3300 default_encountered = True
3301 elif default_encountered:
3302 raise TypeError(f'Type parameter {t!r} without a default'
3303 ' follows type parameter with a default')
3304
3305 parameters.append(t)
3306 else:
3307 if _is_unpacked_typevartuple(t):
3308 type_var_tuple_encountered = True
3309 for x in getattr(t, '__parameters__', ()):
3310 if x not in parameters:
3311 parameters.append(x)
3312
3313 return tuple(parameters)
3314
3315 if not _PEP_696_IMPLEMENTED:
3316 typing._collect_parameters = _collect_parameters
3317
3318# Backport typing.NamedTuple as it exists in Python 3.13.
3319# In 3.11, the ability to define generic `NamedTuple`s was supported.
3320# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
3321# On 3.12, we added __orig_bases__ to call-based NamedTuples
3322# On 3.13, we deprecated kwargs-based NamedTuples
3323# Breakpoint: https://github.com/python/cpython/pull/105609
3324if sys.version_info >= (3, 13):
3325 NamedTuple = typing.NamedTuple
3326else:
3327 def _make_nmtuple(name, types, module, defaults=()):
3328 fields = [n for n, t in types]
3329 annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
3330 for n, t in types}
3331 nm_tpl = collections.namedtuple(name, fields,
3332 defaults=defaults, module=module)
3333 nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
3334 return nm_tpl
3335
3336 _prohibited_namedtuple_fields = typing._prohibited
3337 _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
3338
3339 class _NamedTupleMeta(type):
3340 def __new__(cls, typename, bases, ns):
3341 assert _NamedTuple in bases
3342 for base in bases:
3343 if base is not _NamedTuple and base is not typing.Generic:
3344 raise TypeError(
3345 'can only inherit from a NamedTuple type and Generic')
3346 bases = tuple(tuple if base is _NamedTuple else base for base in bases)
3347 if "__annotations__" in ns:
3348 types = ns["__annotations__"]
3349 elif "__annotate__" in ns:
3350 # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
3351 types = ns["__annotate__"](1)
3352 else:
3353 types = {}
3354 default_names = []
3355 for field_name in types:
3356 if field_name in ns:
3357 default_names.append(field_name)
3358 elif default_names:
3359 raise TypeError(f"Non-default namedtuple field {field_name} "
3360 f"cannot follow default field"
3361 f"{'s' if len(default_names) > 1 else ''} "
3362 f"{', '.join(default_names)}")
3363 nm_tpl = _make_nmtuple(
3364 typename, types.items(),
3365 defaults=[ns[n] for n in default_names],
3366 module=ns['__module__']
3367 )
3368 nm_tpl.__bases__ = bases
3369 if typing.Generic in bases:
3370 if hasattr(typing, '_generic_class_getitem'): # 3.12+
3371 nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
3372 else:
3373 class_getitem = typing.Generic.__class_getitem__.__func__
3374 nm_tpl.__class_getitem__ = classmethod(class_getitem)
3375 # update from user namespace without overriding special namedtuple attributes
3376 for key, val in ns.items():
3377 if key in _prohibited_namedtuple_fields:
3378 raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
3379 elif key not in _special_namedtuple_fields:
3380 if key not in nm_tpl._fields:
3381 setattr(nm_tpl, key, ns[key])
3382 try:
3383 set_name = type(val).__set_name__
3384 except AttributeError:
3385 pass
3386 else:
3387 try:
3388 set_name(val, nm_tpl, key)
3389 except BaseException as e:
3390 msg = (
3391 f"Error calling __set_name__ on {type(val).__name__!r} "
3392 f"instance {key!r} in {typename!r}"
3393 )
3394 # BaseException.add_note() existed on py311,
3395 # but the __set_name__ machinery didn't start
3396 # using add_note() until py312.
3397 # Making sure exceptions are raised in the same way
3398 # as in "normal" classes seems most important here.
3399 # Breakpoint: https://github.com/python/cpython/pull/95915
3400 if sys.version_info >= (3, 12):
3401 e.add_note(msg)
3402 raise
3403 else:
3404 raise RuntimeError(msg) from e
3405
3406 if typing.Generic in bases:
3407 nm_tpl.__init_subclass__()
3408 return nm_tpl
3409
3410 _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
3411
3412 def _namedtuple_mro_entries(bases):
3413 assert NamedTuple in bases
3414 return (_NamedTuple,)
3415
3416 def NamedTuple(typename, fields=_marker, /, **kwargs):
3417 """Typed version of namedtuple.
3418
3419 Usage::
3420
3421 class Employee(NamedTuple):
3422 name: str
3423 id: int
3424
3425 This is equivalent to::
3426
3427 Employee = collections.namedtuple('Employee', ['name', 'id'])
3428
3429 The resulting class has an extra __annotations__ attribute, giving a
3430 dict that maps field names to types. (The field names are also in
3431 the _fields attribute, which is part of the namedtuple API.)
3432 An alternative equivalent functional syntax is also accepted::
3433
3434 Employee = NamedTuple('Employee', [('name', str), ('id', int)])
3435 """
3436 if fields is _marker:
3437 if kwargs:
3438 deprecated_thing = "Creating NamedTuple classes using keyword arguments"
3439 deprecation_msg = (
3440 "{name} is deprecated and will be disallowed in Python {remove}. "
3441 "Use the class-based or functional syntax instead."
3442 )
3443 else:
3444 deprecated_thing = "Failing to pass a value for the 'fields' parameter"
3445 example = f"`{typename} = NamedTuple({typename!r}, [])`"
3446 deprecation_msg = (
3447 "{name} is deprecated and will be disallowed in Python {remove}. "
3448 "To create a NamedTuple class with 0 fields "
3449 "using the functional syntax, "
3450 "pass an empty list, e.g. "
3451 ) + example + "."
3452 elif fields is None:
3453 if kwargs:
3454 raise TypeError(
3455 "Cannot pass `None` as the 'fields' parameter "
3456 "and also specify fields using keyword arguments"
3457 )
3458 else:
3459 deprecated_thing = "Passing `None` as the 'fields' parameter"
3460 example = f"`{typename} = NamedTuple({typename!r}, [])`"
3461 deprecation_msg = (
3462 "{name} is deprecated and will be disallowed in Python {remove}. "
3463 "To create a NamedTuple class with 0 fields "
3464 "using the functional syntax, "
3465 "pass an empty list, e.g. "
3466 ) + example + "."
3467 elif kwargs:
3468 raise TypeError("Either list of fields or keywords"
3469 " can be provided to NamedTuple, not both")
3470 if fields is _marker or fields is None:
3471 warnings.warn(
3472 deprecation_msg.format(name=deprecated_thing, remove="3.15"),
3473 DeprecationWarning,
3474 stacklevel=2,
3475 )
3476 fields = kwargs.items()
3477 nt = _make_nmtuple(typename, fields, module=_caller())
3478 nt.__orig_bases__ = (NamedTuple,)
3479 return nt
3480
3481 NamedTuple.__mro_entries__ = _namedtuple_mro_entries
3482
3483
3484if hasattr(collections.abc, "Buffer"):
3485 Buffer = collections.abc.Buffer
3486else:
3487 class Buffer(abc.ABC): # noqa: B024
3488 """Base class for classes that implement the buffer protocol.
3489
3490 The buffer protocol allows Python objects to expose a low-level
3491 memory buffer interface. Before Python 3.12, it is not possible
3492 to implement the buffer protocol in pure Python code, or even
3493 to check whether a class implements the buffer protocol. In
3494 Python 3.12 and higher, the ``__buffer__`` method allows access
3495 to the buffer protocol from Python code, and the
3496 ``collections.abc.Buffer`` ABC allows checking whether a class
3497 implements the buffer protocol.
3498
3499 To indicate support for the buffer protocol in earlier versions,
3500 inherit from this ABC, either in a stub file or at runtime,
3501 or use ABC registration. This ABC provides no methods, because
3502 there is no Python-accessible methods shared by pre-3.12 buffer
3503 classes. It is useful primarily for static checks.
3504
3505 """
3506
3507 # As a courtesy, register the most common stdlib buffer classes.
3508 Buffer.register(memoryview)
3509 Buffer.register(bytearray)
3510 Buffer.register(bytes)
3511
3512
3513# Backport of types.get_original_bases, available on 3.12+ in CPython
3514if hasattr(_types, "get_original_bases"):
3515 get_original_bases = _types.get_original_bases
3516else:
3517 def get_original_bases(cls, /):
3518 """Return the class's "original" bases prior to modification by `__mro_entries__`.
3519
3520 Examples::
3521
3522 from typing import TypeVar, Generic
3523 from typing_extensions import NamedTuple, TypedDict
3524
3525 T = TypeVar("T")
3526 class Foo(Generic[T]): ...
3527 class Bar(Foo[int], float): ...
3528 class Baz(list[str]): ...
3529 Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
3530 Spam = TypedDict("Spam", {"a": int, "b": str})
3531
3532 assert get_original_bases(Bar) == (Foo[int], float)
3533 assert get_original_bases(Baz) == (list[str],)
3534 assert get_original_bases(Eggs) == (NamedTuple,)
3535 assert get_original_bases(Spam) == (TypedDict,)
3536 assert get_original_bases(int) == (object,)
3537 """
3538 try:
3539 return cls.__dict__.get("__orig_bases__", cls.__bases__)
3540 except AttributeError:
3541 raise TypeError(
3542 f'Expected an instance of type, not {type(cls).__name__!r}'
3543 ) from None
3544
3545
3546# NewType is a class on Python 3.10+, making it pickleable
3547# The error message for subclassing instances of NewType was improved on 3.11+
3548# Breakpoint: https://github.com/python/cpython/pull/30268
3549if sys.version_info >= (3, 11):
3550 NewType = typing.NewType
3551else:
3552 class NewType:
3553 """NewType creates simple unique types with almost zero
3554 runtime overhead. NewType(name, tp) is considered a subtype of tp
3555 by static type checkers. At runtime, NewType(name, tp) returns
3556 a dummy callable that simply returns its argument. Usage::
3557 UserId = NewType('UserId', int)
3558 def name_by_id(user_id: UserId) -> str:
3559 ...
3560 UserId('user') # Fails type check
3561 name_by_id(42) # Fails type check
3562 name_by_id(UserId(42)) # OK
3563 num = UserId(5) + 1 # type: int
3564 """
3565
3566 def __call__(self, obj, /):
3567 return obj
3568
3569 def __init__(self, name, tp):
3570 self.__qualname__ = name
3571 if '.' in name:
3572 name = name.rpartition('.')[-1]
3573 self.__name__ = name
3574 self.__supertype__ = tp
3575 def_mod = _caller()
3576 if def_mod != 'typing_extensions':
3577 self.__module__ = def_mod
3578
3579 def __mro_entries__(self, bases):
3580 # We defined __mro_entries__ to get a better error message
3581 # if a user attempts to subclass a NewType instance. bpo-46170
3582 supercls_name = self.__name__
3583
3584 class Dummy:
3585 def __init_subclass__(cls):
3586 subcls_name = cls.__name__
3587 raise TypeError(
3588 f"Cannot subclass an instance of NewType. "
3589 f"Perhaps you were looking for: "
3590 f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
3591 )
3592
3593 return (Dummy,)
3594
3595 def __repr__(self):
3596 return f'{self.__module__}.{self.__qualname__}'
3597
3598 def __reduce__(self):
3599 return self.__qualname__
3600
3601 # Breakpoint: https://github.com/python/cpython/pull/21515
3602 if sys.version_info >= (3, 10):
3603 # PEP 604 methods
3604 # It doesn't make sense to have these methods on Python <3.10
3605
3606 def __or__(self, other):
3607 return typing.Union[self, other]
3608
3609 def __ror__(self, other):
3610 return typing.Union[other, self]
3611
3612
3613# Breakpoint: https://github.com/python/cpython/pull/124795
3614if sys.version_info >= (3, 14):
3615 TypeAliasType = typing.TypeAliasType
3616# <=3.13
3617else:
3618 # Breakpoint: https://github.com/python/cpython/pull/103764
3619 if sys.version_info >= (3, 12):
3620 # 3.12-3.13
3621 def _is_unionable(obj):
3622 """Corresponds to is_unionable() in unionobject.c in CPython."""
3623 return obj is None or isinstance(obj, (
3624 type,
3625 _types.GenericAlias,
3626 _types.UnionType,
3627 typing.TypeAliasType,
3628 TypeAliasType,
3629 ))
3630 else:
3631 # <=3.11
3632 def _is_unionable(obj):
3633 """Corresponds to is_unionable() in unionobject.c in CPython."""
3634 return obj is None or isinstance(obj, (
3635 type,
3636 _types.GenericAlias,
3637 _types.UnionType,
3638 TypeAliasType,
3639 ))
3640
3641 if sys.version_info < (3, 10):
3642 # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
3643 # so that we emulate the behaviour of `types.GenericAlias`
3644 # on the latest versions of CPython
3645 _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
3646 "__class__",
3647 "__bases__",
3648 "__origin__",
3649 "__args__",
3650 "__unpacked__",
3651 "__parameters__",
3652 "__typing_unpacked_tuple_args__",
3653 "__mro_entries__",
3654 "__reduce_ex__",
3655 "__reduce__",
3656 "__copy__",
3657 "__deepcopy__",
3658 })
3659
3660 class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
3661 def __getattr__(self, attr):
3662 if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
3663 return object.__getattr__(self, attr)
3664 return getattr(self.__origin__, attr)
3665
3666
3667 class TypeAliasType:
3668 """Create named, parameterized type aliases.
3669
3670 This provides a backport of the new `type` statement in Python 3.12:
3671
3672 type ListOrSet[T] = list[T] | set[T]
3673
3674 is equivalent to:
3675
3676 T = TypeVar("T")
3677 ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
3678
3679 The name ListOrSet can then be used as an alias for the type it refers to.
3680
3681 The type_params argument should contain all the type parameters used
3682 in the value of the type alias. If the alias is not generic, this
3683 argument is omitted.
3684
3685 Static type checkers should only support type aliases declared using
3686 TypeAliasType that follow these rules:
3687
3688 - The first argument (the name) must be a string literal.
3689 - The TypeAliasType instance must be immediately assigned to a variable
3690 of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
3691 as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
3692
3693 """
3694
3695 def __init__(self, name: str, value, *, type_params=()):
3696 if not isinstance(name, str):
3697 raise TypeError("TypeAliasType name must be a string")
3698 if not isinstance(type_params, tuple):
3699 raise TypeError("type_params must be a tuple")
3700 self.__value__ = value
3701 self.__type_params__ = type_params
3702
3703 default_value_encountered = False
3704 parameters = []
3705 for type_param in type_params:
3706 if (
3707 not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
3708 # <=3.11
3709 # Unpack Backport passes isinstance(type_param, TypeVar)
3710 or _is_unpack(type_param)
3711 ):
3712 raise TypeError(f"Expected a type param, got {type_param!r}")
3713 has_default = (
3714 getattr(type_param, '__default__', NoDefault) is not NoDefault
3715 )
3716 if default_value_encountered and not has_default:
3717 raise TypeError(f"non-default type parameter '{type_param!r}'"
3718 " follows default type parameter")
3719 if has_default:
3720 default_value_encountered = True
3721 if isinstance(type_param, TypeVarTuple):
3722 parameters.extend(type_param)
3723 else:
3724 parameters.append(type_param)
3725 self.__parameters__ = tuple(parameters)
3726 def_mod = _caller()
3727 if def_mod != 'typing_extensions':
3728 self.__module__ = def_mod
3729 # Setting this attribute closes the TypeAliasType from further modification
3730 self.__name__ = name
3731
3732 def __setattr__(self, name: str, value: object, /) -> None:
3733 if hasattr(self, "__name__"):
3734 self._raise_attribute_error(name)
3735 super().__setattr__(name, value)
3736
3737 def __delattr__(self, name: str, /) -> Never:
3738 self._raise_attribute_error(name)
3739
3740 def _raise_attribute_error(self, name: str) -> Never:
3741 # Match the Python 3.12 error messages exactly
3742 if name == "__name__":
3743 raise AttributeError("readonly attribute")
3744 elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
3745 raise AttributeError(
3746 f"attribute '{name}' of 'typing.TypeAliasType' objects "
3747 "is not writable"
3748 )
3749 else:
3750 raise AttributeError(
3751 f"'typing.TypeAliasType' object has no attribute '{name}'"
3752 )
3753
3754 def __repr__(self) -> str:
3755 return self.__name__
3756
3757 if sys.version_info < (3, 11):
3758 def _check_single_param(self, param, recursion=0):
3759 # Allow [], [int], [int, str], [int, ...], [int, T]
3760 if param is ...:
3761 return ...
3762 if param is None:
3763 return None
3764 # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
3765 if isinstance(param, list) and recursion == 0:
3766 return [self._check_single_param(arg, recursion+1)
3767 for arg in param]
3768 return typing._type_check(
3769 param, f'Subscripting {self.__name__} requires a type.'
3770 )
3771
3772 def _check_parameters(self, parameters):
3773 if sys.version_info < (3, 11):
3774 return tuple(
3775 self._check_single_param(item)
3776 for item in parameters
3777 )
3778 return tuple(typing._type_check(
3779 item, f'Subscripting {self.__name__} requires a type.'
3780 )
3781 for item in parameters
3782 )
3783
3784 def __getitem__(self, parameters):
3785 if not self.__type_params__:
3786 raise TypeError("Only generic type aliases are subscriptable")
3787 if not isinstance(parameters, tuple):
3788 parameters = (parameters,)
3789 # Using 3.9 here will create problems with Concatenate
3790 if sys.version_info >= (3, 10):
3791 return _types.GenericAlias(self, parameters)
3792 type_vars = _collect_type_vars(parameters)
3793 parameters = self._check_parameters(parameters)
3794 alias = _TypeAliasGenericAlias(self, parameters)
3795 # alias.__parameters__ is not complete if Concatenate is present
3796 # as it is converted to a list from which no parameters are extracted.
3797 if alias.__parameters__ != type_vars:
3798 alias.__parameters__ = type_vars
3799 return alias
3800
3801 def __reduce__(self):
3802 return self.__name__
3803
3804 def __init_subclass__(cls, *args, **kwargs):
3805 raise TypeError(
3806 "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
3807 )
3808
3809 # The presence of this method convinces typing._type_check
3810 # that TypeAliasTypes are types.
3811 def __call__(self):
3812 raise TypeError("Type alias is not callable")
3813
3814 # Breakpoint: https://github.com/python/cpython/pull/21515
3815 if sys.version_info >= (3, 10):
3816 def __or__(self, right):
3817 # For forward compatibility with 3.12, reject Unions
3818 # that are not accepted by the built-in Union.
3819 if not _is_unionable(right):
3820 return NotImplemented
3821 return typing.Union[self, right]
3822
3823 def __ror__(self, left):
3824 if not _is_unionable(left):
3825 return NotImplemented
3826 return typing.Union[left, self]
3827
3828
3829if hasattr(typing, "is_protocol"):
3830 is_protocol = typing.is_protocol
3831 get_protocol_members = typing.get_protocol_members
3832else:
3833 def is_protocol(tp: type, /) -> bool:
3834 """Return True if the given type is a Protocol.
3835
3836 Example::
3837
3838 >>> from typing_extensions import Protocol, is_protocol
3839 >>> class P(Protocol):
3840 ... def a(self) -> str: ...
3841 ... b: int
3842 >>> is_protocol(P)
3843 True
3844 >>> is_protocol(int)
3845 False
3846 """
3847 return (
3848 isinstance(tp, type)
3849 and getattr(tp, '_is_protocol', False)
3850 and tp is not Protocol
3851 and tp is not typing.Protocol
3852 )
3853
3854 def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
3855 """Return the set of members defined in a Protocol.
3856
3857 Example::
3858
3859 >>> from typing_extensions import Protocol, get_protocol_members
3860 >>> class P(Protocol):
3861 ... def a(self) -> str: ...
3862 ... b: int
3863 >>> get_protocol_members(P)
3864 frozenset({'a', 'b'})
3865
3866 Raise a TypeError for arguments that are not Protocols.
3867 """
3868 if not is_protocol(tp):
3869 raise TypeError(f'{tp!r} is not a Protocol')
3870 if hasattr(tp, '__protocol_attrs__'):
3871 return frozenset(tp.__protocol_attrs__)
3872 return frozenset(_get_protocol_attrs(tp))
3873
3874
3875if hasattr(typing, "Doc"):
3876 Doc = typing.Doc
3877else:
3878 class Doc:
3879 """Define the documentation of a type annotation using ``Annotated``, to be
3880 used in class attributes, function and method parameters, return values,
3881 and variables.
3882
3883 The value should be a positional-only string literal to allow static tools
3884 like editors and documentation generators to use it.
3885
3886 This complements docstrings.
3887
3888 The string value passed is available in the attribute ``documentation``.
3889
3890 Example::
3891
3892 >>> from typing_extensions import Annotated, Doc
3893 >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
3894 """
3895 def __init__(self, documentation: str, /) -> None:
3896 self.documentation = documentation
3897
3898 def __repr__(self) -> str:
3899 return f"Doc({self.documentation!r})"
3900
3901 def __hash__(self) -> int:
3902 return hash(self.documentation)
3903
3904 def __eq__(self, other: object) -> bool:
3905 if not isinstance(other, Doc):
3906 return NotImplemented
3907 return self.documentation == other.documentation
3908
3909
3910_CapsuleType = getattr(_types, "CapsuleType", None)
3911
3912if _CapsuleType is None:
3913 try:
3914 import _socket
3915 except ImportError:
3916 pass
3917 else:
3918 _CAPI = getattr(_socket, "CAPI", None)
3919 if _CAPI is not None:
3920 _CapsuleType = type(_CAPI)
3921
3922if _CapsuleType is not None:
3923 CapsuleType = _CapsuleType
3924 __all__.append("CapsuleType")
3925
3926
3927if sys.version_info >= (3, 14):
3928 from annotationlib import Format, get_annotations
3929else:
3930 # Available since Python 3.14.0a3
3931 # PR: https://github.com/python/cpython/pull/124415
3932 class Format(enum.IntEnum):
3933 VALUE = 1
3934 VALUE_WITH_FAKE_GLOBALS = 2
3935 FORWARDREF = 3
3936 STRING = 4
3937
3938 # Available since Python 3.14.0a1
3939 # PR: https://github.com/python/cpython/pull/119891
3940 def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
3941 format=Format.VALUE):
3942 """Compute the annotations dict for an object.
3943
3944 obj may be a callable, class, or module.
3945 Passing in an object of any other type raises TypeError.
3946
3947 Returns a dict. get_annotations() returns a new dict every time
3948 it's called; calling it twice on the same object will return two
3949 different but equivalent dicts.
3950
3951 This is a backport of `inspect.get_annotations`, which has been
3952 in the standard library since Python 3.10. See the standard library
3953 documentation for more:
3954
3955 https://docs.python.org/3/library/inspect.html#inspect.get_annotations
3956
3957 This backport adds the *format* argument introduced by PEP 649. The
3958 three formats supported are:
3959 * VALUE: the annotations are returned as-is. This is the default and
3960 it is compatible with the behavior on previous Python versions.
3961 * FORWARDREF: return annotations as-is if possible, but replace any
3962 undefined names with ForwardRef objects. The implementation proposed by
3963 PEP 649 relies on language changes that cannot be backported; the
3964 typing-extensions implementation simply returns the same result as VALUE.
3965 * STRING: return annotations as strings, in a format close to the original
3966 source. Again, this behavior cannot be replicated directly in a backport.
3967 As an approximation, typing-extensions retrieves the annotations under
3968 VALUE semantics and then stringifies them.
3969
3970 The purpose of this backport is to allow users who would like to use
3971 FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
3972 want to support earlier Python versions, to simply write:
3973
3974 typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
3975
3976 """
3977 format = Format(format)
3978 if format is Format.VALUE_WITH_FAKE_GLOBALS:
3979 raise ValueError(
3980 "The VALUE_WITH_FAKE_GLOBALS format is for internal use only"
3981 )
3982
3983 if eval_str and format is not Format.VALUE:
3984 raise ValueError("eval_str=True is only supported with format=Format.VALUE")
3985
3986 if isinstance(obj, type):
3987 # class
3988 obj_dict = getattr(obj, '__dict__', None)
3989 if obj_dict and hasattr(obj_dict, 'get'):
3990 ann = obj_dict.get('__annotations__', None)
3991 if isinstance(ann, _types.GetSetDescriptorType):
3992 ann = None
3993 else:
3994 ann = None
3995
3996 obj_globals = None
3997 module_name = getattr(obj, '__module__', None)
3998 if module_name:
3999 module = sys.modules.get(module_name, None)
4000 if module:
4001 obj_globals = getattr(module, '__dict__', None)
4002 obj_locals = dict(vars(obj))
4003 unwrap = obj
4004 elif isinstance(obj, _types.ModuleType):
4005 # module
4006 ann = getattr(obj, '__annotations__', None)
4007 obj_globals = obj.__dict__
4008 obj_locals = None
4009 unwrap = None
4010 elif callable(obj):
4011 # this includes types.Function, types.BuiltinFunctionType,
4012 # types.BuiltinMethodType, functools.partial, functools.singledispatch,
4013 # "class funclike" from Lib/test/test_inspect... on and on it goes.
4014 ann = getattr(obj, '__annotations__', None)
4015 obj_globals = getattr(obj, '__globals__', None)
4016 obj_locals = None
4017 unwrap = obj
4018 elif hasattr(obj, '__annotations__'):
4019 ann = obj.__annotations__
4020 obj_globals = obj_locals = unwrap = None
4021 else:
4022 raise TypeError(f"{obj!r} is not a module, class, or callable.")
4023
4024 if ann is None:
4025 return {}
4026
4027 if not isinstance(ann, dict):
4028 raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
4029
4030 if not ann:
4031 return {}
4032
4033 if not eval_str:
4034 if format is Format.STRING:
4035 return {
4036 key: value if isinstance(value, str) else typing._type_repr(value)
4037 for key, value in ann.items()
4038 }
4039 return dict(ann)
4040
4041 if unwrap is not None:
4042 while True:
4043 if hasattr(unwrap, '__wrapped__'):
4044 unwrap = unwrap.__wrapped__
4045 continue
4046 if isinstance(unwrap, functools.partial):
4047 unwrap = unwrap.func
4048 continue
4049 break
4050 if hasattr(unwrap, "__globals__"):
4051 obj_globals = unwrap.__globals__
4052
4053 if globals is None:
4054 globals = obj_globals
4055 if locals is None:
4056 locals = obj_locals or {}
4057
4058 # "Inject" type parameters into the local namespace
4059 # (unless they are shadowed by assignments *in* the local namespace),
4060 # as a way of emulating annotation scopes when calling `eval()`
4061 if type_params := getattr(obj, "__type_params__", ()):
4062 locals = {param.__name__: param for param in type_params} | locals
4063
4064 return_value = {key:
4065 value if not isinstance(value, str) else eval(value, globals, locals)
4066 for key, value in ann.items() }
4067 return return_value
4068
4069
4070if hasattr(typing, "evaluate_forward_ref"):
4071 evaluate_forward_ref = typing.evaluate_forward_ref
4072else:
4073 # Implements annotationlib.ForwardRef.evaluate
4074 def _eval_with_owner(
4075 forward_ref, *, owner=None, globals=None, locals=None, type_params=None
4076 ):
4077 if forward_ref.__forward_evaluated__:
4078 return forward_ref.__forward_value__
4079 if getattr(forward_ref, "__cell__", None) is not None:
4080 try:
4081 value = forward_ref.__cell__.cell_contents
4082 except ValueError:
4083 pass
4084 else:
4085 forward_ref.__forward_evaluated__ = True
4086 forward_ref.__forward_value__ = value
4087 return value
4088 if owner is None:
4089 owner = getattr(forward_ref, "__owner__", None)
4090
4091 if (
4092 globals is None
4093 and getattr(forward_ref, "__forward_module__", None) is not None
4094 ):
4095 globals = getattr(
4096 sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
4097 )
4098 if globals is None:
4099 globals = getattr(forward_ref, "__globals__", None)
4100 if globals is None:
4101 if isinstance(owner, type):
4102 module_name = getattr(owner, "__module__", None)
4103 if module_name:
4104 module = sys.modules.get(module_name, None)
4105 if module:
4106 globals = getattr(module, "__dict__", None)
4107 elif isinstance(owner, _types.ModuleType):
4108 globals = getattr(owner, "__dict__", None)
4109 elif callable(owner):
4110 globals = getattr(owner, "__globals__", None)
4111
4112 # If we pass None to eval() below, the globals of this module are used.
4113 if globals is None:
4114 globals = {}
4115
4116 if locals is None:
4117 locals = {}
4118 if isinstance(owner, type):
4119 locals.update(vars(owner))
4120
4121 if type_params is None and owner is not None:
4122 # "Inject" type parameters into the local namespace
4123 # (unless they are shadowed by assignments *in* the local namespace),
4124 # as a way of emulating annotation scopes when calling `eval()`
4125 type_params = getattr(owner, "__type_params__", None)
4126
4127 # Type parameters exist in their own scope, which is logically
4128 # between the locals and the globals. We simulate this by adding
4129 # them to the globals.
4130 if type_params is not None:
4131 globals = dict(globals)
4132 for param in type_params:
4133 globals[param.__name__] = param
4134
4135 arg = forward_ref.__forward_arg__
4136 if arg.isidentifier() and not keyword.iskeyword(arg):
4137 if arg in locals:
4138 value = locals[arg]
4139 elif arg in globals:
4140 value = globals[arg]
4141 elif hasattr(builtins, arg):
4142 return getattr(builtins, arg)
4143 else:
4144 raise NameError(arg)
4145 else:
4146 code = forward_ref.__forward_code__
4147 value = eval(code, globals, locals)
4148 forward_ref.__forward_evaluated__ = True
4149 forward_ref.__forward_value__ = value
4150 return value
4151
4152 def evaluate_forward_ref(
4153 forward_ref,
4154 *,
4155 owner=None,
4156 globals=None,
4157 locals=None,
4158 type_params=None,
4159 format=None,
4160 _recursive_guard=frozenset(),
4161 ):
4162 """Evaluate a forward reference as a type hint.
4163
4164 This is similar to calling the ForwardRef.evaluate() method,
4165 but unlike that method, evaluate_forward_ref() also:
4166
4167 * Recursively evaluates forward references nested within the type hint.
4168 * Rejects certain objects that are not valid type hints.
4169 * Replaces type hints that evaluate to None with types.NoneType.
4170 * Supports the *FORWARDREF* and *STRING* formats.
4171
4172 *forward_ref* must be an instance of ForwardRef. *owner*, if given,
4173 should be the object that holds the annotations that the forward reference
4174 derived from, such as a module, class object, or function. It is used to
4175 infer the namespaces to use for looking up names. *globals* and *locals*
4176 can also be explicitly given to provide the global and local namespaces.
4177 *type_params* is a tuple of type parameters that are in scope when
4178 evaluating the forward reference. This parameter must be provided (though
4179 it may be an empty tuple) if *owner* is not given and the forward reference
4180 does not already have an owner set. *format* specifies the format of the
4181 annotation and is a member of the annotationlib.Format enum.
4182
4183 """
4184 if format == Format.STRING:
4185 return forward_ref.__forward_arg__
4186 if forward_ref.__forward_arg__ in _recursive_guard:
4187 return forward_ref
4188
4189 # Evaluate the forward reference
4190 try:
4191 value = _eval_with_owner(
4192 forward_ref,
4193 owner=owner,
4194 globals=globals,
4195 locals=locals,
4196 type_params=type_params,
4197 )
4198 except NameError:
4199 if format == Format.FORWARDREF:
4200 return forward_ref
4201 else:
4202 raise
4203
4204 if isinstance(value, str):
4205 value = ForwardRef(value)
4206
4207 # Recursively evaluate the type
4208 if isinstance(value, ForwardRef):
4209 if getattr(value, "__forward_module__", True) is not None:
4210 globals = None
4211 return evaluate_forward_ref(
4212 value,
4213 globals=globals,
4214 locals=locals,
4215 type_params=type_params, owner=owner,
4216 _recursive_guard=_recursive_guard, format=format
4217 )
4218 if sys.version_info < (3, 12, 5) and type_params:
4219 # Make use of type_params
4220 locals = dict(locals) if locals else {}
4221 for tvar in type_params:
4222 if tvar.__name__ not in locals: # lets not overwrite something present
4223 locals[tvar.__name__] = tvar
4224 if sys.version_info < (3, 12, 5):
4225 return typing._eval_type(
4226 value,
4227 globals,
4228 locals,
4229 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
4230 )
4231 else:
4232 return typing._eval_type(
4233 value,
4234 globals,
4235 locals,
4236 type_params,
4237 recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
4238 )
4239
4240
4241if sys.version_info >= (3, 14, 0, "beta"):
4242 type_repr = annotationlib.type_repr
4243else:
4244 def type_repr(value):
4245 """Convert a Python value to a format suitable for use with the STRING format.
4246
4247 This is intended as a helper for tools that support the STRING format but do
4248 not have access to the code that originally produced the annotations. It uses
4249 repr() for most objects.
4250
4251 """
4252 if isinstance(value, (type, _types.FunctionType, _types.BuiltinFunctionType)):
4253 if value.__module__ == "builtins":
4254 return value.__qualname__
4255 return f"{value.__module__}.{value.__qualname__}"
4256 if value is ...:
4257 return "..."
4258 return repr(value)
4259
4260
4261# Aliases for items that are in typing in all supported versions.
4262# We use hasattr() checks so this library will continue to import on
4263# future versions of Python that may remove these names.
4264_typing_names = [
4265 "AbstractSet",
4266 "AnyStr",
4267 "BinaryIO",
4268 "Callable",
4269 "Collection",
4270 "Container",
4271 "Dict",
4272 "FrozenSet",
4273 "Hashable",
4274 "IO",
4275 "ItemsView",
4276 "Iterable",
4277 "Iterator",
4278 "KeysView",
4279 "List",
4280 "Mapping",
4281 "MappingView",
4282 "Match",
4283 "MutableMapping",
4284 "MutableSequence",
4285 "MutableSet",
4286 "Optional",
4287 "Pattern",
4288 "Reversible",
4289 "Sequence",
4290 "Set",
4291 "Sized",
4292 "TextIO",
4293 "Tuple",
4294 "Union",
4295 "ValuesView",
4296 "cast",
4297 "no_type_check",
4298 "no_type_check_decorator",
4299 # This is private, but it was defined by typing_extensions for a long time
4300 # and some users rely on it.
4301 "_AnnotatedAlias",
4302]
4303globals().update(
4304 {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)}
4305)
4306# These are defined unconditionally because they are used in
4307# typing-extensions itself.
4308Generic = typing.Generic
4309ForwardRef = typing.ForwardRef
4310Annotated = typing.Annotated