Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pydantic/main.py: 42%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1"""Logic for creating models."""
3# Because `dict` is in the local namespace of the `BaseModel` class, we use `Dict` for annotations.
4# TODO v3 fallback to `dict` when the deprecated `dict` method gets removed.
5# ruff: noqa: UP035
7from __future__ import annotations as _annotations
9import operator
10import sys
11import types
12import warnings
13from collections.abc import Generator, Mapping
14from copy import copy, deepcopy
15from functools import cached_property
16from typing import (
17 TYPE_CHECKING,
18 Any,
19 Callable,
20 ClassVar,
21 Dict,
22 Generic,
23 Literal,
24 TypeVar,
25 Union,
26 cast,
27 overload,
28)
30import pydantic_core
31import typing_extensions
32from pydantic_core import PydanticUndefined, ValidationError
33from typing_extensions import Self, TypeAlias, Unpack
35from . import PydanticDeprecatedSince20, PydanticDeprecatedSince211
36from ._internal import (
37 _config,
38 _decorators,
39 _fields,
40 _forward_ref,
41 _generics,
42 _mock_val_ser,
43 _model_construction,
44 _namespace_utils,
45 _repr,
46 _typing_extra,
47 _utils,
48)
49from ._migration import getattr_migration
50from .aliases import AliasChoices, AliasPath
51from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
52from .config import ConfigDict, ExtraValues
53from .errors import PydanticUndefinedAnnotation, PydanticUserError
54from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema
55from .plugin._schema_validator import PluggableSchemaValidator
57if TYPE_CHECKING:
58 from inspect import Signature
59 from pathlib import Path
61 from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator
63 from ._internal._fields import PydanticExtraInfo
64 from ._internal._namespace_utils import MappingNamespace
65 from ._internal._utils import AbstractSetIntStr, MappingIntStrAny
66 from .deprecated.parse import Protocol as DeprecatedParseProtocol
67 from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr
70__all__ = 'BaseModel', 'create_model'
72# Keep these type aliases available at runtime:
73TupleGenerator: TypeAlias = Generator[tuple[str, Any], None, None]
74# NOTE: In reality, `bool` should be replaced by `Literal[True]` but mypy fails to correctly apply bidirectional
75# type inference (e.g. when using `{'a': {'b': True}}`):
76# NOTE: Keep this type alias in sync with the stub definition in `pydantic-core`:
77IncEx: TypeAlias = Union[set[int], set[str], Mapping[int, Union['IncEx', bool]], Mapping[str, Union['IncEx', bool]]]
79_object_setattr = _model_construction.object_setattr
82def _check_frozen(model_cls: type[BaseModel], name: str, value: Any) -> None:
83 if model_cls.model_config.get('frozen'):
84 error_type = 'frozen_instance'
85 elif getattr(model_cls.__pydantic_fields__.get(name), 'frozen', False):
86 error_type = 'frozen_field'
87 else:
88 return
90 raise ValidationError.from_exception_data(
91 model_cls.__name__, [{'type': error_type, 'loc': (name,), 'input': value}]
92 )
95def _model_field_setattr_handler(model: BaseModel, name: str, val: Any) -> None:
96 model.__dict__[name] = val
97 model.__pydantic_fields_set__.add(name)
100def _private_setattr_handler(model: BaseModel, name: str, val: Any) -> None:
101 if getattr(model, '__pydantic_private__', None) is None:
102 # While the attribute should be present at this point, this may not be the case if
103 # users do unusual stuff with `model_post_init()` (which is where the `__pydantic_private__`
104 # is initialized, by wrapping the user-defined `model_post_init()`), e.g. if they mock
105 # the `model_post_init()` call. Ideally we should find a better way to init private attrs.
106 object.__setattr__(model, '__pydantic_private__', {})
107 model.__pydantic_private__[name] = val # pyright: ignore[reportOptionalSubscript]
110_SIMPLE_SETATTR_HANDLERS: Mapping[str, Callable[[BaseModel, str, Any], None]] = {
111 'model_field': _model_field_setattr_handler,
112 'validate_assignment': lambda model, name, val: model.__pydantic_validator__.validate_assignment(model, name, val), # pyright: ignore[reportAssignmentType]
113 'private': _private_setattr_handler,
114 'cached_property': lambda model, name, val: model.__dict__.__setitem__(name, val),
115 'extra_known': lambda model, name, val: _object_setattr(model, name, val),
116}
119class BaseModel(metaclass=_model_construction.ModelMetaclass):
120 """!!! abstract "Usage Documentation"
121 [Models](../concepts/models.md)
123 A base class for creating Pydantic models.
125 Attributes:
126 __class_vars__: The names of the class variables defined on the model.
127 __private_attributes__: Metadata about the private attributes of the model.
128 __signature__: The synthesized `__init__` [`Signature`][inspect.Signature] of the model.
130 __pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
131 __pydantic_core_schema__: The core schema of the model.
132 __pydantic_custom_init__: Whether the model has a custom `__init__` function.
133 __pydantic_decorators__: Metadata containing the decorators defined on the model.
134 This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
135 __pydantic_generic_metadata__: A dictionary containing metadata about generic Pydantic models.
136 The `origin` and `args` items map to the [`__origin__`][genericalias.__origin__]
137 and [`__args__`][genericalias.__args__] attributes of [generic aliases][types-genericalias],
138 and the `parameter` item maps to the `__parameter__` attribute of generic classes.
139 __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
140 __pydantic_post_init__: The name of the post-init method for the model, if defined.
141 __pydantic_root_model__: Whether the model is a [`RootModel`][pydantic.root_model.RootModel].
142 __pydantic_serializer__: The `pydantic-core` `SchemaSerializer` used to dump instances of the model.
143 __pydantic_validator__: The `pydantic-core` `SchemaValidator` used to validate instances of the model.
145 __pydantic_fields__: A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects.
146 __pydantic_computed_fields__: A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects.
148 __pydantic_extra__: A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra]
149 is set to `'allow'`.
150 __pydantic_fields_set__: The names of fields explicitly set during instantiation.
151 __pydantic_private__: Values of private attributes set on the model instance.
152 """
154 # Note: Many of the below class vars are defined in the metaclass, but we define them here for type checking purposes.
156 model_config: ClassVar[ConfigDict] = ConfigDict()
157 """
158 Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict].
159 """
161 __class_vars__: ClassVar[set[str]]
162 """The names of the class variables defined on the model."""
164 __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] # noqa: UP006
165 """Metadata about the private attributes of the model."""
167 __signature__: ClassVar[Signature]
168 """The synthesized `__init__` [`Signature`][inspect.Signature] of the model."""
170 __pydantic_complete__: ClassVar[bool] = False
171 """Whether model building is completed, or if there are still undefined fields."""
173 __pydantic_core_schema__: ClassVar[CoreSchema]
174 """The core schema of the model."""
176 __pydantic_custom_init__: ClassVar[bool]
177 """Whether the model has a custom `__init__` method."""
179 # Must be set for `GenerateSchema.model_schema` to work for a plain `BaseModel` annotation.
180 __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] = _decorators.DecoratorInfos()
181 """Metadata containing the decorators defined on the model.
182 This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1."""
184 __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata]
185 """A dictionary containing metadata about generic Pydantic models.
187 The `origin` and `args` items map to the [`__origin__`][genericalias.__origin__]
188 and [`__args__`][genericalias.__args__] attributes of [generic aliases][types-genericalias],
189 and the `parameter` item maps to the `__parameter__` attribute of generic classes.
190 """
192 __pydantic_parent_namespace__: ClassVar[Dict[str, Any] | None] = None # noqa: UP006
193 """Parent namespace of the model, used for automatic rebuilding of models."""
195 __pydantic_post_init__: ClassVar[None | Literal['model_post_init']]
196 """The name of the post-init method for the model, if defined."""
198 __pydantic_root_model__: ClassVar[bool] = False
199 """Whether the model is a [`RootModel`][pydantic.root_model.RootModel]."""
201 __pydantic_serializer__: ClassVar[SchemaSerializer]
202 """The `pydantic-core` `SchemaSerializer` used to dump instances of the model."""
204 __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator]
205 """The `pydantic-core` `SchemaValidator` used to validate instances of the model."""
207 __pydantic_fields__: ClassVar[Dict[str, FieldInfo]] # noqa: UP006
208 """A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects.
209 This replaces `Model.__fields__` from Pydantic V1.
210 """
212 __pydantic_setattr_handlers__: ClassVar[Dict[str, Callable[[BaseModel, str, Any], None]]] # noqa: UP006
213 """`__setattr__` handlers. Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`"""
215 __pydantic_computed_fields__: ClassVar[Dict[str, ComputedFieldInfo]] # noqa: UP006
216 """A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects."""
218 __pydantic_extra_info__: ClassVar[PydanticExtraInfo | None]
219 """A wrapper around the `__pydantic_extra__` annotation, if explicitly annotated on a model.
221 This is a private attribute, not meant to be used outside Pydantic.
222 """
224 __pydantic_extra__: Dict[str, Any] | None = _model_construction.NoInitField(init=False) # noqa: UP006
225 """A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra] is set to `'allow'`."""
227 __pydantic_fields_set__: set[str] = _model_construction.NoInitField(init=False)
228 """The names of fields explicitly set during instantiation."""
230 __pydantic_private__: Dict[str, Any] | None = _model_construction.NoInitField(init=False) # noqa: UP006
231 """Values of private attributes set on the model instance."""
233 if not TYPE_CHECKING:
234 # Prevent `BaseModel` from being instantiated directly
235 # (defined in an `if not TYPE_CHECKING` block for clarity and to avoid type checking errors):
236 __pydantic_core_schema__ = _mock_val_ser.MockCoreSchema(
237 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
238 code='base-model-instantiated',
239 )
240 __pydantic_validator__ = _mock_val_ser.MockValSer(
241 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
242 val_or_ser='validator',
243 code='base-model-instantiated',
244 )
245 __pydantic_serializer__ = _mock_val_ser.MockValSer(
246 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
247 val_or_ser='serializer',
248 code='base-model-instantiated',
249 )
251 __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__'
253 def __init__(self, /, **data: Any) -> None:
254 """Create a new model by parsing and validating input data from keyword arguments.
256 Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be
257 validated to form a valid model.
259 `self` is explicitly positional-only to allow `self` as a field name.
260 """
261 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
262 __tracebackhide__ = True
263 validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
264 if self is not validated_self:
265 warnings.warn(
266 'A custom validator is returning a value other than `self`.\n'
267 "Returning anything other than `self` from a top level model validator isn't supported when validating via `__init__`.\n"
268 'See the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.',
269 stacklevel=2,
270 )
272 # The following line sets a flag that we use to determine when `__init__` gets overridden by the user
273 __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess]
275 @_utils.deprecated_instance_property
276 @classmethod
277 def model_fields(cls) -> dict[str, FieldInfo]:
278 """A mapping of field names to their respective [`FieldInfo`][pydantic.fields.FieldInfo] instances.
280 !!! warning
281 Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3.
282 Instead, you should access this attribute from the model class.
283 """
284 return getattr(cls, '__pydantic_fields__', {})
286 @_utils.deprecated_instance_property
287 @classmethod
288 def model_computed_fields(cls) -> dict[str, ComputedFieldInfo]:
289 """A mapping of computed field names to their respective [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] instances.
291 !!! warning
292 Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3.
293 Instead, you should access this attribute from the model class.
294 """
295 return getattr(cls, '__pydantic_computed_fields__', {})
297 @property
298 def model_extra(self) -> dict[str, Any] | None:
299 """Get extra fields set during validation.
301 Returns:
302 A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
303 """
304 return self.__pydantic_extra__
306 @property
307 def model_fields_set(self) -> set[str]:
308 """Returns the set of fields that have been explicitly set on this model instance.
310 Returns:
311 A set of strings representing the fields that have been set,
312 i.e. that were not filled from defaults.
313 """
314 return self.__pydantic_fields_set__
316 @classmethod
317 def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: C901
318 """Creates a new instance of the `Model` class with validated data.
320 Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data.
321 Default values are respected, but no other validation is performed.
323 !!! note
324 `model_construct()` generally respects the `model_config.extra` setting on the provided model.
325 That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__`
326 and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored.
327 Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in
328 an error if extra values are passed, but they will be ignored.
330 Args:
331 _fields_set: A set of field names that were originally explicitly set during instantiation. If provided,
332 this is directly used for the [`model_fields_set`][pydantic.BaseModel.model_fields_set] attribute.
333 Otherwise, the field names from the `values` argument will be used.
334 values: Trusted or pre-validated data dictionary.
336 Returns:
337 A new instance of the `Model` class with validated data.
338 """
339 m = cls.__new__(cls)
340 fields_values: dict[str, Any] = {}
341 fields_set = set()
343 for name, field in cls.__pydantic_fields__.items():
344 if field.alias is not None and field.alias in values:
345 fields_values[name] = values.pop(field.alias)
346 fields_set.add(name)
348 if (name not in fields_set) and (field.validation_alias is not None):
349 validation_aliases: list[str | AliasPath] = (
350 field.validation_alias.choices
351 if isinstance(field.validation_alias, AliasChoices)
352 else [field.validation_alias]
353 )
355 for alias in validation_aliases:
356 if isinstance(alias, str) and alias in values:
357 fields_values[name] = values.pop(alias)
358 fields_set.add(name)
359 break
360 elif isinstance(alias, AliasPath):
361 value = alias.search_dict_for_path(values)
362 if value is not PydanticUndefined:
363 fields_values[name] = value
364 fields_set.add(name)
365 break
367 if name not in fields_set:
368 if name in values:
369 fields_values[name] = values.pop(name)
370 fields_set.add(name)
371 elif not field.is_required():
372 fields_values[name] = field.get_default(call_default_factory=True, validated_data=fields_values)
373 if _fields_set is None:
374 _fields_set = fields_set
376 _extra: dict[str, Any] | None = values if cls.model_config.get('extra') == 'allow' else None
377 _object_setattr(m, '__dict__', fields_values)
378 _object_setattr(m, '__pydantic_fields_set__', _fields_set)
379 if not cls.__pydantic_root_model__:
380 _object_setattr(m, '__pydantic_extra__', _extra)
381 _object_setattr(m, '__pydantic_private__', None)
383 if cls.__pydantic_post_init__:
384 m.model_post_init(None)
385 # update private attributes with values set
386 if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None:
387 for k, v in values.items():
388 if k in m.__private_attributes__:
389 m.__pydantic_private__[k] = v
391 return m
393 def model_copy(self, *, update: Mapping[str, Any] | None = None, deep: bool = False) -> Self:
394 """!!! abstract "Usage Documentation"
395 [`model_copy`](../concepts/models.md#model-copy)
397 Returns a copy of the model.
399 !!! note
400 The underlying instance's [`__dict__`][object.__dict__] attribute is copied. This
401 might have unexpected side effects if you store anything in it, on top of the model
402 fields (e.g. the value of [cached properties][functools.cached_property]).
404 Args:
405 update: Values to change/add in the new model. Note: the data is not validated
406 before creating the new model. You should trust this data.
407 deep: Set to `True` to make a deep copy of the model.
409 Returns:
410 New model instance.
411 """
412 copied = self.__deepcopy__() if deep else self.__copy__()
413 if update:
414 if self.model_config.get('extra') == 'allow':
415 for k, v in update.items():
416 if k in self.__pydantic_fields__:
417 copied.__dict__[k] = v
418 else:
419 if copied.__pydantic_extra__ is None:
420 copied.__pydantic_extra__ = {}
421 copied.__pydantic_extra__[k] = v
422 else:
423 copied.__dict__.update(update)
424 copied.__pydantic_fields_set__.update(update.keys())
425 return copied
427 def model_dump(
428 self,
429 *,
430 mode: Literal['json', 'python'] | str = 'python',
431 include: IncEx | None = None,
432 exclude: IncEx | None = None,
433 context: Any | None = None,
434 by_alias: bool | None = None,
435 exclude_unset: bool = False,
436 exclude_defaults: bool = False,
437 exclude_none: bool = False,
438 exclude_computed_fields: bool = False,
439 round_trip: bool = False,
440 warnings: bool | Literal['none', 'warn', 'error'] = True,
441 fallback: Callable[[Any], Any] | None = None,
442 serialize_as_any: bool = False,
443 polymorphic_serialization: bool | None = None,
444 ) -> dict[str, Any]:
445 """!!! abstract "Usage Documentation"
446 [`model_dump`](../concepts/serialization.md#python-mode)
448 Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
450 Args:
451 mode: The mode in which `to_python` should run.
452 If mode is 'json', the output will only contain JSON serializable types.
453 If mode is 'python', the output may contain non-JSON-serializable Python objects.
454 include: A set of fields to include in the output.
455 exclude: A set of fields to exclude from the output.
456 context: Additional context to pass to the serializer.
457 by_alias: Whether to use the field's alias in the dictionary key if defined.
458 exclude_unset: Whether to exclude fields that have not been explicitly set.
459 exclude_defaults: Whether to exclude fields that are set to their default value.
460 exclude_none: Whether to exclude fields that have a value of `None`.
461 exclude_computed_fields: Whether to exclude computed fields.
462 While this can be useful for round-tripping, it is usually recommended to use the dedicated
463 `round_trip` parameter instead.
464 round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
465 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
466 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
467 fallback: A function to call when an unknown value is encountered. If not provided,
468 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
469 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
470 polymorphic_serialization: Whether to use model and dataclass polymorphic serialization for this call.
472 Returns:
473 A dictionary representation of the model.
474 """
475 return self.__pydantic_serializer__.to_python(
476 self,
477 mode=mode,
478 by_alias=by_alias,
479 include=include,
480 exclude=exclude,
481 context=context,
482 exclude_unset=exclude_unset,
483 exclude_defaults=exclude_defaults,
484 exclude_none=exclude_none,
485 exclude_computed_fields=exclude_computed_fields,
486 round_trip=round_trip,
487 warnings=warnings,
488 fallback=fallback,
489 serialize_as_any=serialize_as_any,
490 polymorphic_serialization=polymorphic_serialization,
491 )
493 def model_dump_json(
494 self,
495 *,
496 indent: int | None = None,
497 ensure_ascii: bool = False,
498 include: IncEx | None = None,
499 exclude: IncEx | None = None,
500 context: Any | None = None,
501 by_alias: bool | None = None,
502 exclude_unset: bool = False,
503 exclude_defaults: bool = False,
504 exclude_none: bool = False,
505 exclude_computed_fields: bool = False,
506 round_trip: bool = False,
507 warnings: bool | Literal['none', 'warn', 'error'] = True,
508 fallback: Callable[[Any], Any] | None = None,
509 serialize_as_any: bool = False,
510 polymorphic_serialization: bool | None = None,
511 ) -> str:
512 """!!! abstract "Usage Documentation"
513 [`model_dump_json`](../concepts/serialization.md#json-mode)
515 Generates a JSON representation of the model using Pydantic's `to_json` method.
517 Args:
518 indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
519 ensure_ascii: If `True`, the output is guaranteed to have all incoming non-ASCII characters escaped.
520 If `False` (the default), these characters will be output as-is.
521 include: Field(s) to include in the JSON output.
522 exclude: Field(s) to exclude from the JSON output.
523 context: Additional context to pass to the serializer.
524 by_alias: Whether to serialize using field aliases.
525 exclude_unset: Whether to exclude fields that have not been explicitly set.
526 exclude_defaults: Whether to exclude fields that are set to their default value.
527 exclude_none: Whether to exclude fields that have a value of `None`.
528 exclude_computed_fields: Whether to exclude computed fields.
529 While this can be useful for round-tripping, it is usually recommended to use the dedicated
530 `round_trip` parameter instead.
531 round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
532 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
533 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
534 fallback: A function to call when an unknown value is encountered. If not provided,
535 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
536 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
537 polymorphic_serialization: Whether to use model and dataclass polymorphic serialization for this call.
539 Returns:
540 A JSON string representation of the model.
541 """
542 return self.__pydantic_serializer__.to_json(
543 self,
544 indent=indent,
545 ensure_ascii=ensure_ascii,
546 include=include,
547 exclude=exclude,
548 context=context,
549 by_alias=by_alias,
550 exclude_unset=exclude_unset,
551 exclude_defaults=exclude_defaults,
552 exclude_none=exclude_none,
553 exclude_computed_fields=exclude_computed_fields,
554 round_trip=round_trip,
555 warnings=warnings,
556 fallback=fallback,
557 serialize_as_any=serialize_as_any,
558 polymorphic_serialization=polymorphic_serialization,
559 ).decode()
561 @classmethod
562 def model_json_schema(
563 cls,
564 by_alias: bool = True,
565 ref_template: str = DEFAULT_REF_TEMPLATE,
566 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
567 mode: JsonSchemaMode = 'validation',
568 *,
569 union_format: Literal['any_of', 'primitive_type_array'] = 'any_of',
570 ) -> dict[str, Any]:
571 """Generates a JSON schema for a model class.
573 Args:
574 by_alias: Whether to use attribute aliases or not.
575 ref_template: The reference template.
576 union_format: The format to use when combining schemas from unions together. Can be one of:
578 - `'any_of'`: Use the [`anyOf`](https://json-schema.org/understanding-json-schema/reference/combining#anyOf)
579 keyword to combine schemas (the default).
580 - `'primitive_type_array'`: Use the [`type`](https://json-schema.org/understanding-json-schema/reference/type)
581 keyword as an array of strings, containing each type of the combination. If any of the schemas is not a primitive
582 type (`string`, `boolean`, `null`, `integer` or `number`) or contains constraints/metadata, falls back to
583 `any_of`.
584 schema_generator: To override the logic used to generate the JSON schema, as a subclass of
585 `GenerateJsonSchema` with your desired modifications
586 mode: The mode in which to generate the schema.
588 Returns:
589 The JSON schema for the given model class.
590 """
591 return model_json_schema(
592 cls,
593 by_alias=by_alias,
594 ref_template=ref_template,
595 union_format=union_format,
596 schema_generator=schema_generator,
597 mode=mode,
598 )
600 @classmethod
601 def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
602 """Compute the class name for parametrizations of generic classes.
604 This method can be overridden to achieve a custom naming scheme for generic BaseModels.
606 Args:
607 params: Tuple of types of the class. Given a generic class
608 `Model` with 2 type variables and a concrete model `Model[str, int]`,
609 the value `(str, int)` would be passed to `params`.
611 Returns:
612 String representing the new class where `params` are passed to `cls` as type variables.
614 Raises:
615 TypeError: Raised when trying to generate concrete names for non-generic models.
616 """
617 if not issubclass(cls, Generic):
618 raise TypeError('Concrete names should only be generated for generic models.')
620 # Any strings received should represent forward references, so we handle them specially below.
621 # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
622 # we may be able to remove this special case.
623 param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
624 params_component = ', '.join(param_names)
625 return f'{cls.__name__}[{params_component}]'
627 def model_post_init(self, context: Any, /) -> None:
628 """Override this method to perform additional initialization after `__init__` and `model_construct`.
629 This is useful if you want to do some validation that requires the entire model to be initialized.
630 """
632 @classmethod
633 def model_rebuild(
634 cls,
635 *,
636 force: bool = False,
637 raise_errors: bool = True,
638 _parent_namespace_depth: int = 2,
639 _types_namespace: MappingNamespace | None = None,
640 ) -> bool | None:
641 """Try to rebuild the pydantic-core schema for the model.
643 This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
644 the initial attempt to build the schema, and automatic rebuilding fails.
646 Args:
647 force: Whether to force the rebuilding of the model schema, defaults to `False`.
648 raise_errors: Whether to raise errors, defaults to `True`.
649 _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
650 _types_namespace: The types namespace, defaults to `None`.
652 Returns:
653 Returns `None` if the schema is already "complete" and rebuilding was not required.
654 If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
655 """
656 already_complete = cls.__pydantic_complete__
657 if already_complete and not force:
658 return None
660 cls.__pydantic_complete__ = False
662 for attr in ('__pydantic_core_schema__', '__pydantic_validator__', '__pydantic_serializer__'):
663 if attr in cls.__dict__ and not isinstance(getattr(cls, attr), _mock_val_ser.MockValSer):
664 # Deleting the validator/serializer is necessary as otherwise they can get reused in
665 # pydantic-core. We do so only if they aren't mock instances, otherwise — as `model_rebuild()`
666 # isn't thread-safe — concurrent model instantiations can lead to the parent validator being used.
667 # Same applies for the core schema that can be reused in schema generation.
668 delattr(cls, attr)
670 if _types_namespace is not None:
671 rebuild_ns = _types_namespace
672 elif _parent_namespace_depth > 0:
673 rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {}
674 else:
675 rebuild_ns = {}
677 parent_ns = _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {}
679 ns_resolver = _namespace_utils.NsResolver(
680 parent_namespace={**rebuild_ns, **parent_ns},
681 )
683 return _model_construction.complete_model_class(
684 cls,
685 _config.ConfigWrapper(cls.model_config, check=False),
686 ns_resolver,
687 raise_errors=raise_errors,
688 # If the model was already complete, we don't need to call the hook again.
689 call_on_complete_hook=not already_complete,
690 is_force_rebuild=force,
691 )
693 @classmethod
694 def model_validate(
695 cls,
696 obj: Any,
697 *,
698 strict: bool | None = None,
699 extra: ExtraValues | None = None,
700 from_attributes: bool | None = None,
701 context: Any | None = None,
702 by_alias: bool | None = None,
703 by_name: bool | None = None,
704 ) -> Self:
705 """Validate a pydantic model instance.
707 Args:
708 obj: The object to validate.
709 strict: Whether to enforce types strictly.
710 extra: Whether to ignore, allow, or forbid extra data during model validation.
711 See the [`extra` configuration value][pydantic.ConfigDict.extra] for details.
712 from_attributes: Whether to extract data from object attributes.
713 context: Additional context to pass to the validator.
714 by_alias: Whether to use the field's alias when validating against the provided input data.
715 by_name: Whether to use the field's name when validating against the provided input data.
717 Raises:
718 ValidationError: If the object could not be validated.
720 Returns:
721 The validated model instance.
722 """
723 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
724 __tracebackhide__ = True
726 if by_alias is False and by_name is not True:
727 raise PydanticUserError(
728 'At least one of `by_alias` or `by_name` must be set to True.',
729 code='validate-by-alias-and-name-false',
730 )
732 return cls.__pydantic_validator__.validate_python(
733 obj,
734 strict=strict,
735 extra=extra,
736 from_attributes=from_attributes,
737 context=context,
738 by_alias=by_alias,
739 by_name=by_name,
740 )
742 @classmethod
743 def model_validate_json(
744 cls,
745 json_data: str | bytes | bytearray,
746 *,
747 strict: bool | None = None,
748 extra: ExtraValues | None = None,
749 context: Any | None = None,
750 by_alias: bool | None = None,
751 by_name: bool | None = None,
752 ) -> Self:
753 """!!! abstract "Usage Documentation"
754 [JSON Parsing](../concepts/json.md#json-parsing)
756 Validate the given JSON data against the Pydantic model.
758 Args:
759 json_data: The JSON data to validate.
760 strict: Whether to enforce types strictly.
761 extra: Whether to ignore, allow, or forbid extra data during model validation.
762 See the [`extra` configuration value][pydantic.ConfigDict.extra] for details.
763 context: Extra variables to pass to the validator.
764 by_alias: Whether to use the field's alias when validating against the provided input data.
765 by_name: Whether to use the field's name when validating against the provided input data.
767 Returns:
768 The validated Pydantic model.
770 Raises:
771 ValidationError: If `json_data` is not a JSON string or the object could not be validated.
772 """
773 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
774 __tracebackhide__ = True
776 if by_alias is False and by_name is not True:
777 raise PydanticUserError(
778 'At least one of `by_alias` or `by_name` must be set to True.',
779 code='validate-by-alias-and-name-false',
780 )
782 return cls.__pydantic_validator__.validate_json(
783 json_data, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
784 )
786 @classmethod
787 def model_validate_strings(
788 cls,
789 obj: Any,
790 *,
791 strict: bool | None = None,
792 extra: ExtraValues | None = None,
793 context: Any | None = None,
794 by_alias: bool | None = None,
795 by_name: bool | None = None,
796 ) -> Self:
797 """Validate the given object with string data against the Pydantic model.
799 Args:
800 obj: The object containing string data to validate.
801 strict: Whether to enforce types strictly.
802 extra: Whether to ignore, allow, or forbid extra data during model validation.
803 See the [`extra` configuration value][pydantic.ConfigDict.extra] for details.
804 context: Extra variables to pass to the validator.
805 by_alias: Whether to use the field's alias when validating against the provided input data.
806 by_name: Whether to use the field's name when validating against the provided input data.
808 Returns:
809 The validated Pydantic model.
810 """
811 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
812 __tracebackhide__ = True
814 if by_alias is False and by_name is not True:
815 raise PydanticUserError(
816 'At least one of `by_alias` or `by_name` must be set to True.',
817 code='validate-by-alias-and-name-false',
818 )
820 return cls.__pydantic_validator__.validate_strings(
821 obj, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
822 )
824 @classmethod
825 def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler: GetCoreSchemaHandler, /) -> CoreSchema:
826 # This warning is only emitted when calling `super().__get_pydantic_core_schema__` from a model subclass.
827 # In the generate schema logic, this method (`BaseModel.__get_pydantic_core_schema__`) is special cased to
828 # *not* be called if not overridden.
829 warnings.warn(
830 'The `__get_pydantic_core_schema__` method of the `BaseModel` class is deprecated. If you are calling '
831 '`super().__get_pydantic_core_schema__` when overriding the method on a Pydantic model, consider using '
832 '`handler(source)` instead. However, note that overriding this method on models can lead to unexpected '
833 'side effects.',
834 PydanticDeprecatedSince211,
835 stacklevel=2,
836 )
837 # Logic copied over from `GenerateSchema._model_schema`:
838 schema = cls.__dict__.get('__pydantic_core_schema__')
839 if schema is not None and not isinstance(schema, _mock_val_ser.MockCoreSchema):
840 return cls.__pydantic_core_schema__
842 return handler(source)
844 @classmethod
845 def __get_pydantic_json_schema__(
846 cls,
847 core_schema: CoreSchema,
848 handler: GetJsonSchemaHandler,
849 /,
850 ) -> JsonSchemaValue:
851 """Hook into generating the model's JSON schema.
853 Args:
854 core_schema: A `pydantic-core` CoreSchema.
855 You can ignore this argument and call the handler with a new CoreSchema,
856 wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`),
857 or just call the handler with the original schema.
858 handler: Call into Pydantic's internal JSON schema generation.
859 This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema
860 generation fails.
861 Since this gets called by `BaseModel.model_json_schema` you can override the
862 `schema_generator` argument to that function to change JSON schema generation globally
863 for a type.
865 Returns:
866 A JSON schema, as a Python object.
867 """
868 return handler(core_schema)
870 @classmethod
871 def __pydantic_init_subclass__(cls, **kwargs: Any) -> None:
872 """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass`
873 only after basic class initialization is complete. In particular, attributes like `model_fields` will
874 be present when this is called, but forward annotations are not guaranteed to be resolved yet,
875 meaning that creating an instance of the class may fail.
877 This is necessary because `__init_subclass__` will always be called by `type.__new__`,
878 and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that
879 `type.__new__` was called in such a manner that the class would already be sufficiently initialized.
881 This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely,
882 any kwargs passed to the class definition that aren't used internally by Pydantic.
884 Args:
885 **kwargs: Any keyword arguments passed to the class definition that aren't used internally
886 by Pydantic.
888 Note:
889 You may want to override [`__pydantic_on_complete__()`][pydantic.main.BaseModel.__pydantic_on_complete__]
890 instead, which is called once the class and its fields are fully initialized and ready for validation.
891 """
893 @classmethod
894 def __pydantic_on_complete__(cls) -> None:
895 """This is called once the class and its fields are fully initialized and ready to be used.
897 This typically happens when the class is created (just before
898 [`__pydantic_init_subclass__()`][pydantic.main.BaseModel.__pydantic_init_subclass__] is called on the superclass),
899 except when forward annotations are used that could not immediately be resolved.
900 In that case, it will be called later, when the model is rebuilt automatically or explicitly using
901 [`model_rebuild()`][pydantic.main.BaseModel.model_rebuild].
902 """
904 def __class_getitem__(
905 cls, typevar_values: type[Any] | tuple[type[Any], ...]
906 ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef:
907 cached = _generics.get_cached_generic_type_early(cls, typevar_values)
908 if cached is not None:
909 return cached
911 if cls is BaseModel:
912 raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel')
913 if not hasattr(cls, '__parameters__'):
914 raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic')
915 if not cls.__pydantic_generic_metadata__['parameters'] and Generic not in cls.__bases__:
916 raise TypeError(f'{cls} is not a generic class')
918 if not isinstance(typevar_values, tuple):
919 typevar_values = (typevar_values,)
921 # For a model `class Model[T, U, V = int](BaseModel): ...` parametrized with `(str, bool)`,
922 # this gives us `{T: str, U: bool, V: int}`:
923 typevars_map = _generics.map_generic_model_arguments(cls, typevar_values)
924 # We also update the provided args to use defaults values (`(str, bool)` becomes `(str, bool, int)`):
925 typevar_values = tuple(v for v in typevars_map.values())
927 if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
928 submodel = cls # if arguments are equal to parameters it's the same object
929 _generics.set_cached_generic_type(cls, typevar_values, submodel)
930 else:
931 parent_args = cls.__pydantic_generic_metadata__['args']
932 if not parent_args:
933 args = typevar_values
934 else:
935 args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args)
937 origin = cls.__pydantic_generic_metadata__['origin'] or cls
938 model_name = origin.model_parametrized_name(args)
939 params = tuple(
940 dict.fromkeys(_generics.iter_contained_typevars(typevars_map.values()))
941 ) # use dict as ordered set
943 with _generics.generic_recursion_self_type(origin, args) as maybe_self_type:
944 cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args)
945 if cached is not None:
946 return cached
948 if maybe_self_type is not None:
949 return maybe_self_type
951 # Attempt to rebuild the origin in case new types have been defined
952 try:
953 # depth 2 gets you above this __class_getitem__ call.
954 # Note that we explicitly provide the parent ns, otherwise
955 # `model_rebuild` will use the parent ns no matter if it is the ns of a module.
956 # We don't want this here, as this has unexpected effects when a model
957 # is being parametrized during a forward annotation evaluation.
958 parent_ns = _typing_extra.parent_frame_namespace(parent_depth=2) or {}
959 origin.model_rebuild(_types_namespace=parent_ns)
960 except PydanticUndefinedAnnotation:
961 # It's okay if it fails, it just means there are still undefined types
962 # that could be evaluated later.
963 pass
965 submodel = _generics.create_generic_submodel(model_name, origin, args, params)
967 _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args)
969 return submodel
971 def __copy__(self) -> Self:
972 """Returns a shallow copy of the model."""
973 cls = type(self)
974 m = cls.__new__(cls)
975 _object_setattr(m, '__dict__', copy(self.__dict__))
976 _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__))
977 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
979 if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
980 _object_setattr(m, '__pydantic_private__', None)
981 else:
982 _object_setattr(
983 m,
984 '__pydantic_private__',
985 {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined},
986 )
988 return m
990 def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self:
991 """Returns a deep copy of the model."""
992 cls = type(self)
993 m = cls.__new__(cls)
994 _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo))
995 _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo))
996 # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str],
997 # and attempting a deepcopy would be marginally slower.
998 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
1000 if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
1001 _object_setattr(m, '__pydantic_private__', None)
1002 else:
1003 _object_setattr(
1004 m,
1005 '__pydantic_private__',
1006 deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo),
1007 )
1009 return m
1011 if not TYPE_CHECKING:
1012 # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
1013 # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643
1015 def __getattr__(self, item: str) -> Any:
1016 private_attributes = object.__getattribute__(self, '__private_attributes__')
1017 if item in private_attributes:
1018 attribute = private_attributes[item]
1019 if hasattr(attribute, '__get__'):
1020 return attribute.__get__(self, type(self)) # type: ignore
1022 try:
1023 # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
1024 return self.__pydantic_private__[item] # type: ignore
1025 except KeyError as exc:
1026 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
1027 else:
1028 # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
1029 # See `BaseModel.__repr_args__` for more details
1030 try:
1031 pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
1032 except AttributeError:
1033 pydantic_extra = None
1035 if pydantic_extra and item in pydantic_extra:
1036 return pydantic_extra[item]
1037 else:
1038 if hasattr(self.__class__, item):
1039 return super().__getattribute__(item) # Raises AttributeError if appropriate
1040 else:
1041 # this is the current error
1042 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')
1044 def __setattr__(self, name: str, value: Any) -> None:
1045 if (setattr_handler := self.__pydantic_setattr_handlers__.get(name)) is not None:
1046 setattr_handler(self, name, value)
1047 # if None is returned from _setattr_handler, the attribute was set directly
1048 elif (setattr_handler := self._setattr_handler(name, value)) is not None:
1049 setattr_handler(self, name, value) # call here to not memo on possibly unknown fields
1050 self.__pydantic_setattr_handlers__[name] = setattr_handler # memoize the handler for faster access
1052 def _setattr_handler(self, name: str, value: Any) -> Callable[[BaseModel, str, Any], None] | None:
1053 """Get a handler for setting an attribute on the model instance.
1055 Returns:
1056 A handler for setting an attribute on the model instance. Used for memoization of the handler.
1057 Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`
1058 Returns `None` when memoization is not safe, then the attribute is set directly.
1059 """
1060 cls = self.__class__
1061 if name in cls.__class_vars__:
1062 raise AttributeError(
1063 f'{name!r} is a ClassVar of `{cls.__name__}` and cannot be set on an instance. '
1064 f'If you want to set a value on the class, use `{cls.__name__}.{name} = value`.'
1065 )
1066 elif not _fields.is_valid_field_name(name):
1067 if (attribute := cls.__private_attributes__.get(name)) is not None:
1068 if hasattr(attribute, '__set__'):
1069 return lambda model, _name, val: attribute.__set__(model, val)
1070 else:
1071 return _SIMPLE_SETATTR_HANDLERS['private']
1072 else:
1073 _object_setattr(self, name, value)
1074 return None # Can not return memoized handler with possibly freeform attr names
1076 attr = getattr(cls, name, None)
1077 # NOTE: We currently special case properties and `cached_property`, but we might need
1078 # to generalize this to all data/non-data descriptors at some point. For non-data descriptors
1079 # (such as `cached_property`), it isn't obvious though. `cached_property` caches the value
1080 # to the instance's `__dict__`, but other non-data descriptors might do things differently.
1081 if isinstance(attr, cached_property):
1082 return _SIMPLE_SETATTR_HANDLERS['cached_property']
1084 _check_frozen(cls, name, value)
1086 # We allow properties to be set only on non frozen models for now (to match dataclasses).
1087 # This can be changed if it ever gets requested.
1088 if isinstance(attr, property):
1089 return lambda model, _name, val: attr.__set__(model, val)
1090 elif cls.model_config.get('validate_assignment'):
1091 return _SIMPLE_SETATTR_HANDLERS['validate_assignment']
1092 elif name not in cls.__pydantic_fields__:
1093 if cls.model_config.get('extra') != 'allow':
1094 # TODO - matching error
1095 raise ValueError(f'"{cls.__name__}" object has no field "{name}"')
1096 elif attr is None:
1097 # attribute does not exist, so put it in extra
1098 self.__pydantic_extra__[name] = value
1099 self.__pydantic_fields_set__.add(name)
1100 return None # Can not return memoized handler with possibly freeform attr names
1101 else:
1102 # attribute _does_ exist, and was not in extra, so update it
1103 return _SIMPLE_SETATTR_HANDLERS['extra_known']
1104 else:
1105 return _SIMPLE_SETATTR_HANDLERS['model_field']
1107 def __delattr__(self, item: str) -> Any:
1108 cls = self.__class__
1110 if item in self.__private_attributes__:
1111 attribute = self.__private_attributes__[item]
1112 if hasattr(attribute, '__delete__'):
1113 attribute.__delete__(self) # type: ignore
1114 return
1116 try:
1117 # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
1118 del self.__pydantic_private__[item] # type: ignore
1119 return
1120 except KeyError as exc:
1121 raise AttributeError(f'{cls.__name__!r} object has no attribute {item!r}') from exc
1123 # Allow cached properties to be deleted (even if the class is frozen):
1124 attr = getattr(cls, item, None)
1125 if isinstance(attr, cached_property):
1126 return object.__delattr__(self, item)
1128 _check_frozen(cls, name=item, value=None)
1130 if item in self.__pydantic_fields__:
1131 object.__delattr__(self, item)
1132 elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__:
1133 del self.__pydantic_extra__[item]
1134 else:
1135 try:
1136 object.__delattr__(self, item)
1137 except AttributeError:
1138 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')
1140 # Because we make use of `@dataclass_transform()`, `__replace__` is already synthesized by
1141 # type checkers, so we define the implementation in this `if not TYPE_CHECKING:` block:
1142 def __replace__(self, **changes: Any) -> Self:
1143 return self.model_copy(update=changes)
1145 def __getstate__(self) -> dict[Any, Any]:
1146 private = self.__pydantic_private__
1147 if private:
1148 private = {k: v for k, v in private.items() if v is not PydanticUndefined}
1149 return {
1150 '__dict__': self.__dict__,
1151 '__pydantic_extra__': self.__pydantic_extra__,
1152 '__pydantic_fields_set__': self.__pydantic_fields_set__,
1153 '__pydantic_private__': private,
1154 }
1156 def __setstate__(self, state: dict[Any, Any]) -> None:
1157 _object_setattr(self, '__pydantic_fields_set__', state.get('__pydantic_fields_set__', {}))
1158 _object_setattr(self, '__pydantic_extra__', state.get('__pydantic_extra__', {}))
1159 _object_setattr(self, '__pydantic_private__', state.get('__pydantic_private__', {}))
1160 _object_setattr(self, '__dict__', state.get('__dict__', {}))
1162 if not TYPE_CHECKING:
1164 def __eq__(self, other: Any) -> bool:
1165 if isinstance(other, BaseModel):
1166 # When comparing instances of generic types for equality, as long as all field values are equal,
1167 # only require their generic origin types to be equal, rather than exact type equality.
1168 # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1).
1169 self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__
1170 other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__
1172 # Perform common checks first
1173 if not (
1174 self_type is other_type
1175 and getattr(self, '__pydantic_private__', None) == getattr(other, '__pydantic_private__', None)
1176 # We need to assume `None` and `{}` are equivalent, because extra behavior
1177 # can be controlled at validation time:
1178 and (self.__pydantic_extra__ or {}) == (other.__pydantic_extra__ or {})
1179 ):
1180 return False
1182 # We only want to compare pydantic fields but ignoring fields is costly.
1183 # We'll perform a fast check first, and fallback only when needed
1184 # See GH-7444 and GH-7825 for rationale and a performance benchmark
1186 # First, do the fast (and sometimes faulty) __dict__ comparison
1187 if self.__dict__ == other.__dict__:
1188 # If the check above passes, then pydantic fields are equal, we can return early
1189 return True
1191 # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return
1192 # early if there are no keys to ignore (we would just return False later on anyway)
1193 model_fields = type(self).__pydantic_fields__.keys()
1194 if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields:
1195 return False
1197 # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore
1198 # Resort to costly filtering of the __dict__ objects
1199 # We use operator.itemgetter because it is much faster than dict comprehensions
1200 # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an
1201 # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute
1202 # raises an error in BaseModel.__getattr__ instead of returning the class attribute
1203 # So we can use operator.itemgetter() instead of operator.attrgetter()
1204 getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL
1205 try:
1206 return getter(self.__dict__) == getter(other.__dict__)
1207 except KeyError:
1208 # In rare cases (such as when using the deprecated BaseModel.copy() method),
1209 # the __dict__ may not contain all model fields, which is how we can get here.
1210 # getter(self.__dict__) is much faster than any 'safe' method that accounts
1211 # for missing keys, and wrapping it in a `try` doesn't slow things down much
1212 # in the common case.
1213 self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__)
1214 other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__)
1215 return getter(self_fields_proxy) == getter(other_fields_proxy)
1217 # other instance is not a BaseModel
1218 else:
1219 return NotImplemented # delegate to the other item in the comparison
1221 if TYPE_CHECKING:
1222 # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits
1223 # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of
1224 # subclass initialization.
1226 def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]):
1227 """This signature is included purely to help type-checkers check arguments to class declaration, which
1228 provides a way to conveniently set model_config key/value pairs.
1230 ```python
1231 from pydantic import BaseModel
1233 class MyModel(BaseModel, extra='allow'): ...
1234 ```
1236 However, this may be deceiving, since the _actual_ calls to `__init_subclass__` will not receive any
1237 of the config arguments, and will only receive any keyword arguments passed during class initialization
1238 that are _not_ expected keys in ConfigDict. (This is due to the way `ModelMetaclass.__new__` works.)
1240 Args:
1241 **kwargs: Keyword arguments passed to the class definition, which set model_config
1243 Note:
1244 You may want to override `__pydantic_init_subclass__` instead, which behaves similarly but is called
1245 *after* the class is fully initialized.
1246 """
1248 def __iter__(self) -> TupleGenerator:
1249 """So `dict(model)` works."""
1250 yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')]
1251 extra = self.__pydantic_extra__
1252 if extra:
1253 yield from extra.items()
1255 def __repr__(self) -> str:
1256 return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
1258 def __repr_args__(self) -> _repr.ReprArgs:
1259 # Eagerly create the repr of computed fields, as this may trigger access of cached properties and as such
1260 # modify the instance's `__dict__`. If we don't do it now, it could happen when iterating over the `__dict__`
1261 # below if the instance happens to be referenced in a field, and would modify the `__dict__` size *during* iteration.
1262 computed_fields_repr_args = [
1263 (k, getattr(self, k)) for k, v in self.__pydantic_computed_fields__.items() if v.repr
1264 ]
1266 for k, v in self.__dict__.items():
1267 field = self.__pydantic_fields__.get(k)
1268 if field and field.repr:
1269 if v is not self:
1270 yield k, v
1271 else:
1272 yield k, self.__repr_recursion__(v)
1273 # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
1274 # This can happen if a `ValidationError` is raised during initialization and the instance's
1275 # repr is generated as part of the exception handling. Therefore, we use `getattr` here
1276 # with a fallback, even though the type hints indicate the attribute will always be present.
1277 try:
1278 pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
1279 except AttributeError:
1280 pydantic_extra = None
1282 if pydantic_extra is not None:
1283 yield from ((k, v) for k, v in pydantic_extra.items())
1284 yield from computed_fields_repr_args
1286 # take logic from `_repr.Representation` without the side effects of inheritance, see #5740
1287 __repr_name__ = _repr.Representation.__repr_name__
1288 __repr_recursion__ = _repr.Representation.__repr_recursion__
1289 __repr_str__ = _repr.Representation.__repr_str__
1290 __pretty__ = _repr.Representation.__pretty__
1291 __rich_repr__ = _repr.Representation.__rich_repr__
1293 def __str__(self) -> str:
1294 return self.__repr_str__(' ')
1296 # ##### Deprecated methods from v1 #####
1297 @property
1298 @typing_extensions.deprecated(
1299 'The `__fields__` attribute is deprecated, use the `model_fields` class property instead.', category=None
1300 )
1301 def __fields__(self) -> dict[str, FieldInfo]:
1302 warnings.warn(
1303 'The `__fields__` attribute is deprecated, use the `model_fields` class property instead.',
1304 category=PydanticDeprecatedSince20,
1305 stacklevel=2,
1306 )
1307 return getattr(type(self), '__pydantic_fields__', {})
1309 @property
1310 @typing_extensions.deprecated(
1311 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
1312 category=None,
1313 )
1314 def __fields_set__(self) -> set[str]:
1315 warnings.warn(
1316 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
1317 category=PydanticDeprecatedSince20,
1318 stacklevel=2,
1319 )
1320 return self.__pydantic_fields_set__
1322 @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None)
1323 def dict( # noqa: D102
1324 self,
1325 *,
1326 include: IncEx | None = None,
1327 exclude: IncEx | None = None,
1328 by_alias: bool = False,
1329 exclude_unset: bool = False,
1330 exclude_defaults: bool = False,
1331 exclude_none: bool = False,
1332 ) -> Dict[str, Any]: # noqa UP006
1333 warnings.warn(
1334 'The `dict` method is deprecated; use `model_dump` instead.',
1335 category=PydanticDeprecatedSince20,
1336 stacklevel=2,
1337 )
1338 return self.model_dump(
1339 include=include,
1340 exclude=exclude,
1341 by_alias=by_alias,
1342 exclude_unset=exclude_unset,
1343 exclude_defaults=exclude_defaults,
1344 exclude_none=exclude_none,
1345 )
1347 @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None)
1348 def json( # noqa: D102
1349 self,
1350 *,
1351 include: IncEx | None = None,
1352 exclude: IncEx | None = None,
1353 by_alias: bool = False,
1354 exclude_unset: bool = False,
1355 exclude_defaults: bool = False,
1356 exclude_none: bool = False,
1357 encoder: Callable[[Any], Any] | None = PydanticUndefined, # type: ignore[assignment]
1358 models_as_dict: bool = PydanticUndefined, # type: ignore[assignment]
1359 **dumps_kwargs: Any,
1360 ) -> str:
1361 warnings.warn(
1362 'The `json` method is deprecated; use `model_dump_json` instead.',
1363 category=PydanticDeprecatedSince20,
1364 stacklevel=2,
1365 )
1366 if encoder is not PydanticUndefined:
1367 raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.')
1368 if models_as_dict is not PydanticUndefined:
1369 raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.')
1370 if dumps_kwargs:
1371 raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.')
1372 return self.model_dump_json(
1373 include=include,
1374 exclude=exclude,
1375 by_alias=by_alias,
1376 exclude_unset=exclude_unset,
1377 exclude_defaults=exclude_defaults,
1378 exclude_none=exclude_none,
1379 )
1381 @classmethod
1382 @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None)
1383 def parse_obj(cls, obj: Any) -> Self: # noqa: D102
1384 warnings.warn(
1385 'The `parse_obj` method is deprecated; use `model_validate` instead.',
1386 category=PydanticDeprecatedSince20,
1387 stacklevel=2,
1388 )
1389 return cls.model_validate(obj)
1391 @classmethod
1392 @typing_extensions.deprecated(
1393 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
1394 'otherwise load the data then use `model_validate` instead.',
1395 category=None,
1396 )
1397 def parse_raw( # noqa: D102
1398 cls,
1399 b: str | bytes,
1400 *,
1401 content_type: str | None = None,
1402 encoding: str = 'utf8',
1403 proto: DeprecatedParseProtocol | None = None,
1404 allow_pickle: bool = False,
1405 ) -> Self: # pragma: no cover
1406 warnings.warn(
1407 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
1408 'otherwise load the data then use `model_validate` instead.',
1409 category=PydanticDeprecatedSince20,
1410 stacklevel=2,
1411 )
1412 from .deprecated import parse
1414 try:
1415 obj = parse.load_str_bytes(
1416 b,
1417 proto=proto,
1418 content_type=content_type,
1419 encoding=encoding,
1420 allow_pickle=allow_pickle,
1421 )
1422 except (ValueError, TypeError) as exc:
1423 import json
1425 # try to match V1
1426 if isinstance(exc, UnicodeDecodeError):
1427 type_str = 'value_error.unicodedecode'
1428 elif isinstance(exc, json.JSONDecodeError):
1429 type_str = 'value_error.jsondecode'
1430 elif isinstance(exc, ValueError):
1431 type_str = 'value_error'
1432 else:
1433 type_str = 'type_error'
1435 # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same
1436 error: pydantic_core.InitErrorDetails = {
1437 # The type: ignore on the next line is to ignore the requirement of LiteralString
1438 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), # type: ignore
1439 'loc': ('__root__',),
1440 'input': b,
1441 }
1442 raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error])
1443 return cls.model_validate(obj)
1445 @classmethod
1446 @typing_extensions.deprecated(
1447 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
1448 'use `model_validate_json`, otherwise `model_validate` instead.',
1449 category=None,
1450 )
1451 def parse_file( # noqa: D102
1452 cls,
1453 path: str | Path,
1454 *,
1455 content_type: str | None = None,
1456 encoding: str = 'utf8',
1457 proto: DeprecatedParseProtocol | None = None,
1458 allow_pickle: bool = False,
1459 ) -> Self:
1460 warnings.warn(
1461 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
1462 'use `model_validate_json`, otherwise `model_validate` instead.',
1463 category=PydanticDeprecatedSince20,
1464 stacklevel=2,
1465 )
1466 from .deprecated import parse
1468 obj = parse.load_file(
1469 path,
1470 proto=proto,
1471 content_type=content_type,
1472 encoding=encoding,
1473 allow_pickle=allow_pickle,
1474 )
1475 return cls.parse_obj(obj)
1477 @classmethod
1478 @typing_extensions.deprecated(
1479 'The `from_orm` method is deprecated; set '
1480 "`model_config['from_attributes']=True` and use `model_validate` instead.",
1481 category=None,
1482 )
1483 def from_orm(cls, obj: Any) -> Self: # noqa: D102
1484 warnings.warn(
1485 'The `from_orm` method is deprecated; set '
1486 "`model_config['from_attributes']=True` and use `model_validate` instead.",
1487 category=PydanticDeprecatedSince20,
1488 stacklevel=2,
1489 )
1490 if not cls.model_config.get('from_attributes', None):
1491 raise PydanticUserError(
1492 'You must set the config attribute `from_attributes=True` to use from_orm', code=None
1493 )
1494 return cls.model_validate(obj)
1496 @classmethod
1497 @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None)
1498 def construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: D102
1499 warnings.warn(
1500 'The `construct` method is deprecated; use `model_construct` instead.',
1501 category=PydanticDeprecatedSince20,
1502 stacklevel=2,
1503 )
1504 return cls.model_construct(_fields_set=_fields_set, **values)
1506 @typing_extensions.deprecated(
1507 'The `copy` method is deprecated; use `model_copy` instead. '
1508 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
1509 category=None,
1510 )
1511 def copy(
1512 self,
1513 *,
1514 include: AbstractSetIntStr | MappingIntStrAny | None = None,
1515 exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
1516 update: Dict[str, Any] | None = None, # noqa UP006
1517 deep: bool = False,
1518 ) -> Self: # pragma: no cover
1519 """Returns a copy of the model.
1521 !!! warning "Deprecated"
1522 This method is now deprecated; use `model_copy` instead.
1524 If you need `include` or `exclude`, use:
1526 ```python {test="skip" lint="skip"}
1527 data = self.model_dump(include=include, exclude=exclude, round_trip=True)
1528 data = {**data, **(update or {})}
1529 copied = self.model_validate(data)
1530 ```
1532 Args:
1533 include: Optional set or mapping specifying which fields to include in the copied model.
1534 exclude: Optional set or mapping specifying which fields to exclude in the copied model.
1535 update: Optional dictionary of field-value pairs to override field values in the copied model.
1536 deep: If True, the values of fields that are Pydantic models will be deep-copied.
1538 Returns:
1539 A copy of the model with included, excluded and updated fields as specified.
1540 """
1541 warnings.warn(
1542 'The `copy` method is deprecated; use `model_copy` instead. '
1543 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
1544 category=PydanticDeprecatedSince20,
1545 stacklevel=2,
1546 )
1547 from .deprecated import copy_internals
1549 values = dict(
1550 copy_internals._iter(
1551 self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
1552 ),
1553 **(update or {}),
1554 )
1555 if self.__pydantic_private__ is None:
1556 private = None
1557 else:
1558 private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}
1560 if self.__pydantic_extra__ is None:
1561 extra: dict[str, Any] | None = None
1562 else:
1563 extra = self.__pydantic_extra__.copy()
1564 for k in list(self.__pydantic_extra__):
1565 if k not in values: # k was in the exclude
1566 extra.pop(k)
1567 for k in list(values):
1568 if k in self.__pydantic_extra__: # k must have come from extra
1569 extra[k] = values.pop(k)
1571 # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
1572 if update:
1573 fields_set = self.__pydantic_fields_set__ | update.keys()
1574 else:
1575 fields_set = set(self.__pydantic_fields_set__)
1577 # removing excluded fields from `__pydantic_fields_set__`
1578 if exclude:
1579 fields_set -= set(exclude)
1581 return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)
1583 @classmethod
1584 @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None)
1585 def schema( # noqa: D102
1586 cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE
1587 ) -> Dict[str, Any]: # noqa UP006
1588 warnings.warn(
1589 'The `schema` method is deprecated; use `model_json_schema` instead.',
1590 category=PydanticDeprecatedSince20,
1591 stacklevel=2,
1592 )
1593 return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template)
1595 @classmethod
1596 @typing_extensions.deprecated(
1597 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
1598 category=None,
1599 )
1600 def schema_json( # noqa: D102
1601 cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any
1602 ) -> str: # pragma: no cover
1603 warnings.warn(
1604 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
1605 category=PydanticDeprecatedSince20,
1606 stacklevel=2,
1607 )
1608 import json
1610 from .deprecated.json import pydantic_encoder
1612 return json.dumps(
1613 cls.model_json_schema(by_alias=by_alias, ref_template=ref_template),
1614 default=pydantic_encoder,
1615 **dumps_kwargs,
1616 )
1618 @classmethod
1619 @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None)
1620 def validate(cls, value: Any) -> Self: # noqa: D102
1621 warnings.warn(
1622 'The `validate` method is deprecated; use `model_validate` instead.',
1623 category=PydanticDeprecatedSince20,
1624 stacklevel=2,
1625 )
1626 return cls.model_validate(value)
1628 @classmethod
1629 @typing_extensions.deprecated(
1630 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
1631 category=None,
1632 )
1633 def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102
1634 warnings.warn(
1635 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
1636 category=PydanticDeprecatedSince20,
1637 stacklevel=2,
1638 )
1639 if localns: # pragma: no cover
1640 raise TypeError('`localns` arguments are not longer accepted.')
1641 cls.model_rebuild(force=True)
1643 @typing_extensions.deprecated(
1644 'The private method `_iter` will be removed and should no longer be used.', category=None
1645 )
1646 def _iter(self, *args: Any, **kwargs: Any) -> Any:
1647 warnings.warn(
1648 'The private method `_iter` will be removed and should no longer be used.',
1649 category=PydanticDeprecatedSince20,
1650 stacklevel=2,
1651 )
1652 from .deprecated import copy_internals
1654 return copy_internals._iter(self, *args, **kwargs)
1656 @typing_extensions.deprecated(
1657 'The private method `_copy_and_set_values` will be removed and should no longer be used.',
1658 category=None,
1659 )
1660 def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any:
1661 warnings.warn(
1662 'The private method `_copy_and_set_values` will be removed and should no longer be used.',
1663 category=PydanticDeprecatedSince20,
1664 stacklevel=2,
1665 )
1666 from .deprecated import copy_internals
1668 return copy_internals._copy_and_set_values(self, *args, **kwargs)
1670 @classmethod
1671 @typing_extensions.deprecated(
1672 'The private method `_get_value` will be removed and should no longer be used.',
1673 category=None,
1674 )
1675 def _get_value(cls, *args: Any, **kwargs: Any) -> Any:
1676 warnings.warn(
1677 'The private method `_get_value` will be removed and should no longer be used.',
1678 category=PydanticDeprecatedSince20,
1679 stacklevel=2,
1680 )
1681 from .deprecated import copy_internals
1683 return copy_internals._get_value(cls, *args, **kwargs)
1685 @typing_extensions.deprecated(
1686 'The private method `_calculate_keys` will be removed and should no longer be used.',
1687 category=None,
1688 )
1689 def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any:
1690 warnings.warn(
1691 'The private method `_calculate_keys` will be removed and should no longer be used.',
1692 category=PydanticDeprecatedSince20,
1693 stacklevel=2,
1694 )
1695 from .deprecated import copy_internals
1697 return copy_internals._calculate_keys(self, *args, **kwargs)
1700ModelT = TypeVar('ModelT', bound=BaseModel)
1703@overload
1704def create_model(
1705 model_name: str,
1706 /,
1707 *,
1708 __config__: ConfigDict | None = None,
1709 __doc__: str | None = None,
1710 __base__: None = None,
1711 __module__: str = __name__,
1712 __validators__: dict[str, Callable[..., Any]] | None = None,
1713 __cls_kwargs__: dict[str, Any] | None = None,
1714 __qualname__: str | None = None,
1715 **field_definitions: Any | tuple[Any, Any],
1716) -> type[BaseModel]: ...
1719@overload
1720def create_model(
1721 model_name: str,
1722 /,
1723 *,
1724 __config__: ConfigDict | None = None,
1725 __doc__: str | None = None,
1726 __base__: type[ModelT] | tuple[type[ModelT], ...],
1727 __module__: str = __name__,
1728 __validators__: dict[str, Callable[..., Any]] | None = None,
1729 __cls_kwargs__: dict[str, Any] | None = None,
1730 __qualname__: str | None = None,
1731 **field_definitions: Any | tuple[Any, Any],
1732) -> type[ModelT]: ...
1735def create_model( # noqa: C901
1736 model_name: str,
1737 /,
1738 *,
1739 __config__: ConfigDict | None = None,
1740 __doc__: str | None = None,
1741 __base__: type[ModelT] | tuple[type[ModelT], ...] | None = None,
1742 __module__: str | None = None,
1743 __validators__: dict[str, Callable[..., Any]] | None = None,
1744 __cls_kwargs__: dict[str, Any] | None = None,
1745 __qualname__: str | None = None,
1746 # TODO PEP 747: replace `Any` by the TypeForm:
1747 **field_definitions: Any | tuple[Any, Any],
1748) -> type[ModelT]:
1749 """!!! abstract "Usage Documentation"
1750 [Dynamic Model Creation](../concepts/models.md#dynamic-model-creation)
1752 Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a
1753 subclass of [`BaseModel`][pydantic.BaseModel].
1755 !!! warning
1756 This function may execute arbitrary code contained in field annotations, if string references need to be evaluated.
1758 See [Security implications of introspecting annotations](https://docs.python.org/3/library/annotationlib.html#annotationlib-security) for more information.
1760 Args:
1761 model_name: The name of the newly created model.
1762 __config__: The configuration of the new model.
1763 __doc__: The docstring of the new model.
1764 __base__: The base class or classes for the new model.
1765 __module__: The name of the module that the model belongs to;
1766 if `None`, the value is taken from `sys._getframe(1)`
1767 __validators__: A dictionary of methods that validate fields. The keys are the names of the validation methods to
1768 be added to the model, and the values are the validation methods themselves. You can read more about functional
1769 validators [here](https://docs.pydantic.dev/2.9/concepts/validators/#field-validators).
1770 __cls_kwargs__: A dictionary of keyword arguments for class creation, such as `metaclass`.
1771 __qualname__: The qualified name of the newly created model.
1772 **field_definitions: Field definitions of the new model. Either:
1774 - a single element, representing the type annotation of the field.
1775 - a two-tuple, the first element being the type and the second element the assigned value
1776 (either a default or the [`Field()`][pydantic.Field] function).
1778 Returns:
1779 The new [model][pydantic.BaseModel].
1781 Raises:
1782 PydanticUserError: If `__base__` and `__config__` are both passed.
1783 """
1784 if __base__ is None:
1785 __base__ = (cast('type[ModelT]', BaseModel),)
1786 elif not isinstance(__base__, tuple):
1787 __base__ = (__base__,)
1789 __cls_kwargs__ = __cls_kwargs__ or {}
1791 fields: dict[str, Any] = {}
1792 annotations: dict[str, Any] = {}
1794 for f_name, f_def in field_definitions.items():
1795 if isinstance(f_def, tuple):
1796 if len(f_def) != 2:
1797 raise PydanticUserError(
1798 f'Field definition for {f_name!r} should a single element representing the type or a two-tuple, the first element '
1799 'being the type and the second element the assigned value (either a default or the `Field()` function).',
1800 code='create-model-field-definitions',
1801 )
1803 annotations[f_name] = f_def[0]
1804 fields[f_name] = f_def[1]
1805 else:
1806 annotations[f_name] = f_def
1808 if __module__ is None:
1809 f = sys._getframe(1)
1810 __module__ = f.f_globals['__name__']
1812 namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__}
1813 if __doc__:
1814 namespace['__doc__'] = __doc__
1815 if __qualname__ is not None:
1816 namespace['__qualname__'] = __qualname__
1817 if __validators__:
1818 namespace.update(__validators__)
1819 namespace.update(fields)
1820 if __config__:
1821 namespace['model_config'] = __config__
1822 resolved_bases = types.resolve_bases(__base__)
1823 meta, ns, kwds = types.prepare_class(model_name, resolved_bases, kwds=__cls_kwargs__)
1824 if resolved_bases is not __base__:
1825 ns['__orig_bases__'] = __base__
1826 namespace.update(ns)
1828 return meta(
1829 model_name,
1830 resolved_bases,
1831 namespace,
1832 __pydantic_reset_parent_namespace__=False,
1833 _create_model_module=__module__,
1834 **kwds,
1835 )
1838__getattr__ = getattr_migration(__name__)