Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pydantic/main.py: 42%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1"""Logic for creating models."""
3# Because `dict` is in the local namespace of the `BaseModel` class, we use `Dict` for annotations.
4# TODO v3 fallback to `dict` when the deprecated `dict` method gets removed.
5# ruff: noqa: UP035
7from __future__ import annotations as _annotations
9import operator
10import sys
11import types
12import warnings
13from collections.abc import Generator, Mapping
14from copy import copy, deepcopy
15from functools import cached_property
16from typing import (
17 TYPE_CHECKING,
18 Any,
19 Callable,
20 ClassVar,
21 Dict,
22 Generic,
23 Literal,
24 TypeVar,
25 Union,
26 cast,
27 overload,
28)
30import pydantic_core
31import typing_extensions
32from pydantic_core import PydanticUndefined, ValidationError
33from typing_extensions import Self, TypeAlias, Unpack
35from . import PydanticDeprecatedSince20, PydanticDeprecatedSince211
36from ._internal import (
37 _config,
38 _decorators,
39 _fields,
40 _forward_ref,
41 _generics,
42 _mock_val_ser,
43 _model_construction,
44 _namespace_utils,
45 _repr,
46 _typing_extra,
47 _utils,
48)
49from ._migration import getattr_migration
50from .aliases import AliasChoices, AliasPath
51from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler
52from .config import ConfigDict, ExtraValues
53from .errors import PydanticUndefinedAnnotation, PydanticUserError
54from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema
55from .plugin._schema_validator import PluggableSchemaValidator
57if TYPE_CHECKING:
58 from inspect import Signature
59 from pathlib import Path
61 from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator
63 from ._internal._namespace_utils import MappingNamespace
64 from ._internal._utils import AbstractSetIntStr, MappingIntStrAny
65 from .deprecated.parse import Protocol as DeprecatedParseProtocol
66 from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr
69__all__ = 'BaseModel', 'create_model'
71# Keep these type aliases available at runtime:
72TupleGenerator: TypeAlias = Generator[tuple[str, Any], None, None]
73# NOTE: In reality, `bool` should be replaced by `Literal[True]` but mypy fails to correctly apply bidirectional
74# type inference (e.g. when using `{'a': {'b': True}}`):
75# NOTE: Keep this type alias in sync with the stub definition in `pydantic-core`:
76IncEx: TypeAlias = Union[set[int], set[str], Mapping[int, Union['IncEx', bool]], Mapping[str, Union['IncEx', bool]]]
78_object_setattr = _model_construction.object_setattr
81def _check_frozen(model_cls: type[BaseModel], name: str, value: Any) -> None:
82 if model_cls.model_config.get('frozen'):
83 error_type = 'frozen_instance'
84 elif getattr(model_cls.__pydantic_fields__.get(name), 'frozen', False):
85 error_type = 'frozen_field'
86 else:
87 return
89 raise ValidationError.from_exception_data(
90 model_cls.__name__, [{'type': error_type, 'loc': (name,), 'input': value}]
91 )
94def _model_field_setattr_handler(model: BaseModel, name: str, val: Any) -> None:
95 model.__dict__[name] = val
96 model.__pydantic_fields_set__.add(name)
99def _private_setattr_handler(model: BaseModel, name: str, val: Any) -> None:
100 if getattr(model, '__pydantic_private__', None) is None:
101 # While the attribute should be present at this point, this may not be the case if
102 # users do unusual stuff with `model_post_init()` (which is where the `__pydantic_private__`
103 # is initialized, by wrapping the user-defined `model_post_init()`), e.g. if they mock
104 # the `model_post_init()` call. Ideally we should find a better way to init private attrs.
105 object.__setattr__(model, '__pydantic_private__', {})
106 model.__pydantic_private__[name] = val # pyright: ignore[reportOptionalSubscript]
109_SIMPLE_SETATTR_HANDLERS: Mapping[str, Callable[[BaseModel, str, Any], None]] = {
110 'model_field': _model_field_setattr_handler,
111 'validate_assignment': lambda model, name, val: model.__pydantic_validator__.validate_assignment(model, name, val), # pyright: ignore[reportAssignmentType]
112 'private': _private_setattr_handler,
113 'cached_property': lambda model, name, val: model.__dict__.__setitem__(name, val),
114 'extra_known': lambda model, name, val: _object_setattr(model, name, val),
115}
118class BaseModel(metaclass=_model_construction.ModelMetaclass):
119 """!!! abstract "Usage Documentation"
120 [Models](../concepts/models.md)
122 A base class for creating Pydantic models.
124 Attributes:
125 __class_vars__: The names of the class variables defined on the model.
126 __private_attributes__: Metadata about the private attributes of the model.
127 __signature__: The synthesized `__init__` [`Signature`][inspect.Signature] of the model.
129 __pydantic_complete__: Whether model building is completed, or if there are still undefined fields.
130 __pydantic_core_schema__: The core schema of the model.
131 __pydantic_custom_init__: Whether the model has a custom `__init__` function.
132 __pydantic_decorators__: Metadata containing the decorators defined on the model.
133 This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.
134 __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to
135 __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.
136 __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models.
137 __pydantic_post_init__: The name of the post-init method for the model, if defined.
138 __pydantic_root_model__: Whether the model is a [`RootModel`][pydantic.root_model.RootModel].
139 __pydantic_serializer__: The `pydantic-core` `SchemaSerializer` used to dump instances of the model.
140 __pydantic_validator__: The `pydantic-core` `SchemaValidator` used to validate instances of the model.
142 __pydantic_fields__: A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects.
143 __pydantic_computed_fields__: A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects.
145 __pydantic_extra__: A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra]
146 is set to `'allow'`.
147 __pydantic_fields_set__: The names of fields explicitly set during instantiation.
148 __pydantic_private__: Values of private attributes set on the model instance.
149 """
151 # Note: Many of the below class vars are defined in the metaclass, but we define them here for type checking purposes.
153 model_config: ClassVar[ConfigDict] = ConfigDict()
154 """
155 Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict].
156 """
158 __class_vars__: ClassVar[set[str]]
159 """The names of the class variables defined on the model."""
161 __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] # noqa: UP006
162 """Metadata about the private attributes of the model."""
164 __signature__: ClassVar[Signature]
165 """The synthesized `__init__` [`Signature`][inspect.Signature] of the model."""
167 __pydantic_complete__: ClassVar[bool] = False
168 """Whether model building is completed, or if there are still undefined fields."""
170 __pydantic_core_schema__: ClassVar[CoreSchema]
171 """The core schema of the model."""
173 __pydantic_custom_init__: ClassVar[bool]
174 """Whether the model has a custom `__init__` method."""
176 # Must be set for `GenerateSchema.model_schema` to work for a plain `BaseModel` annotation.
177 __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] = _decorators.DecoratorInfos()
178 """Metadata containing the decorators defined on the model.
179 This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1."""
181 __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata]
182 """Metadata for generic models; contains data used for a similar purpose to
183 __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these."""
185 __pydantic_parent_namespace__: ClassVar[Dict[str, Any] | None] = None # noqa: UP006
186 """Parent namespace of the model, used for automatic rebuilding of models."""
188 __pydantic_post_init__: ClassVar[None | Literal['model_post_init']]
189 """The name of the post-init method for the model, if defined."""
191 __pydantic_root_model__: ClassVar[bool] = False
192 """Whether the model is a [`RootModel`][pydantic.root_model.RootModel]."""
194 __pydantic_serializer__: ClassVar[SchemaSerializer]
195 """The `pydantic-core` `SchemaSerializer` used to dump instances of the model."""
197 __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator]
198 """The `pydantic-core` `SchemaValidator` used to validate instances of the model."""
200 __pydantic_fields__: ClassVar[Dict[str, FieldInfo]] # noqa: UP006
201 """A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects.
202 This replaces `Model.__fields__` from Pydantic V1.
203 """
205 __pydantic_setattr_handlers__: ClassVar[Dict[str, Callable[[BaseModel, str, Any], None]]] # noqa: UP006
206 """`__setattr__` handlers. Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`"""
208 __pydantic_computed_fields__: ClassVar[Dict[str, ComputedFieldInfo]] # noqa: UP006
209 """A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects."""
211 __pydantic_extra__: Dict[str, Any] | None = _model_construction.NoInitField(init=False) # noqa: UP006
212 """A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra] is set to `'allow'`."""
214 __pydantic_fields_set__: set[str] = _model_construction.NoInitField(init=False)
215 """The names of fields explicitly set during instantiation."""
217 __pydantic_private__: Dict[str, Any] | None = _model_construction.NoInitField(init=False) # noqa: UP006
218 """Values of private attributes set on the model instance."""
220 if not TYPE_CHECKING:
221 # Prevent `BaseModel` from being instantiated directly
222 # (defined in an `if not TYPE_CHECKING` block for clarity and to avoid type checking errors):
223 __pydantic_core_schema__ = _mock_val_ser.MockCoreSchema(
224 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
225 code='base-model-instantiated',
226 )
227 __pydantic_validator__ = _mock_val_ser.MockValSer(
228 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
229 val_or_ser='validator',
230 code='base-model-instantiated',
231 )
232 __pydantic_serializer__ = _mock_val_ser.MockValSer(
233 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly',
234 val_or_ser='serializer',
235 code='base-model-instantiated',
236 )
238 __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__'
240 def __init__(self, /, **data: Any) -> None:
241 """Create a new model by parsing and validating input data from keyword arguments.
243 Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be
244 validated to form a valid model.
246 `self` is explicitly positional-only to allow `self` as a field name.
247 """
248 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
249 __tracebackhide__ = True
250 validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
251 if self is not validated_self:
252 warnings.warn(
253 'A custom validator is returning a value other than `self`.\n'
254 "Returning anything other than `self` from a top level model validator isn't supported when validating via `__init__`.\n"
255 'See the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.',
256 stacklevel=2,
257 )
259 # The following line sets a flag that we use to determine when `__init__` gets overridden by the user
260 __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess]
262 @_utils.deprecated_instance_property
263 @classmethod
264 def model_fields(cls) -> dict[str, FieldInfo]:
265 """A mapping of field names to their respective [`FieldInfo`][pydantic.fields.FieldInfo] instances.
267 !!! warning
268 Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3.
269 Instead, you should access this attribute from the model class.
270 """
271 return getattr(cls, '__pydantic_fields__', {})
273 @_utils.deprecated_instance_property
274 @classmethod
275 def model_computed_fields(cls) -> dict[str, ComputedFieldInfo]:
276 """A mapping of computed field names to their respective [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] instances.
278 !!! warning
279 Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3.
280 Instead, you should access this attribute from the model class.
281 """
282 return getattr(cls, '__pydantic_computed_fields__', {})
284 @property
285 def model_extra(self) -> dict[str, Any] | None:
286 """Get extra fields set during validation.
288 Returns:
289 A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
290 """
291 return self.__pydantic_extra__
293 @property
294 def model_fields_set(self) -> set[str]:
295 """Returns the set of fields that have been explicitly set on this model instance.
297 Returns:
298 A set of strings representing the fields that have been set,
299 i.e. that were not filled from defaults.
300 """
301 return self.__pydantic_fields_set__
303 @classmethod
304 def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: C901
305 """Creates a new instance of the `Model` class with validated data.
307 Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data.
308 Default values are respected, but no other validation is performed.
310 !!! note
311 `model_construct()` generally respects the `model_config.extra` setting on the provided model.
312 That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__`
313 and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored.
314 Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in
315 an error if extra values are passed, but they will be ignored.
317 Args:
318 _fields_set: A set of field names that were originally explicitly set during instantiation. If provided,
319 this is directly used for the [`model_fields_set`][pydantic.BaseModel.model_fields_set] attribute.
320 Otherwise, the field names from the `values` argument will be used.
321 values: Trusted or pre-validated data dictionary.
323 Returns:
324 A new instance of the `Model` class with validated data.
325 """
326 m = cls.__new__(cls)
327 fields_values: dict[str, Any] = {}
328 fields_set = set()
330 for name, field in cls.__pydantic_fields__.items():
331 if field.alias is not None and field.alias in values:
332 fields_values[name] = values.pop(field.alias)
333 fields_set.add(name)
335 if (name not in fields_set) and (field.validation_alias is not None):
336 validation_aliases: list[str | AliasPath] = (
337 field.validation_alias.choices
338 if isinstance(field.validation_alias, AliasChoices)
339 else [field.validation_alias]
340 )
342 for alias in validation_aliases:
343 if isinstance(alias, str) and alias in values:
344 fields_values[name] = values.pop(alias)
345 fields_set.add(name)
346 break
347 elif isinstance(alias, AliasPath):
348 value = alias.search_dict_for_path(values)
349 if value is not PydanticUndefined:
350 fields_values[name] = value
351 fields_set.add(name)
352 break
354 if name not in fields_set:
355 if name in values:
356 fields_values[name] = values.pop(name)
357 fields_set.add(name)
358 elif not field.is_required():
359 fields_values[name] = field.get_default(call_default_factory=True, validated_data=fields_values)
360 if _fields_set is None:
361 _fields_set = fields_set
363 _extra: dict[str, Any] | None = values if cls.model_config.get('extra') == 'allow' else None
364 _object_setattr(m, '__dict__', fields_values)
365 _object_setattr(m, '__pydantic_fields_set__', _fields_set)
366 if not cls.__pydantic_root_model__:
367 _object_setattr(m, '__pydantic_extra__', _extra)
369 if cls.__pydantic_post_init__:
370 m.model_post_init(None)
371 # update private attributes with values set
372 if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None:
373 for k, v in values.items():
374 if k in m.__private_attributes__:
375 m.__pydantic_private__[k] = v
377 elif not cls.__pydantic_root_model__:
378 # Note: if there are any private attributes, cls.__pydantic_post_init__ would exist
379 # Since it doesn't, that means that `__pydantic_private__` should be set to None
380 _object_setattr(m, '__pydantic_private__', None)
382 return m
384 def model_copy(self, *, update: Mapping[str, Any] | None = None, deep: bool = False) -> Self:
385 """!!! abstract "Usage Documentation"
386 [`model_copy`](../concepts/models.md#model-copy)
388 Returns a copy of the model.
390 !!! note
391 The underlying instance's [`__dict__`][object.__dict__] attribute is copied. This
392 might have unexpected side effects if you store anything in it, on top of the model
393 fields (e.g. the value of [cached properties][functools.cached_property]).
395 Args:
396 update: Values to change/add in the new model. Note: the data is not validated
397 before creating the new model. You should trust this data.
398 deep: Set to `True` to make a deep copy of the model.
400 Returns:
401 New model instance.
402 """
403 copied = self.__deepcopy__() if deep else self.__copy__()
404 if update:
405 if self.model_config.get('extra') == 'allow':
406 for k, v in update.items():
407 if k in self.__pydantic_fields__:
408 copied.__dict__[k] = v
409 else:
410 if copied.__pydantic_extra__ is None:
411 copied.__pydantic_extra__ = {}
412 copied.__pydantic_extra__[k] = v
413 else:
414 copied.__dict__.update(update)
415 copied.__pydantic_fields_set__.update(update.keys())
416 return copied
418 def model_dump(
419 self,
420 *,
421 mode: Literal['json', 'python'] | str = 'python',
422 include: IncEx | None = None,
423 exclude: IncEx | None = None,
424 context: Any | None = None,
425 by_alias: bool | None = None,
426 exclude_unset: bool = False,
427 exclude_defaults: bool = False,
428 exclude_none: bool = False,
429 exclude_computed_fields: bool = False,
430 round_trip: bool = False,
431 warnings: bool | Literal['none', 'warn', 'error'] = True,
432 fallback: Callable[[Any], Any] | None = None,
433 serialize_as_any: bool = False,
434 ) -> dict[str, Any]:
435 """!!! abstract "Usage Documentation"
436 [`model_dump`](../concepts/serialization.md#python-mode)
438 Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
440 Args:
441 mode: The mode in which `to_python` should run.
442 If mode is 'json', the output will only contain JSON serializable types.
443 If mode is 'python', the output may contain non-JSON-serializable Python objects.
444 include: A set of fields to include in the output.
445 exclude: A set of fields to exclude from the output.
446 context: Additional context to pass to the serializer.
447 by_alias: Whether to use the field's alias in the dictionary key if defined.
448 exclude_unset: Whether to exclude fields that have not been explicitly set.
449 exclude_defaults: Whether to exclude fields that are set to their default value.
450 exclude_none: Whether to exclude fields that have a value of `None`.
451 exclude_computed_fields: Whether to exclude computed fields.
452 While this can be useful for round-tripping, it is usually recommended tu use the dedicated
453 `round_trip` parameter instead.
454 round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
455 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
456 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
457 fallback: A function to call when an unknown value is encountered. If not provided,
458 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
459 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
461 Returns:
462 A dictionary representation of the model.
463 """
464 return self.__pydantic_serializer__.to_python(
465 self,
466 mode=mode,
467 by_alias=by_alias,
468 include=include,
469 exclude=exclude,
470 context=context,
471 exclude_unset=exclude_unset,
472 exclude_defaults=exclude_defaults,
473 exclude_none=exclude_none,
474 exclude_computed_fields=exclude_computed_fields,
475 round_trip=round_trip,
476 warnings=warnings,
477 fallback=fallback,
478 serialize_as_any=serialize_as_any,
479 )
481 def model_dump_json(
482 self,
483 *,
484 indent: int | None = None,
485 ensure_ascii: bool = False,
486 include: IncEx | None = None,
487 exclude: IncEx | None = None,
488 context: Any | None = None,
489 by_alias: bool | None = None,
490 exclude_unset: bool = False,
491 exclude_defaults: bool = False,
492 exclude_none: bool = False,
493 exclude_computed_fields: bool = False,
494 round_trip: bool = False,
495 warnings: bool | Literal['none', 'warn', 'error'] = True,
496 fallback: Callable[[Any], Any] | None = None,
497 serialize_as_any: bool = False,
498 ) -> str:
499 """!!! abstract "Usage Documentation"
500 [`model_dump_json`](../concepts/serialization.md#json-mode)
502 Generates a JSON representation of the model using Pydantic's `to_json` method.
504 Args:
505 indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
506 ensure_ascii: If `True`, the output is guaranteed to have all incoming non-ASCII characters escaped.
507 If `False` (the default), these characters will be output as-is.
508 include: Field(s) to include in the JSON output.
509 exclude: Field(s) to exclude from the JSON output.
510 context: Additional context to pass to the serializer.
511 by_alias: Whether to serialize using field aliases.
512 exclude_unset: Whether to exclude fields that have not been explicitly set.
513 exclude_defaults: Whether to exclude fields that are set to their default value.
514 exclude_none: Whether to exclude fields that have a value of `None`.
515 exclude_computed_fields: Whether to exclude computed fields.
516 While this can be useful for round-tripping, it is usually recommended to use the dedicated
517 `round_trip` parameter instead.
518 round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
519 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors,
520 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError].
521 fallback: A function to call when an unknown value is encountered. If not provided,
522 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised.
523 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior.
525 Returns:
526 A JSON string representation of the model.
527 """
528 return self.__pydantic_serializer__.to_json(
529 self,
530 indent=indent,
531 ensure_ascii=ensure_ascii,
532 include=include,
533 exclude=exclude,
534 context=context,
535 by_alias=by_alias,
536 exclude_unset=exclude_unset,
537 exclude_defaults=exclude_defaults,
538 exclude_none=exclude_none,
539 exclude_computed_fields=exclude_computed_fields,
540 round_trip=round_trip,
541 warnings=warnings,
542 fallback=fallback,
543 serialize_as_any=serialize_as_any,
544 ).decode()
546 @classmethod
547 def model_json_schema(
548 cls,
549 by_alias: bool = True,
550 ref_template: str = DEFAULT_REF_TEMPLATE,
551 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
552 mode: JsonSchemaMode = 'validation',
553 *,
554 union_format: Literal['any_of', 'primitive_type_array'] = 'any_of',
555 ) -> dict[str, Any]:
556 """Generates a JSON schema for a model class.
558 Args:
559 by_alias: Whether to use attribute aliases or not.
560 ref_template: The reference template.
561 union_format: The format to use when combining schemas from unions together. Can be one of:
563 - `'any_of'`: Use the [`anyOf`](https://json-schema.org/understanding-json-schema/reference/combining#anyOf)
564 keyword to combine schemas (the default).
565 - `'primitive_type_array'`: Use the [`type`](https://json-schema.org/understanding-json-schema/reference/type)
566 keyword as an array of strings, containing each type of the combination. If any of the schemas is not a primitive
567 type (`string`, `boolean`, `null`, `integer` or `number`) or contains constraints/metadata, falls back to
568 `any_of`.
569 schema_generator: To override the logic used to generate the JSON schema, as a subclass of
570 `GenerateJsonSchema` with your desired modifications
571 mode: The mode in which to generate the schema.
573 Returns:
574 The JSON schema for the given model class.
575 """
576 return model_json_schema(
577 cls,
578 by_alias=by_alias,
579 ref_template=ref_template,
580 union_format=union_format,
581 schema_generator=schema_generator,
582 mode=mode,
583 )
585 @classmethod
586 def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
587 """Compute the class name for parametrizations of generic classes.
589 This method can be overridden to achieve a custom naming scheme for generic BaseModels.
591 Args:
592 params: Tuple of types of the class. Given a generic class
593 `Model` with 2 type variables and a concrete model `Model[str, int]`,
594 the value `(str, int)` would be passed to `params`.
596 Returns:
597 String representing the new class where `params` are passed to `cls` as type variables.
599 Raises:
600 TypeError: Raised when trying to generate concrete names for non-generic models.
601 """
602 if not issubclass(cls, Generic):
603 raise TypeError('Concrete names should only be generated for generic models.')
605 # Any strings received should represent forward references, so we handle them specially below.
606 # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
607 # we may be able to remove this special case.
608 param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
609 params_component = ', '.join(param_names)
610 return f'{cls.__name__}[{params_component}]'
612 def model_post_init(self, context: Any, /) -> None:
613 """Override this method to perform additional initialization after `__init__` and `model_construct`.
614 This is useful if you want to do some validation that requires the entire model to be initialized.
615 """
617 @classmethod
618 def model_rebuild(
619 cls,
620 *,
621 force: bool = False,
622 raise_errors: bool = True,
623 _parent_namespace_depth: int = 2,
624 _types_namespace: MappingNamespace | None = None,
625 ) -> bool | None:
626 """Try to rebuild the pydantic-core schema for the model.
628 This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
629 the initial attempt to build the schema, and automatic rebuilding fails.
631 Args:
632 force: Whether to force the rebuilding of the model schema, defaults to `False`.
633 raise_errors: Whether to raise errors, defaults to `True`.
634 _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
635 _types_namespace: The types namespace, defaults to `None`.
637 Returns:
638 Returns `None` if the schema is already "complete" and rebuilding was not required.
639 If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
640 """
641 already_complete = cls.__pydantic_complete__
642 if already_complete and not force:
643 return None
645 cls.__pydantic_complete__ = False
647 for attr in ('__pydantic_core_schema__', '__pydantic_validator__', '__pydantic_serializer__'):
648 if attr in cls.__dict__ and not isinstance(getattr(cls, attr), _mock_val_ser.MockValSer):
649 # Deleting the validator/serializer is necessary as otherwise they can get reused in
650 # pydantic-core. We do so only if they aren't mock instances, otherwise — as `model_rebuild()`
651 # isn't thread-safe — concurrent model instantiations can lead to the parent validator being used.
652 # Same applies for the core schema that can be reused in schema generation.
653 delattr(cls, attr)
655 if _types_namespace is not None:
656 rebuild_ns = _types_namespace
657 elif _parent_namespace_depth > 0:
658 rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {}
659 else:
660 rebuild_ns = {}
662 parent_ns = _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {}
664 ns_resolver = _namespace_utils.NsResolver(
665 parent_namespace={**rebuild_ns, **parent_ns},
666 )
668 return _model_construction.complete_model_class(
669 cls,
670 _config.ConfigWrapper(cls.model_config, check=False),
671 ns_resolver,
672 raise_errors=raise_errors,
673 # If the model was already complete, we don't need to call the hook again.
674 call_on_complete_hook=not already_complete,
675 )
677 @classmethod
678 def model_validate(
679 cls,
680 obj: Any,
681 *,
682 strict: bool | None = None,
683 extra: ExtraValues | None = None,
684 from_attributes: bool | None = None,
685 context: Any | None = None,
686 by_alias: bool | None = None,
687 by_name: bool | None = None,
688 ) -> Self:
689 """Validate a pydantic model instance.
691 Args:
692 obj: The object to validate.
693 strict: Whether to enforce types strictly.
694 extra: Whether to ignore, allow, or forbid extra data during model validation.
695 See the [`extra` configuration value][pydantic.ConfigDict.extra] for details.
696 from_attributes: Whether to extract data from object attributes.
697 context: Additional context to pass to the validator.
698 by_alias: Whether to use the field's alias when validating against the provided input data.
699 by_name: Whether to use the field's name when validating against the provided input data.
701 Raises:
702 ValidationError: If the object could not be validated.
704 Returns:
705 The validated model instance.
706 """
707 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
708 __tracebackhide__ = True
710 if by_alias is False and by_name is not True:
711 raise PydanticUserError(
712 'At least one of `by_alias` or `by_name` must be set to True.',
713 code='validate-by-alias-and-name-false',
714 )
716 return cls.__pydantic_validator__.validate_python(
717 obj,
718 strict=strict,
719 extra=extra,
720 from_attributes=from_attributes,
721 context=context,
722 by_alias=by_alias,
723 by_name=by_name,
724 )
726 @classmethod
727 def model_validate_json(
728 cls,
729 json_data: str | bytes | bytearray,
730 *,
731 strict: bool | None = None,
732 extra: ExtraValues | None = None,
733 context: Any | None = None,
734 by_alias: bool | None = None,
735 by_name: bool | None = None,
736 ) -> Self:
737 """!!! abstract "Usage Documentation"
738 [JSON Parsing](../concepts/json.md#json-parsing)
740 Validate the given JSON data against the Pydantic model.
742 Args:
743 json_data: The JSON data to validate.
744 strict: Whether to enforce types strictly.
745 extra: Whether to ignore, allow, or forbid extra data during model validation.
746 See the [`extra` configuration value][pydantic.ConfigDict.extra] for details.
747 context: Extra variables to pass to the validator.
748 by_alias: Whether to use the field's alias when validating against the provided input data.
749 by_name: Whether to use the field's name when validating against the provided input data.
751 Returns:
752 The validated Pydantic model.
754 Raises:
755 ValidationError: If `json_data` is not a JSON string or the object could not be validated.
756 """
757 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
758 __tracebackhide__ = True
760 if by_alias is False and by_name is not True:
761 raise PydanticUserError(
762 'At least one of `by_alias` or `by_name` must be set to True.',
763 code='validate-by-alias-and-name-false',
764 )
766 return cls.__pydantic_validator__.validate_json(
767 json_data, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
768 )
770 @classmethod
771 def model_validate_strings(
772 cls,
773 obj: Any,
774 *,
775 strict: bool | None = None,
776 extra: ExtraValues | None = None,
777 context: Any | None = None,
778 by_alias: bool | None = None,
779 by_name: bool | None = None,
780 ) -> Self:
781 """Validate the given object with string data against the Pydantic model.
783 Args:
784 obj: The object containing string data to validate.
785 strict: Whether to enforce types strictly.
786 extra: Whether to ignore, allow, or forbid extra data during model validation.
787 See the [`extra` configuration value][pydantic.ConfigDict.extra] for details.
788 context: Extra variables to pass to the validator.
789 by_alias: Whether to use the field's alias when validating against the provided input data.
790 by_name: Whether to use the field's name when validating against the provided input data.
792 Returns:
793 The validated Pydantic model.
794 """
795 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks
796 __tracebackhide__ = True
798 if by_alias is False and by_name is not True:
799 raise PydanticUserError(
800 'At least one of `by_alias` or `by_name` must be set to True.',
801 code='validate-by-alias-and-name-false',
802 )
804 return cls.__pydantic_validator__.validate_strings(
805 obj, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
806 )
808 @classmethod
809 def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler: GetCoreSchemaHandler, /) -> CoreSchema:
810 # This warning is only emitted when calling `super().__get_pydantic_core_schema__` from a model subclass.
811 # In the generate schema logic, this method (`BaseModel.__get_pydantic_core_schema__`) is special cased to
812 # *not* be called if not overridden.
813 warnings.warn(
814 'The `__get_pydantic_core_schema__` method of the `BaseModel` class is deprecated. If you are calling '
815 '`super().__get_pydantic_core_schema__` when overriding the method on a Pydantic model, consider using '
816 '`handler(source)` instead. However, note that overriding this method on models can lead to unexpected '
817 'side effects.',
818 PydanticDeprecatedSince211,
819 stacklevel=2,
820 )
821 # Logic copied over from `GenerateSchema._model_schema`:
822 schema = cls.__dict__.get('__pydantic_core_schema__')
823 if schema is not None and not isinstance(schema, _mock_val_ser.MockCoreSchema):
824 return cls.__pydantic_core_schema__
826 return handler(source)
828 @classmethod
829 def __get_pydantic_json_schema__(
830 cls,
831 core_schema: CoreSchema,
832 handler: GetJsonSchemaHandler,
833 /,
834 ) -> JsonSchemaValue:
835 """Hook into generating the model's JSON schema.
837 Args:
838 core_schema: A `pydantic-core` CoreSchema.
839 You can ignore this argument and call the handler with a new CoreSchema,
840 wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`),
841 or just call the handler with the original schema.
842 handler: Call into Pydantic's internal JSON schema generation.
843 This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema
844 generation fails.
845 Since this gets called by `BaseModel.model_json_schema` you can override the
846 `schema_generator` argument to that function to change JSON schema generation globally
847 for a type.
849 Returns:
850 A JSON schema, as a Python object.
851 """
852 return handler(core_schema)
854 @classmethod
855 def __pydantic_init_subclass__(cls, **kwargs: Any) -> None:
856 """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass`
857 only after basic class initialization is complete. In particular, attributes like `model_fields` will
858 be present when this is called, but forward annotations are not guaranteed to be resolved yet,
859 meaning that creating an instance of the class may fail.
861 This is necessary because `__init_subclass__` will always be called by `type.__new__`,
862 and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that
863 `type.__new__` was called in such a manner that the class would already be sufficiently initialized.
865 This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely,
866 any kwargs passed to the class definition that aren't used internally by Pydantic.
868 Args:
869 **kwargs: Any keyword arguments passed to the class definition that aren't used internally
870 by Pydantic.
872 Note:
873 You may want to override [`__pydantic_on_complete__()`][pydantic.main.BaseModel.__pydantic_on_complete__]
874 instead, which is called once the class and its fields are fully initialized and ready for validation.
875 """
877 @classmethod
878 def __pydantic_on_complete__(cls) -> None:
879 """This is called once the class and its fields are fully initialized and ready to be used.
881 This typically happens when the class is created (just before
882 [`__pydantic_init_subclass__()`][pydantic.main.BaseModel.__pydantic_init_subclass__] is called on the superclass),
883 except when forward annotations are used that could not immediately be resolved.
884 In that case, it will be called later, when the model is rebuilt automatically or explicitly using
885 [`model_rebuild()`][pydantic.main.BaseModel.model_rebuild].
886 """
888 def __class_getitem__(
889 cls, typevar_values: type[Any] | tuple[type[Any], ...]
890 ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef:
891 cached = _generics.get_cached_generic_type_early(cls, typevar_values)
892 if cached is not None:
893 return cached
895 if cls is BaseModel:
896 raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel')
897 if not hasattr(cls, '__parameters__'):
898 raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic')
899 if not cls.__pydantic_generic_metadata__['parameters'] and Generic not in cls.__bases__:
900 raise TypeError(f'{cls} is not a generic class')
902 if not isinstance(typevar_values, tuple):
903 typevar_values = (typevar_values,)
905 # For a model `class Model[T, U, V = int](BaseModel): ...` parametrized with `(str, bool)`,
906 # this gives us `{T: str, U: bool, V: int}`:
907 typevars_map = _generics.map_generic_model_arguments(cls, typevar_values)
908 # We also update the provided args to use defaults values (`(str, bool)` becomes `(str, bool, int)`):
909 typevar_values = tuple(v for v in typevars_map.values())
911 if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
912 submodel = cls # if arguments are equal to parameters it's the same object
913 _generics.set_cached_generic_type(cls, typevar_values, submodel)
914 else:
915 parent_args = cls.__pydantic_generic_metadata__['args']
916 if not parent_args:
917 args = typevar_values
918 else:
919 args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args)
921 origin = cls.__pydantic_generic_metadata__['origin'] or cls
922 model_name = origin.model_parametrized_name(args)
923 params = tuple(
924 dict.fromkeys(_generics.iter_contained_typevars(typevars_map.values()))
925 ) # use dict as ordered set
927 with _generics.generic_recursion_self_type(origin, args) as maybe_self_type:
928 cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args)
929 if cached is not None:
930 return cached
932 if maybe_self_type is not None:
933 return maybe_self_type
935 # Attempt to rebuild the origin in case new types have been defined
936 try:
937 # depth 2 gets you above this __class_getitem__ call.
938 # Note that we explicitly provide the parent ns, otherwise
939 # `model_rebuild` will use the parent ns no matter if it is the ns of a module.
940 # We don't want this here, as this has unexpected effects when a model
941 # is being parametrized during a forward annotation evaluation.
942 parent_ns = _typing_extra.parent_frame_namespace(parent_depth=2) or {}
943 origin.model_rebuild(_types_namespace=parent_ns)
944 except PydanticUndefinedAnnotation:
945 # It's okay if it fails, it just means there are still undefined types
946 # that could be evaluated later.
947 pass
949 submodel = _generics.create_generic_submodel(model_name, origin, args, params)
951 _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args)
953 return submodel
955 def __copy__(self) -> Self:
956 """Returns a shallow copy of the model."""
957 cls = type(self)
958 m = cls.__new__(cls)
959 _object_setattr(m, '__dict__', copy(self.__dict__))
960 _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__))
961 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
963 if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
964 _object_setattr(m, '__pydantic_private__', None)
965 else:
966 _object_setattr(
967 m,
968 '__pydantic_private__',
969 {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined},
970 )
972 return m
974 def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self:
975 """Returns a deep copy of the model."""
976 cls = type(self)
977 m = cls.__new__(cls)
978 _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo))
979 _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo))
980 # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str],
981 # and attempting a deepcopy would be marginally slower.
982 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__))
984 if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None:
985 _object_setattr(m, '__pydantic_private__', None)
986 else:
987 _object_setattr(
988 m,
989 '__pydantic_private__',
990 deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo),
991 )
993 return m
995 if not TYPE_CHECKING:
996 # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access
997 # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643
999 def __getattr__(self, item: str) -> Any:
1000 private_attributes = object.__getattribute__(self, '__private_attributes__')
1001 if item in private_attributes:
1002 attribute = private_attributes[item]
1003 if hasattr(attribute, '__get__'):
1004 return attribute.__get__(self, type(self)) # type: ignore
1006 try:
1007 # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
1008 return self.__pydantic_private__[item] # type: ignore
1009 except KeyError as exc:
1010 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc
1011 else:
1012 # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
1013 # See `BaseModel.__repr_args__` for more details
1014 try:
1015 pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
1016 except AttributeError:
1017 pydantic_extra = None
1019 if pydantic_extra and item in pydantic_extra:
1020 return pydantic_extra[item]
1021 else:
1022 if hasattr(self.__class__, item):
1023 return super().__getattribute__(item) # Raises AttributeError if appropriate
1024 else:
1025 # this is the current error
1026 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')
1028 def __setattr__(self, name: str, value: Any) -> None:
1029 if (setattr_handler := self.__pydantic_setattr_handlers__.get(name)) is not None:
1030 setattr_handler(self, name, value)
1031 # if None is returned from _setattr_handler, the attribute was set directly
1032 elif (setattr_handler := self._setattr_handler(name, value)) is not None:
1033 setattr_handler(self, name, value) # call here to not memo on possibly unknown fields
1034 self.__pydantic_setattr_handlers__[name] = setattr_handler # memoize the handler for faster access
1036 def _setattr_handler(self, name: str, value: Any) -> Callable[[BaseModel, str, Any], None] | None:
1037 """Get a handler for setting an attribute on the model instance.
1039 Returns:
1040 A handler for setting an attribute on the model instance. Used for memoization of the handler.
1041 Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`
1042 Returns `None` when memoization is not safe, then the attribute is set directly.
1043 """
1044 cls = self.__class__
1045 if name in cls.__class_vars__:
1046 raise AttributeError(
1047 f'{name!r} is a ClassVar of `{cls.__name__}` and cannot be set on an instance. '
1048 f'If you want to set a value on the class, use `{cls.__name__}.{name} = value`.'
1049 )
1050 elif not _fields.is_valid_field_name(name):
1051 if (attribute := cls.__private_attributes__.get(name)) is not None:
1052 if hasattr(attribute, '__set__'):
1053 return lambda model, _name, val: attribute.__set__(model, val)
1054 else:
1055 return _SIMPLE_SETATTR_HANDLERS['private']
1056 else:
1057 _object_setattr(self, name, value)
1058 return None # Can not return memoized handler with possibly freeform attr names
1060 attr = getattr(cls, name, None)
1061 # NOTE: We currently special case properties and `cached_property`, but we might need
1062 # to generalize this to all data/non-data descriptors at some point. For non-data descriptors
1063 # (such as `cached_property`), it isn't obvious though. `cached_property` caches the value
1064 # to the instance's `__dict__`, but other non-data descriptors might do things differently.
1065 if isinstance(attr, cached_property):
1066 return _SIMPLE_SETATTR_HANDLERS['cached_property']
1068 _check_frozen(cls, name, value)
1070 # We allow properties to be set only on non frozen models for now (to match dataclasses).
1071 # This can be changed if it ever gets requested.
1072 if isinstance(attr, property):
1073 return lambda model, _name, val: attr.__set__(model, val)
1074 elif cls.model_config.get('validate_assignment'):
1075 return _SIMPLE_SETATTR_HANDLERS['validate_assignment']
1076 elif name not in cls.__pydantic_fields__:
1077 if cls.model_config.get('extra') != 'allow':
1078 # TODO - matching error
1079 raise ValueError(f'"{cls.__name__}" object has no field "{name}"')
1080 elif attr is None:
1081 # attribute does not exist, so put it in extra
1082 self.__pydantic_extra__[name] = value
1083 return None # Can not return memoized handler with possibly freeform attr names
1084 else:
1085 # attribute _does_ exist, and was not in extra, so update it
1086 return _SIMPLE_SETATTR_HANDLERS['extra_known']
1087 else:
1088 return _SIMPLE_SETATTR_HANDLERS['model_field']
1090 def __delattr__(self, item: str) -> Any:
1091 cls = self.__class__
1093 if item in self.__private_attributes__:
1094 attribute = self.__private_attributes__[item]
1095 if hasattr(attribute, '__delete__'):
1096 attribute.__delete__(self) # type: ignore
1097 return
1099 try:
1100 # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items
1101 del self.__pydantic_private__[item] # type: ignore
1102 return
1103 except KeyError as exc:
1104 raise AttributeError(f'{cls.__name__!r} object has no attribute {item!r}') from exc
1106 # Allow cached properties to be deleted (even if the class is frozen):
1107 attr = getattr(cls, item, None)
1108 if isinstance(attr, cached_property):
1109 return object.__delattr__(self, item)
1111 _check_frozen(cls, name=item, value=None)
1113 if item in self.__pydantic_fields__:
1114 object.__delattr__(self, item)
1115 elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__:
1116 del self.__pydantic_extra__[item]
1117 else:
1118 try:
1119 object.__delattr__(self, item)
1120 except AttributeError:
1121 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}')
1123 # Because we make use of `@dataclass_transform()`, `__replace__` is already synthesized by
1124 # type checkers, so we define the implementation in this `if not TYPE_CHECKING:` block:
1125 def __replace__(self, **changes: Any) -> Self:
1126 return self.model_copy(update=changes)
1128 def __getstate__(self) -> dict[Any, Any]:
1129 private = self.__pydantic_private__
1130 if private:
1131 private = {k: v for k, v in private.items() if v is not PydanticUndefined}
1132 return {
1133 '__dict__': self.__dict__,
1134 '__pydantic_extra__': self.__pydantic_extra__,
1135 '__pydantic_fields_set__': self.__pydantic_fields_set__,
1136 '__pydantic_private__': private,
1137 }
1139 def __setstate__(self, state: dict[Any, Any]) -> None:
1140 _object_setattr(self, '__pydantic_fields_set__', state.get('__pydantic_fields_set__', {}))
1141 _object_setattr(self, '__pydantic_extra__', state.get('__pydantic_extra__', {}))
1142 _object_setattr(self, '__pydantic_private__', state.get('__pydantic_private__', {}))
1143 _object_setattr(self, '__dict__', state.get('__dict__', {}))
1145 if not TYPE_CHECKING:
1147 def __eq__(self, other: Any) -> bool:
1148 if isinstance(other, BaseModel):
1149 # When comparing instances of generic types for equality, as long as all field values are equal,
1150 # only require their generic origin types to be equal, rather than exact type equality.
1151 # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1).
1152 self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__
1153 other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__
1155 # Perform common checks first
1156 if not (
1157 self_type == other_type
1158 and getattr(self, '__pydantic_private__', None) == getattr(other, '__pydantic_private__', None)
1159 and self.__pydantic_extra__ == other.__pydantic_extra__
1160 ):
1161 return False
1163 # We only want to compare pydantic fields but ignoring fields is costly.
1164 # We'll perform a fast check first, and fallback only when needed
1165 # See GH-7444 and GH-7825 for rationale and a performance benchmark
1167 # First, do the fast (and sometimes faulty) __dict__ comparison
1168 if self.__dict__ == other.__dict__:
1169 # If the check above passes, then pydantic fields are equal, we can return early
1170 return True
1172 # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return
1173 # early if there are no keys to ignore (we would just return False later on anyway)
1174 model_fields = type(self).__pydantic_fields__.keys()
1175 if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields:
1176 return False
1178 # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore
1179 # Resort to costly filtering of the __dict__ objects
1180 # We use operator.itemgetter because it is much faster than dict comprehensions
1181 # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an
1182 # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute
1183 # raises an error in BaseModel.__getattr__ instead of returning the class attribute
1184 # So we can use operator.itemgetter() instead of operator.attrgetter()
1185 getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL
1186 try:
1187 return getter(self.__dict__) == getter(other.__dict__)
1188 except KeyError:
1189 # In rare cases (such as when using the deprecated BaseModel.copy() method),
1190 # the __dict__ may not contain all model fields, which is how we can get here.
1191 # getter(self.__dict__) is much faster than any 'safe' method that accounts
1192 # for missing keys, and wrapping it in a `try` doesn't slow things down much
1193 # in the common case.
1194 self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__)
1195 other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__)
1196 return getter(self_fields_proxy) == getter(other_fields_proxy)
1198 # other instance is not a BaseModel
1199 else:
1200 return NotImplemented # delegate to the other item in the comparison
1202 if TYPE_CHECKING:
1203 # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits
1204 # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of
1205 # subclass initialization.
1207 def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]):
1208 """This signature is included purely to help type-checkers check arguments to class declaration, which
1209 provides a way to conveniently set model_config key/value pairs.
1211 ```python
1212 from pydantic import BaseModel
1214 class MyModel(BaseModel, extra='allow'): ...
1215 ```
1217 However, this may be deceiving, since the _actual_ calls to `__init_subclass__` will not receive any
1218 of the config arguments, and will only receive any keyword arguments passed during class initialization
1219 that are _not_ expected keys in ConfigDict. (This is due to the way `ModelMetaclass.__new__` works.)
1221 Args:
1222 **kwargs: Keyword arguments passed to the class definition, which set model_config
1224 Note:
1225 You may want to override `__pydantic_init_subclass__` instead, which behaves similarly but is called
1226 *after* the class is fully initialized.
1227 """
1229 def __iter__(self) -> TupleGenerator:
1230 """So `dict(model)` works."""
1231 yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')]
1232 extra = self.__pydantic_extra__
1233 if extra:
1234 yield from extra.items()
1236 def __repr__(self) -> str:
1237 return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
1239 def __repr_args__(self) -> _repr.ReprArgs:
1240 # Eagerly create the repr of computed fields, as this may trigger access of cached properties and as such
1241 # modify the instance's `__dict__`. If we don't do it now, it could happen when iterating over the `__dict__`
1242 # below if the instance happens to be referenced in a field, and would modify the `__dict__` size *during* iteration.
1243 computed_fields_repr_args = [
1244 (k, getattr(self, k)) for k, v in self.__pydantic_computed_fields__.items() if v.repr
1245 ]
1247 for k, v in self.__dict__.items():
1248 field = self.__pydantic_fields__.get(k)
1249 if field and field.repr:
1250 if v is not self:
1251 yield k, v
1252 else:
1253 yield k, self.__repr_recursion__(v)
1254 # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized.
1255 # This can happen if a `ValidationError` is raised during initialization and the instance's
1256 # repr is generated as part of the exception handling. Therefore, we use `getattr` here
1257 # with a fallback, even though the type hints indicate the attribute will always be present.
1258 try:
1259 pydantic_extra = object.__getattribute__(self, '__pydantic_extra__')
1260 except AttributeError:
1261 pydantic_extra = None
1263 if pydantic_extra is not None:
1264 yield from ((k, v) for k, v in pydantic_extra.items())
1265 yield from computed_fields_repr_args
1267 # take logic from `_repr.Representation` without the side effects of inheritance, see #5740
1268 __repr_name__ = _repr.Representation.__repr_name__
1269 __repr_recursion__ = _repr.Representation.__repr_recursion__
1270 __repr_str__ = _repr.Representation.__repr_str__
1271 __pretty__ = _repr.Representation.__pretty__
1272 __rich_repr__ = _repr.Representation.__rich_repr__
1274 def __str__(self) -> str:
1275 return self.__repr_str__(' ')
1277 # ##### Deprecated methods from v1 #####
1278 @property
1279 @typing_extensions.deprecated(
1280 'The `__fields__` attribute is deprecated, use the `model_fields` class property instead.', category=None
1281 )
1282 def __fields__(self) -> dict[str, FieldInfo]:
1283 warnings.warn(
1284 'The `__fields__` attribute is deprecated, use the `model_fields` class property instead.',
1285 category=PydanticDeprecatedSince20,
1286 stacklevel=2,
1287 )
1288 return getattr(type(self), '__pydantic_fields__', {})
1290 @property
1291 @typing_extensions.deprecated(
1292 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
1293 category=None,
1294 )
1295 def __fields_set__(self) -> set[str]:
1296 warnings.warn(
1297 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.',
1298 category=PydanticDeprecatedSince20,
1299 stacklevel=2,
1300 )
1301 return self.__pydantic_fields_set__
1303 @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None)
1304 def dict( # noqa: D102
1305 self,
1306 *,
1307 include: IncEx | None = None,
1308 exclude: IncEx | None = None,
1309 by_alias: bool = False,
1310 exclude_unset: bool = False,
1311 exclude_defaults: bool = False,
1312 exclude_none: bool = False,
1313 ) -> Dict[str, Any]: # noqa UP006
1314 warnings.warn(
1315 'The `dict` method is deprecated; use `model_dump` instead.',
1316 category=PydanticDeprecatedSince20,
1317 stacklevel=2,
1318 )
1319 return self.model_dump(
1320 include=include,
1321 exclude=exclude,
1322 by_alias=by_alias,
1323 exclude_unset=exclude_unset,
1324 exclude_defaults=exclude_defaults,
1325 exclude_none=exclude_none,
1326 )
1328 @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None)
1329 def json( # noqa: D102
1330 self,
1331 *,
1332 include: IncEx | None = None,
1333 exclude: IncEx | None = None,
1334 by_alias: bool = False,
1335 exclude_unset: bool = False,
1336 exclude_defaults: bool = False,
1337 exclude_none: bool = False,
1338 encoder: Callable[[Any], Any] | None = PydanticUndefined, # type: ignore[assignment]
1339 models_as_dict: bool = PydanticUndefined, # type: ignore[assignment]
1340 **dumps_kwargs: Any,
1341 ) -> str:
1342 warnings.warn(
1343 'The `json` method is deprecated; use `model_dump_json` instead.',
1344 category=PydanticDeprecatedSince20,
1345 stacklevel=2,
1346 )
1347 if encoder is not PydanticUndefined:
1348 raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.')
1349 if models_as_dict is not PydanticUndefined:
1350 raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.')
1351 if dumps_kwargs:
1352 raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.')
1353 return self.model_dump_json(
1354 include=include,
1355 exclude=exclude,
1356 by_alias=by_alias,
1357 exclude_unset=exclude_unset,
1358 exclude_defaults=exclude_defaults,
1359 exclude_none=exclude_none,
1360 )
1362 @classmethod
1363 @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None)
1364 def parse_obj(cls, obj: Any) -> Self: # noqa: D102
1365 warnings.warn(
1366 'The `parse_obj` method is deprecated; use `model_validate` instead.',
1367 category=PydanticDeprecatedSince20,
1368 stacklevel=2,
1369 )
1370 return cls.model_validate(obj)
1372 @classmethod
1373 @typing_extensions.deprecated(
1374 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
1375 'otherwise load the data then use `model_validate` instead.',
1376 category=None,
1377 )
1378 def parse_raw( # noqa: D102
1379 cls,
1380 b: str | bytes,
1381 *,
1382 content_type: str | None = None,
1383 encoding: str = 'utf8',
1384 proto: DeprecatedParseProtocol | None = None,
1385 allow_pickle: bool = False,
1386 ) -> Self: # pragma: no cover
1387 warnings.warn(
1388 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, '
1389 'otherwise load the data then use `model_validate` instead.',
1390 category=PydanticDeprecatedSince20,
1391 stacklevel=2,
1392 )
1393 from .deprecated import parse
1395 try:
1396 obj = parse.load_str_bytes(
1397 b,
1398 proto=proto,
1399 content_type=content_type,
1400 encoding=encoding,
1401 allow_pickle=allow_pickle,
1402 )
1403 except (ValueError, TypeError) as exc:
1404 import json
1406 # try to match V1
1407 if isinstance(exc, UnicodeDecodeError):
1408 type_str = 'value_error.unicodedecode'
1409 elif isinstance(exc, json.JSONDecodeError):
1410 type_str = 'value_error.jsondecode'
1411 elif isinstance(exc, ValueError):
1412 type_str = 'value_error'
1413 else:
1414 type_str = 'type_error'
1416 # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same
1417 error: pydantic_core.InitErrorDetails = {
1418 # The type: ignore on the next line is to ignore the requirement of LiteralString
1419 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), # type: ignore
1420 'loc': ('__root__',),
1421 'input': b,
1422 }
1423 raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error])
1424 return cls.model_validate(obj)
1426 @classmethod
1427 @typing_extensions.deprecated(
1428 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
1429 'use `model_validate_json`, otherwise `model_validate` instead.',
1430 category=None,
1431 )
1432 def parse_file( # noqa: D102
1433 cls,
1434 path: str | Path,
1435 *,
1436 content_type: str | None = None,
1437 encoding: str = 'utf8',
1438 proto: DeprecatedParseProtocol | None = None,
1439 allow_pickle: bool = False,
1440 ) -> Self:
1441 warnings.warn(
1442 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON '
1443 'use `model_validate_json`, otherwise `model_validate` instead.',
1444 category=PydanticDeprecatedSince20,
1445 stacklevel=2,
1446 )
1447 from .deprecated import parse
1449 obj = parse.load_file(
1450 path,
1451 proto=proto,
1452 content_type=content_type,
1453 encoding=encoding,
1454 allow_pickle=allow_pickle,
1455 )
1456 return cls.parse_obj(obj)
1458 @classmethod
1459 @typing_extensions.deprecated(
1460 'The `from_orm` method is deprecated; set '
1461 "`model_config['from_attributes']=True` and use `model_validate` instead.",
1462 category=None,
1463 )
1464 def from_orm(cls, obj: Any) -> Self: # noqa: D102
1465 warnings.warn(
1466 'The `from_orm` method is deprecated; set '
1467 "`model_config['from_attributes']=True` and use `model_validate` instead.",
1468 category=PydanticDeprecatedSince20,
1469 stacklevel=2,
1470 )
1471 if not cls.model_config.get('from_attributes', None):
1472 raise PydanticUserError(
1473 'You must set the config attribute `from_attributes=True` to use from_orm', code=None
1474 )
1475 return cls.model_validate(obj)
1477 @classmethod
1478 @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None)
1479 def construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: D102
1480 warnings.warn(
1481 'The `construct` method is deprecated; use `model_construct` instead.',
1482 category=PydanticDeprecatedSince20,
1483 stacklevel=2,
1484 )
1485 return cls.model_construct(_fields_set=_fields_set, **values)
1487 @typing_extensions.deprecated(
1488 'The `copy` method is deprecated; use `model_copy` instead. '
1489 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
1490 category=None,
1491 )
1492 def copy(
1493 self,
1494 *,
1495 include: AbstractSetIntStr | MappingIntStrAny | None = None,
1496 exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
1497 update: Dict[str, Any] | None = None, # noqa UP006
1498 deep: bool = False,
1499 ) -> Self: # pragma: no cover
1500 """Returns a copy of the model.
1502 !!! warning "Deprecated"
1503 This method is now deprecated; use `model_copy` instead.
1505 If you need `include` or `exclude`, use:
1507 ```python {test="skip" lint="skip"}
1508 data = self.model_dump(include=include, exclude=exclude, round_trip=True)
1509 data = {**data, **(update or {})}
1510 copied = self.model_validate(data)
1511 ```
1513 Args:
1514 include: Optional set or mapping specifying which fields to include in the copied model.
1515 exclude: Optional set or mapping specifying which fields to exclude in the copied model.
1516 update: Optional dictionary of field-value pairs to override field values in the copied model.
1517 deep: If True, the values of fields that are Pydantic models will be deep-copied.
1519 Returns:
1520 A copy of the model with included, excluded and updated fields as specified.
1521 """
1522 warnings.warn(
1523 'The `copy` method is deprecated; use `model_copy` instead. '
1524 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
1525 category=PydanticDeprecatedSince20,
1526 stacklevel=2,
1527 )
1528 from .deprecated import copy_internals
1530 values = dict(
1531 copy_internals._iter(
1532 self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
1533 ),
1534 **(update or {}),
1535 )
1536 if self.__pydantic_private__ is None:
1537 private = None
1538 else:
1539 private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}
1541 if self.__pydantic_extra__ is None:
1542 extra: dict[str, Any] | None = None
1543 else:
1544 extra = self.__pydantic_extra__.copy()
1545 for k in list(self.__pydantic_extra__):
1546 if k not in values: # k was in the exclude
1547 extra.pop(k)
1548 for k in list(values):
1549 if k in self.__pydantic_extra__: # k must have come from extra
1550 extra[k] = values.pop(k)
1552 # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
1553 if update:
1554 fields_set = self.__pydantic_fields_set__ | update.keys()
1555 else:
1556 fields_set = set(self.__pydantic_fields_set__)
1558 # removing excluded fields from `__pydantic_fields_set__`
1559 if exclude:
1560 fields_set -= set(exclude)
1562 return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)
1564 @classmethod
1565 @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None)
1566 def schema( # noqa: D102
1567 cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE
1568 ) -> Dict[str, Any]: # noqa UP006
1569 warnings.warn(
1570 'The `schema` method is deprecated; use `model_json_schema` instead.',
1571 category=PydanticDeprecatedSince20,
1572 stacklevel=2,
1573 )
1574 return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template)
1576 @classmethod
1577 @typing_extensions.deprecated(
1578 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
1579 category=None,
1580 )
1581 def schema_json( # noqa: D102
1582 cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any
1583 ) -> str: # pragma: no cover
1584 warnings.warn(
1585 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.',
1586 category=PydanticDeprecatedSince20,
1587 stacklevel=2,
1588 )
1589 import json
1591 from .deprecated.json import pydantic_encoder
1593 return json.dumps(
1594 cls.model_json_schema(by_alias=by_alias, ref_template=ref_template),
1595 default=pydantic_encoder,
1596 **dumps_kwargs,
1597 )
1599 @classmethod
1600 @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None)
1601 def validate(cls, value: Any) -> Self: # noqa: D102
1602 warnings.warn(
1603 'The `validate` method is deprecated; use `model_validate` instead.',
1604 category=PydanticDeprecatedSince20,
1605 stacklevel=2,
1606 )
1607 return cls.model_validate(value)
1609 @classmethod
1610 @typing_extensions.deprecated(
1611 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
1612 category=None,
1613 )
1614 def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102
1615 warnings.warn(
1616 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.',
1617 category=PydanticDeprecatedSince20,
1618 stacklevel=2,
1619 )
1620 if localns: # pragma: no cover
1621 raise TypeError('`localns` arguments are not longer accepted.')
1622 cls.model_rebuild(force=True)
1624 @typing_extensions.deprecated(
1625 'The private method `_iter` will be removed and should no longer be used.', category=None
1626 )
1627 def _iter(self, *args: Any, **kwargs: Any) -> Any:
1628 warnings.warn(
1629 'The private method `_iter` will be removed and should no longer be used.',
1630 category=PydanticDeprecatedSince20,
1631 stacklevel=2,
1632 )
1633 from .deprecated import copy_internals
1635 return copy_internals._iter(self, *args, **kwargs)
1637 @typing_extensions.deprecated(
1638 'The private method `_copy_and_set_values` will be removed and should no longer be used.',
1639 category=None,
1640 )
1641 def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any:
1642 warnings.warn(
1643 'The private method `_copy_and_set_values` will be removed and should no longer be used.',
1644 category=PydanticDeprecatedSince20,
1645 stacklevel=2,
1646 )
1647 from .deprecated import copy_internals
1649 return copy_internals._copy_and_set_values(self, *args, **kwargs)
1651 @classmethod
1652 @typing_extensions.deprecated(
1653 'The private method `_get_value` will be removed and should no longer be used.',
1654 category=None,
1655 )
1656 def _get_value(cls, *args: Any, **kwargs: Any) -> Any:
1657 warnings.warn(
1658 'The private method `_get_value` will be removed and should no longer be used.',
1659 category=PydanticDeprecatedSince20,
1660 stacklevel=2,
1661 )
1662 from .deprecated import copy_internals
1664 return copy_internals._get_value(cls, *args, **kwargs)
1666 @typing_extensions.deprecated(
1667 'The private method `_calculate_keys` will be removed and should no longer be used.',
1668 category=None,
1669 )
1670 def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any:
1671 warnings.warn(
1672 'The private method `_calculate_keys` will be removed and should no longer be used.',
1673 category=PydanticDeprecatedSince20,
1674 stacklevel=2,
1675 )
1676 from .deprecated import copy_internals
1678 return copy_internals._calculate_keys(self, *args, **kwargs)
1681ModelT = TypeVar('ModelT', bound=BaseModel)
1684@overload
1685def create_model(
1686 model_name: str,
1687 /,
1688 *,
1689 __config__: ConfigDict | None = None,
1690 __doc__: str | None = None,
1691 __base__: None = None,
1692 __module__: str = __name__,
1693 __validators__: dict[str, Callable[..., Any]] | None = None,
1694 __cls_kwargs__: dict[str, Any] | None = None,
1695 __qualname__: str | None = None,
1696 **field_definitions: Any | tuple[str, Any],
1697) -> type[BaseModel]: ...
1700@overload
1701def create_model(
1702 model_name: str,
1703 /,
1704 *,
1705 __config__: ConfigDict | None = None,
1706 __doc__: str | None = None,
1707 __base__: type[ModelT] | tuple[type[ModelT], ...],
1708 __module__: str = __name__,
1709 __validators__: dict[str, Callable[..., Any]] | None = None,
1710 __cls_kwargs__: dict[str, Any] | None = None,
1711 __qualname__: str | None = None,
1712 **field_definitions: Any | tuple[str, Any],
1713) -> type[ModelT]: ...
1716def create_model( # noqa: C901
1717 model_name: str,
1718 /,
1719 *,
1720 __config__: ConfigDict | None = None,
1721 __doc__: str | None = None,
1722 __base__: type[ModelT] | tuple[type[ModelT], ...] | None = None,
1723 __module__: str | None = None,
1724 __validators__: dict[str, Callable[..., Any]] | None = None,
1725 __cls_kwargs__: dict[str, Any] | None = None,
1726 __qualname__: str | None = None,
1727 # TODO PEP 747: replace `Any` by the TypeForm:
1728 **field_definitions: Any | tuple[str, Any],
1729) -> type[ModelT]:
1730 """!!! abstract "Usage Documentation"
1731 [Dynamic Model Creation](../concepts/models.md#dynamic-model-creation)
1733 Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a
1734 subclass of [`BaseModel`][pydantic.BaseModel].
1736 Args:
1737 model_name: The name of the newly created model.
1738 __config__: The configuration of the new model.
1739 __doc__: The docstring of the new model.
1740 __base__: The base class or classes for the new model.
1741 __module__: The name of the module that the model belongs to;
1742 if `None`, the value is taken from `sys._getframe(1)`
1743 __validators__: A dictionary of methods that validate fields. The keys are the names of the validation methods to
1744 be added to the model, and the values are the validation methods themselves. You can read more about functional
1745 validators [here](https://docs.pydantic.dev/2.9/concepts/validators/#field-validators).
1746 __cls_kwargs__: A dictionary of keyword arguments for class creation, such as `metaclass`.
1747 __qualname__: The qualified name of the newly created model.
1748 **field_definitions: Field definitions of the new model. Either:
1750 - a single element, representing the type annotation of the field.
1751 - a two-tuple, the first element being the type and the second element the assigned value
1752 (either a default or the [`Field()`][pydantic.Field] function).
1754 Returns:
1755 The new [model][pydantic.BaseModel].
1757 Raises:
1758 PydanticUserError: If `__base__` and `__config__` are both passed.
1759 """
1760 if __base__ is None:
1761 __base__ = (cast('type[ModelT]', BaseModel),)
1762 elif not isinstance(__base__, tuple):
1763 __base__ = (__base__,)
1765 __cls_kwargs__ = __cls_kwargs__ or {}
1767 fields: dict[str, Any] = {}
1768 annotations: dict[str, Any] = {}
1770 for f_name, f_def in field_definitions.items():
1771 if isinstance(f_def, tuple):
1772 if len(f_def) != 2:
1773 raise PydanticUserError(
1774 f'Field definition for {f_name!r} should a single element representing the type or a two-tuple, the first element '
1775 'being the type and the second element the assigned value (either a default or the `Field()` function).',
1776 code='create-model-field-definitions',
1777 )
1779 annotations[f_name] = f_def[0]
1780 fields[f_name] = f_def[1]
1781 else:
1782 annotations[f_name] = f_def
1784 if __module__ is None:
1785 f = sys._getframe(1)
1786 __module__ = f.f_globals['__name__']
1788 namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__}
1789 if __doc__:
1790 namespace['__doc__'] = __doc__
1791 if __qualname__ is not None:
1792 namespace['__qualname__'] = __qualname__
1793 if __validators__:
1794 namespace.update(__validators__)
1795 namespace.update(fields)
1796 if __config__:
1797 namespace['model_config'] = __config__
1798 resolved_bases = types.resolve_bases(__base__)
1799 meta, ns, kwds = types.prepare_class(model_name, resolved_bases, kwds=__cls_kwargs__)
1800 if resolved_bases is not __base__:
1801 ns['__orig_bases__'] = __base__
1802 namespace.update(ns)
1804 return meta(
1805 model_name,
1806 resolved_bases,
1807 namespace,
1808 __pydantic_reset_parent_namespace__=False,
1809 _create_model_module=__module__,
1810 **kwds,
1811 )
1814__getattr__ = getattr_migration(__name__)