Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pydantic/main.py: 39%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

578 statements  

1"""Logic for creating models.""" 

2 

3# Because `dict` is in the local namespace of the `BaseModel` class, we use `Dict` for annotations. 

4# TODO v3 fallback to `dict` when the deprecated `dict` method gets removed. 

5# ruff: noqa: UP035 

6 

7from __future__ import annotations as _annotations 

8 

9import operator 

10import sys 

11import types 

12import typing 

13import warnings 

14from collections.abc import Generator, Mapping 

15from copy import copy, deepcopy 

16from functools import cached_property 

17from typing import ( 

18 TYPE_CHECKING, 

19 Any, 

20 Callable, 

21 ClassVar, 

22 Dict, 

23 Literal, 

24 TypeVar, 

25 Union, 

26 cast, 

27 overload, 

28) 

29 

30import pydantic_core 

31import typing_extensions 

32from pydantic_core import PydanticUndefined, ValidationError 

33from typing_extensions import Self, TypeAlias, Unpack 

34 

35from . import PydanticDeprecatedSince20, PydanticDeprecatedSince211 

36from ._internal import ( 

37 _config, 

38 _decorators, 

39 _fields, 

40 _forward_ref, 

41 _generics, 

42 _mock_val_ser, 

43 _model_construction, 

44 _namespace_utils, 

45 _repr, 

46 _typing_extra, 

47 _utils, 

48) 

49from ._migration import getattr_migration 

50from .aliases import AliasChoices, AliasPath 

51from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler 

52from .config import ConfigDict 

53from .errors import PydanticUndefinedAnnotation, PydanticUserError 

54from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema 

55from .plugin._schema_validator import PluggableSchemaValidator 

56 

57if TYPE_CHECKING: 

58 from inspect import Signature 

59 from pathlib import Path 

60 

61 from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator 

62 

63 from ._internal._namespace_utils import MappingNamespace 

64 from ._internal._utils import AbstractSetIntStr, MappingIntStrAny 

65 from .deprecated.parse import Protocol as DeprecatedParseProtocol 

66 from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr 

67else: 

68 # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 

69 # and https://youtrack.jetbrains.com/issue/PY-51428 

70 DeprecationWarning = PydanticDeprecatedSince20 

71 

72__all__ = 'BaseModel', 'create_model' 

73 

74# Keep these type aliases available at runtime: 

75TupleGenerator: TypeAlias = Generator[tuple[str, Any], None, None] 

76# NOTE: In reality, `bool` should be replaced by `Literal[True]` but mypy fails to correctly apply bidirectional 

77# type inference (e.g. when using `{'a': {'b': True}}`): 

78# NOTE: Keep this type alias in sync with the stub definition in `pydantic-core`: 

79IncEx: TypeAlias = Union[set[int], set[str], Mapping[int, Union['IncEx', bool]], Mapping[str, Union['IncEx', bool]]] 

80 

81_object_setattr = _model_construction.object_setattr 

82 

83 

84def _check_frozen(model_cls: type[BaseModel], name: str, value: Any) -> None: 

85 if model_cls.model_config.get('frozen'): 

86 error_type = 'frozen_instance' 

87 elif getattr(model_cls.__pydantic_fields__.get(name), 'frozen', False): 

88 error_type = 'frozen_field' 

89 else: 

90 return 

91 

92 raise ValidationError.from_exception_data( 

93 model_cls.__name__, [{'type': error_type, 'loc': (name,), 'input': value}] 

94 ) 

95 

96 

97def _model_field_setattr_handler(model: BaseModel, name: str, val: Any) -> None: 

98 model.__dict__[name] = val 

99 model.__pydantic_fields_set__.add(name) 

100 

101 

102def _private_setattr_handler(model: BaseModel, name: str, val: Any) -> None: 

103 if getattr(model, '__pydantic_private__', None) is None: 

104 # While the attribute should be present at this point, this may not be the case if 

105 # users do unusual stuff with `model_post_init()` (which is where the `__pydantic_private__` 

106 # is initialized, by wrapping the user-defined `model_post_init()`), e.g. if they mock 

107 # the `model_post_init()` call. Ideally we should find a better way to init private attrs. 

108 object.__setattr__(model, '__pydantic_private__', {}) 

109 model.__pydantic_private__[name] = val # pyright: ignore[reportOptionalSubscript] 

110 

111 

112_SIMPLE_SETATTR_HANDLERS: Mapping[str, Callable[[BaseModel, str, Any], None]] = { 

113 'model_field': _model_field_setattr_handler, 

114 'validate_assignment': lambda model, name, val: model.__pydantic_validator__.validate_assignment(model, name, val), # pyright: ignore[reportAssignmentType] 

115 'private': _private_setattr_handler, 

116 'cached_property': lambda model, name, val: model.__dict__.__setitem__(name, val), 

117 'extra_known': lambda model, name, val: _object_setattr(model, name, val), 

118} 

119 

120 

121class BaseModel(metaclass=_model_construction.ModelMetaclass): 

122 """!!! abstract "Usage Documentation" 

123 [Models](../concepts/models.md) 

124 

125 A base class for creating Pydantic models. 

126 

127 Attributes: 

128 __class_vars__: The names of the class variables defined on the model. 

129 __private_attributes__: Metadata about the private attributes of the model. 

130 __signature__: The synthesized `__init__` [`Signature`][inspect.Signature] of the model. 

131 

132 __pydantic_complete__: Whether model building is completed, or if there are still undefined fields. 

133 __pydantic_core_schema__: The core schema of the model. 

134 __pydantic_custom_init__: Whether the model has a custom `__init__` function. 

135 __pydantic_decorators__: Metadata containing the decorators defined on the model. 

136 This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1. 

137 __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to 

138 __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these. 

139 __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models. 

140 __pydantic_post_init__: The name of the post-init method for the model, if defined. 

141 __pydantic_root_model__: Whether the model is a [`RootModel`][pydantic.root_model.RootModel]. 

142 __pydantic_serializer__: The `pydantic-core` `SchemaSerializer` used to dump instances of the model. 

143 __pydantic_validator__: The `pydantic-core` `SchemaValidator` used to validate instances of the model. 

144 

145 __pydantic_fields__: A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects. 

146 __pydantic_computed_fields__: A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects. 

147 

148 __pydantic_extra__: A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra] 

149 is set to `'allow'`. 

150 __pydantic_fields_set__: The names of fields explicitly set during instantiation. 

151 __pydantic_private__: Values of private attributes set on the model instance. 

152 """ 

153 

154 # Note: Many of the below class vars are defined in the metaclass, but we define them here for type checking purposes. 

155 

156 model_config: ClassVar[ConfigDict] = ConfigDict() 

157 """ 

158 Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict]. 

159 """ 

160 

161 __class_vars__: ClassVar[set[str]] 

162 """The names of the class variables defined on the model.""" 

163 

164 __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] # noqa: UP006 

165 """Metadata about the private attributes of the model.""" 

166 

167 __signature__: ClassVar[Signature] 

168 """The synthesized `__init__` [`Signature`][inspect.Signature] of the model.""" 

169 

170 __pydantic_complete__: ClassVar[bool] = False 

171 """Whether model building is completed, or if there are still undefined fields.""" 

172 

173 __pydantic_core_schema__: ClassVar[CoreSchema] 

174 """The core schema of the model.""" 

175 

176 __pydantic_custom_init__: ClassVar[bool] 

177 """Whether the model has a custom `__init__` method.""" 

178 

179 # Must be set for `GenerateSchema.model_schema` to work for a plain `BaseModel` annotation. 

180 __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] = _decorators.DecoratorInfos() 

181 """Metadata containing the decorators defined on the model. 

182 This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1.""" 

183 

184 __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata] 

185 """Metadata for generic models; contains data used for a similar purpose to 

186 __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these.""" 

187 

188 __pydantic_parent_namespace__: ClassVar[Dict[str, Any] | None] = None # noqa: UP006 

189 """Parent namespace of the model, used for automatic rebuilding of models.""" 

190 

191 __pydantic_post_init__: ClassVar[None | Literal['model_post_init']] 

192 """The name of the post-init method for the model, if defined.""" 

193 

194 __pydantic_root_model__: ClassVar[bool] = False 

195 """Whether the model is a [`RootModel`][pydantic.root_model.RootModel].""" 

196 

197 __pydantic_serializer__: ClassVar[SchemaSerializer] 

198 """The `pydantic-core` `SchemaSerializer` used to dump instances of the model.""" 

199 

200 __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator] 

201 """The `pydantic-core` `SchemaValidator` used to validate instances of the model.""" 

202 

203 __pydantic_fields__: ClassVar[Dict[str, FieldInfo]] # noqa: UP006 

204 """A dictionary of field names and their corresponding [`FieldInfo`][pydantic.fields.FieldInfo] objects. 

205 This replaces `Model.__fields__` from Pydantic V1. 

206 """ 

207 

208 __pydantic_setattr_handlers__: ClassVar[Dict[str, Callable[[BaseModel, str, Any], None]]] # noqa: UP006 

209 """`__setattr__` handlers. Memoizing the handlers leads to a dramatic performance improvement in `__setattr__`""" 

210 

211 __pydantic_computed_fields__: ClassVar[Dict[str, ComputedFieldInfo]] # noqa: UP006 

212 """A dictionary of computed field names and their corresponding [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] objects.""" 

213 

214 __pydantic_extra__: dict[str, Any] | None = _model_construction.NoInitField(init=False) 

215 """A dictionary containing extra values, if [`extra`][pydantic.config.ConfigDict.extra] is set to `'allow'`.""" 

216 

217 __pydantic_fields_set__: set[str] = _model_construction.NoInitField(init=False) 

218 """The names of fields explicitly set during instantiation.""" 

219 

220 __pydantic_private__: dict[str, Any] | None = _model_construction.NoInitField(init=False) 

221 """Values of private attributes set on the model instance.""" 

222 

223 if not TYPE_CHECKING: 

224 # Prevent `BaseModel` from being instantiated directly 

225 # (defined in an `if not TYPE_CHECKING` block for clarity and to avoid type checking errors): 

226 __pydantic_core_schema__ = _mock_val_ser.MockCoreSchema( 

227 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', 

228 code='base-model-instantiated', 

229 ) 

230 __pydantic_validator__ = _mock_val_ser.MockValSer( 

231 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', 

232 val_or_ser='validator', 

233 code='base-model-instantiated', 

234 ) 

235 __pydantic_serializer__ = _mock_val_ser.MockValSer( 

236 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', 

237 val_or_ser='serializer', 

238 code='base-model-instantiated', 

239 ) 

240 

241 __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__' 

242 

243 def __init__(self, /, **data: Any) -> None: 

244 """Create a new model by parsing and validating input data from keyword arguments. 

245 

246 Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be 

247 validated to form a valid model. 

248 

249 `self` is explicitly positional-only to allow `self` as a field name. 

250 """ 

251 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

252 __tracebackhide__ = True 

253 validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self) 

254 if self is not validated_self: 

255 warnings.warn( 

256 'A custom validator is returning a value other than `self`.\n' 

257 "Returning anything other than `self` from a top level model validator isn't supported when validating via `__init__`.\n" 

258 'See the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.', 

259 stacklevel=2, 

260 ) 

261 

262 # The following line sets a flag that we use to determine when `__init__` gets overridden by the user 

263 __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess] 

264 

265 @_utils.deprecated_instance_property 

266 @classmethod 

267 def model_fields(cls) -> dict[str, FieldInfo]: 

268 """A mapping of field names to their respective [`FieldInfo`][pydantic.fields.FieldInfo] instances. 

269 

270 !!! warning 

271 Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3. 

272 Instead, you should access this attribute from the model class. 

273 """ 

274 return getattr(cls, '__pydantic_fields__', {}) 

275 

276 @_utils.deprecated_instance_property 

277 @classmethod 

278 def model_computed_fields(cls) -> dict[str, ComputedFieldInfo]: 

279 """A mapping of computed field names to their respective [`ComputedFieldInfo`][pydantic.fields.ComputedFieldInfo] instances. 

280 

281 !!! warning 

282 Accessing this attribute from a model instance is deprecated, and will not work in Pydantic V3. 

283 Instead, you should access this attribute from the model class. 

284 """ 

285 return getattr(cls, '__pydantic_computed_fields__', {}) 

286 

287 @property 

288 def model_extra(self) -> dict[str, Any] | None: 

289 """Get extra fields set during validation. 

290 

291 Returns: 

292 A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`. 

293 """ 

294 return self.__pydantic_extra__ 

295 

296 @property 

297 def model_fields_set(self) -> set[str]: 

298 """Returns the set of fields that have been explicitly set on this model instance. 

299 

300 Returns: 

301 A set of strings representing the fields that have been set, 

302 i.e. that were not filled from defaults. 

303 """ 

304 return self.__pydantic_fields_set__ 

305 

306 @classmethod 

307 def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: C901 

308 """Creates a new instance of the `Model` class with validated data. 

309 

310 Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. 

311 Default values are respected, but no other validation is performed. 

312 

313 !!! note 

314 `model_construct()` generally respects the `model_config.extra` setting on the provided model. 

315 That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` 

316 and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. 

317 Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in 

318 an error if extra values are passed, but they will be ignored. 

319 

320 Args: 

321 _fields_set: A set of field names that were originally explicitly set during instantiation. If provided, 

322 this is directly used for the [`model_fields_set`][pydantic.BaseModel.model_fields_set] attribute. 

323 Otherwise, the field names from the `values` argument will be used. 

324 values: Trusted or pre-validated data dictionary. 

325 

326 Returns: 

327 A new instance of the `Model` class with validated data. 

328 """ 

329 m = cls.__new__(cls) 

330 fields_values: dict[str, Any] = {} 

331 fields_set = set() 

332 

333 for name, field in cls.__pydantic_fields__.items(): 

334 if field.alias is not None and field.alias in values: 

335 fields_values[name] = values.pop(field.alias) 

336 fields_set.add(name) 

337 

338 if (name not in fields_set) and (field.validation_alias is not None): 

339 validation_aliases: list[str | AliasPath] = ( 

340 field.validation_alias.choices 

341 if isinstance(field.validation_alias, AliasChoices) 

342 else [field.validation_alias] 

343 ) 

344 

345 for alias in validation_aliases: 

346 if isinstance(alias, str) and alias in values: 

347 fields_values[name] = values.pop(alias) 

348 fields_set.add(name) 

349 break 

350 elif isinstance(alias, AliasPath): 

351 value = alias.search_dict_for_path(values) 

352 if value is not PydanticUndefined: 

353 fields_values[name] = value 

354 fields_set.add(name) 

355 break 

356 

357 if name not in fields_set: 

358 if name in values: 

359 fields_values[name] = values.pop(name) 

360 fields_set.add(name) 

361 elif not field.is_required(): 

362 fields_values[name] = field.get_default(call_default_factory=True, validated_data=fields_values) 

363 if _fields_set is None: 

364 _fields_set = fields_set 

365 

366 _extra: dict[str, Any] | None = values if cls.model_config.get('extra') == 'allow' else None 

367 _object_setattr(m, '__dict__', fields_values) 

368 _object_setattr(m, '__pydantic_fields_set__', _fields_set) 

369 if not cls.__pydantic_root_model__: 

370 _object_setattr(m, '__pydantic_extra__', _extra) 

371 

372 if cls.__pydantic_post_init__: 

373 m.model_post_init(None) 

374 # update private attributes with values set 

375 if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None: 

376 for k, v in values.items(): 

377 if k in m.__private_attributes__: 

378 m.__pydantic_private__[k] = v 

379 

380 elif not cls.__pydantic_root_model__: 

381 # Note: if there are any private attributes, cls.__pydantic_post_init__ would exist 

382 # Since it doesn't, that means that `__pydantic_private__` should be set to None 

383 _object_setattr(m, '__pydantic_private__', None) 

384 

385 return m 

386 

387 def model_copy(self, *, update: Mapping[str, Any] | None = None, deep: bool = False) -> Self: 

388 """!!! abstract "Usage Documentation" 

389 [`model_copy`](../concepts/serialization.md#model_copy) 

390 

391 Returns a copy of the model. 

392 

393 !!! note 

394 The underlying instance's [`__dict__`][object.__dict__] attribute is copied. This 

395 might have unexpected side effects if you store anything in it, on top of the model 

396 fields (e.g. the value of [cached properties][functools.cached_property]). 

397 

398 Args: 

399 update: Values to change/add in the new model. Note: the data is not validated 

400 before creating the new model. You should trust this data. 

401 deep: Set to `True` to make a deep copy of the model. 

402 

403 Returns: 

404 New model instance. 

405 """ 

406 copied = self.__deepcopy__() if deep else self.__copy__() 

407 if update: 

408 if self.model_config.get('extra') == 'allow': 

409 for k, v in update.items(): 

410 if k in self.__pydantic_fields__: 

411 copied.__dict__[k] = v 

412 else: 

413 if copied.__pydantic_extra__ is None: 

414 copied.__pydantic_extra__ = {} 

415 copied.__pydantic_extra__[k] = v 

416 else: 

417 copied.__dict__.update(update) 

418 copied.__pydantic_fields_set__.update(update.keys()) 

419 return copied 

420 

421 def model_dump( 

422 self, 

423 *, 

424 mode: Literal['json', 'python'] | str = 'python', 

425 include: IncEx | None = None, 

426 exclude: IncEx | None = None, 

427 context: Any | None = None, 

428 by_alias: bool | None = None, 

429 exclude_unset: bool = False, 

430 exclude_defaults: bool = False, 

431 exclude_none: bool = False, 

432 round_trip: bool = False, 

433 warnings: bool | Literal['none', 'warn', 'error'] = True, 

434 fallback: Callable[[Any], Any] | None = None, 

435 serialize_as_any: bool = False, 

436 ) -> dict[str, Any]: 

437 """!!! abstract "Usage Documentation" 

438 [`model_dump`](../concepts/serialization.md#modelmodel_dump) 

439 

440 Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. 

441 

442 Args: 

443 mode: The mode in which `to_python` should run. 

444 If mode is 'json', the output will only contain JSON serializable types. 

445 If mode is 'python', the output may contain non-JSON-serializable Python objects. 

446 include: A set of fields to include in the output. 

447 exclude: A set of fields to exclude from the output. 

448 context: Additional context to pass to the serializer. 

449 by_alias: Whether to use the field's alias in the dictionary key if defined. 

450 exclude_unset: Whether to exclude fields that have not been explicitly set. 

451 exclude_defaults: Whether to exclude fields that are set to their default value. 

452 exclude_none: Whether to exclude fields that have a value of `None`. 

453 round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. 

454 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, 

455 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. 

456 fallback: A function to call when an unknown value is encountered. If not provided, 

457 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. 

458 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. 

459 

460 Returns: 

461 A dictionary representation of the model. 

462 """ 

463 return self.__pydantic_serializer__.to_python( 

464 self, 

465 mode=mode, 

466 by_alias=by_alias, 

467 include=include, 

468 exclude=exclude, 

469 context=context, 

470 exclude_unset=exclude_unset, 

471 exclude_defaults=exclude_defaults, 

472 exclude_none=exclude_none, 

473 round_trip=round_trip, 

474 warnings=warnings, 

475 fallback=fallback, 

476 serialize_as_any=serialize_as_any, 

477 ) 

478 

479 def model_dump_json( 

480 self, 

481 *, 

482 indent: int | None = None, 

483 include: IncEx | None = None, 

484 exclude: IncEx | None = None, 

485 context: Any | None = None, 

486 by_alias: bool | None = None, 

487 exclude_unset: bool = False, 

488 exclude_defaults: bool = False, 

489 exclude_none: bool = False, 

490 round_trip: bool = False, 

491 warnings: bool | Literal['none', 'warn', 'error'] = True, 

492 fallback: Callable[[Any], Any] | None = None, 

493 serialize_as_any: bool = False, 

494 ) -> str: 

495 """!!! abstract "Usage Documentation" 

496 [`model_dump_json`](../concepts/serialization.md#modelmodel_dump_json) 

497 

498 Generates a JSON representation of the model using Pydantic's `to_json` method. 

499 

500 Args: 

501 indent: Indentation to use in the JSON output. If None is passed, the output will be compact. 

502 include: Field(s) to include in the JSON output. 

503 exclude: Field(s) to exclude from the JSON output. 

504 context: Additional context to pass to the serializer. 

505 by_alias: Whether to serialize using field aliases. 

506 exclude_unset: Whether to exclude fields that have not been explicitly set. 

507 exclude_defaults: Whether to exclude fields that are set to their default value. 

508 exclude_none: Whether to exclude fields that have a value of `None`. 

509 round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. 

510 warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, 

511 "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. 

512 fallback: A function to call when an unknown value is encountered. If not provided, 

513 a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError] error is raised. 

514 serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. 

515 

516 Returns: 

517 A JSON string representation of the model. 

518 """ 

519 return self.__pydantic_serializer__.to_json( 

520 self, 

521 indent=indent, 

522 include=include, 

523 exclude=exclude, 

524 context=context, 

525 by_alias=by_alias, 

526 exclude_unset=exclude_unset, 

527 exclude_defaults=exclude_defaults, 

528 exclude_none=exclude_none, 

529 round_trip=round_trip, 

530 warnings=warnings, 

531 fallback=fallback, 

532 serialize_as_any=serialize_as_any, 

533 ).decode() 

534 

535 @classmethod 

536 def model_json_schema( 

537 cls, 

538 by_alias: bool = True, 

539 ref_template: str = DEFAULT_REF_TEMPLATE, 

540 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, 

541 mode: JsonSchemaMode = 'validation', 

542 ) -> dict[str, Any]: 

543 """Generates a JSON schema for a model class. 

544 

545 Args: 

546 by_alias: Whether to use attribute aliases or not. 

547 ref_template: The reference template. 

548 schema_generator: To override the logic used to generate the JSON schema, as a subclass of 

549 `GenerateJsonSchema` with your desired modifications 

550 mode: The mode in which to generate the schema. 

551 

552 Returns: 

553 The JSON schema for the given model class. 

554 """ 

555 return model_json_schema( 

556 cls, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator, mode=mode 

557 ) 

558 

559 @classmethod 

560 def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str: 

561 """Compute the class name for parametrizations of generic classes. 

562 

563 This method can be overridden to achieve a custom naming scheme for generic BaseModels. 

564 

565 Args: 

566 params: Tuple of types of the class. Given a generic class 

567 `Model` with 2 type variables and a concrete model `Model[str, int]`, 

568 the value `(str, int)` would be passed to `params`. 

569 

570 Returns: 

571 String representing the new class where `params` are passed to `cls` as type variables. 

572 

573 Raises: 

574 TypeError: Raised when trying to generate concrete names for non-generic models. 

575 """ 

576 if not issubclass(cls, typing.Generic): 

577 raise TypeError('Concrete names should only be generated for generic models.') 

578 

579 # Any strings received should represent forward references, so we handle them specially below. 

580 # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future, 

581 # we may be able to remove this special case. 

582 param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params] 

583 params_component = ', '.join(param_names) 

584 return f'{cls.__name__}[{params_component}]' 

585 

586 def model_post_init(self, context: Any, /) -> None: 

587 """Override this method to perform additional initialization after `__init__` and `model_construct`. 

588 This is useful if you want to do some validation that requires the entire model to be initialized. 

589 """ 

590 pass 

591 

592 @classmethod 

593 def model_rebuild( 

594 cls, 

595 *, 

596 force: bool = False, 

597 raise_errors: bool = True, 

598 _parent_namespace_depth: int = 2, 

599 _types_namespace: MappingNamespace | None = None, 

600 ) -> bool | None: 

601 """Try to rebuild the pydantic-core schema for the model. 

602 

603 This may be necessary when one of the annotations is a ForwardRef which could not be resolved during 

604 the initial attempt to build the schema, and automatic rebuilding fails. 

605 

606 Args: 

607 force: Whether to force the rebuilding of the model schema, defaults to `False`. 

608 raise_errors: Whether to raise errors, defaults to `True`. 

609 _parent_namespace_depth: The depth level of the parent namespace, defaults to 2. 

610 _types_namespace: The types namespace, defaults to `None`. 

611 

612 Returns: 

613 Returns `None` if the schema is already "complete" and rebuilding was not required. 

614 If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. 

615 """ 

616 if not force and cls.__pydantic_complete__: 

617 return None 

618 

619 for attr in ('__pydantic_core_schema__', '__pydantic_validator__', '__pydantic_serializer__'): 

620 if attr in cls.__dict__ and not isinstance(getattr(cls, attr), _mock_val_ser.MockValSer): 

621 # Deleting the validator/serializer is necessary as otherwise they can get reused in 

622 # pydantic-core. We do so only if they aren't mock instances, otherwise — as `model_rebuild()` 

623 # isn't thread-safe — concurrent model instantiations can lead to the parent validator being used. 

624 # Same applies for the core schema that can be reused in schema generation. 

625 delattr(cls, attr) 

626 

627 cls.__pydantic_complete__ = False 

628 

629 if _types_namespace is not None: 

630 rebuild_ns = _types_namespace 

631 elif _parent_namespace_depth > 0: 

632 rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {} 

633 else: 

634 rebuild_ns = {} 

635 

636 parent_ns = _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {} 

637 

638 ns_resolver = _namespace_utils.NsResolver( 

639 parent_namespace={**rebuild_ns, **parent_ns}, 

640 ) 

641 

642 if not cls.__pydantic_fields_complete__: 

643 typevars_map = _generics.get_model_typevars_map(cls) 

644 try: 

645 cls.__pydantic_fields__ = _fields.rebuild_model_fields( 

646 cls, 

647 ns_resolver=ns_resolver, 

648 typevars_map=typevars_map, 

649 ) 

650 except NameError as e: 

651 exc = PydanticUndefinedAnnotation.from_name_error(e) 

652 _mock_val_ser.set_model_mocks(cls, f'`{exc.name}`') 

653 if raise_errors: 

654 raise exc from e 

655 

656 if not raise_errors and not cls.__pydantic_fields_complete__: 

657 # No need to continue with schema gen, it is guaranteed to fail 

658 return False 

659 

660 assert cls.__pydantic_fields_complete__ 

661 

662 return _model_construction.complete_model_class( 

663 cls, 

664 _config.ConfigWrapper(cls.model_config, check=False), 

665 raise_errors=raise_errors, 

666 ns_resolver=ns_resolver, 

667 ) 

668 

669 @classmethod 

670 def model_validate( 

671 cls, 

672 obj: Any, 

673 *, 

674 strict: bool | None = None, 

675 from_attributes: bool | None = None, 

676 context: Any | None = None, 

677 by_alias: bool | None = None, 

678 by_name: bool | None = None, 

679 ) -> Self: 

680 """Validate a pydantic model instance. 

681 

682 Args: 

683 obj: The object to validate. 

684 strict: Whether to enforce types strictly. 

685 from_attributes: Whether to extract data from object attributes. 

686 context: Additional context to pass to the validator. 

687 by_alias: Whether to use the field's alias when validating against the provided input data. 

688 by_name: Whether to use the field's name when validating against the provided input data. 

689 

690 Raises: 

691 ValidationError: If the object could not be validated. 

692 

693 Returns: 

694 The validated model instance. 

695 """ 

696 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

697 __tracebackhide__ = True 

698 

699 if by_alias is False and by_name is not True: 

700 raise PydanticUserError( 

701 'At least one of `by_alias` or `by_name` must be set to True.', 

702 code='validate-by-alias-and-name-false', 

703 ) 

704 

705 return cls.__pydantic_validator__.validate_python( 

706 obj, strict=strict, from_attributes=from_attributes, context=context, by_alias=by_alias, by_name=by_name 

707 ) 

708 

709 @classmethod 

710 def model_validate_json( 

711 cls, 

712 json_data: str | bytes | bytearray, 

713 *, 

714 strict: bool | None = None, 

715 context: Any | None = None, 

716 by_alias: bool | None = None, 

717 by_name: bool | None = None, 

718 ) -> Self: 

719 """!!! abstract "Usage Documentation" 

720 [JSON Parsing](../concepts/json.md#json-parsing) 

721 

722 Validate the given JSON data against the Pydantic model. 

723 

724 Args: 

725 json_data: The JSON data to validate. 

726 strict: Whether to enforce types strictly. 

727 context: Extra variables to pass to the validator. 

728 by_alias: Whether to use the field's alias when validating against the provided input data. 

729 by_name: Whether to use the field's name when validating against the provided input data. 

730 

731 Returns: 

732 The validated Pydantic model. 

733 

734 Raises: 

735 ValidationError: If `json_data` is not a JSON string or the object could not be validated. 

736 """ 

737 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

738 __tracebackhide__ = True 

739 

740 if by_alias is False and by_name is not True: 

741 raise PydanticUserError( 

742 'At least one of `by_alias` or `by_name` must be set to True.', 

743 code='validate-by-alias-and-name-false', 

744 ) 

745 

746 return cls.__pydantic_validator__.validate_json( 

747 json_data, strict=strict, context=context, by_alias=by_alias, by_name=by_name 

748 ) 

749 

750 @classmethod 

751 def model_validate_strings( 

752 cls, 

753 obj: Any, 

754 *, 

755 strict: bool | None = None, 

756 context: Any | None = None, 

757 by_alias: bool | None = None, 

758 by_name: bool | None = None, 

759 ) -> Self: 

760 """Validate the given object with string data against the Pydantic model. 

761 

762 Args: 

763 obj: The object containing string data to validate. 

764 strict: Whether to enforce types strictly. 

765 context: Extra variables to pass to the validator. 

766 by_alias: Whether to use the field's alias when validating against the provided input data. 

767 by_name: Whether to use the field's name when validating against the provided input data. 

768 

769 Returns: 

770 The validated Pydantic model. 

771 """ 

772 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

773 __tracebackhide__ = True 

774 

775 if by_alias is False and by_name is not True: 

776 raise PydanticUserError( 

777 'At least one of `by_alias` or `by_name` must be set to True.', 

778 code='validate-by-alias-and-name-false', 

779 ) 

780 

781 return cls.__pydantic_validator__.validate_strings( 

782 obj, strict=strict, context=context, by_alias=by_alias, by_name=by_name 

783 ) 

784 

785 @classmethod 

786 def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler: GetCoreSchemaHandler, /) -> CoreSchema: 

787 # This warning is only emitted when calling `super().__get_pydantic_core_schema__` from a model subclass. 

788 # In the generate schema logic, this method (`BaseModel.__get_pydantic_core_schema__`) is special cased to 

789 # *not* be called if not overridden. 

790 warnings.warn( 

791 'The `__get_pydantic_core_schema__` method of the `BaseModel` class is deprecated. If you are calling ' 

792 '`super().__get_pydantic_core_schema__` when overriding the method on a Pydantic model, consider using ' 

793 '`handler(source)` instead. However, note that overriding this method on models can lead to unexpected ' 

794 'side effects.', 

795 PydanticDeprecatedSince211, 

796 stacklevel=2, 

797 ) 

798 # Logic copied over from `GenerateSchema._model_schema`: 

799 schema = cls.__dict__.get('__pydantic_core_schema__') 

800 if schema is not None and not isinstance(schema, _mock_val_ser.MockCoreSchema): 

801 return cls.__pydantic_core_schema__ 

802 

803 return handler(source) 

804 

805 @classmethod 

806 def __get_pydantic_json_schema__( 

807 cls, 

808 core_schema: CoreSchema, 

809 handler: GetJsonSchemaHandler, 

810 /, 

811 ) -> JsonSchemaValue: 

812 """Hook into generating the model's JSON schema. 

813 

814 Args: 

815 core_schema: A `pydantic-core` CoreSchema. 

816 You can ignore this argument and call the handler with a new CoreSchema, 

817 wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`), 

818 or just call the handler with the original schema. 

819 handler: Call into Pydantic's internal JSON schema generation. 

820 This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema 

821 generation fails. 

822 Since this gets called by `BaseModel.model_json_schema` you can override the 

823 `schema_generator` argument to that function to change JSON schema generation globally 

824 for a type. 

825 

826 Returns: 

827 A JSON schema, as a Python object. 

828 """ 

829 return handler(core_schema) 

830 

831 @classmethod 

832 def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: 

833 """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass` 

834 only after the class is actually fully initialized. In particular, attributes like `model_fields` will 

835 be present when this is called. 

836 

837 This is necessary because `__init_subclass__` will always be called by `type.__new__`, 

838 and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that 

839 `type.__new__` was called in such a manner that the class would already be sufficiently initialized. 

840 

841 This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely, 

842 any kwargs passed to the class definition that aren't used internally by pydantic. 

843 

844 Args: 

845 **kwargs: Any keyword arguments passed to the class definition that aren't used internally 

846 by pydantic. 

847 """ 

848 pass 

849 

850 def __class_getitem__( 

851 cls, typevar_values: type[Any] | tuple[type[Any], ...] 

852 ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef: 

853 cached = _generics.get_cached_generic_type_early(cls, typevar_values) 

854 if cached is not None: 

855 return cached 

856 

857 if cls is BaseModel: 

858 raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel') 

859 if not hasattr(cls, '__parameters__'): 

860 raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic') 

861 if not cls.__pydantic_generic_metadata__['parameters'] and typing.Generic not in cls.__bases__: 

862 raise TypeError(f'{cls} is not a generic class') 

863 

864 if not isinstance(typevar_values, tuple): 

865 typevar_values = (typevar_values,) 

866 

867 # For a model `class Model[T, U, V = int](BaseModel): ...` parametrized with `(str, bool)`, 

868 # this gives us `{T: str, U: bool, V: int}`: 

869 typevars_map = _generics.map_generic_model_arguments(cls, typevar_values) 

870 # We also update the provided args to use defaults values (`(str, bool)` becomes `(str, bool, int)`): 

871 typevar_values = tuple(v for v in typevars_map.values()) 

872 

873 if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: 

874 submodel = cls # if arguments are equal to parameters it's the same object 

875 _generics.set_cached_generic_type(cls, typevar_values, submodel) 

876 else: 

877 parent_args = cls.__pydantic_generic_metadata__['args'] 

878 if not parent_args: 

879 args = typevar_values 

880 else: 

881 args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args) 

882 

883 origin = cls.__pydantic_generic_metadata__['origin'] or cls 

884 model_name = origin.model_parametrized_name(args) 

885 params = tuple( 

886 {param: None for param in _generics.iter_contained_typevars(typevars_map.values())} 

887 ) # use dict as ordered set 

888 

889 with _generics.generic_recursion_self_type(origin, args) as maybe_self_type: 

890 cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args) 

891 if cached is not None: 

892 return cached 

893 

894 if maybe_self_type is not None: 

895 return maybe_self_type 

896 

897 # Attempt to rebuild the origin in case new types have been defined 

898 try: 

899 # depth 2 gets you above this __class_getitem__ call. 

900 # Note that we explicitly provide the parent ns, otherwise 

901 # `model_rebuild` will use the parent ns no matter if it is the ns of a module. 

902 # We don't want this here, as this has unexpected effects when a model 

903 # is being parametrized during a forward annotation evaluation. 

904 parent_ns = _typing_extra.parent_frame_namespace(parent_depth=2) or {} 

905 origin.model_rebuild(_types_namespace=parent_ns) 

906 except PydanticUndefinedAnnotation: 

907 # It's okay if it fails, it just means there are still undefined types 

908 # that could be evaluated later. 

909 pass 

910 

911 submodel = _generics.create_generic_submodel(model_name, origin, args, params) 

912 

913 _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args) 

914 

915 return submodel 

916 

917 def __copy__(self) -> Self: 

918 """Returns a shallow copy of the model.""" 

919 cls = type(self) 

920 m = cls.__new__(cls) 

921 _object_setattr(m, '__dict__', copy(self.__dict__)) 

922 _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__)) 

923 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) 

924 

925 if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None: 

926 _object_setattr(m, '__pydantic_private__', None) 

927 else: 

928 _object_setattr( 

929 m, 

930 '__pydantic_private__', 

931 {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, 

932 ) 

933 

934 return m 

935 

936 def __deepcopy__(self, memo: dict[int, Any] | None = None) -> Self: 

937 """Returns a deep copy of the model.""" 

938 cls = type(self) 

939 m = cls.__new__(cls) 

940 _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo)) 

941 _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo)) 

942 # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str], 

943 # and attempting a deepcopy would be marginally slower. 

944 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) 

945 

946 if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None: 

947 _object_setattr(m, '__pydantic_private__', None) 

948 else: 

949 _object_setattr( 

950 m, 

951 '__pydantic_private__', 

952 deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo), 

953 ) 

954 

955 return m 

956 

957 if not TYPE_CHECKING: 

958 # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access 

959 # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643 

960 

961 def __getattr__(self, item: str) -> Any: 

962 private_attributes = object.__getattribute__(self, '__private_attributes__') 

963 if item in private_attributes: 

964 attribute = private_attributes[item] 

965 if hasattr(attribute, '__get__'): 

966 return attribute.__get__(self, type(self)) # type: ignore 

967 

968 try: 

969 # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items 

970 return self.__pydantic_private__[item] # type: ignore 

971 except KeyError as exc: 

972 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc 

973 else: 

974 # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized. 

975 # See `BaseModel.__repr_args__` for more details 

976 try: 

977 pydantic_extra = object.__getattribute__(self, '__pydantic_extra__') 

978 except AttributeError: 

979 pydantic_extra = None 

980 

981 if pydantic_extra: 

982 try: 

983 return pydantic_extra[item] 

984 except KeyError as exc: 

985 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc 

986 else: 

987 if hasattr(self.__class__, item): 

988 return super().__getattribute__(item) # Raises AttributeError if appropriate 

989 else: 

990 # this is the current error 

991 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') 

992 

993 def __setattr__(self, name: str, value: Any) -> None: 

994 if (setattr_handler := self.__pydantic_setattr_handlers__.get(name)) is not None: 

995 setattr_handler(self, name, value) 

996 # if None is returned from _setattr_handler, the attribute was set directly 

997 elif (setattr_handler := self._setattr_handler(name, value)) is not None: 

998 setattr_handler(self, name, value) # call here to not memo on possibly unknown fields 

999 self.__pydantic_setattr_handlers__[name] = setattr_handler # memoize the handler for faster access 

1000 

1001 def _setattr_handler(self, name: str, value: Any) -> Callable[[BaseModel, str, Any], None] | None: 

1002 """Get a handler for setting an attribute on the model instance. 

1003 

1004 Returns: 

1005 A handler for setting an attribute on the model instance. Used for memoization of the handler. 

1006 Memoizing the handlers leads to a dramatic performance improvement in `__setattr__` 

1007 Returns `None` when memoization is not safe, then the attribute is set directly. 

1008 """ 

1009 cls = self.__class__ 

1010 if name in cls.__class_vars__: 

1011 raise AttributeError( 

1012 f'{name!r} is a ClassVar of `{cls.__name__}` and cannot be set on an instance. ' 

1013 f'If you want to set a value on the class, use `{cls.__name__}.{name} = value`.' 

1014 ) 

1015 elif not _fields.is_valid_field_name(name): 

1016 if (attribute := cls.__private_attributes__.get(name)) is not None: 

1017 if hasattr(attribute, '__set__'): 

1018 return lambda model, _name, val: attribute.__set__(model, val) 

1019 else: 

1020 return _SIMPLE_SETATTR_HANDLERS['private'] 

1021 else: 

1022 _object_setattr(self, name, value) 

1023 return None # Can not return memoized handler with possibly freeform attr names 

1024 

1025 attr = getattr(cls, name, None) 

1026 # NOTE: We currently special case properties and `cached_property`, but we might need 

1027 # to generalize this to all data/non-data descriptors at some point. For non-data descriptors 

1028 # (such as `cached_property`), it isn't obvious though. `cached_property` caches the value 

1029 # to the instance's `__dict__`, but other non-data descriptors might do things differently. 

1030 if isinstance(attr, cached_property): 

1031 return _SIMPLE_SETATTR_HANDLERS['cached_property'] 

1032 

1033 _check_frozen(cls, name, value) 

1034 

1035 # We allow properties to be set only on non frozen models for now (to match dataclasses). 

1036 # This can be changed if it ever gets requested. 

1037 if isinstance(attr, property): 

1038 return lambda model, _name, val: attr.__set__(model, val) 

1039 elif cls.model_config.get('validate_assignment'): 

1040 return _SIMPLE_SETATTR_HANDLERS['validate_assignment'] 

1041 elif name not in cls.__pydantic_fields__: 

1042 if cls.model_config.get('extra') != 'allow': 

1043 # TODO - matching error 

1044 raise ValueError(f'"{cls.__name__}" object has no field "{name}"') 

1045 elif attr is None: 

1046 # attribute does not exist, so put it in extra 

1047 self.__pydantic_extra__[name] = value 

1048 return None # Can not return memoized handler with possibly freeform attr names 

1049 else: 

1050 # attribute _does_ exist, and was not in extra, so update it 

1051 return _SIMPLE_SETATTR_HANDLERS['extra_known'] 

1052 else: 

1053 return _SIMPLE_SETATTR_HANDLERS['model_field'] 

1054 

1055 def __delattr__(self, item: str) -> Any: 

1056 cls = self.__class__ 

1057 

1058 if item in self.__private_attributes__: 

1059 attribute = self.__private_attributes__[item] 

1060 if hasattr(attribute, '__delete__'): 

1061 attribute.__delete__(self) # type: ignore 

1062 return 

1063 

1064 try: 

1065 # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items 

1066 del self.__pydantic_private__[item] # type: ignore 

1067 return 

1068 except KeyError as exc: 

1069 raise AttributeError(f'{cls.__name__!r} object has no attribute {item!r}') from exc 

1070 

1071 # Allow cached properties to be deleted (even if the class is frozen): 

1072 attr = getattr(cls, item, None) 

1073 if isinstance(attr, cached_property): 

1074 return object.__delattr__(self, item) 

1075 

1076 _check_frozen(cls, name=item, value=None) 

1077 

1078 if item in self.__pydantic_fields__: 

1079 object.__delattr__(self, item) 

1080 elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__: 

1081 del self.__pydantic_extra__[item] 

1082 else: 

1083 try: 

1084 object.__delattr__(self, item) 

1085 except AttributeError: 

1086 raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') 

1087 

1088 # Because we make use of `@dataclass_transform()`, `__replace__` is already synthesized by 

1089 # type checkers, so we define the implementation in this `if not TYPE_CHECKING:` block: 

1090 def __replace__(self, **changes: Any) -> Self: 

1091 return self.model_copy(update=changes) 

1092 

1093 def __getstate__(self) -> dict[Any, Any]: 

1094 private = self.__pydantic_private__ 

1095 if private: 

1096 private = {k: v for k, v in private.items() if v is not PydanticUndefined} 

1097 return { 

1098 '__dict__': self.__dict__, 

1099 '__pydantic_extra__': self.__pydantic_extra__, 

1100 '__pydantic_fields_set__': self.__pydantic_fields_set__, 

1101 '__pydantic_private__': private, 

1102 } 

1103 

1104 def __setstate__(self, state: dict[Any, Any]) -> None: 

1105 _object_setattr(self, '__pydantic_fields_set__', state.get('__pydantic_fields_set__', {})) 

1106 _object_setattr(self, '__pydantic_extra__', state.get('__pydantic_extra__', {})) 

1107 _object_setattr(self, '__pydantic_private__', state.get('__pydantic_private__', {})) 

1108 _object_setattr(self, '__dict__', state.get('__dict__', {})) 

1109 

1110 if not TYPE_CHECKING: 

1111 

1112 def __eq__(self, other: Any) -> bool: 

1113 if isinstance(other, BaseModel): 

1114 # When comparing instances of generic types for equality, as long as all field values are equal, 

1115 # only require their generic origin types to be equal, rather than exact type equality. 

1116 # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1). 

1117 self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__ 

1118 other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__ 

1119 

1120 # Perform common checks first 

1121 if not ( 

1122 self_type == other_type 

1123 and getattr(self, '__pydantic_private__', None) == getattr(other, '__pydantic_private__', None) 

1124 and self.__pydantic_extra__ == other.__pydantic_extra__ 

1125 ): 

1126 return False 

1127 

1128 # We only want to compare pydantic fields but ignoring fields is costly. 

1129 # We'll perform a fast check first, and fallback only when needed 

1130 # See GH-7444 and GH-7825 for rationale and a performance benchmark 

1131 

1132 # First, do the fast (and sometimes faulty) __dict__ comparison 

1133 if self.__dict__ == other.__dict__: 

1134 # If the check above passes, then pydantic fields are equal, we can return early 

1135 return True 

1136 

1137 # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return 

1138 # early if there are no keys to ignore (we would just return False later on anyway) 

1139 model_fields = type(self).__pydantic_fields__.keys() 

1140 if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields: 

1141 return False 

1142 

1143 # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore 

1144 # Resort to costly filtering of the __dict__ objects 

1145 # We use operator.itemgetter because it is much faster than dict comprehensions 

1146 # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an 

1147 # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute 

1148 # raises an error in BaseModel.__getattr__ instead of returning the class attribute 

1149 # So we can use operator.itemgetter() instead of operator.attrgetter() 

1150 getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL 

1151 try: 

1152 return getter(self.__dict__) == getter(other.__dict__) 

1153 except KeyError: 

1154 # In rare cases (such as when using the deprecated BaseModel.copy() method), 

1155 # the __dict__ may not contain all model fields, which is how we can get here. 

1156 # getter(self.__dict__) is much faster than any 'safe' method that accounts 

1157 # for missing keys, and wrapping it in a `try` doesn't slow things down much 

1158 # in the common case. 

1159 self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__) 

1160 other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__) 

1161 return getter(self_fields_proxy) == getter(other_fields_proxy) 

1162 

1163 # other instance is not a BaseModel 

1164 else: 

1165 return NotImplemented # delegate to the other item in the comparison 

1166 

1167 if TYPE_CHECKING: 

1168 # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits 

1169 # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of 

1170 # subclass initialization. 

1171 

1172 def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]): 

1173 """This signature is included purely to help type-checkers check arguments to class declaration, which 

1174 provides a way to conveniently set model_config key/value pairs. 

1175 

1176 ```python 

1177 from pydantic import BaseModel 

1178 

1179 class MyModel(BaseModel, extra='allow'): ... 

1180 ``` 

1181 

1182 However, this may be deceiving, since the _actual_ calls to `__init_subclass__` will not receive any 

1183 of the config arguments, and will only receive any keyword arguments passed during class initialization 

1184 that are _not_ expected keys in ConfigDict. (This is due to the way `ModelMetaclass.__new__` works.) 

1185 

1186 Args: 

1187 **kwargs: Keyword arguments passed to the class definition, which set model_config 

1188 

1189 Note: 

1190 You may want to override `__pydantic_init_subclass__` instead, which behaves similarly but is called 

1191 *after* the class is fully initialized. 

1192 """ 

1193 

1194 def __iter__(self) -> TupleGenerator: 

1195 """So `dict(model)` works.""" 

1196 yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')] 

1197 extra = self.__pydantic_extra__ 

1198 if extra: 

1199 yield from extra.items() 

1200 

1201 def __repr__(self) -> str: 

1202 return f'{self.__repr_name__()}({self.__repr_str__(", ")})' 

1203 

1204 def __repr_args__(self) -> _repr.ReprArgs: 

1205 # Eagerly create the repr of computed fields, as this may trigger access of cached properties and as such 

1206 # modify the instance's `__dict__`. If we don't do it now, it could happen when iterating over the `__dict__` 

1207 # below if the instance happens to be referenced in a field, and would modify the `__dict__` size *during* iteration. 

1208 computed_fields_repr_args = [ 

1209 (k, getattr(self, k)) for k, v in self.__pydantic_computed_fields__.items() if v.repr 

1210 ] 

1211 

1212 for k, v in self.__dict__.items(): 

1213 field = self.__pydantic_fields__.get(k) 

1214 if field and field.repr: 

1215 if v is not self: 

1216 yield k, v 

1217 else: 

1218 yield k, self.__repr_recursion__(v) 

1219 # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized. 

1220 # This can happen if a `ValidationError` is raised during initialization and the instance's 

1221 # repr is generated as part of the exception handling. Therefore, we use `getattr` here 

1222 # with a fallback, even though the type hints indicate the attribute will always be present. 

1223 try: 

1224 pydantic_extra = object.__getattribute__(self, '__pydantic_extra__') 

1225 except AttributeError: 

1226 pydantic_extra = None 

1227 

1228 if pydantic_extra is not None: 

1229 yield from ((k, v) for k, v in pydantic_extra.items()) 

1230 yield from computed_fields_repr_args 

1231 

1232 # take logic from `_repr.Representation` without the side effects of inheritance, see #5740 

1233 __repr_name__ = _repr.Representation.__repr_name__ 

1234 __repr_recursion__ = _repr.Representation.__repr_recursion__ 

1235 __repr_str__ = _repr.Representation.__repr_str__ 

1236 __pretty__ = _repr.Representation.__pretty__ 

1237 __rich_repr__ = _repr.Representation.__rich_repr__ 

1238 

1239 def __str__(self) -> str: 

1240 return self.__repr_str__(' ') 

1241 

1242 # ##### Deprecated methods from v1 ##### 

1243 @property 

1244 @typing_extensions.deprecated( 

1245 'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None 

1246 ) 

1247 def __fields__(self) -> dict[str, FieldInfo]: 

1248 warnings.warn( 

1249 'The `__fields__` attribute is deprecated, use `model_fields` instead.', 

1250 category=PydanticDeprecatedSince20, 

1251 stacklevel=2, 

1252 ) 

1253 return getattr(type(self), '__pydantic_fields__', {}) 

1254 

1255 @property 

1256 @typing_extensions.deprecated( 

1257 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', 

1258 category=None, 

1259 ) 

1260 def __fields_set__(self) -> set[str]: 

1261 warnings.warn( 

1262 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', 

1263 category=PydanticDeprecatedSince20, 

1264 stacklevel=2, 

1265 ) 

1266 return self.__pydantic_fields_set__ 

1267 

1268 @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None) 

1269 def dict( # noqa: D102 

1270 self, 

1271 *, 

1272 include: IncEx | None = None, 

1273 exclude: IncEx | None = None, 

1274 by_alias: bool = False, 

1275 exclude_unset: bool = False, 

1276 exclude_defaults: bool = False, 

1277 exclude_none: bool = False, 

1278 ) -> Dict[str, Any]: # noqa UP006 

1279 warnings.warn( 

1280 'The `dict` method is deprecated; use `model_dump` instead.', 

1281 category=PydanticDeprecatedSince20, 

1282 stacklevel=2, 

1283 ) 

1284 return self.model_dump( 

1285 include=include, 

1286 exclude=exclude, 

1287 by_alias=by_alias, 

1288 exclude_unset=exclude_unset, 

1289 exclude_defaults=exclude_defaults, 

1290 exclude_none=exclude_none, 

1291 ) 

1292 

1293 @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None) 

1294 def json( # noqa: D102 

1295 self, 

1296 *, 

1297 include: IncEx | None = None, 

1298 exclude: IncEx | None = None, 

1299 by_alias: bool = False, 

1300 exclude_unset: bool = False, 

1301 exclude_defaults: bool = False, 

1302 exclude_none: bool = False, 

1303 encoder: Callable[[Any], Any] | None = PydanticUndefined, # type: ignore[assignment] 

1304 models_as_dict: bool = PydanticUndefined, # type: ignore[assignment] 

1305 **dumps_kwargs: Any, 

1306 ) -> str: 

1307 warnings.warn( 

1308 'The `json` method is deprecated; use `model_dump_json` instead.', 

1309 category=PydanticDeprecatedSince20, 

1310 stacklevel=2, 

1311 ) 

1312 if encoder is not PydanticUndefined: 

1313 raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.') 

1314 if models_as_dict is not PydanticUndefined: 

1315 raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.') 

1316 if dumps_kwargs: 

1317 raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.') 

1318 return self.model_dump_json( 

1319 include=include, 

1320 exclude=exclude, 

1321 by_alias=by_alias, 

1322 exclude_unset=exclude_unset, 

1323 exclude_defaults=exclude_defaults, 

1324 exclude_none=exclude_none, 

1325 ) 

1326 

1327 @classmethod 

1328 @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None) 

1329 def parse_obj(cls, obj: Any) -> Self: # noqa: D102 

1330 warnings.warn( 

1331 'The `parse_obj` method is deprecated; use `model_validate` instead.', 

1332 category=PydanticDeprecatedSince20, 

1333 stacklevel=2, 

1334 ) 

1335 return cls.model_validate(obj) 

1336 

1337 @classmethod 

1338 @typing_extensions.deprecated( 

1339 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, ' 

1340 'otherwise load the data then use `model_validate` instead.', 

1341 category=None, 

1342 ) 

1343 def parse_raw( # noqa: D102 

1344 cls, 

1345 b: str | bytes, 

1346 *, 

1347 content_type: str | None = None, 

1348 encoding: str = 'utf8', 

1349 proto: DeprecatedParseProtocol | None = None, 

1350 allow_pickle: bool = False, 

1351 ) -> Self: # pragma: no cover 

1352 warnings.warn( 

1353 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, ' 

1354 'otherwise load the data then use `model_validate` instead.', 

1355 category=PydanticDeprecatedSince20, 

1356 stacklevel=2, 

1357 ) 

1358 from .deprecated import parse 

1359 

1360 try: 

1361 obj = parse.load_str_bytes( 

1362 b, 

1363 proto=proto, 

1364 content_type=content_type, 

1365 encoding=encoding, 

1366 allow_pickle=allow_pickle, 

1367 ) 

1368 except (ValueError, TypeError) as exc: 

1369 import json 

1370 

1371 # try to match V1 

1372 if isinstance(exc, UnicodeDecodeError): 

1373 type_str = 'value_error.unicodedecode' 

1374 elif isinstance(exc, json.JSONDecodeError): 

1375 type_str = 'value_error.jsondecode' 

1376 elif isinstance(exc, ValueError): 

1377 type_str = 'value_error' 

1378 else: 

1379 type_str = 'type_error' 

1380 

1381 # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same 

1382 error: pydantic_core.InitErrorDetails = { 

1383 # The type: ignore on the next line is to ignore the requirement of LiteralString 

1384 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), # type: ignore 

1385 'loc': ('__root__',), 

1386 'input': b, 

1387 } 

1388 raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error]) 

1389 return cls.model_validate(obj) 

1390 

1391 @classmethod 

1392 @typing_extensions.deprecated( 

1393 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' 

1394 'use `model_validate_json`, otherwise `model_validate` instead.', 

1395 category=None, 

1396 ) 

1397 def parse_file( # noqa: D102 

1398 cls, 

1399 path: str | Path, 

1400 *, 

1401 content_type: str | None = None, 

1402 encoding: str = 'utf8', 

1403 proto: DeprecatedParseProtocol | None = None, 

1404 allow_pickle: bool = False, 

1405 ) -> Self: 

1406 warnings.warn( 

1407 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' 

1408 'use `model_validate_json`, otherwise `model_validate` instead.', 

1409 category=PydanticDeprecatedSince20, 

1410 stacklevel=2, 

1411 ) 

1412 from .deprecated import parse 

1413 

1414 obj = parse.load_file( 

1415 path, 

1416 proto=proto, 

1417 content_type=content_type, 

1418 encoding=encoding, 

1419 allow_pickle=allow_pickle, 

1420 ) 

1421 return cls.parse_obj(obj) 

1422 

1423 @classmethod 

1424 @typing_extensions.deprecated( 

1425 'The `from_orm` method is deprecated; set ' 

1426 "`model_config['from_attributes']=True` and use `model_validate` instead.", 

1427 category=None, 

1428 ) 

1429 def from_orm(cls, obj: Any) -> Self: # noqa: D102 

1430 warnings.warn( 

1431 'The `from_orm` method is deprecated; set ' 

1432 "`model_config['from_attributes']=True` and use `model_validate` instead.", 

1433 category=PydanticDeprecatedSince20, 

1434 stacklevel=2, 

1435 ) 

1436 if not cls.model_config.get('from_attributes', None): 

1437 raise PydanticUserError( 

1438 'You must set the config attribute `from_attributes=True` to use from_orm', code=None 

1439 ) 

1440 return cls.model_validate(obj) 

1441 

1442 @classmethod 

1443 @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None) 

1444 def construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self: # noqa: D102 

1445 warnings.warn( 

1446 'The `construct` method is deprecated; use `model_construct` instead.', 

1447 category=PydanticDeprecatedSince20, 

1448 stacklevel=2, 

1449 ) 

1450 return cls.model_construct(_fields_set=_fields_set, **values) 

1451 

1452 @typing_extensions.deprecated( 

1453 'The `copy` method is deprecated; use `model_copy` instead. ' 

1454 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', 

1455 category=None, 

1456 ) 

1457 def copy( 

1458 self, 

1459 *, 

1460 include: AbstractSetIntStr | MappingIntStrAny | None = None, 

1461 exclude: AbstractSetIntStr | MappingIntStrAny | None = None, 

1462 update: Dict[str, Any] | None = None, # noqa UP006 

1463 deep: bool = False, 

1464 ) -> Self: # pragma: no cover 

1465 """Returns a copy of the model. 

1466 

1467 !!! warning "Deprecated" 

1468 This method is now deprecated; use `model_copy` instead. 

1469 

1470 If you need `include` or `exclude`, use: 

1471 

1472 ```python {test="skip" lint="skip"} 

1473 data = self.model_dump(include=include, exclude=exclude, round_trip=True) 

1474 data = {**data, **(update or {})} 

1475 copied = self.model_validate(data) 

1476 ``` 

1477 

1478 Args: 

1479 include: Optional set or mapping specifying which fields to include in the copied model. 

1480 exclude: Optional set or mapping specifying which fields to exclude in the copied model. 

1481 update: Optional dictionary of field-value pairs to override field values in the copied model. 

1482 deep: If True, the values of fields that are Pydantic models will be deep-copied. 

1483 

1484 Returns: 

1485 A copy of the model with included, excluded and updated fields as specified. 

1486 """ 

1487 warnings.warn( 

1488 'The `copy` method is deprecated; use `model_copy` instead. ' 

1489 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', 

1490 category=PydanticDeprecatedSince20, 

1491 stacklevel=2, 

1492 ) 

1493 from .deprecated import copy_internals 

1494 

1495 values = dict( 

1496 copy_internals._iter( 

1497 self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False 

1498 ), 

1499 **(update or {}), 

1500 ) 

1501 if self.__pydantic_private__ is None: 

1502 private = None 

1503 else: 

1504 private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined} 

1505 

1506 if self.__pydantic_extra__ is None: 

1507 extra: dict[str, Any] | None = None 

1508 else: 

1509 extra = self.__pydantic_extra__.copy() 

1510 for k in list(self.__pydantic_extra__): 

1511 if k not in values: # k was in the exclude 

1512 extra.pop(k) 

1513 for k in list(values): 

1514 if k in self.__pydantic_extra__: # k must have come from extra 

1515 extra[k] = values.pop(k) 

1516 

1517 # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg 

1518 if update: 

1519 fields_set = self.__pydantic_fields_set__ | update.keys() 

1520 else: 

1521 fields_set = set(self.__pydantic_fields_set__) 

1522 

1523 # removing excluded fields from `__pydantic_fields_set__` 

1524 if exclude: 

1525 fields_set -= set(exclude) 

1526 

1527 return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep) 

1528 

1529 @classmethod 

1530 @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None) 

1531 def schema( # noqa: D102 

1532 cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE 

1533 ) -> Dict[str, Any]: # noqa UP006 

1534 warnings.warn( 

1535 'The `schema` method is deprecated; use `model_json_schema` instead.', 

1536 category=PydanticDeprecatedSince20, 

1537 stacklevel=2, 

1538 ) 

1539 return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template) 

1540 

1541 @classmethod 

1542 @typing_extensions.deprecated( 

1543 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', 

1544 category=None, 

1545 ) 

1546 def schema_json( # noqa: D102 

1547 cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any 

1548 ) -> str: # pragma: no cover 

1549 warnings.warn( 

1550 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', 

1551 category=PydanticDeprecatedSince20, 

1552 stacklevel=2, 

1553 ) 

1554 import json 

1555 

1556 from .deprecated.json import pydantic_encoder 

1557 

1558 return json.dumps( 

1559 cls.model_json_schema(by_alias=by_alias, ref_template=ref_template), 

1560 default=pydantic_encoder, 

1561 **dumps_kwargs, 

1562 ) 

1563 

1564 @classmethod 

1565 @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None) 

1566 def validate(cls, value: Any) -> Self: # noqa: D102 

1567 warnings.warn( 

1568 'The `validate` method is deprecated; use `model_validate` instead.', 

1569 category=PydanticDeprecatedSince20, 

1570 stacklevel=2, 

1571 ) 

1572 return cls.model_validate(value) 

1573 

1574 @classmethod 

1575 @typing_extensions.deprecated( 

1576 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', 

1577 category=None, 

1578 ) 

1579 def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102 

1580 warnings.warn( 

1581 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', 

1582 category=PydanticDeprecatedSince20, 

1583 stacklevel=2, 

1584 ) 

1585 if localns: # pragma: no cover 

1586 raise TypeError('`localns` arguments are not longer accepted.') 

1587 cls.model_rebuild(force=True) 

1588 

1589 @typing_extensions.deprecated( 

1590 'The private method `_iter` will be removed and should no longer be used.', category=None 

1591 ) 

1592 def _iter(self, *args: Any, **kwargs: Any) -> Any: 

1593 warnings.warn( 

1594 'The private method `_iter` will be removed and should no longer be used.', 

1595 category=PydanticDeprecatedSince20, 

1596 stacklevel=2, 

1597 ) 

1598 from .deprecated import copy_internals 

1599 

1600 return copy_internals._iter(self, *args, **kwargs) 

1601 

1602 @typing_extensions.deprecated( 

1603 'The private method `_copy_and_set_values` will be removed and should no longer be used.', 

1604 category=None, 

1605 ) 

1606 def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any: 

1607 warnings.warn( 

1608 'The private method `_copy_and_set_values` will be removed and should no longer be used.', 

1609 category=PydanticDeprecatedSince20, 

1610 stacklevel=2, 

1611 ) 

1612 from .deprecated import copy_internals 

1613 

1614 return copy_internals._copy_and_set_values(self, *args, **kwargs) 

1615 

1616 @classmethod 

1617 @typing_extensions.deprecated( 

1618 'The private method `_get_value` will be removed and should no longer be used.', 

1619 category=None, 

1620 ) 

1621 def _get_value(cls, *args: Any, **kwargs: Any) -> Any: 

1622 warnings.warn( 

1623 'The private method `_get_value` will be removed and should no longer be used.', 

1624 category=PydanticDeprecatedSince20, 

1625 stacklevel=2, 

1626 ) 

1627 from .deprecated import copy_internals 

1628 

1629 return copy_internals._get_value(cls, *args, **kwargs) 

1630 

1631 @typing_extensions.deprecated( 

1632 'The private method `_calculate_keys` will be removed and should no longer be used.', 

1633 category=None, 

1634 ) 

1635 def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any: 

1636 warnings.warn( 

1637 'The private method `_calculate_keys` will be removed and should no longer be used.', 

1638 category=PydanticDeprecatedSince20, 

1639 stacklevel=2, 

1640 ) 

1641 from .deprecated import copy_internals 

1642 

1643 return copy_internals._calculate_keys(self, *args, **kwargs) 

1644 

1645 

1646ModelT = TypeVar('ModelT', bound=BaseModel) 

1647 

1648 

1649@overload 

1650def create_model( 

1651 model_name: str, 

1652 /, 

1653 *, 

1654 __config__: ConfigDict | None = None, 

1655 __doc__: str | None = None, 

1656 __base__: None = None, 

1657 __module__: str = __name__, 

1658 __validators__: dict[str, Callable[..., Any]] | None = None, 

1659 __cls_kwargs__: dict[str, Any] | None = None, 

1660 **field_definitions: Any | tuple[str, Any], 

1661) -> type[BaseModel]: ... 

1662 

1663 

1664@overload 

1665def create_model( 

1666 model_name: str, 

1667 /, 

1668 *, 

1669 __config__: ConfigDict | None = None, 

1670 __doc__: str | None = None, 

1671 __base__: type[ModelT] | tuple[type[ModelT], ...], 

1672 __module__: str = __name__, 

1673 __validators__: dict[str, Callable[..., Any]] | None = None, 

1674 __cls_kwargs__: dict[str, Any] | None = None, 

1675 **field_definitions: Any | tuple[str, Any], 

1676) -> type[ModelT]: ... 

1677 

1678 

1679def create_model( # noqa: C901 

1680 model_name: str, 

1681 /, 

1682 *, 

1683 __config__: ConfigDict | None = None, 

1684 __doc__: str | None = None, 

1685 __base__: type[ModelT] | tuple[type[ModelT], ...] | None = None, 

1686 __module__: str | None = None, 

1687 __validators__: dict[str, Callable[..., Any]] | None = None, 

1688 __cls_kwargs__: dict[str, Any] | None = None, 

1689 # TODO PEP 747: replace `Any` by the TypeForm: 

1690 **field_definitions: Any | tuple[str, Any], 

1691) -> type[ModelT]: 

1692 """!!! abstract "Usage Documentation" 

1693 [Dynamic Model Creation](../concepts/models.md#dynamic-model-creation) 

1694 

1695 Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a 

1696 subclass of [`BaseModel`][pydantic.BaseModel]. 

1697 

1698 Args: 

1699 model_name: The name of the newly created model. 

1700 __config__: The configuration of the new model. 

1701 __doc__: The docstring of the new model. 

1702 __base__: The base class or classes for the new model. 

1703 __module__: The name of the module that the model belongs to; 

1704 if `None`, the value is taken from `sys._getframe(1)` 

1705 __validators__: A dictionary of methods that validate fields. The keys are the names of the validation methods to 

1706 be added to the model, and the values are the validation methods themselves. You can read more about functional 

1707 validators [here](https://docs.pydantic.dev/2.9/concepts/validators/#field-validators). 

1708 __cls_kwargs__: A dictionary of keyword arguments for class creation, such as `metaclass`. 

1709 **field_definitions: Field definitions of the new model. Either: 

1710 

1711 - a single element, representing the type annotation of the field. 

1712 - a two-tuple, the first element being the type and the second element the assigned value 

1713 (either a default or the [`Field()`][pydantic.Field] function). 

1714 

1715 Returns: 

1716 The new [model][pydantic.BaseModel]. 

1717 

1718 Raises: 

1719 PydanticUserError: If `__base__` and `__config__` are both passed. 

1720 """ 

1721 if __base__ is None: 

1722 __base__ = (cast('type[ModelT]', BaseModel),) 

1723 elif not isinstance(__base__, tuple): 

1724 __base__ = (__base__,) 

1725 

1726 __cls_kwargs__ = __cls_kwargs__ or {} 

1727 

1728 fields: dict[str, Any] = {} 

1729 annotations: dict[str, Any] = {} 

1730 

1731 for f_name, f_def in field_definitions.items(): 

1732 if isinstance(f_def, tuple): 

1733 if len(f_def) != 2: 

1734 raise PydanticUserError( 

1735 f'Field definition for {f_name!r} should a single element representing the type or a two-tuple, the first element ' 

1736 'being the type and the second element the assigned value (either a default or the `Field()` function).', 

1737 code='create-model-field-definitions', 

1738 ) 

1739 

1740 annotations[f_name] = f_def[0] 

1741 fields[f_name] = f_def[1] 

1742 else: 

1743 annotations[f_name] = f_def 

1744 

1745 if __module__ is None: 

1746 f = sys._getframe(1) 

1747 __module__ = f.f_globals['__name__'] 

1748 

1749 namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__} 

1750 if __doc__: 

1751 namespace.update({'__doc__': __doc__}) 

1752 if __validators__: 

1753 namespace.update(__validators__) 

1754 namespace.update(fields) 

1755 if __config__: 

1756 namespace['model_config'] = __config__ 

1757 resolved_bases = types.resolve_bases(__base__) 

1758 meta, ns, kwds = types.prepare_class(model_name, resolved_bases, kwds=__cls_kwargs__) 

1759 if resolved_bases is not __base__: 

1760 ns['__orig_bases__'] = __base__ 

1761 namespace.update(ns) 

1762 

1763 return meta( 

1764 model_name, 

1765 resolved_bases, 

1766 namespace, 

1767 __pydantic_reset_parent_namespace__=False, 

1768 _create_model_module=__module__, 

1769 **kwds, 

1770 ) 

1771 

1772 

1773__getattr__ = getattr_migration(__name__)