Coverage for /pythoncovmergedfiles/medio/medio/src/pydantic/pydantic/main.py: 35%

449 statements  

« prev     ^ index     » next       coverage.py v7.2.3, created at 2023-04-27 07:38 +0000

1""" 

2Logic for creating models, could perhaps be renamed to `models.py`. 

3""" 

4from __future__ import annotations as _annotations 

5 

6import typing 

7import warnings 

8from abc import ABCMeta 

9from copy import copy, deepcopy 

10from inspect import getdoc 

11from pathlib import Path 

12from types import prepare_class, resolve_bases 

13from typing import Any, Callable, Generic, Mapping, Tuple, cast 

14 

15import pydantic_core 

16import typing_extensions 

17 

18from ._internal import ( 

19 _config, 

20 _decorators, 

21 _forward_ref, 

22 _generics, 

23 _model_construction, 

24 _repr, 

25 _typing_extra, 

26 _utils, 

27) 

28from ._internal._fields import Undefined 

29from ._migration import getattr_migration 

30from .config import ConfigDict 

31from .deprecated import copy_internals as _deprecated_copy_internals 

32from .deprecated import parse as _deprecated_parse 

33from .errors import PydanticUndefinedAnnotation, PydanticUserError 

34from .fields import ComputedFieldInfo, Field, FieldInfo, ModelPrivateAttr 

35from .json_schema import ( 

36 DEFAULT_REF_TEMPLATE, 

37 GenerateJsonSchema, 

38 GetJsonSchemaHandler, 

39 JsonSchemaValue, 

40 model_json_schema, 

41) 

42 

43if typing.TYPE_CHECKING: 

44 from inspect import Signature 

45 

46 from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator 

47 

48 from ._internal._utils import AbstractSetIntStr, MappingIntStrAny 

49 

50 AnyClassMethod = classmethod[Any, Any, Any] 

51 TupleGenerator = typing.Generator[Tuple[str, Any], None, None] 

52 Model = typing.TypeVar('Model', bound='BaseModel') 

53 # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope 

54 IncEx: typing_extensions.TypeAlias = 'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None' 

55 

56__all__ = 'BaseModel', 'create_model' 

57 

58_object_setattr = _model_construction.object_setattr 

59# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra 

60# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's 

61# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for 

62# the `BaseModel` class, since that's defined immediately after the metaclass. 

63_base_class_defined = False 

64 

65 

66class _ModelNamespaceDict(dict): # type: ignore[type-arg] 

67 """ 

68 Intercept attributes being set on model classes and warn about overriding of decorators (`@field_validator`, etc.) 

69 """ 

70 

71 def __setitem__(self, k: str, v: object) -> None: 

72 existing: Any = self.get(k, None) 

73 if existing and v is not existing and isinstance(existing, _decorators.PydanticDescriptorProxy): 

74 warnings.warn(f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator') 

75 

76 return super().__setitem__(k, v) 

77 

78 

79@typing_extensions.dataclass_transform(kw_only_default=True, field_specifiers=(Field,)) 

80class ModelMetaclass(ABCMeta): 

81 def __new__( 

82 mcs, 

83 cls_name: str, 

84 bases: tuple[type[Any], ...], 

85 namespace: dict[str, Any], 

86 __pydantic_generic_metadata__: _generics.PydanticGenericMetadata | None = None, 

87 __pydantic_reset_parent_namespace__: bool = True, 

88 **kwargs: Any, 

89 ) -> type: 

90 if _base_class_defined: 

91 base_field_names, class_vars, base_private_attributes = _collect_bases_data(bases) 

92 

93 config_wrapper = _config.ConfigWrapper.for_model(bases, namespace, kwargs) 

94 namespace['model_config'] = config_wrapper.config_dict 

95 private_attributes = _model_construction.inspect_namespace( 

96 namespace, config_wrapper.ignored_types, class_vars, base_field_names 

97 ) 

98 if private_attributes: 

99 slots: set[str] = set(namespace.get('__slots__', ())) 

100 namespace['__slots__'] = slots | private_attributes.keys() 

101 

102 if 'model_post_init' in namespace: 

103 # if there are private_attributes and a model_post_init function, we handle both 

104 original_model_post_init = namespace['model_post_init'] 

105 

106 def wrapped_model_post_init(self: BaseModel, __context: Any) -> None: 

107 """ 

108 We need to both initialize private attributes and call the user-defined model_post_init method 

109 """ 

110 _model_construction.init_private_attributes(self, __context) 

111 original_model_post_init(self, __context) 

112 

113 namespace['model_post_init'] = wrapped_model_post_init 

114 else: 

115 namespace['model_post_init'] = _model_construction.init_private_attributes 

116 

117 namespace['__class_vars__'] = class_vars 

118 namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes} 

119 

120 if '__hash__' not in namespace and config_wrapper.frozen: 

121 

122 def hash_func(self: Any) -> int: 

123 return hash(self.__class__) + hash(tuple(self.__dict__.values())) 

124 

125 namespace['__hash__'] = hash_func 

126 

127 cls: type[BaseModel] = super().__new__(mcs, cls_name, bases, namespace, **kwargs) # type: ignore 

128 

129 cls.__pydantic_decorators__ = _decorators.DecoratorInfos.build(cls) 

130 

131 # Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class 

132 if __pydantic_generic_metadata__: 

133 cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__ 

134 else: 

135 parameters = getattr(cls, '__parameters__', ()) 

136 parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ()) 

137 if parameters and parent_parameters and not all(x in parameters for x in parent_parameters): 

138 combined_parameters = parent_parameters + tuple(x for x in parameters if x not in parent_parameters) 

139 parameters_str = ', '.join([str(x) for x in combined_parameters]) 

140 error_message = ( 

141 f'All parameters must be present on typing.Generic;' 

142 f' you should inherit from typing.Generic[{parameters_str}]' 

143 ) 

144 if Generic not in bases: # pragma: no cover 

145 # This branch will only be hit if I have misunderstood how `__parameters__` works. 

146 # If that is the case, and a user hits this, I could imagine it being very helpful 

147 # to have this extra detail in the reported traceback. 

148 error_message += f' (bases={bases})' 

149 raise TypeError(error_message) 

150 

151 cls.__pydantic_generic_metadata__ = { 

152 'origin': None, 

153 'args': (), 

154 'parameters': parameters, 

155 } 

156 

157 cls.__pydantic_model_complete__ = False # Ensure this specific class gets completed 

158 

159 # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 

160 # for attributes not in `new_namespace` (e.g. private attributes) 

161 for name, obj in private_attributes.items(): 

162 set_name = getattr(obj, '__set_name__', None) 

163 if callable(set_name): 

164 set_name(cls, name) 

165 

166 if __pydantic_reset_parent_namespace__: 

167 cls.__pydantic_parent_namespace__ = _typing_extra.parent_frame_namespace() 

168 parent_namespace = getattr(cls, '__pydantic_parent_namespace__', None) 

169 

170 types_namespace = _typing_extra.get_cls_types_namespace(cls, parent_namespace) 

171 _model_construction.set_model_fields(cls, bases, types_namespace) 

172 _model_construction.complete_model_class( 

173 cls, 

174 cls_name, 

175 config_wrapper, 

176 raise_errors=False, 

177 types_namespace=types_namespace, 

178 ) 

179 # using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__ 

180 # I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is 

181 # only hit for _proper_ subclasses of BaseModel 

182 super(cls, cls).__pydantic_init_subclass__(**kwargs) # type: ignore[misc] 

183 return cls 

184 else: 

185 # this is the BaseModel class itself being created, no logic required 

186 return super().__new__(mcs, cls_name, bases, namespace, **kwargs) 

187 

188 @classmethod 

189 def __prepare__(cls, *args: Any, **kwargs: Any) -> Mapping[str, object]: 

190 return _ModelNamespaceDict() 

191 

192 def __instancecheck__(self, instance: Any) -> bool: 

193 """ 

194 Avoid calling ABC _abc_subclasscheck unless we're pretty sure. 

195 

196 See #3829 and python/cpython#92810 

197 """ 

198 return hasattr(instance, '__pydantic_validator__') and super().__instancecheck__(instance) 

199 

200 

201class BaseModel(_repr.Representation, metaclass=ModelMetaclass): 

202 if typing.TYPE_CHECKING: 

203 # populated by the metaclass, defined here to help IDEs only 

204 __pydantic_validator__: typing.ClassVar[SchemaValidator] 

205 __pydantic_core_schema__: typing.ClassVar[CoreSchema] 

206 __pydantic_serializer__: typing.ClassVar[SchemaSerializer] 

207 __pydantic_decorators__: typing.ClassVar[_decorators.DecoratorInfos] 

208 """metadata for `@validator`, `@root_validator` and `@serializer` decorators""" 

209 model_fields: typing.ClassVar[dict[str, FieldInfo]] = {} 

210 __signature__: typing.ClassVar[Signature] 

211 __private_attributes__: typing.ClassVar[dict[str, ModelPrivateAttr]] 

212 __class_vars__: typing.ClassVar[set[str]] 

213 __pydantic_fields_set__: set[str] = set() 

214 __pydantic_generic_metadata__: typing.ClassVar[_generics.PydanticGenericMetadata] 

215 __pydantic_parent_namespace__: typing.ClassVar[dict[str, Any] | None] 

216 else: 

217 # `model_fields` and `__pydantic_decorators__` must be set for 

218 # pydantic._internal._generate_schema.GenerateSchema.model_schema to work for a plain BaseModel annotation 

219 model_fields = {} 

220 __pydantic_decorators__ = _decorators.DecoratorInfos() 

221 __pydantic_validator__ = _model_construction.MockValidator( 

222 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', 

223 code='base-model-instantiated', 

224 ) 

225 

226 model_config = ConfigDict() 

227 __slots__ = '__dict__', '__pydantic_fields_set__' 

228 __doc__ = '' # Null out the Representation docstring 

229 __pydantic_model_complete__ = False 

230 

231 def __init__(__pydantic_self__, **data: Any) -> None: # type: ignore 

232 """ 

233 Create a new model by parsing and validating input data from keyword arguments. 

234 

235 Raises ValidationError if the input data cannot be parsed to form a valid model. 

236 

237 Uses something other than `self` for the first arg to allow "self" as a field name. 

238 """ 

239 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

240 __tracebackhide__ = True 

241 __pydantic_self__.__pydantic_validator__.validate_python(data, self_instance=__pydantic_self__) 

242 

243 @classmethod 

244 def __get_pydantic_core_schema__( 

245 cls, __source: type[BaseModel], __handler: Callable[[Any], CoreSchema] 

246 ) -> CoreSchema: 

247 """Hook into generating the model's CoreSchema. 

248 

249 Args: 

250 __source (type[BaseModel]): The class we are generating a schema for. 

251 This will generally be the same as the `cls` argument if this is a classmethod. 

252 __handler (GetJsonSchemaHandler): Call into Pydantic's internal JSON schema generation. 

253 A callable that calls into Pydantic's internal CoreSchema generation logic. 

254 

255 Returns: 

256 CoreSchema: A `pydantic-core` `CoreSchema`. 

257 """ 

258 # Only use the cached value from this _exact_ class; we don't want one from a parent class 

259 # This is why we check `cls.__dict__` and don't use `cls.__pydantic_core_schema__` or similar. 

260 if '__pydantic_core_schema__' in cls.__dict__: 

261 # Due to the way generic classes are built, it's possible that an invalid schema may be temporarily 

262 # set on generic classes. I think we could resolve this to ensure that we get proper schema caching 

263 # for generics, but for simplicity for now, we just always rebuild if the class has a generic origin. 

264 if not cls.__pydantic_generic_metadata__['origin']: 

265 return cls.__pydantic_core_schema__ 

266 

267 return __handler(__source) 

268 

269 @classmethod 

270 def __get_pydantic_json_schema__( 

271 cls, 

272 __core_schema: CoreSchema, 

273 __handler: GetJsonSchemaHandler, 

274 ) -> JsonSchemaValue: 

275 """Hook into generating the model's JSON schema. 

276 

277 Args: 

278 __core_schema (CoreSchema): A `pydantic-core` CoreSchema. 

279 You can ignore this argument and call the handler with a new CoreSchema, 

280 wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`), 

281 or just call the handler with the original schema. 

282 __handler (GetJsonSchemaHandler): Call into Pydantic's internal JSON schema generation. 

283 This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema 

284 generation fails. 

285 Since this gets called by `BaseModel.model_json_schema` you can override the 

286 `schema_generator` argument to that function to change JSON schema generation globally 

287 for a type. 

288 

289 Returns: 

290 JsonSchemaValue: A JSON schema, as a Python object. 

291 """ 

292 return __handler(__core_schema) 

293 

294 @classmethod 

295 def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: 

296 """ 

297 This is intended to behave just like `__init_subclass__`, but is called by ModelMetaclass 

298 only after the class is actually fully initialized. In particular, attributes like `model_fields` will 

299 be present when this is called. 

300 

301 This is necessary because `__init_subclass__` will always be called by `type.__new__`, 

302 and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that 

303 `type.__new__` was called in such a manner that the class would already be sufficiently initialized. 

304 

305 This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely, 

306 any kwargs passed to the class definition that aren't used internally by pydantic. 

307 """ 

308 pass 

309 

310 @classmethod 

311 def model_validate( 

312 cls: type[Model], obj: Any, *, strict: bool | None = None, context: dict[str, Any] | None = None 

313 ) -> Model: 

314 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

315 __tracebackhide__ = True 

316 return cls.__pydantic_validator__.validate_python(obj, strict=strict, context=context) 

317 

318 @property 

319 def model_fields_set(self) -> set[str]: 

320 """ 

321 The set of fields that have been set on this model instance, i.e. that were not filled from defaults. 

322 """ 

323 return self.__pydantic_fields_set__ 

324 

325 @property 

326 def model_computed_fields(self) -> dict[str, ComputedFieldInfo]: 

327 """ 

328 The computed fields of this model instance. 

329 """ 

330 return {k: v.info for k, v in self.__pydantic_decorators__.computed_fields.items()} 

331 

332 @classmethod 

333 def model_validate_json( 

334 cls: type[Model], 

335 json_data: str | bytes | bytearray, 

336 *, 

337 strict: bool | None = None, 

338 context: dict[str, Any] | None = None, 

339 ) -> Model: 

340 # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks 

341 __tracebackhide__ = True 

342 return cls.__pydantic_validator__.validate_json(json_data, strict=strict, context=context) 

343 

344 def model_post_init(self, __context: Any) -> None: 

345 """ 

346 If you override `model_post_init`, it will be called at the end of `__init__` and `model_construct` 

347 """ 

348 pass 

349 

350 def __setattr__(self, name: str, value: Any) -> None: 

351 if name in self.__class_vars__: 

352 raise AttributeError( 

353 f'"{name}" is a ClassVar of `{self.__class__.__name__}` and cannot be set on an instance. ' 

354 f'If you want to set a value on the class, use `{self.__class__.__name__}.{name} = value`.' 

355 ) 

356 elif name.startswith('_'): 

357 _object_setattr(self, name, value) 

358 return 

359 elif self.model_config.get('frozen', None): 

360 raise TypeError(f'"{self.__class__.__name__}" is frozen and does not support item assignment') 

361 

362 attr = getattr(self.__class__, name, None) 

363 if isinstance(attr, property): 

364 attr.__set__(self, value) 

365 elif self.model_config.get('validate_assignment', None): 

366 self.__pydantic_validator__.validate_assignment(self, name, value) 

367 elif self.model_config.get('extra') != 'allow' and name not in self.model_fields: 

368 # TODO - matching error 

369 raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') 

370 else: 

371 self.__dict__[name] = value 

372 self.__pydantic_fields_set__.add(name) 

373 

374 def __getstate__(self) -> dict[Any, Any]: 

375 private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) 

376 return { 

377 '__dict__': self.__dict__, 

378 '__pydantic_fields_set__': self.__pydantic_fields_set__, 

379 '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined}, 

380 } 

381 

382 def __setstate__(self, state: dict[Any, Any]) -> None: 

383 _object_setattr(self, '__dict__', state['__dict__']) 

384 _object_setattr(self, '__pydantic_fields_set__', state['__pydantic_fields_set__']) 

385 for name, value in state.get('__private_attribute_values__', {}).items(): 

386 _object_setattr(self, name, value) 

387 

388 def model_dump( 

389 self, 

390 *, 

391 mode: typing_extensions.Literal['json', 'python'] | str = 'python', 

392 include: IncEx = None, 

393 exclude: IncEx = None, 

394 by_alias: bool = False, 

395 exclude_unset: bool = False, 

396 exclude_defaults: bool = False, 

397 exclude_none: bool = False, 

398 round_trip: bool = False, 

399 warnings: bool = True, 

400 ) -> dict[str, Any]: 

401 """ 

402 Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. 

403 """ 

404 return self.__pydantic_serializer__.to_python( 

405 self, 

406 mode=mode, 

407 by_alias=by_alias, 

408 include=include, 

409 exclude=exclude, 

410 exclude_unset=exclude_unset, 

411 exclude_defaults=exclude_defaults, 

412 exclude_none=exclude_none, 

413 round_trip=round_trip, 

414 warnings=warnings, 

415 ) 

416 

417 def model_dump_json( 

418 self, 

419 *, 

420 indent: int | None = None, 

421 include: IncEx = None, 

422 exclude: IncEx = None, 

423 by_alias: bool = False, 

424 exclude_unset: bool = False, 

425 exclude_defaults: bool = False, 

426 exclude_none: bool = False, 

427 round_trip: bool = False, 

428 warnings: bool = True, 

429 ) -> str: 

430 """ 

431 Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. 

432 """ 

433 return self.__pydantic_serializer__.to_json( 

434 self, 

435 indent=indent, 

436 include=include, 

437 exclude=exclude, 

438 by_alias=by_alias, 

439 exclude_unset=exclude_unset, 

440 exclude_defaults=exclude_defaults, 

441 exclude_none=exclude_none, 

442 round_trip=round_trip, 

443 warnings=warnings, 

444 ).decode() 

445 

446 @classmethod 

447 def model_construct(cls: type[Model], _fields_set: set[str] | None = None, **values: Any) -> Model: 

448 """ 

449 Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. 

450 Default values are respected, but no other validation is performed. 

451 Behaves as if `Config.extra = 'allow'` was set since it adds all passed values 

452 """ 

453 m = cls.__new__(cls) 

454 fields_values: dict[str, Any] = {} 

455 for name, field in cls.model_fields.items(): 

456 if field.alias and field.alias in values: 

457 fields_values[name] = values[field.alias] 

458 elif name in values: 

459 fields_values[name] = values[name] 

460 elif not field.is_required(): 

461 fields_values[name] = field.get_default(call_default_factory=True) 

462 fields_values.update(values) 

463 _object_setattr(m, '__dict__', fields_values) 

464 if _fields_set is None: 

465 _fields_set = set(values.keys()) 

466 _object_setattr(m, '__pydantic_fields_set__', _fields_set) 

467 if type(m).model_post_init is not BaseModel.model_post_init: 

468 m.model_post_init(None) 

469 return m 

470 

471 @classmethod 

472 def model_json_schema( 

473 cls, 

474 by_alias: bool = True, 

475 ref_template: str = DEFAULT_REF_TEMPLATE, 

476 schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, 

477 ) -> dict[str, Any]: 

478 """ 

479 To override the logic used to generate the JSON schema, you can create a subclass of GenerateJsonSchema 

480 with your desired modifications, then override this method on a custom base class and set the default 

481 value of `schema_generator` to be your subclass. 

482 """ 

483 return model_json_schema(cls, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator) 

484 

485 @classmethod 

486 def model_modify_json_schema(cls, json_schema: JsonSchemaValue) -> JsonSchemaValue: 

487 """ 

488 Overriding this method provides a simple way to modify the JSON schema generated for the model. 

489 

490 This is a convenience method primarily intended to control how the "generic" properties of the JSON schema 

491 are populated. See https://json-schema.org/understanding-json-schema/reference/generic.html for more details. 

492 

493 If you want to make more sweeping changes to how the JSON schema is generated, you will probably want to create 

494 a subclass of `GenerateJsonSchema` and pass it as `schema_generator` in `BaseModel.model_json_schema`. 

495 """ 

496 metadata = {'title': cls.model_config.get('title', None) or cls.__name__, 'description': getdoc(cls) or None} 

497 metadata = {k: v for k, v in metadata.items() if v is not None} 

498 return {**metadata, **json_schema} 

499 

500 @classmethod 

501 def model_rebuild( 

502 cls, 

503 *, 

504 force: bool = False, 

505 raise_errors: bool = True, 

506 _parent_namespace_depth: int = 2, 

507 _types_namespace: dict[str, Any] | None = None, 

508 ) -> bool | None: 

509 """ 

510 Try to (Re)construct the model schema. 

511 """ 

512 if not force and cls.__pydantic_model_complete__: 

513 return None 

514 else: 

515 if _types_namespace is not None: 

516 types_namespace: dict[str, Any] | None = _types_namespace.copy() 

517 else: 

518 if _parent_namespace_depth > 0: 

519 frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {} 

520 cls_parent_ns = cls.__pydantic_parent_namespace__ or {} 

521 cls.__pydantic_parent_namespace__ = {**cls_parent_ns, **frame_parent_ns} 

522 

523 types_namespace = cls.__pydantic_parent_namespace__ 

524 

525 types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace) 

526 return _model_construction.complete_model_class( 

527 cls, 

528 cls.__name__, 

529 _config.ConfigWrapper(cls.model_config, check=False), 

530 raise_errors=raise_errors, 

531 types_namespace=types_namespace, 

532 ) 

533 

534 def __iter__(self) -> TupleGenerator: 

535 """ 

536 so `dict(model)` works 

537 """ 

538 yield from self.__dict__.items() 

539 

540 def __eq__(self, other: Any) -> bool: 

541 if not isinstance(other, BaseModel): 

542 return False 

543 

544 # When comparing instances of generic types for equality, as long as all field values are equal, 

545 # only require their generic origin types to be equal, rather than exact type equality. 

546 # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1). 

547 self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__ 

548 other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__ 

549 

550 if self_type != other_type: 

551 return False 

552 

553 if self.__dict__ != other.__dict__: 

554 return False 

555 

556 # If the types and field values match, check for equality of private attributes 

557 for k in self.__private_attributes__: 

558 if getattr(self, k, Undefined) != getattr(other, k, Undefined): 

559 return False 

560 

561 return True 

562 

563 def model_copy(self: Model, *, update: dict[str, Any] | None = None, deep: bool = False) -> Model: 

564 """ 

565 Returns a copy of the model. 

566 

567 :param update: values to change/add in the new model. Note: the data is not validated before creating 

568 the new model: you should trust this data 

569 :param deep: set to `True` to make a deep copy of the model 

570 :return: new model instance 

571 """ 

572 copied = self.__deepcopy__() if deep else self.__copy__() 

573 if update: 

574 copied.__dict__.update(update) 

575 copied.__pydantic_fields_set__.update(update.keys()) 

576 return copied 

577 

578 def __copy__(self: Model) -> Model: 

579 """ 

580 Returns a shallow copy of the model 

581 """ 

582 cls = type(self) 

583 m = cls.__new__(cls) 

584 _object_setattr(m, '__dict__', copy(self.__dict__)) 

585 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) 

586 for name in self.__private_attributes__: 

587 value = getattr(self, name, Undefined) 

588 if value is not Undefined: 

589 _object_setattr(m, name, value) 

590 return m 

591 

592 def __deepcopy__(self: Model, memo: dict[int, Any] | None = None) -> Model: 

593 """ 

594 Returns a deep copy of the model 

595 """ 

596 cls = type(self) 

597 m = cls.__new__(cls) 

598 _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo)) 

599 # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str], 

600 # and attempting a deepcopy would be marginally slower. 

601 _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) 

602 for name in self.__private_attributes__: 

603 value = getattr(self, name, Undefined) 

604 if value is not Undefined: 

605 _object_setattr(m, name, deepcopy(value, memo=memo)) 

606 return m 

607 

608 def __repr_args__(self) -> _repr.ReprArgs: 

609 yield from [ 

610 (k, v) 

611 for k, v in self.__dict__.items() 

612 if not k.startswith('_') and (k not in self.model_fields or self.model_fields[k].repr) 

613 ] 

614 yield from [(k, getattr(self, k)) for k, v in self.model_computed_fields.items() if v.repr] 

615 

616 def __class_getitem__( 

617 cls, typevar_values: type[Any] | tuple[type[Any], ...] 

618 ) -> type[BaseModel] | _forward_ref.PydanticForwardRef | _forward_ref.PydanticRecursiveRef: 

619 cached = _generics.get_cached_generic_type_early(cls, typevar_values) 

620 if cached is not None: 

621 return cached 

622 

623 if cls is BaseModel: 

624 raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel') 

625 if not hasattr(cls, '__parameters__'): 

626 raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic') 

627 if not cls.__pydantic_generic_metadata__['parameters'] and Generic not in cls.__bases__: 

628 raise TypeError(f'{cls} is not a generic class') 

629 

630 if not isinstance(typevar_values, tuple): 

631 typevar_values = (typevar_values,) 

632 _generics.check_parameters_count(cls, typevar_values) 

633 

634 # Build map from generic typevars to passed params 

635 typevars_map: dict[_typing_extra.TypeVarType, type[Any]] = dict( 

636 zip(cls.__pydantic_generic_metadata__['parameters'], typevar_values) 

637 ) 

638 

639 if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: 

640 submodel = cls # if arguments are equal to parameters it's the same object 

641 _generics.set_cached_generic_type(cls, typevar_values, submodel) 

642 else: 

643 parent_args = cls.__pydantic_generic_metadata__['args'] 

644 if not parent_args: 

645 args = typevar_values 

646 else: 

647 args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args) 

648 

649 origin = cls.__pydantic_generic_metadata__['origin'] or cls 

650 model_name = origin.model_parametrized_name(args) 

651 params = tuple( 

652 {param: None for param in _generics.iter_contained_typevars(typevars_map.values())} 

653 ) # use dict as ordered set 

654 

655 with _generics.generic_recursion_self_type(origin, args) as maybe_self_type: 

656 if maybe_self_type is not None: 

657 return maybe_self_type 

658 

659 cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args) 

660 if cached is not None: 

661 return cached 

662 

663 # Attempt to rebuild the origin in case new types have been defined 

664 try: 

665 # depth 3 gets you above this __class_getitem__ call 

666 origin.model_rebuild(_parent_namespace_depth=3) 

667 except PydanticUndefinedAnnotation: 

668 # It's okay if it fails, it just means there are still undefined types 

669 # that could be evaluated later. 

670 # TODO: Presumably we should error if validation is attempted here? 

671 pass 

672 

673 submodel = _generics.create_generic_submodel(model_name, origin, args, params) 

674 

675 # Update cache 

676 _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args) 

677 

678 return submodel 

679 

680 @classmethod 

681 def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str: 

682 """ 

683 Compute class name for parametrizations of generic classes. 

684 

685 :param params: Tuple of types of the class . Given a generic class 

686 `Model` with 2 type variables and a concrete model `Model[str, int]`, 

687 the value `(str, int)` would be passed to `params`. 

688 :return: String representing the new class where `params` are 

689 passed to `cls` as type variables. 

690 

691 This method can be overridden to achieve a custom naming scheme for generic BaseModels. 

692 """ 

693 if not issubclass(cls, Generic): # type: ignore[arg-type] 

694 raise TypeError('Concrete names should only be generated for generic models.') 

695 

696 # Any strings received should represent forward references, so we handle them specially below. 

697 # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future, 

698 # we may be able to remove this special case. 

699 param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params] 

700 params_component = ', '.join(param_names) 

701 return f'{cls.__name__}[{params_component}]' 

702 

703 # ##### Deprecated methods from v1 ##### 

704 @property 

705 def __fields_set__(self) -> set[str]: 

706 warnings.warn( 

707 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', DeprecationWarning 

708 ) 

709 return self.__pydantic_fields_set__ 

710 

711 def dict( 

712 self, 

713 *, 

714 include: IncEx = None, 

715 exclude: IncEx = None, 

716 by_alias: bool = False, 

717 exclude_unset: bool = False, 

718 exclude_defaults: bool = False, 

719 exclude_none: bool = False, 

720 ) -> typing.Dict[str, Any]: # noqa UP006 

721 warnings.warn('The `dict` method is deprecated; use `model_dump` instead.', DeprecationWarning) 

722 return self.model_dump( 

723 include=include, 

724 exclude=exclude, 

725 by_alias=by_alias, 

726 exclude_unset=exclude_unset, 

727 exclude_defaults=exclude_defaults, 

728 exclude_none=exclude_none, 

729 ) 

730 

731 def json( 

732 self, 

733 *, 

734 include: IncEx = None, 

735 exclude: IncEx = None, 

736 by_alias: bool = False, 

737 exclude_unset: bool = False, 

738 exclude_defaults: bool = False, 

739 exclude_none: bool = False, 

740 # TODO: What do we do about the following arguments? 

741 # Do they need to go on model_config now, and get used by the serializer? 

742 encoder: typing.Callable[[Any], Any] | None = Undefined, # type: ignore[assignment] 

743 models_as_dict: bool = Undefined, # type: ignore[assignment] 

744 **dumps_kwargs: Any, 

745 ) -> str: 

746 warnings.warn('The `json` method is deprecated; use `model_dump_json` instead.', DeprecationWarning) 

747 if encoder is not Undefined: 

748 raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.') 

749 if models_as_dict is not Undefined: 

750 raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.') 

751 if dumps_kwargs: 

752 raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.') 

753 return self.model_dump_json( 

754 include=include, 

755 exclude=exclude, 

756 by_alias=by_alias, 

757 exclude_unset=exclude_unset, 

758 exclude_defaults=exclude_defaults, 

759 exclude_none=exclude_none, 

760 ) 

761 

762 @classmethod 

763 def parse_obj(cls: type[Model], obj: Any) -> Model: 

764 warnings.warn('The `parse_obj` method is deprecated; use `model_validate` instead.', DeprecationWarning) 

765 return cls.model_validate(obj) 

766 

767 @classmethod 

768 def parse_raw( 

769 cls: type[Model], 

770 b: str | bytes, 

771 *, 

772 content_type: str | None = None, 

773 encoding: str = 'utf8', 

774 proto: _deprecated_parse.Protocol | None = None, 

775 allow_pickle: bool = False, 

776 ) -> Model: 

777 warnings.warn( 

778 'The `parse_raw` method is deprecated; if your data is JSON use `model_json_validate`, ' 

779 'otherwise load the data then use `model_validate` instead.', 

780 DeprecationWarning, 

781 ) 

782 try: 

783 obj = _deprecated_parse.load_str_bytes( 

784 b, 

785 proto=proto, 

786 content_type=content_type, 

787 encoding=encoding, 

788 allow_pickle=allow_pickle, 

789 ) 

790 except (ValueError, TypeError) as exc: 

791 import json 

792 

793 # try to match V1 

794 if isinstance(exc, UnicodeDecodeError): 

795 type_str = 'value_error.unicodedecode' 

796 elif isinstance(exc, json.JSONDecodeError): 

797 type_str = 'value_error.jsondecode' 

798 elif isinstance(exc, ValueError): 

799 type_str = 'value_error' 

800 else: 

801 type_str = 'type_error' 

802 

803 # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same 

804 error: pydantic_core.InitErrorDetails = { 

805 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), 

806 'loc': ('__root__',), 

807 'input': b, 

808 } 

809 raise pydantic_core.ValidationError(cls.__name__, [error]) 

810 return cls.model_validate(obj) 

811 

812 @classmethod 

813 def parse_file( 

814 cls: type[Model], 

815 path: str | Path, 

816 *, 

817 content_type: str | None = None, 

818 encoding: str = 'utf8', 

819 proto: _deprecated_parse.Protocol | None = None, 

820 allow_pickle: bool = False, 

821 ) -> Model: 

822 warnings.warn( 

823 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' 

824 'use `model_json_validate` otherwise `model_validate` instead.', 

825 DeprecationWarning, 

826 ) 

827 obj = _deprecated_parse.load_file( 

828 path, 

829 proto=proto, 

830 content_type=content_type, 

831 encoding=encoding, 

832 allow_pickle=allow_pickle, 

833 ) 

834 return cls.parse_obj(obj) 

835 

836 @classmethod 

837 def from_orm(cls: type[Model], obj: Any) -> Model: 

838 warnings.warn( 

839 'The `from_orm` method is deprecated; set `model_config["from_attributes"]=True` ' 

840 'and use `model_validate` instead.', 

841 DeprecationWarning, 

842 ) 

843 if not cls.model_config.get('from_attributes', None): 

844 raise PydanticUserError( 

845 'You must set the config attribute `from_attributes=True` to use from_orm', code=None 

846 ) 

847 return cls.model_validate(obj) 

848 

849 @classmethod 

850 def construct(cls: type[Model], _fields_set: set[str] | None = None, **values: Any) -> Model: 

851 warnings.warn('The `construct` method is deprecated; use `model_construct` instead.', DeprecationWarning) 

852 return cls.model_construct(_fields_set=_fields_set, **values) 

853 

854 def copy( 

855 self: Model, 

856 *, 

857 include: AbstractSetIntStr | MappingIntStrAny | None = None, 

858 exclude: AbstractSetIntStr | MappingIntStrAny | None = None, 

859 update: typing.Dict[str, Any] | None = None, # noqa UP006 

860 deep: bool = False, 

861 ) -> Model: 

862 """ 

863 This method is now deprecated; use `model_copy` instead. If you need include / exclude, use: 

864 

865 data = self.model_dump(include=include, exclude=exclude, round_trip=True) 

866 data = {**data, **(update or {})} 

867 copied = self.model_validate(data) 

868 """ 

869 warnings.warn( 

870 'The `copy` method is deprecated; use `model_copy` instead. ' 

871 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', 

872 DeprecationWarning, 

873 ) 

874 

875 values = dict( 

876 _deprecated_copy_internals._iter( # type: ignore 

877 self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False 

878 ), 

879 **(update or {}), 

880 ) 

881 

882 # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg 

883 if update: 

884 fields_set = self.__pydantic_fields_set__ | update.keys() 

885 else: 

886 fields_set = set(self.__pydantic_fields_set__) 

887 

888 # removing excluded fields from `__pydantic_fields_set__` 

889 if exclude: 

890 fields_set -= set(exclude) 

891 

892 return _deprecated_copy_internals._copy_and_set_values(self, values, fields_set, deep=deep) # type: ignore 

893 

894 @classmethod 

895 def schema( 

896 cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE 

897 ) -> typing.Dict[str, Any]: # noqa UP006 

898 warnings.warn('The `schema` method is deprecated; use `model_json_schema` instead.', DeprecationWarning) 

899 return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template) 

900 

901 @classmethod 

902 def schema_json( 

903 cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any 

904 ) -> str: 

905 import json 

906 

907 warnings.warn( 

908 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', 

909 DeprecationWarning, 

910 ) 

911 from .deprecated.json import pydantic_encoder 

912 

913 return json.dumps( 

914 cls.model_json_schema(by_alias=by_alias, ref_template=ref_template), 

915 default=pydantic_encoder, 

916 **dumps_kwargs, 

917 ) 

918 

919 @classmethod 

920 def validate(cls: type[Model], value: Any) -> Model: 

921 warnings.warn('The `validate` method is deprecated; use `model_validate` instead.', DeprecationWarning) 

922 return cls.model_validate(value) 

923 

924 @classmethod 

925 def update_forward_refs(cls, **localns: Any) -> None: 

926 warnings.warn( 

927 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', DeprecationWarning 

928 ) 

929 if localns: 

930 raise TypeError('`localns` arguments are not longer accepted.') 

931 cls.model_rebuild(force=True) 

932 

933 def _iter(self, *args: Any, **kwargs: Any) -> Any: 

934 warnings.warn('The private method `_iter` will be removed and should no longer be used.', DeprecationWarning) 

935 return _deprecated_copy_internals._iter(self, *args, **kwargs) # type: ignore 

936 

937 def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any: 

938 warnings.warn( 

939 'The private method `_copy_and_set_values` will be removed and should no longer be used.', 

940 DeprecationWarning, 

941 ) 

942 return _deprecated_copy_internals._copy_and_set_values(self, *args, **kwargs) # type: ignore 

943 

944 @classmethod 

945 def _get_value(cls, *args: Any, **kwargs: Any) -> Any: 

946 warnings.warn( 

947 'The private method `_get_value` will be removed and should no longer be used.', DeprecationWarning 

948 ) 

949 return _deprecated_copy_internals._get_value(cls, *args, **kwargs) # type: ignore 

950 

951 def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any: 

952 warnings.warn( 

953 'The private method `_calculate_keys` will be removed and should no longer be used.', DeprecationWarning 

954 ) 

955 return _deprecated_copy_internals._calculate_keys(self, *args, **kwargs) # type: ignore 

956 

957 

958_base_class_defined = True 

959 

960 

961@typing.overload 

962def create_model( 

963 __model_name: str, 

964 *, 

965 __config__: ConfigDict | None = None, 

966 __base__: None = None, 

967 __module__: str = __name__, 

968 __validators__: dict[str, AnyClassMethod] | None = None, 

969 __cls_kwargs__: dict[str, Any] | None = None, 

970 **field_definitions: Any, 

971) -> type[BaseModel]: 

972 ... 

973 

974 

975@typing.overload 

976def create_model( 

977 __model_name: str, 

978 *, 

979 __config__: ConfigDict | None = None, 

980 __base__: type[Model] | tuple[type[Model], ...], 

981 __module__: str = __name__, 

982 __validators__: dict[str, AnyClassMethod] | None = None, 

983 __cls_kwargs__: dict[str, Any] | None = None, 

984 **field_definitions: Any, 

985) -> type[Model]: 

986 ... 

987 

988 

989def create_model( 

990 __model_name: str, 

991 *, 

992 __config__: ConfigDict | None = None, 

993 __base__: type[Model] | tuple[type[Model], ...] | None = None, 

994 __module__: str = __name__, 

995 __validators__: dict[str, AnyClassMethod] | None = None, 

996 __cls_kwargs__: dict[str, Any] | None = None, 

997 __slots__: tuple[str, ...] | None = None, 

998 **field_definitions: Any, 

999) -> type[Model]: 

1000 """ 

1001 Dynamically create a model. 

1002 :param __model_name: name of the created model 

1003 :param __config__: config dict/class to use for the new model 

1004 :param __base__: base class for the new model to inherit from 

1005 :param __module__: module of the created model 

1006 :param __validators__: a dict of method names and @validator class methods 

1007 :param __cls_kwargs__: a dict for class creation 

1008 :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` 

1009 :param field_definitions: fields of the model (or extra fields if a base is supplied) 

1010 in the format `<name>=(<type>, <default value>)` or `<name>=<default value>, e.g. 

1011 `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format 

1012 `<name>=<Field>` or `<name>=(<type>, <FieldInfo>)`, e.g. 

1013 `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or 

1014 `foo=(str, FieldInfo(title='Foo'))` 

1015 """ 

1016 if __slots__ is not None: 

1017 # __slots__ will be ignored from here on 

1018 warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning) 

1019 

1020 if __base__ is not None: 

1021 if __config__ is not None: 

1022 raise PydanticUserError( 

1023 'to avoid confusion `__config__` and `__base__` cannot be used together', 

1024 code='create-model-config-base', 

1025 ) 

1026 if not isinstance(__base__, tuple): 

1027 __base__ = (__base__,) 

1028 else: 

1029 __base__ = (typing.cast(typing.Type['Model'], BaseModel),) 

1030 

1031 __cls_kwargs__ = __cls_kwargs__ or {} 

1032 

1033 fields = {} 

1034 annotations = {} 

1035 

1036 for f_name, f_def in field_definitions.items(): 

1037 if f_name.startswith('_'): 

1038 warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) 

1039 if isinstance(f_def, tuple): 

1040 f_def = cast('tuple[str, Any]', f_def) 

1041 try: 

1042 f_annotation, f_value = f_def 

1043 except ValueError as e: 

1044 raise PydanticUserError( 

1045 'Field definitions should either be a `(<type>, <default>)`.', 

1046 code='create-model-field-definitions', 

1047 ) from e 

1048 else: 

1049 f_annotation, f_value = None, f_def 

1050 

1051 if f_annotation: 

1052 annotations[f_name] = f_annotation 

1053 fields[f_name] = f_value 

1054 

1055 namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__} 

1056 if __validators__: 

1057 namespace.update(__validators__) 

1058 namespace.update(fields) 

1059 if __config__: 

1060 namespace['model_config'] = _config.ConfigWrapper(__config__).config_dict 

1061 resolved_bases = resolve_bases(__base__) 

1062 meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) 

1063 if resolved_bases is not __base__: 

1064 ns['__orig_bases__'] = __base__ 

1065 namespace.update(ns) 

1066 return meta(__model_name, resolved_bases, namespace, __pydantic_reset_parent_namespace__=False, **kwds) 

1067 

1068 

1069def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]: 

1070 field_names: set[str] = set() 

1071 class_vars: set[str] = set() 

1072 private_attributes: dict[str, ModelPrivateAttr] = {} 

1073 for base in bases: 

1074 if _base_class_defined and issubclass(base, BaseModel) and base != BaseModel: 

1075 # model_fields might not be defined yet in the case of generics, so we use getattr here: 

1076 field_names.update(getattr(base, 'model_fields', {}).keys()) 

1077 class_vars.update(base.__class_vars__) 

1078 private_attributes.update(base.__private_attributes__) 

1079 return field_names, class_vars, private_attributes 

1080 

1081 

1082__getattr__ = getattr_migration(__name__)