1"""Provide an enhanced dataclass that performs validation."""
2
3from __future__ import annotations as _annotations
4
5import dataclasses
6import functools
7import sys
8import types
9from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, NoReturn, TypeVar, overload
10from warnings import warn
11
12from typing_extensions import TypeGuard, dataclass_transform
13
14from ._internal import _config, _decorators, _namespace_utils, _typing_extra
15from ._internal import _dataclasses as _pydantic_dataclasses
16from ._migration import getattr_migration
17from .config import ConfigDict
18from .errors import PydanticUserError
19from .fields import Field, FieldInfo, PrivateAttr
20
21if TYPE_CHECKING:
22 from ._internal._dataclasses import PydanticDataclass
23 from ._internal._namespace_utils import MappingNamespace
24
25__all__ = 'dataclass', 'rebuild_dataclass'
26
27_T = TypeVar('_T')
28
29if sys.version_info >= (3, 10):
30
31 @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
32 @overload
33 def dataclass(
34 *,
35 init: Literal[False] = False,
36 repr: bool = True,
37 eq: bool = True,
38 order: bool = False,
39 unsafe_hash: bool = False,
40 frozen: bool = False,
41 config: ConfigDict | type[object] | None = None,
42 validate_on_init: bool | None = None,
43 kw_only: bool = ...,
44 slots: bool = ...,
45 ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
46 ...
47
48 @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
49 @overload
50 def dataclass(
51 _cls: type[_T], # type: ignore
52 *,
53 init: Literal[False] = False,
54 repr: bool = True,
55 eq: bool = True,
56 order: bool = False,
57 unsafe_hash: bool = False,
58 frozen: bool | None = None,
59 config: ConfigDict | type[object] | None = None,
60 validate_on_init: bool | None = None,
61 kw_only: bool = ...,
62 slots: bool = ...,
63 ) -> type[PydanticDataclass]: ...
64
65else:
66
67 @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
68 @overload
69 def dataclass(
70 *,
71 init: Literal[False] = False,
72 repr: bool = True,
73 eq: bool = True,
74 order: bool = False,
75 unsafe_hash: bool = False,
76 frozen: bool | None = None,
77 config: ConfigDict | type[object] | None = None,
78 validate_on_init: bool | None = None,
79 ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore
80 ...
81
82 @dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
83 @overload
84 def dataclass(
85 _cls: type[_T], # type: ignore
86 *,
87 init: Literal[False] = False,
88 repr: bool = True,
89 eq: bool = True,
90 order: bool = False,
91 unsafe_hash: bool = False,
92 frozen: bool | None = None,
93 config: ConfigDict | type[object] | None = None,
94 validate_on_init: bool | None = None,
95 ) -> type[PydanticDataclass]: ...
96
97
98@dataclass_transform(field_specifiers=(dataclasses.field, Field, PrivateAttr))
99def dataclass(
100 _cls: type[_T] | None = None,
101 *,
102 init: Literal[False] = False,
103 repr: bool = True,
104 eq: bool = True,
105 order: bool = False,
106 unsafe_hash: bool = False,
107 frozen: bool | None = None,
108 config: ConfigDict | type[object] | None = None,
109 validate_on_init: bool | None = None,
110 kw_only: bool = False,
111 slots: bool = False,
112) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]:
113 """!!! abstract "Usage Documentation"
114 [`dataclasses`](../concepts/dataclasses.md)
115
116 A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`,
117 but with added validation.
118
119 This function should be used similarly to `dataclasses.dataclass`.
120
121 Args:
122 _cls: The target `dataclass`.
123 init: Included for signature compatibility with `dataclasses.dataclass`, and is passed through to
124 `dataclasses.dataclass` when appropriate. If specified, must be set to `False`, as pydantic inserts its
125 own `__init__` function.
126 repr: A boolean indicating whether to include the field in the `__repr__` output.
127 eq: Determines if a `__eq__` method should be generated for the class.
128 order: Determines if comparison magic methods should be generated, such as `__lt__`, but not `__eq__`.
129 unsafe_hash: Determines if a `__hash__` method should be included in the class, as in `dataclasses.dataclass`.
130 frozen: Determines if the generated class should be a 'frozen' `dataclass`, which does not allow its
131 attributes to be modified after it has been initialized. If not set, the value from the provided `config` argument will be used (and will default to `False` otherwise).
132 config: The Pydantic config to use for the `dataclass`.
133 validate_on_init: A deprecated parameter included for backwards compatibility; in V2, all Pydantic dataclasses
134 are validated on init.
135 kw_only: Determines if `__init__` method parameters must be specified by keyword only. Defaults to `False`.
136 slots: Determines if the generated class should be a 'slots' `dataclass`, which does not allow the addition of
137 new attributes after instantiation.
138
139 Returns:
140 A decorator that accepts a class as its argument and returns a Pydantic `dataclass`.
141
142 Raises:
143 AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`.
144 """
145 assert init is False, 'pydantic.dataclasses.dataclass only supports init=False'
146 assert validate_on_init is not False, 'validate_on_init=False is no longer supported'
147
148 if sys.version_info >= (3, 10):
149 kwargs = {'kw_only': kw_only, 'slots': slots}
150 else:
151 kwargs = {}
152
153 def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]:
154 """Create a Pydantic dataclass from a regular dataclass.
155
156 Args:
157 cls: The class to create the Pydantic dataclass from.
158
159 Returns:
160 A Pydantic dataclass.
161 """
162 from ._internal._utils import is_model_class
163
164 if is_model_class(cls):
165 raise PydanticUserError(
166 f'Cannot create a Pydantic dataclass from {cls.__name__} as it is already a Pydantic model',
167 code='dataclass-on-model',
168 )
169
170 original_cls = cls
171
172 # we warn on conflicting config specifications, but only if the class doesn't have a dataclass base
173 # because a dataclass base might provide a __pydantic_config__ attribute that we don't want to warn about
174 has_dataclass_base = any(dataclasses.is_dataclass(base) for base in cls.__bases__)
175 if not has_dataclass_base and config is not None and hasattr(cls, '__pydantic_config__'):
176 warn(
177 f'`config` is set via both the `dataclass` decorator and `__pydantic_config__` for dataclass {cls.__name__}. '
178 f'The `config` specification from `dataclass` decorator will take priority.',
179 category=UserWarning,
180 stacklevel=2,
181 )
182
183 # if config is not explicitly provided, try to read it from the type
184 config_dict = config if config is not None else getattr(cls, '__pydantic_config__', None)
185 config_wrapper = _config.ConfigWrapper(config_dict)
186 decorators = _decorators.DecoratorInfos.build(cls)
187 decorators.update_from_config(config_wrapper)
188
189 # Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator
190 # Otherwise, classes with no __doc__ will have their signature added into the JSON schema description,
191 # since dataclasses.dataclass will set this as the __doc__
192 original_doc = cls.__doc__
193
194 if _pydantic_dataclasses.is_stdlib_dataclass(cls):
195 # Vanilla dataclasses include a default docstring (representing the class signature),
196 # which we don't want to preserve.
197 original_doc = None
198
199 # We don't want to add validation to the existing std lib dataclass, so we will subclass it
200 # If the class is generic, we need to make sure the subclass also inherits from Generic
201 # with all the same parameters.
202 bases = (cls,)
203 if issubclass(cls, Generic):
204 generic_base = Generic[cls.__parameters__] # type: ignore
205 bases = bases + (generic_base,)
206 cls = types.new_class(cls.__name__, bases)
207
208 # Respect frozen setting from dataclass constructor and fallback to config setting if not provided
209 if frozen is not None:
210 frozen_ = frozen
211 if config_wrapper.frozen:
212 # It's not recommended to define both, as the setting from the dataclass decorator will take priority.
213 warn(
214 f'`frozen` is set via both the `dataclass` decorator and `config` for dataclass {cls.__name__!r}.'
215 'This is not recommended. The `frozen` specification on `dataclass` will take priority.',
216 category=UserWarning,
217 stacklevel=2,
218 )
219 else:
220 frozen_ = config_wrapper.frozen or False
221
222 # Make Pydantic's `Field()` function compatible with stdlib dataclasses. As we'll decorate
223 # `cls` with the stdlib `@dataclass` decorator first, there are two attributes, `kw_only` and
224 # `repr` that need to be understood *during* the stdlib creation. We do so in two steps:
225
226 # 1. On the decorated class, wrap `Field()` assignment with `dataclass.field()`, with the
227 # two attributes set (done in `as_dataclass_field()`)
228 cls_anns = _typing_extra.safe_get_annotations(cls)
229 for field_name in cls_anns:
230 # We should look for assignments in `__dict__` instead, but for now we follow
231 # the same behavior as stdlib dataclasses (see https://github.com/python/cpython/issues/88609)
232 field_value = getattr(cls, field_name, None)
233 if isinstance(field_value, FieldInfo):
234 setattr(cls, field_name, _pydantic_dataclasses.as_dataclass_field(field_value))
235
236 # 2. For bases of `cls` that are stdlib dataclasses, we temporarily patch their fields
237 # (see the docstring of the context manager):
238 with _pydantic_dataclasses.patch_base_fields(cls):
239 cls = dataclasses.dataclass( # pyright: ignore[reportCallIssue]
240 cls,
241 # the value of init here doesn't affect anything except that it makes it easier to generate a signature
242 init=True,
243 repr=repr,
244 eq=eq,
245 order=order,
246 unsafe_hash=unsafe_hash,
247 frozen=frozen_,
248 **kwargs,
249 )
250
251 if config_wrapper.validate_assignment:
252 original_setattr = cls.__setattr__
253
254 @functools.wraps(cls.__setattr__)
255 def validated_setattr(instance: PydanticDataclass, name: str, value: Any, /) -> None:
256 if frozen_:
257 return original_setattr(instance, name, value) # pyright: ignore[reportCallIssue]
258 inst_cls = type(instance)
259 attr = getattr(inst_cls, name, None)
260
261 if isinstance(attr, property):
262 attr.__set__(instance, value)
263 elif isinstance(attr, functools.cached_property):
264 instance.__dict__.__setitem__(name, value)
265 else:
266 inst_cls.__pydantic_validator__.validate_assignment(instance, name, value)
267
268 cls.__setattr__ = validated_setattr.__get__(None, cls) # type: ignore
269
270 if slots and not hasattr(cls, '__setstate__'):
271 # If slots is set, `pickle` (relied on by `copy.copy()`) will use
272 # `__setattr__()` to reconstruct the dataclass. However, the custom
273 # `__setattr__()` set above relies on `validate_assignment()`, which
274 # in turn expects all the field values to be already present on the
275 # instance, resulting in attribute errors.
276 # As such, we make use of `object.__setattr__()` instead.
277 # Note that we do so only if `__setstate__()` isn't already set (this is the
278 # case if on top of `slots`, `frozen` is used).
279
280 # Taken from `dataclasses._dataclass_get/setstate()`:
281 def _dataclass_getstate(self: Any) -> list[Any]:
282 return [getattr(self, f.name) for f in dataclasses.fields(self)]
283
284 def _dataclass_setstate(self: Any, state: list[Any]) -> None:
285 for field, value in zip(dataclasses.fields(self), state):
286 object.__setattr__(self, field.name, value)
287
288 cls.__getstate__ = _dataclass_getstate # pyright: ignore[reportAttributeAccessIssue]
289 cls.__setstate__ = _dataclass_setstate # pyright: ignore[reportAttributeAccessIssue]
290
291 # This is an undocumented attribute to distinguish stdlib/Pydantic dataclasses.
292 # It should be set as early as possible:
293 cls.__is_pydantic_dataclass__ = True
294 cls.__pydantic_decorators__ = decorators # type: ignore
295 cls.__doc__ = original_doc
296 # Can be non-existent for dynamically created classes:
297 firstlineno = getattr(original_cls, '__firstlineno__', None)
298 cls.__module__ = original_cls.__module__
299 if sys.version_info >= (3, 13) and firstlineno is not None:
300 # As per https://docs.python.org/3/reference/datamodel.html#type.__firstlineno__:
301 # Setting the `__module__` attribute removes the `__firstlineno__` item from the type’s dictionary.
302 original_cls.__firstlineno__ = firstlineno
303 cls.__firstlineno__ = firstlineno
304 cls.__qualname__ = original_cls.__qualname__
305 cls.__pydantic_fields_complete__ = classmethod(_pydantic_fields_complete)
306 cls.__pydantic_complete__ = False # `complete_dataclass` will set it to `True` if successful.
307 # TODO `parent_namespace` is currently None, but we could do the same thing as Pydantic models:
308 # fetch the parent ns using `parent_frame_namespace` (if the dataclass was defined in a function),
309 # and possibly cache it (see the `__pydantic_parent_namespace__` logic for models).
310 _pydantic_dataclasses.complete_dataclass(cls, config_wrapper, raise_errors=False)
311 return cls
312
313 return create_dataclass if _cls is None else create_dataclass(_cls)
314
315
316def _pydantic_fields_complete(cls: type[PydanticDataclass]) -> bool:
317 """Return whether the fields where successfully collected (i.e. type hints were successfully resolves).
318
319 This is a private property, not meant to be used outside Pydantic.
320 """
321 return all(field_info._complete for field_info in cls.__pydantic_fields__.values())
322
323
324__getattr__ = getattr_migration(__name__)
325
326if sys.version_info < (3, 11):
327 # Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints
328 # Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable.
329
330 def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn:
331 """This function does nothing but raise an error that is as similar as possible to what you'd get
332 if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just
333 to ensure typing._type_check does not error if the type hint evaluates to `InitVar[<parameter>]`.
334 """
335 raise TypeError("'InitVar' object is not callable")
336
337 dataclasses.InitVar.__call__ = _call_initvar
338
339
340def rebuild_dataclass(
341 cls: type[PydanticDataclass],
342 *,
343 force: bool = False,
344 raise_errors: bool = True,
345 _parent_namespace_depth: int = 2,
346 _types_namespace: MappingNamespace | None = None,
347) -> bool | None:
348 """Try to rebuild the pydantic-core schema for the dataclass.
349
350 This may be necessary when one of the annotations is a ForwardRef which could not be resolved during
351 the initial attempt to build the schema, and automatic rebuilding fails.
352
353 This is analogous to `BaseModel.model_rebuild`.
354
355 Args:
356 cls: The class to rebuild the pydantic-core schema for.
357 force: Whether to force the rebuilding of the schema, defaults to `False`.
358 raise_errors: Whether to raise errors, defaults to `True`.
359 _parent_namespace_depth: The depth level of the parent namespace, defaults to 2.
360 _types_namespace: The types namespace, defaults to `None`.
361
362 Returns:
363 Returns `None` if the schema is already "complete" and rebuilding was not required.
364 If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`.
365 """
366 if not force and cls.__pydantic_complete__:
367 return None
368
369 for attr in ('__pydantic_core_schema__', '__pydantic_validator__', '__pydantic_serializer__'):
370 if attr in cls.__dict__:
371 # Deleting the validator/serializer is necessary as otherwise they can get reused in
372 # pydantic-core. Same applies for the core schema that can be reused in schema generation.
373 delattr(cls, attr)
374
375 cls.__pydantic_complete__ = False
376
377 if _types_namespace is not None:
378 rebuild_ns = _types_namespace
379 elif _parent_namespace_depth > 0:
380 rebuild_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth, force=True) or {}
381 else:
382 rebuild_ns = {}
383
384 ns_resolver = _namespace_utils.NsResolver(
385 parent_namespace=rebuild_ns,
386 )
387
388 return _pydantic_dataclasses.complete_dataclass(
389 cls,
390 _config.ConfigWrapper(cls.__pydantic_config__, check=False),
391 raise_errors=raise_errors,
392 ns_resolver=ns_resolver,
393 # We could provide a different config instead (with `'defer_build'` set to `True`)
394 # of this explicit `_force_build` argument, but because config can come from the
395 # decorator parameter or the `__pydantic_config__` attribute, `complete_dataclass`
396 # will overwrite `__pydantic_config__` with the provided config above:
397 _force_build=True,
398 )
399
400
401def is_pydantic_dataclass(class_: type[Any], /) -> TypeGuard[type[PydanticDataclass]]:
402 """Whether a class is a pydantic dataclass.
403
404 Args:
405 class_: The class.
406
407 Returns:
408 `True` if the class is a pydantic dataclass, `False` otherwise.
409 """
410 try:
411 return '__is_pydantic_dataclass__' in class_.__dict__ and dataclasses.is_dataclass(class_)
412 except AttributeError:
413 return False