Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pydantic/_internal/_fields.py: 54%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

295 statements  

1"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`.""" 

2 

3from __future__ import annotations as _annotations 

4 

5import dataclasses 

6import warnings 

7from collections.abc import Mapping 

8from functools import cache 

9from inspect import Parameter, ismethoddescriptor 

10from re import Pattern 

11from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast 

12 

13from pydantic_core import PydanticUndefined 

14from typing_extensions import TypeIs 

15from typing_inspection.introspection import AnnotationSource 

16 

17from pydantic import PydanticDeprecatedSince211 

18from pydantic.errors import PydanticUserError 

19 

20from ..aliases import AliasGenerator 

21from . import _generics, _typing_extra 

22from ._config import ConfigWrapper 

23from ._docs_extraction import extract_docstrings_from_cls 

24from ._import_utils import import_cached_base_model, import_cached_field_info 

25from ._internal_dataclass import slots_true 

26from ._namespace_utils import NsResolver 

27from ._repr import Representation 

28from ._utils import can_be_positional, get_first_not_none 

29 

30if TYPE_CHECKING: 

31 from annotated_types import BaseMetadata 

32 

33 from ..fields import FieldInfo 

34 from ..main import BaseModel 

35 from ._dataclasses import PydanticDataclass, StandardDataclass 

36 from ._decorators import DecoratorInfos 

37 

38 

39class PydanticMetadata(Representation): 

40 """Base class for annotation markers like `Strict`.""" 

41 

42 __slots__ = () 

43 

44 

45@dataclasses.dataclass(**slots_true) # TODO: make kw_only when we drop support for 3.9. 

46class PydanticExtraInfo: 

47 # TODO: make use of PEP 747: 

48 annotation: Any 

49 complete: bool 

50 

51 

52def pydantic_general_metadata(**metadata: Any) -> BaseMetadata: 

53 """Create a new `_PydanticGeneralMetadata` class with the given metadata. 

54 

55 Args: 

56 **metadata: The metadata to add. 

57 

58 Returns: 

59 The new `_PydanticGeneralMetadata` class. 

60 """ 

61 return _general_metadata_cls()(metadata) # type: ignore 

62 

63 

64@cache 

65def _general_metadata_cls() -> type[BaseMetadata]: 

66 """Do it this way to avoid importing `annotated_types` at import time.""" 

67 from annotated_types import BaseMetadata 

68 

69 class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata): 

70 """Pydantic general metadata like `max_digits`.""" 

71 

72 def __init__(self, metadata: Any): 

73 self.__dict__ = metadata 

74 

75 return _PydanticGeneralMetadata # type: ignore 

76 

77 

78def _check_protected_namespaces( 

79 protected_namespaces: tuple[str | Pattern[str], ...], 

80 ann_name: str, 

81 bases: tuple[type[Any], ...], 

82 cls_name: str, 

83) -> None: 

84 BaseModel = import_cached_base_model() 

85 

86 for protected_namespace in protected_namespaces: 

87 ns_violation = False 

88 if isinstance(protected_namespace, Pattern): 

89 ns_violation = protected_namespace.match(ann_name) is not None 

90 elif isinstance(protected_namespace, str): 

91 ns_violation = ann_name.startswith(protected_namespace) 

92 

93 if ns_violation: 

94 for b in bases: 

95 if hasattr(b, ann_name): 

96 if not (issubclass(b, BaseModel) and ann_name in getattr(b, '__pydantic_fields__', {})): 

97 raise ValueError( 

98 f'Field {ann_name!r} conflicts with member {getattr(b, ann_name)}' 

99 f' of protected namespace {protected_namespace!r}.' 

100 ) 

101 else: 

102 valid_namespaces: list[str] = [] 

103 for pn in protected_namespaces: 

104 if isinstance(pn, Pattern): 

105 if not pn.match(ann_name): 

106 valid_namespaces.append(f're.compile({pn.pattern!r})') 

107 else: 

108 if not ann_name.startswith(pn): 

109 valid_namespaces.append(f"'{pn}'") 

110 

111 valid_namespaces_str = f'({", ".join(valid_namespaces)}{",)" if len(valid_namespaces) == 1 else ")"}' 

112 

113 warnings.warn( 

114 f'Field {ann_name!r} in {cls_name!r} conflicts with protected namespace {protected_namespace!r}.\n\n' 

115 f"You may be able to solve this by setting the 'protected_namespaces' configuration to {valid_namespaces_str}.", 

116 UserWarning, 

117 stacklevel=5, 

118 ) 

119 

120 

121def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], use_inspect: bool = False) -> None: 

122 fields_docs = extract_docstrings_from_cls(cls, use_inspect=use_inspect) 

123 for ann_name, field_info in fields.items(): 

124 if field_info.description is None and ann_name in fields_docs: 

125 field_info.description = fields_docs[ann_name] 

126 

127 

128def _apply_field_title_generator_to_field_info( 

129 title_generator: Callable[[str, FieldInfo], str], 

130 field_name: str, 

131 field_info: FieldInfo, 

132): 

133 if field_info.title is None: 

134 title = title_generator(field_name, field_info) 

135 if not isinstance(title, str): 

136 raise TypeError(f'field_title_generator {title_generator} must return str, not {title.__class__}') 

137 

138 field_info.title = title 

139 

140 

141def _apply_alias_generator_to_field_info( 

142 alias_generator: Callable[[str], str] | AliasGenerator, field_name: str, field_info: FieldInfo 

143): 

144 """Apply an alias generator to aliases on a `FieldInfo` instance if appropriate. 

145 

146 Args: 

147 alias_generator: A callable that takes a string and returns a string, or an `AliasGenerator` instance. 

148 field_name: The name of the field from which to generate the alias. 

149 field_info: The `FieldInfo` instance to which the alias generator is (maybe) applied. 

150 """ 

151 # Apply an alias_generator if 

152 # 1. An alias is not specified 

153 # 2. An alias is specified, but the priority is <= 1 

154 if ( 

155 field_info.alias_priority is None 

156 or field_info.alias_priority <= 1 

157 or field_info.alias is None 

158 or field_info.validation_alias is None 

159 or field_info.serialization_alias is None 

160 ): 

161 alias, validation_alias, serialization_alias = None, None, None 

162 

163 if isinstance(alias_generator, AliasGenerator): 

164 alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name) 

165 elif callable(alias_generator): 

166 alias = alias_generator(field_name) 

167 if not isinstance(alias, str): 

168 raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}') 

169 

170 # if priority is not set, we set to 1 

171 # which supports the case where the alias_generator from a child class is used 

172 # to generate an alias for a field in a parent class 

173 if field_info.alias_priority is None or field_info.alias_priority <= 1: 

174 field_info.alias_priority = 1 

175 

176 # if the priority is 1, then we set the aliases to the generated alias 

177 if field_info.alias_priority == 1: 

178 field_info.serialization_alias = get_first_not_none(serialization_alias, alias) 

179 field_info.validation_alias = get_first_not_none(validation_alias, alias) 

180 field_info.alias = alias 

181 

182 # if any of the aliases are not set, then we set them to the corresponding generated alias 

183 if field_info.alias is None: 

184 field_info.alias = alias 

185 if field_info.serialization_alias is None: 

186 field_info.serialization_alias = get_first_not_none(serialization_alias, alias) 

187 if field_info.validation_alias is None: 

188 field_info.validation_alias = get_first_not_none(validation_alias, alias) 

189 

190 

191def update_field_from_config(config_wrapper: ConfigWrapper, field_name: str, field_info: FieldInfo) -> None: 

192 """Update the `FieldInfo` instance from the configuration set on the model it belongs to. 

193 

194 This will apply the title and alias generators from the configuration. 

195 

196 Args: 

197 config_wrapper: The configuration from the model. 

198 field_name: The field name the `FieldInfo` instance is attached to. 

199 field_info: The `FieldInfo` instance to update. 

200 """ 

201 field_title_generator = field_info.field_title_generator or config_wrapper.field_title_generator 

202 if field_title_generator is not None: 

203 _apply_field_title_generator_to_field_info(field_title_generator, field_name, field_info) 

204 if config_wrapper.alias_generator is not None: 

205 _apply_alias_generator_to_field_info(config_wrapper.alias_generator, field_name, field_info) 

206 

207 

208_deprecated_method_names = {'dict', 'json', 'copy', '_iter', '_copy_and_set_values', '_calculate_keys'} 

209 

210_deprecated_classmethod_names = { 

211 'parse_obj', 

212 'parse_raw', 

213 'parse_file', 

214 'from_orm', 

215 'construct', 

216 'schema', 

217 'schema_json', 

218 'validate', 

219 'update_forward_refs', 

220 '_get_value', 

221} 

222 

223 

224def collect_model_fields( # noqa: C901 

225 cls: type[BaseModel], 

226 config_wrapper: ConfigWrapper, 

227 ns_resolver: NsResolver, 

228 *, 

229 typevars_map: Mapping[TypeVar, Any] | None = None, 

230) -> tuple[dict[str, FieldInfo], PydanticExtraInfo | None, set[str]]: 

231 """Collect the fields and class variables names of a nascent Pydantic model. 

232 

233 The fields collection process is *lenient*, meaning it won't error if string annotations 

234 fail to evaluate. If this happens, the original annotation (and assigned value, if any) 

235 is stored on the created `FieldInfo` instance. 

236 

237 The `rebuild_model_fields()` should be called at a later point (e.g. when rebuilding the model), 

238 and will make use of these stored attributes. 

239 

240 Args: 

241 cls: BaseModel or dataclass. 

242 config_wrapper: The config wrapper instance. 

243 ns_resolver: Namespace resolver to use when getting model annotations. 

244 typevars_map: A dictionary mapping type variables to their concrete types. 

245 

246 Returns: 

247 A three-tuple containing the model fields, the `PydanticExtraInfo` instance if the `__pydantic_extra__` annotation is set, 

248 and class variables names. 

249 

250 Raises: 

251 NameError: 

252 - If there is a conflict between a field name and protected namespaces. 

253 - If there is a field other than `root` in `RootModel`. 

254 - If a field shadows an attribute in the parent model. 

255 """ 

256 FieldInfo_ = import_cached_field_info() 

257 BaseModel_ = import_cached_base_model() 

258 

259 bases = cls.__bases__ 

260 parent_fields_lookup: dict[str, FieldInfo] = {} 

261 for base in reversed(bases): 

262 if model_fields := getattr(base, '__pydantic_fields__', None): 

263 parent_fields_lookup.update(model_fields) 

264 

265 type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver) 

266 

267 # `cls_annotations` is only used to determine if an annotation comes from a parent class 

268 cls_annotations = _typing_extra.safe_get_annotations(cls) 

269 

270 fields: dict[str, FieldInfo] = {} 

271 

272 class_vars: set[str] = set() 

273 for ann_name, (ann_type, evaluated) in type_hints.items(): 

274 if ann_name == 'model_config': 

275 # We never want to treat `model_config` as a field 

276 # Note: we may need to change this logic if/when we introduce a `BareModel` class with no 

277 # protected namespaces (where `model_config` might be allowed as a field name) 

278 continue 

279 

280 _check_protected_namespaces( 

281 protected_namespaces=config_wrapper.protected_namespaces, 

282 ann_name=ann_name, 

283 bases=bases, 

284 cls_name=cls.__name__, 

285 ) 

286 

287 if _typing_extra.is_classvar_annotation(ann_type): 

288 class_vars.add(ann_name) 

289 continue 

290 

291 assigned_value = getattr(cls, ann_name, PydanticUndefined) 

292 if assigned_value is not PydanticUndefined and ( 

293 # One of the deprecated instance methods was used as a field name (e.g. `dict()`): 

294 any(getattr(BaseModel_, depr_name, None) is assigned_value for depr_name in _deprecated_method_names) 

295 # One of the deprecated class methods was used as a field name (e.g. `schema()`): 

296 or ( 

297 hasattr(assigned_value, '__func__') 

298 and any( 

299 getattr(getattr(BaseModel_, depr_name, None), '__func__', None) is assigned_value.__func__ # pyright: ignore[reportAttributeAccessIssue] 

300 for depr_name in _deprecated_classmethod_names 

301 ) 

302 ) 

303 ): 

304 # Then `assigned_value` would be the method, even though no default was specified: 

305 assigned_value = PydanticUndefined 

306 

307 if not is_valid_field_name(ann_name): 

308 continue 

309 if cls.__pydantic_root_model__ and ann_name != 'root': 

310 raise NameError( 

311 f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`" 

312 ) 

313 

314 for base in bases: 

315 if hasattr(base, ann_name): 

316 if ann_name not in cls_annotations: 

317 # Don't warn when a field exists in a parent class but has not been defined in the current class 

318 continue 

319 

320 # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get 

321 # "... shadows an attribute" warnings 

322 generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin') 

323 if base is generic_origin: 

324 # Don't warn when "shadowing" of attributes in parametrized generics 

325 continue 

326 

327 dataclass_fields = { 

328 field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ()) 

329 } 

330 if ann_name in dataclass_fields: 

331 # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set 

332 # on the class instance. 

333 continue 

334 

335 warnings.warn( 

336 f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent ' 

337 f'"{base.__qualname__}"', 

338 UserWarning, 

339 stacklevel=4, 

340 ) 

341 

342 if assigned_value is PydanticUndefined: # no assignment, just a plain annotation 

343 if ann_name in cls_annotations or ann_name not in parent_fields_lookup: 

344 # field is either: 

345 # - present in the current model's annotations (and *not* from parent classes) 

346 # - not found on any base classes; this seems to be caused by fields not getting 

347 # generated due to models not being fully defined while initializing recursive models. 

348 # Nothing stops us from just creating a `FieldInfo` for this type hint, so we do this. 

349 field_info = FieldInfo_.from_annotation(ann_type, _source=AnnotationSource.CLASS) 

350 field_info._original_annotation = ann_type 

351 if not evaluated: 

352 field_info._complete = False 

353 # Store the original annotation that should be used to rebuild 

354 # the field info later: 

355 else: 

356 # The field was present on one of the (possibly multiple) base classes, we make a copy directly from it. 

357 parent_field_info = parent_fields_lookup[ann_name]._copy() 

358 

359 # The only case where substituting the type variables is relevant (i.e. when `typevars_map` is not empty) 

360 # is when a generic class is parameterized (e.g. `MyGenericModel[int, str]`), which creates a new class object 

361 # (unlike the stdlib genercis that create a generic alias). In this case, we are guaranteed to only have to copy 

362 # from the origin/parent model (e.g. `MyGenericModel`). 

363 if typevars_map: 

364 field_info = _recreate_field_info( 

365 parent_field_info, ns_resolver=ns_resolver, typevars_map=typevars_map, lenient=True 

366 ) 

367 else: 

368 field_info = parent_field_info 

369 

370 else: # An assigned value is present (either the default value, or a `Field()` function) 

371 if isinstance(assigned_value, FieldInfo_) and ismethoddescriptor(assigned_value.default): 

372 # `assigned_value` was fetched using `getattr`, which triggers a call to `__get__` 

373 # for descriptors, so we do the same if the `= field(default=...)` form is used. 

374 # Note that we only do this for method descriptors for now, we might want to 

375 # extend this to any descriptor in the future (by simply checking for 

376 # `hasattr(assigned_value.default, '__get__')`). 

377 default = assigned_value.default.__get__(None, cls) 

378 assigned_value.default = default 

379 assigned_value._attributes_set['default'] = default 

380 

381 field_info = FieldInfo_.from_annotated_attribute(ann_type, assigned_value, _source=AnnotationSource.CLASS) 

382 

383 # Store the original annotation and assignment value that could be used to rebuild the field info later. 

384 field_info._original_assignment = assigned_value 

385 field_info._original_annotation = ann_type 

386 if not evaluated: 

387 field_info._complete = False 

388 elif 'final' in field_info._qualifiers and not field_info.is_required(): 

389 warnings.warn( 

390 f'Annotation {ann_name!r} is marked as final and has a default value. Pydantic treats {ann_name!r} as a ' 

391 'class variable, but it will be considered as a normal field in V3 to be aligned with dataclasses. If you ' 

392 f'still want {ann_name!r} to be considered as a class variable, annotate it as: `ClassVar[<type>] = <default>.`', 

393 category=PydanticDeprecatedSince211, 

394 # Incorrect when `create_model` is used, but the chance that final with a default is used is low in that case: 

395 stacklevel=4, 

396 ) 

397 class_vars.add(ann_name) 

398 continue 

399 

400 # attributes which are fields are removed from the class namespace: 

401 # 1. To match the behaviour of annotation-only fields 

402 # 2. To avoid false positives in the NameError check above 

403 try: 

404 delattr(cls, ann_name) 

405 except AttributeError: 

406 pass # indicates the attribute was on a parent class 

407 

408 # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__ 

409 # to make sure the decorators have already been built for this exact class 

410 decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__'] 

411 if ann_name in decorators.computed_fields: 

412 raise TypeError( 

413 f'Field {ann_name!r} of class {cls.__name__!r} overrides symbol of same name in a parent class. ' 

414 'This override with a computed_field is incompatible.' 

415 ) 

416 fields[ann_name] = field_info 

417 

418 if field_info._complete: 

419 # If not complete, this will be called in `rebuild_model_fields()`: 

420 update_field_from_config(config_wrapper, ann_name, field_info) 

421 

422 if config_wrapper.use_attribute_docstrings: 

423 _update_fields_from_docstrings(cls, fields) 

424 

425 pydantic_extra_info: PydanticExtraInfo | None = None 

426 if '__pydantic_extra__' in type_hints: 

427 ann, complete = type_hints['__pydantic_extra__'] 

428 pydantic_extra_info = PydanticExtraInfo( 

429 annotation=ann, 

430 complete=complete, 

431 ) 

432 

433 return fields, pydantic_extra_info, class_vars 

434 

435 

436def rebuild_model_fields( 

437 cls: type[BaseModel], 

438 *, 

439 config_wrapper: ConfigWrapper, 

440 ns_resolver: NsResolver, 

441 typevars_map: Mapping[TypeVar, Any], 

442) -> tuple[dict[str, FieldInfo], PydanticExtraInfo | None]: 

443 """Rebuild the (already present) model fields by trying to reevaluate annotations. 

444 

445 This function should be called whenever a model with incomplete fields is encountered. 

446 

447 Returns: 

448 A two-tuple, the first element being the rebuilt fields, the second element being 

449 the rebuild `PydanticExtraInfo` instance, if available. 

450 

451 Raises: 

452 NameError: If one of the annotations failed to evaluate. 

453 

454 Note: 

455 This function *doesn't* mutate the model fields in place, as it can be called during 

456 schema generation, where you don't want to mutate other model's fields. 

457 """ 

458 rebuilt_fields: dict[str, FieldInfo] = {} 

459 with ns_resolver.push(cls): 

460 for f_name, field_info in cls.__pydantic_fields__.items(): 

461 if field_info._complete: 

462 rebuilt_fields[f_name] = field_info 

463 else: 

464 new_field = _recreate_field_info( 

465 field_info, ns_resolver=ns_resolver, typevars_map=typevars_map, lenient=False 

466 ) 

467 update_field_from_config(config_wrapper, f_name, new_field) 

468 rebuilt_fields[f_name] = new_field 

469 

470 if cls.__pydantic_extra_info__ is not None and not cls.__pydantic_extra_info__.complete: 

471 rebuilt_extra_info = PydanticExtraInfo( 

472 annotation=_typing_extra.eval_type( 

473 cls.__pydantic_extra_info__.annotation, *ns_resolver.types_namespace 

474 ), 

475 complete=True, 

476 ) 

477 else: 

478 rebuilt_extra_info = cls.__pydantic_extra_info__ 

479 

480 return rebuilt_fields, rebuilt_extra_info 

481 

482 

483def _recreate_field_info( 

484 field_info: FieldInfo, 

485 ns_resolver: NsResolver, 

486 typevars_map: Mapping[TypeVar, Any], 

487 *, 

488 lenient: bool, 

489) -> FieldInfo: 

490 FieldInfo_ = import_cached_field_info() 

491 

492 existing_desc = field_info.description 

493 if lenient: 

494 ann = _generics.replace_types(field_info._original_annotation, typevars_map) 

495 ann, evaluated = _typing_extra.try_eval_type( 

496 ann, 

497 *ns_resolver.types_namespace, 

498 ) 

499 else: 

500 # Not the best pattern, maybe we could ship our own `eval_type()`, 

501 # that would replace the type variables on the fly during evaluation. 

502 ann = _typing_extra.eval_type( 

503 field_info._original_annotation, 

504 *ns_resolver.types_namespace, 

505 ) 

506 ann = _generics.replace_types(ann, typevars_map) 

507 ann = _typing_extra.eval_type( 

508 ann, 

509 *ns_resolver.types_namespace, 

510 ) 

511 evaluated = True 

512 

513 if (assign := field_info._original_assignment) is PydanticUndefined: 

514 new_field = FieldInfo_.from_annotation(ann, _source=AnnotationSource.CLASS) 

515 else: 

516 new_field = FieldInfo_.from_annotated_attribute(ann, assign, _source=AnnotationSource.CLASS) 

517 new_field._original_assignment = assign 

518 new_field._original_annotation = ann 

519 # The description might come from the docstring if `use_attribute_docstrings` was `True`: 

520 new_field.description = new_field.description if new_field.description is not None else existing_desc 

521 if not evaluated: 

522 new_field._complete = False 

523 

524 return new_field 

525 

526 

527def collect_dataclass_fields( 

528 cls: type[StandardDataclass], 

529 *, 

530 config_wrapper: ConfigWrapper, 

531 ns_resolver: NsResolver | None = None, 

532 typevars_map: dict[Any, Any] | None = None, 

533) -> dict[str, FieldInfo]: 

534 """Collect the fields of a dataclass. 

535 

536 Args: 

537 cls: dataclass. 

538 config_wrapper: The config wrapper instance. 

539 ns_resolver: Namespace resolver to use when getting dataclass annotations. 

540 Defaults to an empty instance. 

541 typevars_map: A dictionary mapping type variables to their concrete types. 

542 

543 Returns: 

544 The dataclass fields. 

545 """ 

546 FieldInfo_ = import_cached_field_info() 

547 

548 fields: dict[str, FieldInfo] = {} 

549 ns_resolver = ns_resolver or NsResolver() 

550 dataclass_fields = cls.__dataclass_fields__ 

551 

552 # The logic here is similar to `_typing_extra.get_cls_type_hints`, 

553 # although we do it manually as stdlib dataclasses already have annotations 

554 # collected in each class: 

555 for base in reversed(cls.__mro__): 

556 if not dataclasses.is_dataclass(base): 

557 continue 

558 

559 with ns_resolver.push(base): 

560 for ann_name, dataclass_field in dataclass_fields.items(): 

561 base_anns = _typing_extra.safe_get_annotations(base) 

562 

563 if ann_name not in base_anns: 

564 # `__dataclass_fields__`contains every field, even the ones from base classes. 

565 # Only collect the ones defined on `base`. 

566 continue 

567 

568 globalns, localns = ns_resolver.types_namespace 

569 ann_type, evaluated = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns) 

570 

571 if _typing_extra.is_classvar_annotation(ann_type): 

572 continue 

573 

574 if ( 

575 not dataclass_field.init 

576 and dataclass_field.default is dataclasses.MISSING 

577 and dataclass_field.default_factory is dataclasses.MISSING 

578 ): 

579 # TODO: We should probably do something with this so that validate_assignment behaves properly 

580 # Issue: https://github.com/pydantic/pydantic/issues/5470 

581 continue 

582 

583 if isinstance(dataclass_field.default, FieldInfo_): 

584 if dataclass_field.default.init_var: 

585 if dataclass_field.default.init is False: 

586 raise PydanticUserError( 

587 f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.', 

588 code='clashing-init-and-init-var', 

589 ) 

590 

591 # TODO: same note as above re validate_assignment 

592 continue 

593 field_info = FieldInfo_.from_annotated_attribute( 

594 ann_type, dataclass_field.default, _source=AnnotationSource.DATACLASS 

595 ) 

596 field_info._original_assignment = dataclass_field.default 

597 else: 

598 field_info = FieldInfo_.from_annotated_attribute( 

599 ann_type, dataclass_field, _source=AnnotationSource.DATACLASS 

600 ) 

601 field_info._original_assignment = dataclass_field 

602 

603 if not evaluated: 

604 field_info._complete = False 

605 field_info._original_annotation = ann_type 

606 

607 fields[ann_name] = field_info 

608 update_field_from_config(config_wrapper, ann_name, field_info) 

609 

610 if field_info.default is not PydanticUndefined and isinstance( 

611 getattr(cls, ann_name, field_info), FieldInfo_ 

612 ): 

613 # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo 

614 setattr(cls, ann_name, field_info.default) 

615 

616 if typevars_map: 

617 for field in fields.values(): 

618 # We don't pass any ns, as `field.annotation` 

619 # was already evaluated. TODO: is this method relevant? 

620 # Can't we juste use `_generics.replace_types`? 

621 field.apply_typevars_map(typevars_map) 

622 

623 if config_wrapper.use_attribute_docstrings: 

624 _update_fields_from_docstrings( 

625 cls, 

626 fields, 

627 # We can't rely on the (more reliable) frame inspection method 

628 # for stdlib dataclasses: 

629 use_inspect=not hasattr(cls, '__is_pydantic_dataclass__'), 

630 ) 

631 

632 return fields 

633 

634 

635def rebuild_dataclass_fields( 

636 cls: type[PydanticDataclass], 

637 *, 

638 config_wrapper: ConfigWrapper, 

639 ns_resolver: NsResolver, 

640 typevars_map: Mapping[TypeVar, Any], 

641) -> dict[str, FieldInfo]: 

642 """Rebuild the (already present) dataclass fields by trying to reevaluate annotations. 

643 

644 This function should be called whenever a dataclass with incomplete fields is encountered. 

645 

646 Raises: 

647 NameError: If one of the annotations failed to evaluate. 

648 

649 Note: 

650 This function *doesn't* mutate the dataclass fields in place, as it can be called during 

651 schema generation, where you don't want to mutate other dataclass's fields. 

652 """ 

653 FieldInfo_ = import_cached_field_info() 

654 

655 rebuilt_fields: dict[str, FieldInfo] = {} 

656 with ns_resolver.push(cls): 

657 for f_name, field_info in cls.__pydantic_fields__.items(): 

658 if field_info._complete: 

659 rebuilt_fields[f_name] = field_info 

660 else: 

661 existing_desc = field_info.description 

662 ann = _typing_extra.eval_type( 

663 field_info._original_annotation, 

664 *ns_resolver.types_namespace, 

665 ) 

666 ann = _generics.replace_types(ann, typevars_map) 

667 new_field = FieldInfo_.from_annotated_attribute( 

668 ann, 

669 field_info._original_assignment, 

670 _source=AnnotationSource.DATACLASS, 

671 ) 

672 

673 # The description might come from the docstring if `use_attribute_docstrings` was `True`: 

674 new_field.description = new_field.description if new_field.description is not None else existing_desc 

675 update_field_from_config(config_wrapper, f_name, new_field) 

676 rebuilt_fields[f_name] = new_field 

677 

678 return rebuilt_fields 

679 

680 

681def is_valid_field_name(name: str) -> bool: 

682 return not name.startswith('_') 

683 

684 

685def is_valid_privateattr_name(name: str) -> bool: 

686 return name.startswith('_') and not name.startswith('__') 

687 

688 

689def takes_validated_data_argument( 

690 default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any], 

691) -> TypeIs[Callable[[dict[str, Any]], Any]]: 

692 """Whether the provided default factory callable has a validated data parameter.""" 

693 try: 

694 sig = _typing_extra.signature_no_eval(default_factory) 

695 except (ValueError, TypeError): 

696 # `inspect.signature` might not be able to infer a signature, e.g. with C objects. 

697 # In this case, we assume no data argument is present: 

698 return False 

699 

700 parameters = list(sig.parameters.values()) 

701 

702 return len(parameters) == 1 and can_be_positional(parameters[0]) and parameters[0].default is Parameter.empty 

703 

704 

705def resolve_default_value( 

706 default: Any, 

707 default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any] | None, 

708 *, 

709 validated_data: dict[str, Any] | None = None, 

710 call_default_factory: bool = False, 

711) -> Any: 

712 """Resolve the default value using either a static default or a default_factory.""" 

713 from ._utils import smart_deepcopy 

714 

715 if default_factory is None: 

716 return smart_deepcopy(default) 

717 if call_default_factory: 

718 if takes_validated_data_argument(default_factory=default_factory): 

719 fac = cast('Callable[[dict[str, Any]], Any]', default_factory) 

720 if validated_data is None: 

721 raise ValueError( 

722 "The default factory requires the 'validated_data' argument, which was not provided when calling 'get_default()'." 

723 ) 

724 return fac(validated_data) 

725 else: 

726 fac = cast('Callable[[], Any]', default_factory) 

727 return fac() 

728 

729 return PydanticUndefined