Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pydantic/_internal/_fields.py: 61%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

223 statements  

1"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`.""" 

2 

3from __future__ import annotations as _annotations 

4 

5import dataclasses 

6import warnings 

7from collections.abc import Mapping 

8from copy import copy 

9from functools import cache 

10from inspect import Parameter, ismethoddescriptor, signature 

11from re import Pattern 

12from typing import TYPE_CHECKING, Any, Callable, TypeVar 

13 

14from pydantic_core import PydanticUndefined 

15from typing_extensions import TypeIs, get_origin 

16from typing_inspection import typing_objects 

17from typing_inspection.introspection import AnnotationSource 

18 

19from pydantic import PydanticDeprecatedSince211 

20from pydantic.errors import PydanticUserError 

21 

22from . import _generics, _typing_extra 

23from ._config import ConfigWrapper 

24from ._docs_extraction import extract_docstrings_from_cls 

25from ._import_utils import import_cached_base_model, import_cached_field_info 

26from ._namespace_utils import NsResolver 

27from ._repr import Representation 

28from ._utils import can_be_positional 

29 

30if TYPE_CHECKING: 

31 from annotated_types import BaseMetadata 

32 

33 from ..fields import FieldInfo 

34 from ..main import BaseModel 

35 from ._dataclasses import PydanticDataclass, StandardDataclass 

36 from ._decorators import DecoratorInfos 

37 

38 

39class PydanticMetadata(Representation): 

40 """Base class for annotation markers like `Strict`.""" 

41 

42 __slots__ = () 

43 

44 

45def pydantic_general_metadata(**metadata: Any) -> BaseMetadata: 

46 """Create a new `_PydanticGeneralMetadata` class with the given metadata. 

47 

48 Args: 

49 **metadata: The metadata to add. 

50 

51 Returns: 

52 The new `_PydanticGeneralMetadata` class. 

53 """ 

54 return _general_metadata_cls()(metadata) # type: ignore 

55 

56 

57@cache 

58def _general_metadata_cls() -> type[BaseMetadata]: 

59 """Do it this way to avoid importing `annotated_types` at import time.""" 

60 from annotated_types import BaseMetadata 

61 

62 class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata): 

63 """Pydantic general metadata like `max_digits`.""" 

64 

65 def __init__(self, metadata: Any): 

66 self.__dict__ = metadata 

67 

68 return _PydanticGeneralMetadata # type: ignore 

69 

70 

71def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], use_inspect: bool = False) -> None: 

72 fields_docs = extract_docstrings_from_cls(cls, use_inspect=use_inspect) 

73 for ann_name, field_info in fields.items(): 

74 if field_info.description is None and ann_name in fields_docs: 

75 field_info.description = fields_docs[ann_name] 

76 

77 

78def collect_model_fields( # noqa: C901 

79 cls: type[BaseModel], 

80 config_wrapper: ConfigWrapper, 

81 ns_resolver: NsResolver | None, 

82 *, 

83 typevars_map: Mapping[TypeVar, Any] | None = None, 

84) -> tuple[dict[str, FieldInfo], set[str]]: 

85 """Collect the fields and class variables names of a nascent Pydantic model. 

86 

87 The fields collection process is *lenient*, meaning it won't error if string annotations 

88 fail to evaluate. If this happens, the original annotation (and assigned value, if any) 

89 is stored on the created `FieldInfo` instance. 

90 

91 The `rebuild_model_fields()` should be called at a later point (e.g. when rebuilding the model), 

92 and will make use of these stored attributes. 

93 

94 Args: 

95 cls: BaseModel or dataclass. 

96 config_wrapper: The config wrapper instance. 

97 ns_resolver: Namespace resolver to use when getting model annotations. 

98 typevars_map: A dictionary mapping type variables to their concrete types. 

99 

100 Returns: 

101 A two-tuple containing model fields and class variables names. 

102 

103 Raises: 

104 NameError: 

105 - If there is a conflict between a field name and protected namespaces. 

106 - If there is a field other than `root` in `RootModel`. 

107 - If a field shadows an attribute in the parent model. 

108 """ 

109 BaseModel = import_cached_base_model() 

110 FieldInfo_ = import_cached_field_info() 

111 

112 bases = cls.__bases__ 

113 parent_fields_lookup: dict[str, FieldInfo] = {} 

114 for base in reversed(bases): 

115 if model_fields := getattr(base, '__pydantic_fields__', None): 

116 parent_fields_lookup.update(model_fields) 

117 

118 type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver) 

119 

120 # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older 

121 # annotations is only used for finding fields in parent classes 

122 annotations = cls.__dict__.get('__annotations__', {}) 

123 fields: dict[str, FieldInfo] = {} 

124 

125 class_vars: set[str] = set() 

126 for ann_name, (ann_type, evaluated) in type_hints.items(): 

127 if ann_name == 'model_config': 

128 # We never want to treat `model_config` as a field 

129 # Note: we may need to change this logic if/when we introduce a `BareModel` class with no 

130 # protected namespaces (where `model_config` might be allowed as a field name) 

131 continue 

132 

133 for protected_namespace in config_wrapper.protected_namespaces: 

134 ns_violation: bool = False 

135 if isinstance(protected_namespace, Pattern): 

136 ns_violation = protected_namespace.match(ann_name) is not None 

137 elif isinstance(protected_namespace, str): 

138 ns_violation = ann_name.startswith(protected_namespace) 

139 

140 if ns_violation: 

141 for b in bases: 

142 if hasattr(b, ann_name): 

143 if not (issubclass(b, BaseModel) and ann_name in getattr(b, '__pydantic_fields__', {})): 

144 raise NameError( 

145 f'Field "{ann_name}" conflicts with member {getattr(b, ann_name)}' 

146 f' of protected namespace "{protected_namespace}".' 

147 ) 

148 else: 

149 valid_namespaces = () 

150 for pn in config_wrapper.protected_namespaces: 

151 if isinstance(pn, Pattern): 

152 if not pn.match(ann_name): 

153 valid_namespaces += (f're.compile({pn.pattern})',) 

154 else: 

155 if not ann_name.startswith(pn): 

156 valid_namespaces += (pn,) 

157 

158 warnings.warn( 

159 f'Field "{ann_name}" in {cls.__name__} has conflict with protected namespace "{protected_namespace}".' 

160 '\n\nYou may be able to resolve this warning by setting' 

161 f" `model_config['protected_namespaces'] = {valid_namespaces}`.", 

162 UserWarning, 

163 ) 

164 if _typing_extra.is_classvar_annotation(ann_type): 

165 class_vars.add(ann_name) 

166 continue 

167 

168 assigned_value = getattr(cls, ann_name, PydanticUndefined) 

169 

170 if not is_valid_field_name(ann_name): 

171 continue 

172 if cls.__pydantic_root_model__ and ann_name != 'root': 

173 raise NameError( 

174 f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`" 

175 ) 

176 

177 # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get 

178 # "... shadows an attribute" warnings 

179 generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin') 

180 for base in bases: 

181 dataclass_fields = { 

182 field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ()) 

183 } 

184 if hasattr(base, ann_name): 

185 if base is generic_origin: 

186 # Don't warn when "shadowing" of attributes in parametrized generics 

187 continue 

188 

189 if ann_name in dataclass_fields: 

190 # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set 

191 # on the class instance. 

192 continue 

193 

194 if ann_name not in annotations: 

195 # Don't warn when a field exists in a parent class but has not been defined in the current class 

196 continue 

197 

198 warnings.warn( 

199 f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent ' 

200 f'"{base.__qualname__}"', 

201 UserWarning, 

202 ) 

203 

204 if assigned_value is PydanticUndefined: # no assignment, just a plain annotation 

205 if ann_name in annotations or ann_name not in parent_fields_lookup: 

206 # field is either: 

207 # - present in the current model's annotations (and *not* from parent classes) 

208 # - not found on any base classes; this seems to be caused by fields bot getting 

209 # generated due to models not being fully defined while initializing recursive models. 

210 # Nothing stops us from just creating a `FieldInfo` for this type hint, so we do this. 

211 field_info = FieldInfo_.from_annotation(ann_type, _source=AnnotationSource.CLASS) 

212 if not evaluated: 

213 field_info._complete = False 

214 # Store the original annotation that should be used to rebuild 

215 # the field info later: 

216 field_info._original_annotation = ann_type 

217 else: 

218 # The field was present on one of the (possibly multiple) base classes 

219 # copy the field to make sure typevar substitutions don't cause issues with the base classes 

220 field_info = copy(parent_fields_lookup[ann_name]) 

221 

222 else: # An assigned value is present (either the default value, or a `Field()` function) 

223 _warn_on_nested_alias_in_annotation(ann_type, ann_name) 

224 if isinstance(assigned_value, FieldInfo_) and ismethoddescriptor(assigned_value.default): 

225 # `assigned_value` was fetched using `getattr`, which triggers a call to `__get__` 

226 # for descriptors, so we do the same if the `= field(default=...)` form is used. 

227 # Note that we only do this for method descriptors for now, we might want to 

228 # extend this to any descriptor in the future (by simply checking for 

229 # `hasattr(assigned_value.default, '__get__')`). 

230 assigned_value.default = assigned_value.default.__get__(None, cls) 

231 

232 # The `from_annotated_attribute()` call below mutates the assigned `Field()`, so make a copy: 

233 original_assignment = ( 

234 assigned_value._copy() if not evaluated and isinstance(assigned_value, FieldInfo_) else assigned_value 

235 ) 

236 

237 field_info = FieldInfo_.from_annotated_attribute(ann_type, assigned_value, _source=AnnotationSource.CLASS) 

238 # Store the original annotation and assignment value that should be used to rebuild the field info later. 

239 # Note that the assignment is always stored as the annotation might contain a type var that is later 

240 # parameterized with an unknown forward reference (and we'll need it to rebuild the field info): 

241 field_info._original_assignment = original_assignment 

242 if not evaluated: 

243 field_info._complete = False 

244 field_info._original_annotation = ann_type 

245 elif 'final' in field_info._qualifiers and not field_info.is_required(): 

246 warnings.warn( 

247 f'Annotation {ann_name!r} is marked as final and has a default value. Pydantic treats {ann_name!r} as a ' 

248 'class variable, but it will be considered as a normal field in V3 to be aligned with dataclasses. If you ' 

249 f'still want {ann_name!r} to be considered as a class variable, annotate it as: `ClassVar[<type>] = <default>.`', 

250 category=PydanticDeprecatedSince211, 

251 # Incorrect when `create_model` is used, but the chance that final with a default is used is low in that case: 

252 stacklevel=4, 

253 ) 

254 class_vars.add(ann_name) 

255 continue 

256 

257 # attributes which are fields are removed from the class namespace: 

258 # 1. To match the behaviour of annotation-only fields 

259 # 2. To avoid false positives in the NameError check above 

260 try: 

261 delattr(cls, ann_name) 

262 except AttributeError: 

263 pass # indicates the attribute was on a parent class 

264 

265 # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__ 

266 # to make sure the decorators have already been built for this exact class 

267 decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__'] 

268 if ann_name in decorators.computed_fields: 

269 raise TypeError( 

270 f'Field {ann_name!r} of class {cls.__name__!r} overrides symbol of same name in a parent class. ' 

271 'This override with a computed_field is incompatible.' 

272 ) 

273 fields[ann_name] = field_info 

274 

275 if typevars_map: 

276 for field in fields.values(): 

277 if field._complete: 

278 field.apply_typevars_map(typevars_map) 

279 

280 if config_wrapper.use_attribute_docstrings: 

281 _update_fields_from_docstrings(cls, fields) 

282 return fields, class_vars 

283 

284 

285def _warn_on_nested_alias_in_annotation(ann_type: type[Any], ann_name: str) -> None: 

286 FieldInfo = import_cached_field_info() 

287 

288 args = getattr(ann_type, '__args__', None) 

289 if args: 

290 for anno_arg in args: 

291 if typing_objects.is_annotated(get_origin(anno_arg)): 

292 for anno_type_arg in _typing_extra.get_args(anno_arg): 

293 if isinstance(anno_type_arg, FieldInfo) and anno_type_arg.alias is not None: 

294 warnings.warn( 

295 f'`alias` specification on field "{ann_name}" must be set on outermost annotation to take effect.', 

296 UserWarning, 

297 ) 

298 return 

299 

300 

301def rebuild_model_fields( 

302 cls: type[BaseModel], 

303 *, 

304 ns_resolver: NsResolver, 

305 typevars_map: Mapping[TypeVar, Any], 

306) -> dict[str, FieldInfo]: 

307 """Rebuild the (already present) model fields by trying to reevaluate annotations. 

308 

309 This function should be called whenever a model with incomplete fields is encountered. 

310 

311 Raises: 

312 NameError: If one of the annotations failed to evaluate. 

313 

314 Note: 

315 This function *doesn't* mutate the model fields in place, as it can be called during 

316 schema generation, where you don't want to mutate other model's fields. 

317 """ 

318 FieldInfo_ = import_cached_field_info() 

319 

320 rebuilt_fields: dict[str, FieldInfo] = {} 

321 with ns_resolver.push(cls): 

322 for f_name, field_info in cls.__pydantic_fields__.items(): 

323 if field_info._complete: 

324 rebuilt_fields[f_name] = field_info 

325 else: 

326 existing_desc = field_info.description 

327 ann = _typing_extra.eval_type( 

328 field_info._original_annotation, 

329 *ns_resolver.types_namespace, 

330 ) 

331 ann = _generics.replace_types(ann, typevars_map) 

332 

333 if (assign := field_info._original_assignment) is PydanticUndefined: 

334 new_field = FieldInfo_.from_annotation(ann, _source=AnnotationSource.CLASS) 

335 else: 

336 new_field = FieldInfo_.from_annotated_attribute(ann, assign, _source=AnnotationSource.CLASS) 

337 # The description might come from the docstring if `use_attribute_docstrings` was `True`: 

338 new_field.description = new_field.description if new_field.description is not None else existing_desc 

339 rebuilt_fields[f_name] = new_field 

340 

341 return rebuilt_fields 

342 

343 

344def collect_dataclass_fields( 

345 cls: type[StandardDataclass], 

346 *, 

347 ns_resolver: NsResolver | None = None, 

348 typevars_map: dict[Any, Any] | None = None, 

349 config_wrapper: ConfigWrapper | None = None, 

350) -> dict[str, FieldInfo]: 

351 """Collect the fields of a dataclass. 

352 

353 Args: 

354 cls: dataclass. 

355 ns_resolver: Namespace resolver to use when getting dataclass annotations. 

356 Defaults to an empty instance. 

357 typevars_map: A dictionary mapping type variables to their concrete types. 

358 config_wrapper: The config wrapper instance. 

359 

360 Returns: 

361 The dataclass fields. 

362 """ 

363 FieldInfo_ = import_cached_field_info() 

364 

365 fields: dict[str, FieldInfo] = {} 

366 ns_resolver = ns_resolver or NsResolver() 

367 dataclass_fields = cls.__dataclass_fields__ 

368 

369 # The logic here is similar to `_typing_extra.get_cls_type_hints`, 

370 # although we do it manually as stdlib dataclasses already have annotations 

371 # collected in each class: 

372 for base in reversed(cls.__mro__): 

373 if not dataclasses.is_dataclass(base): 

374 continue 

375 

376 with ns_resolver.push(base): 

377 for ann_name, dataclass_field in dataclass_fields.items(): 

378 if ann_name not in base.__dict__.get('__annotations__', {}): 

379 # `__dataclass_fields__`contains every field, even the ones from base classes. 

380 # Only collect the ones defined on `base`. 

381 continue 

382 

383 globalns, localns = ns_resolver.types_namespace 

384 ann_type, evaluated = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns) 

385 

386 if _typing_extra.is_classvar_annotation(ann_type): 

387 continue 

388 

389 if ( 

390 not dataclass_field.init 

391 and dataclass_field.default is dataclasses.MISSING 

392 and dataclass_field.default_factory is dataclasses.MISSING 

393 ): 

394 # TODO: We should probably do something with this so that validate_assignment behaves properly 

395 # Issue: https://github.com/pydantic/pydantic/issues/5470 

396 continue 

397 

398 if isinstance(dataclass_field.default, FieldInfo_): 

399 if dataclass_field.default.init_var: 

400 if dataclass_field.default.init is False: 

401 raise PydanticUserError( 

402 f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.', 

403 code='clashing-init-and-init-var', 

404 ) 

405 

406 # TODO: same note as above re validate_assignment 

407 continue 

408 field_info = FieldInfo_.from_annotated_attribute( 

409 ann_type, dataclass_field.default, _source=AnnotationSource.DATACLASS 

410 ) 

411 field_info._original_assignment = dataclass_field.default 

412 else: 

413 field_info = FieldInfo_.from_annotated_attribute( 

414 ann_type, dataclass_field, _source=AnnotationSource.DATACLASS 

415 ) 

416 field_info._original_assignment = dataclass_field 

417 

418 if not evaluated: 

419 field_info._complete = False 

420 field_info._original_annotation = ann_type 

421 

422 fields[ann_name] = field_info 

423 

424 if field_info.default is not PydanticUndefined and isinstance( 

425 getattr(cls, ann_name, field_info), FieldInfo_ 

426 ): 

427 # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo 

428 setattr(cls, ann_name, field_info.default) 

429 

430 if typevars_map: 

431 for field in fields.values(): 

432 # We don't pass any ns, as `field.annotation` 

433 # was already evaluated. TODO: is this method relevant? 

434 # Can't we juste use `_generics.replace_types`? 

435 field.apply_typevars_map(typevars_map) 

436 

437 if config_wrapper is not None and config_wrapper.use_attribute_docstrings: 

438 _update_fields_from_docstrings( 

439 cls, 

440 fields, 

441 # We can't rely on the (more reliable) frame inspection method 

442 # for stdlib dataclasses: 

443 use_inspect=not hasattr(cls, '__is_pydantic_dataclass__'), 

444 ) 

445 

446 return fields 

447 

448 

449def rebuild_dataclass_fields( 

450 cls: type[PydanticDataclass], 

451 *, 

452 config_wrapper: ConfigWrapper, 

453 ns_resolver: NsResolver, 

454 typevars_map: Mapping[TypeVar, Any], 

455) -> dict[str, FieldInfo]: 

456 """Rebuild the (already present) dataclass fields by trying to reevaluate annotations. 

457 

458 This function should be called whenever a dataclass with incomplete fields is encountered. 

459 

460 Raises: 

461 NameError: If one of the annotations failed to evaluate. 

462 

463 Note: 

464 This function *doesn't* mutate the dataclass fields in place, as it can be called during 

465 schema generation, where you don't want to mutate other dataclass's fields. 

466 """ 

467 FieldInfo_ = import_cached_field_info() 

468 

469 rebuilt_fields: dict[str, FieldInfo] = {} 

470 with ns_resolver.push(cls): 

471 for f_name, field_info in cls.__pydantic_fields__.items(): 

472 if field_info._complete: 

473 rebuilt_fields[f_name] = field_info 

474 else: 

475 existing_desc = field_info.description 

476 ann = _typing_extra.eval_type( 

477 field_info._original_annotation, 

478 *ns_resolver.types_namespace, 

479 ) 

480 ann = _generics.replace_types(ann, typevars_map) 

481 new_field = FieldInfo_.from_annotated_attribute( 

482 ann, 

483 field_info._original_assignment, 

484 _source=AnnotationSource.DATACLASS, 

485 ) 

486 

487 # The description might come from the docstring if `use_attribute_docstrings` was `True`: 

488 new_field.description = new_field.description if new_field.description is not None else existing_desc 

489 rebuilt_fields[f_name] = new_field 

490 

491 return rebuilt_fields 

492 

493 

494def is_valid_field_name(name: str) -> bool: 

495 return not name.startswith('_') 

496 

497 

498def is_valid_privateattr_name(name: str) -> bool: 

499 return name.startswith('_') and not name.startswith('__') 

500 

501 

502def takes_validated_data_argument( 

503 default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any], 

504) -> TypeIs[Callable[[dict[str, Any]], Any]]: 

505 """Whether the provided default factory callable has a validated data parameter.""" 

506 try: 

507 sig = signature(default_factory) 

508 except (ValueError, TypeError): 

509 # `inspect.signature` might not be able to infer a signature, e.g. with C objects. 

510 # In this case, we assume no data argument is present: 

511 return False 

512 

513 parameters = list(sig.parameters.values()) 

514 

515 return len(parameters) == 1 and can_be_positional(parameters[0]) and parameters[0].default is Parameter.empty