1"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`."""
2
3from __future__ import annotations as _annotations
4
5import dataclasses
6import warnings
7from collections.abc import Mapping
8from functools import cache
9from inspect import Parameter, ismethoddescriptor, signature
10from re import Pattern
11from typing import TYPE_CHECKING, Any, Callable, TypeVar
12
13from pydantic_core import PydanticUndefined
14from typing_extensions import TypeIs
15from typing_inspection.introspection import AnnotationSource
16
17from pydantic import PydanticDeprecatedSince211
18from pydantic.errors import PydanticUserError
19
20from ..aliases import AliasGenerator
21from . import _generics, _typing_extra
22from ._config import ConfigWrapper
23from ._docs_extraction import extract_docstrings_from_cls
24from ._import_utils import import_cached_base_model, import_cached_field_info
25from ._namespace_utils import NsResolver
26from ._repr import Representation
27from ._utils import can_be_positional, get_first_not_none
28
29if TYPE_CHECKING:
30 from annotated_types import BaseMetadata
31
32 from ..fields import FieldInfo
33 from ..main import BaseModel
34 from ._dataclasses import PydanticDataclass, StandardDataclass
35 from ._decorators import DecoratorInfos
36
37
38class PydanticMetadata(Representation):
39 """Base class for annotation markers like `Strict`."""
40
41 __slots__ = ()
42
43
44def pydantic_general_metadata(**metadata: Any) -> BaseMetadata:
45 """Create a new `_PydanticGeneralMetadata` class with the given metadata.
46
47 Args:
48 **metadata: The metadata to add.
49
50 Returns:
51 The new `_PydanticGeneralMetadata` class.
52 """
53 return _general_metadata_cls()(metadata) # type: ignore
54
55
56@cache
57def _general_metadata_cls() -> type[BaseMetadata]:
58 """Do it this way to avoid importing `annotated_types` at import time."""
59 from annotated_types import BaseMetadata
60
61 class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata):
62 """Pydantic general metadata like `max_digits`."""
63
64 def __init__(self, metadata: Any):
65 self.__dict__ = metadata
66
67 return _PydanticGeneralMetadata # type: ignore
68
69
70def _check_protected_namespaces(
71 protected_namespaces: tuple[str | Pattern[str], ...],
72 ann_name: str,
73 bases: tuple[type[Any], ...],
74 cls_name: str,
75) -> None:
76 BaseModel = import_cached_base_model()
77
78 for protected_namespace in protected_namespaces:
79 ns_violation = False
80 if isinstance(protected_namespace, Pattern):
81 ns_violation = protected_namespace.match(ann_name) is not None
82 elif isinstance(protected_namespace, str):
83 ns_violation = ann_name.startswith(protected_namespace)
84
85 if ns_violation:
86 for b in bases:
87 if hasattr(b, ann_name):
88 if not (issubclass(b, BaseModel) and ann_name in getattr(b, '__pydantic_fields__', {})):
89 raise ValueError(
90 f'Field {ann_name!r} conflicts with member {getattr(b, ann_name)}'
91 f' of protected namespace {protected_namespace!r}.'
92 )
93 else:
94 valid_namespaces: list[str] = []
95 for pn in protected_namespaces:
96 if isinstance(pn, Pattern):
97 if not pn.match(ann_name):
98 valid_namespaces.append(f're.compile({pn.pattern!r})')
99 else:
100 if not ann_name.startswith(pn):
101 valid_namespaces.append(f"'{pn}'")
102
103 valid_namespaces_str = f'({", ".join(valid_namespaces)}{",)" if len(valid_namespaces) == 1 else ")"}'
104
105 warnings.warn(
106 f'Field {ann_name!r} in {cls_name!r} conflicts with protected namespace {protected_namespace!r}.\n\n'
107 f"You may be able to solve this by setting the 'protected_namespaces' configuration to {valid_namespaces_str}.",
108 UserWarning,
109 stacklevel=5,
110 )
111
112
113def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], use_inspect: bool = False) -> None:
114 fields_docs = extract_docstrings_from_cls(cls, use_inspect=use_inspect)
115 for ann_name, field_info in fields.items():
116 if field_info.description is None and ann_name in fields_docs:
117 field_info.description = fields_docs[ann_name]
118
119
120def _apply_field_title_generator_to_field_info(
121 title_generator: Callable[[str, FieldInfo], str],
122 field_name: str,
123 field_info: FieldInfo,
124):
125 if field_info.title is None:
126 title = title_generator(field_name, field_info)
127 if not isinstance(title, str):
128 raise TypeError(f'field_title_generator {title_generator} must return str, not {title.__class__}')
129
130 field_info.title = title
131
132
133def _apply_alias_generator_to_field_info(
134 alias_generator: Callable[[str], str] | AliasGenerator, field_name: str, field_info: FieldInfo
135):
136 """Apply an alias generator to aliases on a `FieldInfo` instance if appropriate.
137
138 Args:
139 alias_generator: A callable that takes a string and returns a string, or an `AliasGenerator` instance.
140 field_name: The name of the field from which to generate the alias.
141 field_info: The `FieldInfo` instance to which the alias generator is (maybe) applied.
142 """
143 # Apply an alias_generator if
144 # 1. An alias is not specified
145 # 2. An alias is specified, but the priority is <= 1
146 if (
147 field_info.alias_priority is None
148 or field_info.alias_priority <= 1
149 or field_info.alias is None
150 or field_info.validation_alias is None
151 or field_info.serialization_alias is None
152 ):
153 alias, validation_alias, serialization_alias = None, None, None
154
155 if isinstance(alias_generator, AliasGenerator):
156 alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name)
157 elif callable(alias_generator):
158 alias = alias_generator(field_name)
159 if not isinstance(alias, str):
160 raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}')
161
162 # if priority is not set, we set to 1
163 # which supports the case where the alias_generator from a child class is used
164 # to generate an alias for a field in a parent class
165 if field_info.alias_priority is None or field_info.alias_priority <= 1:
166 field_info.alias_priority = 1
167
168 # if the priority is 1, then we set the aliases to the generated alias
169 if field_info.alias_priority == 1:
170 field_info.serialization_alias = get_first_not_none(serialization_alias, alias)
171 field_info.validation_alias = get_first_not_none(validation_alias, alias)
172 field_info.alias = alias
173
174 # if any of the aliases are not set, then we set them to the corresponding generated alias
175 if field_info.alias is None:
176 field_info.alias = alias
177 if field_info.serialization_alias is None:
178 field_info.serialization_alias = get_first_not_none(serialization_alias, alias)
179 if field_info.validation_alias is None:
180 field_info.validation_alias = get_first_not_none(validation_alias, alias)
181
182
183def update_field_from_config(config_wrapper: ConfigWrapper, field_name: str, field_info: FieldInfo) -> None:
184 """Update the `FieldInfo` instance from the configuration set on the model it belongs to.
185
186 This will apply the title and alias generators from the configuration.
187
188 Args:
189 config_wrapper: The configuration from the model.
190 field_name: The field name the `FieldInfo` instance is attached to.
191 field_info: The `FieldInfo` instance to update.
192 """
193 field_title_generator = field_info.field_title_generator or config_wrapper.field_title_generator
194 if field_title_generator is not None:
195 _apply_field_title_generator_to_field_info(field_title_generator, field_name, field_info)
196 if config_wrapper.alias_generator is not None:
197 _apply_alias_generator_to_field_info(config_wrapper.alias_generator, field_name, field_info)
198
199
200_deprecated_method_names = {'dict', 'json', 'copy', '_iter', '_copy_and_set_values', '_calculate_keys'}
201
202_deprecated_classmethod_names = {
203 'parse_obj',
204 'parse_raw',
205 'parse_file',
206 'from_orm',
207 'construct',
208 'schema',
209 'schema_json',
210 'validate',
211 'update_forward_refs',
212 '_get_value',
213}
214
215
216def collect_model_fields( # noqa: C901
217 cls: type[BaseModel],
218 config_wrapper: ConfigWrapper,
219 ns_resolver: NsResolver | None,
220 *,
221 typevars_map: Mapping[TypeVar, Any] | None = None,
222) -> tuple[dict[str, FieldInfo], set[str]]:
223 """Collect the fields and class variables names of a nascent Pydantic model.
224
225 The fields collection process is *lenient*, meaning it won't error if string annotations
226 fail to evaluate. If this happens, the original annotation (and assigned value, if any)
227 is stored on the created `FieldInfo` instance.
228
229 The `rebuild_model_fields()` should be called at a later point (e.g. when rebuilding the model),
230 and will make use of these stored attributes.
231
232 Args:
233 cls: BaseModel or dataclass.
234 config_wrapper: The config wrapper instance.
235 ns_resolver: Namespace resolver to use when getting model annotations.
236 typevars_map: A dictionary mapping type variables to their concrete types.
237
238 Returns:
239 A two-tuple containing model fields and class variables names.
240
241 Raises:
242 NameError:
243 - If there is a conflict between a field name and protected namespaces.
244 - If there is a field other than `root` in `RootModel`.
245 - If a field shadows an attribute in the parent model.
246 """
247 FieldInfo_ = import_cached_field_info()
248 BaseModel_ = import_cached_base_model()
249
250 bases = cls.__bases__
251 parent_fields_lookup: dict[str, FieldInfo] = {}
252 for base in reversed(bases):
253 if model_fields := getattr(base, '__pydantic_fields__', None):
254 parent_fields_lookup.update(model_fields)
255
256 type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver)
257
258 # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
259 # annotations is only used for finding fields in parent classes
260 annotations = _typing_extra.safe_get_annotations(cls)
261
262 fields: dict[str, FieldInfo] = {}
263
264 class_vars: set[str] = set()
265 for ann_name, (ann_type, evaluated) in type_hints.items():
266 if ann_name == 'model_config':
267 # We never want to treat `model_config` as a field
268 # Note: we may need to change this logic if/when we introduce a `BareModel` class with no
269 # protected namespaces (where `model_config` might be allowed as a field name)
270 continue
271
272 _check_protected_namespaces(
273 protected_namespaces=config_wrapper.protected_namespaces,
274 ann_name=ann_name,
275 bases=bases,
276 cls_name=cls.__name__,
277 )
278
279 if _typing_extra.is_classvar_annotation(ann_type):
280 class_vars.add(ann_name)
281 continue
282
283 assigned_value = getattr(cls, ann_name, PydanticUndefined)
284 if assigned_value is not PydanticUndefined and (
285 # One of the deprecated instance methods was used as a field name (e.g. `dict()`):
286 any(getattr(BaseModel_, depr_name, None) is assigned_value for depr_name in _deprecated_method_names)
287 # One of the deprecated class methods was used as a field name (e.g. `schema()`):
288 or (
289 hasattr(assigned_value, '__func__')
290 and any(
291 getattr(getattr(BaseModel_, depr_name, None), '__func__', None) is assigned_value.__func__ # pyright: ignore[reportAttributeAccessIssue]
292 for depr_name in _deprecated_classmethod_names
293 )
294 )
295 ):
296 # Then `assigned_value` would be the method, even though no default was specified:
297 assigned_value = PydanticUndefined
298
299 if not is_valid_field_name(ann_name):
300 continue
301 if cls.__pydantic_root_model__ and ann_name != 'root':
302 raise NameError(
303 f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`"
304 )
305
306 # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get
307 # "... shadows an attribute" warnings
308 generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
309 for base in bases:
310 dataclass_fields = {
311 field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ())
312 }
313 if hasattr(base, ann_name):
314 if base is generic_origin:
315 # Don't warn when "shadowing" of attributes in parametrized generics
316 continue
317
318 if ann_name in dataclass_fields:
319 # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set
320 # on the class instance.
321 continue
322
323 if ann_name not in annotations:
324 # Don't warn when a field exists in a parent class but has not been defined in the current class
325 continue
326
327 warnings.warn(
328 f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent '
329 f'"{base.__qualname__}"',
330 UserWarning,
331 stacklevel=4,
332 )
333
334 if assigned_value is PydanticUndefined: # no assignment, just a plain annotation
335 if ann_name in annotations or ann_name not in parent_fields_lookup:
336 # field is either:
337 # - present in the current model's annotations (and *not* from parent classes)
338 # - not found on any base classes; this seems to be caused by fields bot getting
339 # generated due to models not being fully defined while initializing recursive models.
340 # Nothing stops us from just creating a `FieldInfo` for this type hint, so we do this.
341 field_info = FieldInfo_.from_annotation(ann_type, _source=AnnotationSource.CLASS)
342 if not evaluated:
343 field_info._complete = False
344 # Store the original annotation that should be used to rebuild
345 # the field info later:
346 field_info._original_annotation = ann_type
347 else:
348 # The field was present on one of the (possibly multiple) base classes
349 # copy the field to make sure typevar substitutions don't cause issues with the base classes
350 field_info = parent_fields_lookup[ann_name]._copy()
351
352 else: # An assigned value is present (either the default value, or a `Field()` function)
353 if isinstance(assigned_value, FieldInfo_) and ismethoddescriptor(assigned_value.default):
354 # `assigned_value` was fetched using `getattr`, which triggers a call to `__get__`
355 # for descriptors, so we do the same if the `= field(default=...)` form is used.
356 # Note that we only do this for method descriptors for now, we might want to
357 # extend this to any descriptor in the future (by simply checking for
358 # `hasattr(assigned_value.default, '__get__')`).
359 default = assigned_value.default.__get__(None, cls)
360 assigned_value.default = default
361 assigned_value._attributes_set['default'] = default
362
363 field_info = FieldInfo_.from_annotated_attribute(ann_type, assigned_value, _source=AnnotationSource.CLASS)
364 # Store the original annotation and assignment value that should be used to rebuild the field info later.
365 # Note that the assignment is always stored as the annotation might contain a type var that is later
366 # parameterized with an unknown forward reference (and we'll need it to rebuild the field info):
367 field_info._original_assignment = assigned_value
368 if not evaluated:
369 field_info._complete = False
370 field_info._original_annotation = ann_type
371 elif 'final' in field_info._qualifiers and not field_info.is_required():
372 warnings.warn(
373 f'Annotation {ann_name!r} is marked as final and has a default value. Pydantic treats {ann_name!r} as a '
374 'class variable, but it will be considered as a normal field in V3 to be aligned with dataclasses. If you '
375 f'still want {ann_name!r} to be considered as a class variable, annotate it as: `ClassVar[<type>] = <default>.`',
376 category=PydanticDeprecatedSince211,
377 # Incorrect when `create_model` is used, but the chance that final with a default is used is low in that case:
378 stacklevel=4,
379 )
380 class_vars.add(ann_name)
381 continue
382
383 # attributes which are fields are removed from the class namespace:
384 # 1. To match the behaviour of annotation-only fields
385 # 2. To avoid false positives in the NameError check above
386 try:
387 delattr(cls, ann_name)
388 except AttributeError:
389 pass # indicates the attribute was on a parent class
390
391 # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__
392 # to make sure the decorators have already been built for this exact class
393 decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__']
394 if ann_name in decorators.computed_fields:
395 raise TypeError(
396 f'Field {ann_name!r} of class {cls.__name__!r} overrides symbol of same name in a parent class. '
397 'This override with a computed_field is incompatible.'
398 )
399 fields[ann_name] = field_info
400
401 if field_info._complete:
402 # If not complete, this will be called in `rebuild_model_fields()`:
403 update_field_from_config(config_wrapper, ann_name, field_info)
404
405 if typevars_map:
406 for field in fields.values():
407 if field._complete:
408 field.apply_typevars_map(typevars_map)
409
410 if config_wrapper.use_attribute_docstrings:
411 _update_fields_from_docstrings(cls, fields)
412 return fields, class_vars
413
414
415def rebuild_model_fields(
416 cls: type[BaseModel],
417 *,
418 config_wrapper: ConfigWrapper,
419 ns_resolver: NsResolver,
420 typevars_map: Mapping[TypeVar, Any],
421) -> dict[str, FieldInfo]:
422 """Rebuild the (already present) model fields by trying to reevaluate annotations.
423
424 This function should be called whenever a model with incomplete fields is encountered.
425
426 Raises:
427 NameError: If one of the annotations failed to evaluate.
428
429 Note:
430 This function *doesn't* mutate the model fields in place, as it can be called during
431 schema generation, where you don't want to mutate other model's fields.
432 """
433 FieldInfo_ = import_cached_field_info()
434
435 rebuilt_fields: dict[str, FieldInfo] = {}
436 with ns_resolver.push(cls):
437 for f_name, field_info in cls.__pydantic_fields__.items():
438 if field_info._complete:
439 rebuilt_fields[f_name] = field_info
440 else:
441 existing_desc = field_info.description
442 ann = _typing_extra.eval_type(
443 field_info._original_annotation,
444 *ns_resolver.types_namespace,
445 )
446 ann = _generics.replace_types(ann, typevars_map)
447
448 if (assign := field_info._original_assignment) is PydanticUndefined:
449 new_field = FieldInfo_.from_annotation(ann, _source=AnnotationSource.CLASS)
450 else:
451 new_field = FieldInfo_.from_annotated_attribute(ann, assign, _source=AnnotationSource.CLASS)
452 # The description might come from the docstring if `use_attribute_docstrings` was `True`:
453 new_field.description = new_field.description if new_field.description is not None else existing_desc
454 update_field_from_config(config_wrapper, f_name, new_field)
455 rebuilt_fields[f_name] = new_field
456
457 return rebuilt_fields
458
459
460def collect_dataclass_fields(
461 cls: type[StandardDataclass],
462 *,
463 config_wrapper: ConfigWrapper,
464 ns_resolver: NsResolver | None = None,
465 typevars_map: dict[Any, Any] | None = None,
466) -> dict[str, FieldInfo]:
467 """Collect the fields of a dataclass.
468
469 Args:
470 cls: dataclass.
471 config_wrapper: The config wrapper instance.
472 ns_resolver: Namespace resolver to use when getting dataclass annotations.
473 Defaults to an empty instance.
474 typevars_map: A dictionary mapping type variables to their concrete types.
475
476 Returns:
477 The dataclass fields.
478 """
479 FieldInfo_ = import_cached_field_info()
480
481 fields: dict[str, FieldInfo] = {}
482 ns_resolver = ns_resolver or NsResolver()
483 dataclass_fields = cls.__dataclass_fields__
484
485 # The logic here is similar to `_typing_extra.get_cls_type_hints`,
486 # although we do it manually as stdlib dataclasses already have annotations
487 # collected in each class:
488 for base in reversed(cls.__mro__):
489 if not dataclasses.is_dataclass(base):
490 continue
491
492 with ns_resolver.push(base):
493 for ann_name, dataclass_field in dataclass_fields.items():
494 base_anns = _typing_extra.safe_get_annotations(base)
495
496 if ann_name not in base_anns:
497 # `__dataclass_fields__`contains every field, even the ones from base classes.
498 # Only collect the ones defined on `base`.
499 continue
500
501 globalns, localns = ns_resolver.types_namespace
502 ann_type, evaluated = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns)
503
504 if _typing_extra.is_classvar_annotation(ann_type):
505 continue
506
507 if (
508 not dataclass_field.init
509 and dataclass_field.default is dataclasses.MISSING
510 and dataclass_field.default_factory is dataclasses.MISSING
511 ):
512 # TODO: We should probably do something with this so that validate_assignment behaves properly
513 # Issue: https://github.com/pydantic/pydantic/issues/5470
514 continue
515
516 if isinstance(dataclass_field.default, FieldInfo_):
517 if dataclass_field.default.init_var:
518 if dataclass_field.default.init is False:
519 raise PydanticUserError(
520 f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.',
521 code='clashing-init-and-init-var',
522 )
523
524 # TODO: same note as above re validate_assignment
525 continue
526 field_info = FieldInfo_.from_annotated_attribute(
527 ann_type, dataclass_field.default, _source=AnnotationSource.DATACLASS
528 )
529 field_info._original_assignment = dataclass_field.default
530 else:
531 field_info = FieldInfo_.from_annotated_attribute(
532 ann_type, dataclass_field, _source=AnnotationSource.DATACLASS
533 )
534 field_info._original_assignment = dataclass_field
535
536 if not evaluated:
537 field_info._complete = False
538 field_info._original_annotation = ann_type
539
540 fields[ann_name] = field_info
541 update_field_from_config(config_wrapper, ann_name, field_info)
542
543 if field_info.default is not PydanticUndefined and isinstance(
544 getattr(cls, ann_name, field_info), FieldInfo_
545 ):
546 # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo
547 setattr(cls, ann_name, field_info.default)
548
549 if typevars_map:
550 for field in fields.values():
551 # We don't pass any ns, as `field.annotation`
552 # was already evaluated. TODO: is this method relevant?
553 # Can't we juste use `_generics.replace_types`?
554 field.apply_typevars_map(typevars_map)
555
556 if config_wrapper.use_attribute_docstrings:
557 _update_fields_from_docstrings(
558 cls,
559 fields,
560 # We can't rely on the (more reliable) frame inspection method
561 # for stdlib dataclasses:
562 use_inspect=not hasattr(cls, '__is_pydantic_dataclass__'),
563 )
564
565 return fields
566
567
568def rebuild_dataclass_fields(
569 cls: type[PydanticDataclass],
570 *,
571 config_wrapper: ConfigWrapper,
572 ns_resolver: NsResolver,
573 typevars_map: Mapping[TypeVar, Any],
574) -> dict[str, FieldInfo]:
575 """Rebuild the (already present) dataclass fields by trying to reevaluate annotations.
576
577 This function should be called whenever a dataclass with incomplete fields is encountered.
578
579 Raises:
580 NameError: If one of the annotations failed to evaluate.
581
582 Note:
583 This function *doesn't* mutate the dataclass fields in place, as it can be called during
584 schema generation, where you don't want to mutate other dataclass's fields.
585 """
586 FieldInfo_ = import_cached_field_info()
587
588 rebuilt_fields: dict[str, FieldInfo] = {}
589 with ns_resolver.push(cls):
590 for f_name, field_info in cls.__pydantic_fields__.items():
591 if field_info._complete:
592 rebuilt_fields[f_name] = field_info
593 else:
594 existing_desc = field_info.description
595 ann = _typing_extra.eval_type(
596 field_info._original_annotation,
597 *ns_resolver.types_namespace,
598 )
599 ann = _generics.replace_types(ann, typevars_map)
600 new_field = FieldInfo_.from_annotated_attribute(
601 ann,
602 field_info._original_assignment,
603 _source=AnnotationSource.DATACLASS,
604 )
605
606 # The description might come from the docstring if `use_attribute_docstrings` was `True`:
607 new_field.description = new_field.description if new_field.description is not None else existing_desc
608 update_field_from_config(config_wrapper, f_name, new_field)
609 rebuilt_fields[f_name] = new_field
610
611 return rebuilt_fields
612
613
614def is_valid_field_name(name: str) -> bool:
615 return not name.startswith('_')
616
617
618def is_valid_privateattr_name(name: str) -> bool:
619 return name.startswith('_') and not name.startswith('__')
620
621
622def takes_validated_data_argument(
623 default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any],
624) -> TypeIs[Callable[[dict[str, Any]], Any]]:
625 """Whether the provided default factory callable has a validated data parameter."""
626 try:
627 sig = signature(default_factory)
628 except (ValueError, TypeError):
629 # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
630 # In this case, we assume no data argument is present:
631 return False
632
633 parameters = list(sig.parameters.values())
634
635 return len(parameters) == 1 and can_be_positional(parameters[0]) and parameters[0].default is Parameter.empty