1"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`."""
2
3from __future__ import annotations as _annotations
4
5import dataclasses
6import warnings
7from collections.abc import Mapping
8from copy import copy
9from functools import cache
10from inspect import Parameter, ismethoddescriptor, signature
11from re import Pattern
12from typing import TYPE_CHECKING, Any, Callable, TypeVar
13
14from pydantic_core import PydanticUndefined
15from typing_extensions import TypeIs
16from typing_inspection.introspection import AnnotationSource
17
18from pydantic import PydanticDeprecatedSince211
19from pydantic.errors import PydanticUserError
20
21from ..aliases import AliasGenerator
22from . import _generics, _typing_extra
23from ._config import ConfigWrapper
24from ._docs_extraction import extract_docstrings_from_cls
25from ._import_utils import import_cached_base_model, import_cached_field_info
26from ._namespace_utils import NsResolver
27from ._repr import Representation
28from ._utils import can_be_positional, get_first_not_none
29
30if TYPE_CHECKING:
31 from annotated_types import BaseMetadata
32
33 from ..fields import FieldInfo
34 from ..main import BaseModel
35 from ._dataclasses import PydanticDataclass, StandardDataclass
36 from ._decorators import DecoratorInfos
37
38
39class PydanticMetadata(Representation):
40 """Base class for annotation markers like `Strict`."""
41
42 __slots__ = ()
43
44
45def pydantic_general_metadata(**metadata: Any) -> BaseMetadata:
46 """Create a new `_PydanticGeneralMetadata` class with the given metadata.
47
48 Args:
49 **metadata: The metadata to add.
50
51 Returns:
52 The new `_PydanticGeneralMetadata` class.
53 """
54 return _general_metadata_cls()(metadata) # type: ignore
55
56
57@cache
58def _general_metadata_cls() -> type[BaseMetadata]:
59 """Do it this way to avoid importing `annotated_types` at import time."""
60 from annotated_types import BaseMetadata
61
62 class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata):
63 """Pydantic general metadata like `max_digits`."""
64
65 def __init__(self, metadata: Any):
66 self.__dict__ = metadata
67
68 return _PydanticGeneralMetadata # type: ignore
69
70
71def _check_protected_namespaces(
72 protected_namespaces: tuple[str | Pattern[str], ...],
73 ann_name: str,
74 bases: tuple[type[Any], ...],
75 cls_name: str,
76) -> None:
77 BaseModel = import_cached_base_model()
78
79 for protected_namespace in protected_namespaces:
80 ns_violation = False
81 if isinstance(protected_namespace, Pattern):
82 ns_violation = protected_namespace.match(ann_name) is not None
83 elif isinstance(protected_namespace, str):
84 ns_violation = ann_name.startswith(protected_namespace)
85
86 if ns_violation:
87 for b in bases:
88 if hasattr(b, ann_name):
89 if not (issubclass(b, BaseModel) and ann_name in getattr(b, '__pydantic_fields__', {})):
90 raise ValueError(
91 f'Field {ann_name!r} conflicts with member {getattr(b, ann_name)}'
92 f' of protected namespace {protected_namespace!r}.'
93 )
94 else:
95 valid_namespaces: list[str] = []
96 for pn in protected_namespaces:
97 if isinstance(pn, Pattern):
98 if not pn.match(ann_name):
99 valid_namespaces.append(f're.compile({pn.pattern!r})')
100 else:
101 if not ann_name.startswith(pn):
102 valid_namespaces.append(f"'{pn}'")
103
104 valid_namespaces_str = f'({", ".join(valid_namespaces)}{",)" if len(valid_namespaces) == 1 else ")"}'
105
106 warnings.warn(
107 f'Field {ann_name!r} in {cls_name!r} conflicts with protected namespace {protected_namespace!r}.\n\n'
108 f"You may be able to solve this by setting the 'protected_namespaces' configuration to {valid_namespaces_str}.",
109 UserWarning,
110 stacklevel=5,
111 )
112
113
114def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], use_inspect: bool = False) -> None:
115 fields_docs = extract_docstrings_from_cls(cls, use_inspect=use_inspect)
116 for ann_name, field_info in fields.items():
117 if field_info.description is None and ann_name in fields_docs:
118 field_info.description = fields_docs[ann_name]
119
120
121def _apply_field_title_generator_to_field_info(
122 title_generator: Callable[[str, FieldInfo], str],
123 field_name: str,
124 field_info: FieldInfo,
125):
126 if field_info.title is None:
127 title = title_generator(field_name, field_info)
128 if not isinstance(title, str):
129 raise TypeError(f'field_title_generator {title_generator} must return str, not {title.__class__}')
130
131 field_info.title = title
132
133
134def _apply_alias_generator_to_field_info(
135 alias_generator: Callable[[str], str] | AliasGenerator, field_name: str, field_info: FieldInfo
136):
137 """Apply an alias generator to aliases on a `FieldInfo` instance if appropriate.
138
139 Args:
140 alias_generator: A callable that takes a string and returns a string, or an `AliasGenerator` instance.
141 field_name: The name of the field from which to generate the alias.
142 field_info: The `FieldInfo` instance to which the alias generator is (maybe) applied.
143 """
144 # Apply an alias_generator if
145 # 1. An alias is not specified
146 # 2. An alias is specified, but the priority is <= 1
147 if (
148 field_info.alias_priority is None
149 or field_info.alias_priority <= 1
150 or field_info.alias is None
151 or field_info.validation_alias is None
152 or field_info.serialization_alias is None
153 ):
154 alias, validation_alias, serialization_alias = None, None, None
155
156 if isinstance(alias_generator, AliasGenerator):
157 alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name)
158 elif callable(alias_generator):
159 alias = alias_generator(field_name)
160 if not isinstance(alias, str):
161 raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}')
162
163 # if priority is not set, we set to 1
164 # which supports the case where the alias_generator from a child class is used
165 # to generate an alias for a field in a parent class
166 if field_info.alias_priority is None or field_info.alias_priority <= 1:
167 field_info.alias_priority = 1
168
169 # if the priority is 1, then we set the aliases to the generated alias
170 if field_info.alias_priority == 1:
171 field_info.serialization_alias = get_first_not_none(serialization_alias, alias)
172 field_info.validation_alias = get_first_not_none(validation_alias, alias)
173 field_info.alias = alias
174
175 # if any of the aliases are not set, then we set them to the corresponding generated alias
176 if field_info.alias is None:
177 field_info.alias = alias
178 if field_info.serialization_alias is None:
179 field_info.serialization_alias = get_first_not_none(serialization_alias, alias)
180 if field_info.validation_alias is None:
181 field_info.validation_alias = get_first_not_none(validation_alias, alias)
182
183
184def update_field_from_config(config_wrapper: ConfigWrapper, field_name: str, field_info: FieldInfo) -> None:
185 """Update the `FieldInfo` instance from the configuration set on the model it belongs to.
186
187 This will apply the title and alias generators from the configuration.
188
189 Args:
190 config_wrapper: The configuration from the model.
191 field_name: The field name the `FieldInfo` instance is attached to.
192 field_info: The `FieldInfo` instance to update.
193 """
194 field_title_generator = field_info.field_title_generator or config_wrapper.field_title_generator
195 if field_title_generator is not None:
196 _apply_field_title_generator_to_field_info(field_title_generator, field_name, field_info)
197 if config_wrapper.alias_generator is not None:
198 _apply_alias_generator_to_field_info(config_wrapper.alias_generator, field_name, field_info)
199
200
201_deprecated_method_names = {'dict', 'json', 'copy', '_iter', '_copy_and_set_values', '_calculate_keys'}
202
203_deprecated_classmethod_names = {
204 'parse_obj',
205 'parse_raw',
206 'parse_file',
207 'from_orm',
208 'construct',
209 'schema',
210 'schema_json',
211 'validate',
212 'update_forward_refs',
213 '_get_value',
214}
215
216
217def collect_model_fields( # noqa: C901
218 cls: type[BaseModel],
219 config_wrapper: ConfigWrapper,
220 ns_resolver: NsResolver | None,
221 *,
222 typevars_map: Mapping[TypeVar, Any] | None = None,
223) -> tuple[dict[str, FieldInfo], set[str]]:
224 """Collect the fields and class variables names of a nascent Pydantic model.
225
226 The fields collection process is *lenient*, meaning it won't error if string annotations
227 fail to evaluate. If this happens, the original annotation (and assigned value, if any)
228 is stored on the created `FieldInfo` instance.
229
230 The `rebuild_model_fields()` should be called at a later point (e.g. when rebuilding the model),
231 and will make use of these stored attributes.
232
233 Args:
234 cls: BaseModel or dataclass.
235 config_wrapper: The config wrapper instance.
236 ns_resolver: Namespace resolver to use when getting model annotations.
237 typevars_map: A dictionary mapping type variables to their concrete types.
238
239 Returns:
240 A two-tuple containing model fields and class variables names.
241
242 Raises:
243 NameError:
244 - If there is a conflict between a field name and protected namespaces.
245 - If there is a field other than `root` in `RootModel`.
246 - If a field shadows an attribute in the parent model.
247 """
248 FieldInfo_ = import_cached_field_info()
249 BaseModel_ = import_cached_base_model()
250
251 bases = cls.__bases__
252 parent_fields_lookup: dict[str, FieldInfo] = {}
253 for base in reversed(bases):
254 if model_fields := getattr(base, '__pydantic_fields__', None):
255 parent_fields_lookup.update(model_fields)
256
257 type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver)
258
259 # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
260 # annotations is only used for finding fields in parent classes
261 annotations = _typing_extra.safe_get_annotations(cls)
262
263 fields: dict[str, FieldInfo] = {}
264
265 class_vars: set[str] = set()
266 for ann_name, (ann_type, evaluated) in type_hints.items():
267 if ann_name == 'model_config':
268 # We never want to treat `model_config` as a field
269 # Note: we may need to change this logic if/when we introduce a `BareModel` class with no
270 # protected namespaces (where `model_config` might be allowed as a field name)
271 continue
272
273 _check_protected_namespaces(
274 protected_namespaces=config_wrapper.protected_namespaces,
275 ann_name=ann_name,
276 bases=bases,
277 cls_name=cls.__name__,
278 )
279
280 if _typing_extra.is_classvar_annotation(ann_type):
281 class_vars.add(ann_name)
282 continue
283
284 assigned_value = getattr(cls, ann_name, PydanticUndefined)
285 if assigned_value is not PydanticUndefined and (
286 # One of the deprecated instance methods was used as a field name (e.g. `dict()`):
287 any(getattr(BaseModel_, depr_name, None) is assigned_value for depr_name in _deprecated_method_names)
288 # One of the deprecated class methods was used as a field name (e.g. `schema()`):
289 or (
290 hasattr(assigned_value, '__func__')
291 and any(
292 getattr(getattr(BaseModel_, depr_name, None), '__func__', None) is assigned_value.__func__ # pyright: ignore[reportAttributeAccessIssue]
293 for depr_name in _deprecated_classmethod_names
294 )
295 )
296 ):
297 # Then `assigned_value` would be the method, even though no default was specified:
298 assigned_value = PydanticUndefined
299
300 if not is_valid_field_name(ann_name):
301 continue
302 if cls.__pydantic_root_model__ and ann_name != 'root':
303 raise NameError(
304 f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`"
305 )
306
307 # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get
308 # "... shadows an attribute" warnings
309 generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin')
310 for base in bases:
311 dataclass_fields = {
312 field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ())
313 }
314 if hasattr(base, ann_name):
315 if base is generic_origin:
316 # Don't warn when "shadowing" of attributes in parametrized generics
317 continue
318
319 if ann_name in dataclass_fields:
320 # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set
321 # on the class instance.
322 continue
323
324 if ann_name not in annotations:
325 # Don't warn when a field exists in a parent class but has not been defined in the current class
326 continue
327
328 warnings.warn(
329 f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent '
330 f'"{base.__qualname__}"',
331 UserWarning,
332 stacklevel=4,
333 )
334
335 if assigned_value is PydanticUndefined: # no assignment, just a plain annotation
336 if ann_name in annotations or ann_name not in parent_fields_lookup:
337 # field is either:
338 # - present in the current model's annotations (and *not* from parent classes)
339 # - not found on any base classes; this seems to be caused by fields bot getting
340 # generated due to models not being fully defined while initializing recursive models.
341 # Nothing stops us from just creating a `FieldInfo` for this type hint, so we do this.
342 field_info = FieldInfo_.from_annotation(ann_type, _source=AnnotationSource.CLASS)
343 if not evaluated:
344 field_info._complete = False
345 # Store the original annotation that should be used to rebuild
346 # the field info later:
347 field_info._original_annotation = ann_type
348 else:
349 # The field was present on one of the (possibly multiple) base classes
350 # copy the field to make sure typevar substitutions don't cause issues with the base classes
351 field_info = copy(parent_fields_lookup[ann_name])
352
353 else: # An assigned value is present (either the default value, or a `Field()` function)
354 if isinstance(assigned_value, FieldInfo_) and ismethoddescriptor(assigned_value.default):
355 # `assigned_value` was fetched using `getattr`, which triggers a call to `__get__`
356 # for descriptors, so we do the same if the `= field(default=...)` form is used.
357 # Note that we only do this for method descriptors for now, we might want to
358 # extend this to any descriptor in the future (by simply checking for
359 # `hasattr(assigned_value.default, '__get__')`).
360 default = assigned_value.default.__get__(None, cls)
361 assigned_value.default = default
362 assigned_value._attributes_set['default'] = default
363
364 field_info = FieldInfo_.from_annotated_attribute(ann_type, assigned_value, _source=AnnotationSource.CLASS)
365 # Store the original annotation and assignment value that should be used to rebuild the field info later.
366 # Note that the assignment is always stored as the annotation might contain a type var that is later
367 # parameterized with an unknown forward reference (and we'll need it to rebuild the field info):
368 field_info._original_assignment = assigned_value
369 if not evaluated:
370 field_info._complete = False
371 field_info._original_annotation = ann_type
372 elif 'final' in field_info._qualifiers and not field_info.is_required():
373 warnings.warn(
374 f'Annotation {ann_name!r} is marked as final and has a default value. Pydantic treats {ann_name!r} as a '
375 'class variable, but it will be considered as a normal field in V3 to be aligned with dataclasses. If you '
376 f'still want {ann_name!r} to be considered as a class variable, annotate it as: `ClassVar[<type>] = <default>.`',
377 category=PydanticDeprecatedSince211,
378 # Incorrect when `create_model` is used, but the chance that final with a default is used is low in that case:
379 stacklevel=4,
380 )
381 class_vars.add(ann_name)
382 continue
383
384 # attributes which are fields are removed from the class namespace:
385 # 1. To match the behaviour of annotation-only fields
386 # 2. To avoid false positives in the NameError check above
387 try:
388 delattr(cls, ann_name)
389 except AttributeError:
390 pass # indicates the attribute was on a parent class
391
392 # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__
393 # to make sure the decorators have already been built for this exact class
394 decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__']
395 if ann_name in decorators.computed_fields:
396 raise TypeError(
397 f'Field {ann_name!r} of class {cls.__name__!r} overrides symbol of same name in a parent class. '
398 'This override with a computed_field is incompatible.'
399 )
400 fields[ann_name] = field_info
401
402 if field_info._complete:
403 # If not complete, this will be called in `rebuild_model_fields()`:
404 update_field_from_config(config_wrapper, ann_name, field_info)
405
406 if typevars_map:
407 for field in fields.values():
408 if field._complete:
409 field.apply_typevars_map(typevars_map)
410
411 if config_wrapper.use_attribute_docstrings:
412 _update_fields_from_docstrings(cls, fields)
413 return fields, class_vars
414
415
416def rebuild_model_fields(
417 cls: type[BaseModel],
418 *,
419 config_wrapper: ConfigWrapper,
420 ns_resolver: NsResolver,
421 typevars_map: Mapping[TypeVar, Any],
422) -> dict[str, FieldInfo]:
423 """Rebuild the (already present) model fields by trying to reevaluate annotations.
424
425 This function should be called whenever a model with incomplete fields is encountered.
426
427 Raises:
428 NameError: If one of the annotations failed to evaluate.
429
430 Note:
431 This function *doesn't* mutate the model fields in place, as it can be called during
432 schema generation, where you don't want to mutate other model's fields.
433 """
434 FieldInfo_ = import_cached_field_info()
435
436 rebuilt_fields: dict[str, FieldInfo] = {}
437 with ns_resolver.push(cls):
438 for f_name, field_info in cls.__pydantic_fields__.items():
439 if field_info._complete:
440 rebuilt_fields[f_name] = field_info
441 else:
442 existing_desc = field_info.description
443 ann = _typing_extra.eval_type(
444 field_info._original_annotation,
445 *ns_resolver.types_namespace,
446 )
447 ann = _generics.replace_types(ann, typevars_map)
448
449 if (assign := field_info._original_assignment) is PydanticUndefined:
450 new_field = FieldInfo_.from_annotation(ann, _source=AnnotationSource.CLASS)
451 else:
452 new_field = FieldInfo_.from_annotated_attribute(ann, assign, _source=AnnotationSource.CLASS)
453 # The description might come from the docstring if `use_attribute_docstrings` was `True`:
454 new_field.description = new_field.description if new_field.description is not None else existing_desc
455 update_field_from_config(config_wrapper, f_name, new_field)
456 rebuilt_fields[f_name] = new_field
457
458 return rebuilt_fields
459
460
461def collect_dataclass_fields(
462 cls: type[StandardDataclass],
463 *,
464 config_wrapper: ConfigWrapper,
465 ns_resolver: NsResolver | None = None,
466 typevars_map: dict[Any, Any] | None = None,
467) -> dict[str, FieldInfo]:
468 """Collect the fields of a dataclass.
469
470 Args:
471 cls: dataclass.
472 config_wrapper: The config wrapper instance.
473 ns_resolver: Namespace resolver to use when getting dataclass annotations.
474 Defaults to an empty instance.
475 typevars_map: A dictionary mapping type variables to their concrete types.
476
477 Returns:
478 The dataclass fields.
479 """
480 FieldInfo_ = import_cached_field_info()
481
482 fields: dict[str, FieldInfo] = {}
483 ns_resolver = ns_resolver or NsResolver()
484 dataclass_fields = cls.__dataclass_fields__
485
486 # The logic here is similar to `_typing_extra.get_cls_type_hints`,
487 # although we do it manually as stdlib dataclasses already have annotations
488 # collected in each class:
489 for base in reversed(cls.__mro__):
490 if not dataclasses.is_dataclass(base):
491 continue
492
493 with ns_resolver.push(base):
494 for ann_name, dataclass_field in dataclass_fields.items():
495 base_anns = _typing_extra.safe_get_annotations(base)
496
497 if ann_name not in base_anns:
498 # `__dataclass_fields__`contains every field, even the ones from base classes.
499 # Only collect the ones defined on `base`.
500 continue
501
502 globalns, localns = ns_resolver.types_namespace
503 ann_type, evaluated = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns)
504
505 if _typing_extra.is_classvar_annotation(ann_type):
506 continue
507
508 if (
509 not dataclass_field.init
510 and dataclass_field.default is dataclasses.MISSING
511 and dataclass_field.default_factory is dataclasses.MISSING
512 ):
513 # TODO: We should probably do something with this so that validate_assignment behaves properly
514 # Issue: https://github.com/pydantic/pydantic/issues/5470
515 continue
516
517 if isinstance(dataclass_field.default, FieldInfo_):
518 if dataclass_field.default.init_var:
519 if dataclass_field.default.init is False:
520 raise PydanticUserError(
521 f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.',
522 code='clashing-init-and-init-var',
523 )
524
525 # TODO: same note as above re validate_assignment
526 continue
527 field_info = FieldInfo_.from_annotated_attribute(
528 ann_type, dataclass_field.default, _source=AnnotationSource.DATACLASS
529 )
530 field_info._original_assignment = dataclass_field.default
531 else:
532 field_info = FieldInfo_.from_annotated_attribute(
533 ann_type, dataclass_field, _source=AnnotationSource.DATACLASS
534 )
535 field_info._original_assignment = dataclass_field
536
537 if not evaluated:
538 field_info._complete = False
539 field_info._original_annotation = ann_type
540
541 fields[ann_name] = field_info
542 update_field_from_config(config_wrapper, ann_name, field_info)
543
544 if field_info.default is not PydanticUndefined and isinstance(
545 getattr(cls, ann_name, field_info), FieldInfo_
546 ):
547 # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo
548 setattr(cls, ann_name, field_info.default)
549
550 if typevars_map:
551 for field in fields.values():
552 # We don't pass any ns, as `field.annotation`
553 # was already evaluated. TODO: is this method relevant?
554 # Can't we juste use `_generics.replace_types`?
555 field.apply_typevars_map(typevars_map)
556
557 if config_wrapper.use_attribute_docstrings:
558 _update_fields_from_docstrings(
559 cls,
560 fields,
561 # We can't rely on the (more reliable) frame inspection method
562 # for stdlib dataclasses:
563 use_inspect=not hasattr(cls, '__is_pydantic_dataclass__'),
564 )
565
566 return fields
567
568
569def rebuild_dataclass_fields(
570 cls: type[PydanticDataclass],
571 *,
572 config_wrapper: ConfigWrapper,
573 ns_resolver: NsResolver,
574 typevars_map: Mapping[TypeVar, Any],
575) -> dict[str, FieldInfo]:
576 """Rebuild the (already present) dataclass fields by trying to reevaluate annotations.
577
578 This function should be called whenever a dataclass with incomplete fields is encountered.
579
580 Raises:
581 NameError: If one of the annotations failed to evaluate.
582
583 Note:
584 This function *doesn't* mutate the dataclass fields in place, as it can be called during
585 schema generation, where you don't want to mutate other dataclass's fields.
586 """
587 FieldInfo_ = import_cached_field_info()
588
589 rebuilt_fields: dict[str, FieldInfo] = {}
590 with ns_resolver.push(cls):
591 for f_name, field_info in cls.__pydantic_fields__.items():
592 if field_info._complete:
593 rebuilt_fields[f_name] = field_info
594 else:
595 existing_desc = field_info.description
596 ann = _typing_extra.eval_type(
597 field_info._original_annotation,
598 *ns_resolver.types_namespace,
599 )
600 ann = _generics.replace_types(ann, typevars_map)
601 new_field = FieldInfo_.from_annotated_attribute(
602 ann,
603 field_info._original_assignment,
604 _source=AnnotationSource.DATACLASS,
605 )
606
607 # The description might come from the docstring if `use_attribute_docstrings` was `True`:
608 new_field.description = new_field.description if new_field.description is not None else existing_desc
609 update_field_from_config(config_wrapper, f_name, new_field)
610 rebuilt_fields[f_name] = new_field
611
612 return rebuilt_fields
613
614
615def is_valid_field_name(name: str) -> bool:
616 return not name.startswith('_')
617
618
619def is_valid_privateattr_name(name: str) -> bool:
620 return name.startswith('_') and not name.startswith('__')
621
622
623def takes_validated_data_argument(
624 default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any],
625) -> TypeIs[Callable[[dict[str, Any]], Any]]:
626 """Whether the provided default factory callable has a validated data parameter."""
627 try:
628 sig = signature(default_factory)
629 except (ValueError, TypeError):
630 # `inspect.signature` might not be able to infer a signature, e.g. with C objects.
631 # In this case, we assume no data argument is present:
632 return False
633
634 parameters = list(sig.parameters.values())
635
636 return len(parameters) == 1 and can_be_positional(parameters[0]) and parameters[0].default is Parameter.empty