1import copy
2import inspect
3import warnings
4from collections import defaultdict
5from functools import partialmethod
6from itertools import chain
7
8from asgiref.sync import sync_to_async
9
10import django
11from django.apps import apps
12from django.conf import settings
13from django.core import checks
14from django.core.exceptions import (
15 NON_FIELD_ERRORS,
16 FieldDoesNotExist,
17 FieldError,
18 MultipleObjectsReturned,
19 ObjectDoesNotExist,
20 ValidationError,
21)
22from django.db import (
23 DJANGO_VERSION_PICKLE_KEY,
24 DatabaseError,
25 connection,
26 connections,
27 router,
28 transaction,
29)
30from django.db.models import NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value
31from django.db.models.constants import LOOKUP_SEP
32from django.db.models.deletion import CASCADE, Collector
33from django.db.models.expressions import DatabaseDefault
34from django.db.models.fields.composite import CompositePrimaryKey
35from django.db.models.fields.related import (
36 ForeignObjectRel,
37 OneToOneField,
38 lazy_related_operation,
39 resolve_relation,
40)
41from django.db.models.functions import Coalesce
42from django.db.models.manager import Manager
43from django.db.models.options import Options
44from django.db.models.query import F, Q
45from django.db.models.signals import (
46 class_prepared,
47 post_init,
48 post_save,
49 pre_init,
50 pre_save,
51)
52from django.db.models.utils import AltersData, make_model_tuple
53from django.utils.encoding import force_str
54from django.utils.hashable import make_hashable
55from django.utils.text import capfirst, get_text_list
56from django.utils.translation import gettext_lazy as _
57
58
59class Deferred:
60 def __repr__(self):
61 return "<Deferred field>"
62
63 def __str__(self):
64 return "<Deferred field>"
65
66
67DEFERRED = Deferred()
68
69
70def subclass_exception(name, bases, module, attached_to):
71 """
72 Create exception subclass. Used by ModelBase below.
73
74 The exception is created in a way that allows it to be pickled, assuming
75 that the returned exception class will be added as an attribute to the
76 'attached_to' class.
77 """
78 return type(
79 name,
80 bases,
81 {
82 "__module__": module,
83 "__qualname__": "%s.%s" % (attached_to.__qualname__, name),
84 },
85 )
86
87
88def _has_contribute_to_class(value):
89 # Only call contribute_to_class() if it's bound.
90 return not inspect.isclass(value) and hasattr(value, "contribute_to_class")
91
92
93class ModelBase(type):
94 """Metaclass for all models."""
95
96 def __new__(cls, name, bases, attrs, **kwargs):
97 super_new = super().__new__
98
99 # Also ensure initialization is only performed for subclasses of Model
100 # (excluding Model class itself).
101 parents = [b for b in bases if isinstance(b, ModelBase)]
102 if not parents:
103 return super_new(cls, name, bases, attrs)
104
105 # Create the class.
106 module = attrs.pop("__module__")
107 new_attrs = {"__module__": module}
108 classcell = attrs.pop("__classcell__", None)
109 if classcell is not None:
110 new_attrs["__classcell__"] = classcell
111 attr_meta = attrs.pop("Meta", None)
112 # Pass all attrs without a (Django-specific) contribute_to_class()
113 # method to type.__new__() so that they're properly initialized
114 # (i.e. __set_name__()).
115 contributable_attrs = {}
116 for obj_name, obj in attrs.items():
117 if _has_contribute_to_class(obj):
118 contributable_attrs[obj_name] = obj
119 else:
120 new_attrs[obj_name] = obj
121 new_class = super_new(cls, name, bases, new_attrs, **kwargs)
122
123 abstract = getattr(attr_meta, "abstract", False)
124 meta = attr_meta or getattr(new_class, "Meta", None)
125 base_meta = getattr(new_class, "_meta", None)
126
127 app_label = None
128
129 # Look for an application configuration to attach the model to.
130 app_config = apps.get_containing_app_config(module)
131
132 if getattr(meta, "app_label", None) is None:
133 if app_config is None:
134 if not abstract:
135 raise RuntimeError(
136 "Model class %s.%s doesn't declare an explicit "
137 "app_label and isn't in an application in "
138 "INSTALLED_APPS." % (module, name)
139 )
140
141 else:
142 app_label = app_config.label
143
144 new_class.add_to_class("_meta", Options(meta, app_label))
145 if not abstract:
146 new_class.add_to_class(
147 "DoesNotExist",
148 subclass_exception(
149 "DoesNotExist",
150 tuple(
151 x.DoesNotExist
152 for x in parents
153 if hasattr(x, "_meta") and not x._meta.abstract
154 )
155 or (ObjectDoesNotExist,),
156 module,
157 attached_to=new_class,
158 ),
159 )
160 new_class.add_to_class(
161 "MultipleObjectsReturned",
162 subclass_exception(
163 "MultipleObjectsReturned",
164 tuple(
165 x.MultipleObjectsReturned
166 for x in parents
167 if hasattr(x, "_meta") and not x._meta.abstract
168 )
169 or (MultipleObjectsReturned,),
170 module,
171 attached_to=new_class,
172 ),
173 )
174 if base_meta and not base_meta.abstract:
175 # Non-abstract child classes inherit some attributes from their
176 # non-abstract parent (unless an ABC comes before it in the
177 # method resolution order).
178 if not hasattr(meta, "ordering"):
179 new_class._meta.ordering = base_meta.ordering
180 if not hasattr(meta, "get_latest_by"):
181 new_class._meta.get_latest_by = base_meta.get_latest_by
182
183 is_proxy = new_class._meta.proxy
184
185 # If the model is a proxy, ensure that the base class
186 # hasn't been swapped out.
187 if is_proxy and base_meta and base_meta.swapped:
188 raise TypeError(
189 "%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)
190 )
191
192 # Add remaining attributes (those with a contribute_to_class() method)
193 # to the class.
194 for obj_name, obj in contributable_attrs.items():
195 new_class.add_to_class(obj_name, obj)
196
197 # All the fields of any type declared on this model
198 new_fields = chain(
199 new_class._meta.local_fields,
200 new_class._meta.local_many_to_many,
201 new_class._meta.private_fields,
202 )
203 field_names = {f.name for f in new_fields}
204
205 # Basic setup for proxy models.
206 if is_proxy:
207 base = None
208 for parent in [kls for kls in parents if hasattr(kls, "_meta")]:
209 if parent._meta.abstract:
210 if parent._meta.fields:
211 raise TypeError(
212 "Abstract base class containing model fields not "
213 "permitted for proxy model '%s'." % name
214 )
215 else:
216 continue
217 if base is None:
218 base = parent
219 elif parent._meta.concrete_model is not base._meta.concrete_model:
220 raise TypeError(
221 "Proxy model '%s' has more than one non-abstract model base "
222 "class." % name
223 )
224 if base is None:
225 raise TypeError(
226 "Proxy model '%s' has no non-abstract model base class." % name
227 )
228 new_class._meta.setup_proxy(base)
229 new_class._meta.concrete_model = base._meta.concrete_model
230 else:
231 new_class._meta.concrete_model = new_class
232
233 # Collect the parent links for multi-table inheritance.
234 parent_links = {}
235 for base in reversed([new_class] + parents):
236 # Conceptually equivalent to `if base is Model`.
237 if not hasattr(base, "_meta"):
238 continue
239 # Skip concrete parent classes.
240 if base != new_class and not base._meta.abstract:
241 continue
242 # Locate OneToOneField instances.
243 for field in base._meta.local_fields:
244 if isinstance(field, OneToOneField) and field.remote_field.parent_link:
245 related = resolve_relation(new_class, field.remote_field.model)
246 parent_links[make_model_tuple(related)] = field
247
248 # Track fields inherited from base models.
249 inherited_attributes = set()
250 # Do the appropriate setup for any model parents.
251 for base in new_class.mro():
252 if base not in parents or not hasattr(base, "_meta"):
253 # Things without _meta aren't functional models, so they're
254 # uninteresting parents.
255 inherited_attributes.update(base.__dict__)
256 continue
257
258 parent_fields = base._meta.local_fields + base._meta.local_many_to_many
259 if not base._meta.abstract:
260 # Check for clashes between locally declared fields and those
261 # on the base classes.
262 for field in parent_fields:
263 if field.name in field_names:
264 raise FieldError(
265 "Local field %r in class %r clashes with field of "
266 "the same name from base class %r."
267 % (
268 field.name,
269 name,
270 base.__name__,
271 )
272 )
273 else:
274 inherited_attributes.add(field.name)
275
276 # Concrete classes...
277 base = base._meta.concrete_model
278 base_key = make_model_tuple(base)
279 if base_key in parent_links:
280 field = parent_links[base_key]
281 elif not is_proxy:
282 attr_name = "%s_ptr" % base._meta.model_name
283 field = OneToOneField(
284 base,
285 on_delete=CASCADE,
286 name=attr_name,
287 auto_created=True,
288 parent_link=True,
289 )
290
291 if attr_name in field_names:
292 raise FieldError(
293 "Auto-generated field '%s' in class %r for "
294 "parent_link to base class %r clashes with "
295 "declared field of the same name."
296 % (
297 attr_name,
298 name,
299 base.__name__,
300 )
301 )
302
303 # Only add the ptr field if it's not already present;
304 # e.g. migrations will already have it specified
305 if not hasattr(new_class, attr_name):
306 new_class.add_to_class(attr_name, field)
307 else:
308 field = None
309 new_class._meta.parents[base] = field
310 else:
311 base_parents = base._meta.parents.copy()
312
313 # Add fields from abstract base class if it wasn't overridden.
314 for field in parent_fields:
315 if (
316 field.name not in field_names
317 and field.name not in new_class.__dict__
318 and field.name not in inherited_attributes
319 ):
320 new_field = copy.deepcopy(field)
321 new_class.add_to_class(field.name, new_field)
322 # Replace parent links defined on this base by the new
323 # field. It will be appropriately resolved if required.
324 if field.one_to_one:
325 for parent, parent_link in base_parents.items():
326 if field == parent_link:
327 base_parents[parent] = new_field
328
329 # Pass any non-abstract parent classes onto child.
330 new_class._meta.parents.update(base_parents)
331
332 # Inherit private fields (like GenericForeignKey) from the parent
333 # class
334 for field in base._meta.private_fields:
335 if field.name in field_names:
336 if not base._meta.abstract:
337 raise FieldError(
338 "Local field %r in class %r clashes with field of "
339 "the same name from base class %r."
340 % (
341 field.name,
342 name,
343 base.__name__,
344 )
345 )
346 else:
347 field = copy.deepcopy(field)
348 if not base._meta.abstract:
349 field.mti_inherited = True
350 new_class.add_to_class(field.name, field)
351
352 # Copy indexes so that index names are unique when models extend an
353 # abstract model.
354 new_class._meta.indexes = [
355 copy.deepcopy(idx) for idx in new_class._meta.indexes
356 ]
357
358 if abstract:
359 # Abstract base models can't be instantiated and don't appear in
360 # the list of models for an app. We do the final setup for them a
361 # little differently from normal models.
362 attr_meta.abstract = False
363 new_class.Meta = attr_meta
364 return new_class
365
366 new_class._prepare()
367 new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
368 return new_class
369
370 def add_to_class(cls, name, value):
371 if _has_contribute_to_class(value):
372 value.contribute_to_class(cls, name)
373 else:
374 setattr(cls, name, value)
375
376 def _prepare(cls):
377 """Create some methods once self._meta has been populated."""
378 opts = cls._meta
379 opts._prepare(cls)
380
381 if opts.order_with_respect_to:
382 cls.get_next_in_order = partialmethod(
383 cls._get_next_or_previous_in_order, is_next=True
384 )
385 cls.get_previous_in_order = partialmethod(
386 cls._get_next_or_previous_in_order, is_next=False
387 )
388
389 # Defer creating accessors on the foreign class until it has been
390 # created and registered. If remote_field is None, we're ordering
391 # with respect to a GenericForeignKey and don't know what the
392 # foreign class is - we'll add those accessors later in
393 # contribute_to_class().
394 if opts.order_with_respect_to.remote_field:
395 wrt = opts.order_with_respect_to
396 remote = wrt.remote_field.model
397 lazy_related_operation(make_foreign_order_accessors, cls, remote)
398
399 # Give the class a docstring -- its definition.
400 if cls.__doc__ is None:
401 cls.__doc__ = "%s(%s)" % (
402 cls.__name__,
403 ", ".join(f.name for f in opts.fields),
404 )
405
406 get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(
407 opts.label_lower
408 )
409 if get_absolute_url_override:
410 setattr(cls, "get_absolute_url", get_absolute_url_override)
411
412 if not opts.managers:
413 if any(f.name == "objects" for f in opts.fields):
414 raise ValueError(
415 "Model %s must specify a custom Manager, because it has a "
416 "field named 'objects'." % cls.__name__
417 )
418 manager = Manager()
419 manager.auto_created = True
420 cls.add_to_class("objects", manager)
421
422 # Set the name of _meta.indexes. This can't be done in
423 # Options.contribute_to_class() because fields haven't been added to
424 # the model at that point.
425 for index in cls._meta.indexes:
426 if not index.name:
427 index.set_name_with_model(cls)
428
429 class_prepared.send(sender=cls)
430
431 @property
432 def _base_manager(cls):
433 return cls._meta.base_manager
434
435 @property
436 def _default_manager(cls):
437 return cls._meta.default_manager
438
439
440class ModelStateFieldsCacheDescriptor:
441 def __get__(self, instance, cls=None):
442 if instance is None:
443 return self
444 res = instance.fields_cache = {}
445 return res
446
447
448class ModelState:
449 """Store model instance state."""
450
451 db = None
452 # If true, uniqueness validation checks will consider this a new, unsaved
453 # object. Necessary for correct validation of new instances of objects with
454 # explicit (non-auto) PKs. This impacts validation only; it has no effect
455 # on the actual save.
456 adding = True
457 fields_cache = ModelStateFieldsCacheDescriptor()
458
459
460class Model(AltersData, metaclass=ModelBase):
461 def __init__(self, *args, **kwargs):
462 # Alias some things as locals to avoid repeat global lookups
463 cls = self.__class__
464 opts = self._meta
465 _setattr = setattr
466 _DEFERRED = DEFERRED
467 if opts.abstract:
468 raise TypeError("Abstract models cannot be instantiated.")
469
470 pre_init.send(sender=cls, args=args, kwargs=kwargs)
471
472 # Set up the storage for instance state
473 self._state = ModelState()
474
475 # There is a rather weird disparity here; if kwargs, it's set, then args
476 # overrides it. It should be one or the other; don't duplicate the work
477 # The reason for the kwargs check is that standard iterator passes in by
478 # args, and instantiation for iteration is 33% faster.
479 if len(args) > len(opts.concrete_fields):
480 # Daft, but matches old exception sans the err msg.
481 raise IndexError("Number of args exceeds number of fields")
482
483 if not kwargs:
484 fields_iter = iter(opts.concrete_fields)
485 # The ordering of the zip calls matter - zip throws StopIteration
486 # when an iter throws it. So if the first iter throws it, the second
487 # is *not* consumed. We rely on this, so don't change the order
488 # without changing the logic.
489 for val, field in zip(args, fields_iter):
490 if val is _DEFERRED:
491 continue
492 _setattr(self, field.attname, val)
493 else:
494 # Slower, kwargs-ready version.
495 fields_iter = iter(opts.fields)
496 for val, field in zip(args, fields_iter):
497 if val is _DEFERRED:
498 continue
499 _setattr(self, field.attname, val)
500 if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED:
501 raise TypeError(
502 f"{cls.__qualname__}() got both positional and "
503 f"keyword arguments for field '{field.name}'."
504 )
505
506 # Now we're left with the unprocessed fields that *must* come from
507 # keywords, or default.
508
509 for field in fields_iter:
510 is_related_object = False
511 # Virtual field
512 if field.column is None or field.generated:
513 continue
514 if kwargs:
515 if isinstance(field.remote_field, ForeignObjectRel):
516 try:
517 # Assume object instance was passed in.
518 rel_obj = kwargs.pop(field.name)
519 is_related_object = True
520 except KeyError:
521 try:
522 # Object instance wasn't passed in -- must be an ID.
523 val = kwargs.pop(field.attname)
524 except KeyError:
525 val = field.get_default()
526 else:
527 try:
528 val = kwargs.pop(field.attname)
529 except KeyError:
530 # This is done with an exception rather than the
531 # default argument on pop because we don't want
532 # get_default() to be evaluated, and then not used.
533 # Refs #12057.
534 val = field.get_default()
535 else:
536 val = field.get_default()
537
538 if is_related_object:
539 # If we are passed a related instance, set it using the
540 # field.name instead of field.attname (e.g. "user" instead of
541 # "user_id") so that the object gets properly cached (and type
542 # checked) by the RelatedObjectDescriptor.
543 if rel_obj is not _DEFERRED:
544 _setattr(self, field.name, rel_obj)
545 else:
546 if val is not _DEFERRED:
547 _setattr(self, field.attname, val)
548
549 if kwargs:
550 property_names = opts._property_names
551 unexpected = ()
552 for prop, value in kwargs.items():
553 # Any remaining kwargs must correspond to properties or virtual
554 # fields.
555 if prop in property_names:
556 if value is not _DEFERRED:
557 _setattr(self, prop, value)
558 else:
559 try:
560 opts.get_field(prop)
561 except FieldDoesNotExist:
562 unexpected += (prop,)
563 else:
564 if value is not _DEFERRED:
565 _setattr(self, prop, value)
566 if unexpected:
567 unexpected_names = ", ".join(repr(n) for n in unexpected)
568 raise TypeError(
569 f"{cls.__name__}() got unexpected keyword arguments: "
570 f"{unexpected_names}"
571 )
572 super().__init__()
573 post_init.send(sender=cls, instance=self)
574
575 @classmethod
576 def from_db(cls, db, field_names, values):
577 if len(values) != len(cls._meta.concrete_fields):
578 values_iter = iter(values)
579 values = [
580 next(values_iter) if f.attname in field_names else DEFERRED
581 for f in cls._meta.concrete_fields
582 ]
583 new = cls(*values)
584 new._state.adding = False
585 new._state.db = db
586 return new
587
588 def __repr__(self):
589 return "<%s: %s>" % (self.__class__.__name__, self)
590
591 def __str__(self):
592 return "%s object (%s)" % (self.__class__.__name__, self.pk)
593
594 def __eq__(self, other):
595 if not isinstance(other, Model):
596 return NotImplemented
597 if self._meta.concrete_model != other._meta.concrete_model:
598 return False
599 my_pk = self.pk
600 if my_pk is None:
601 return self is other
602 return my_pk == other.pk
603
604 def __hash__(self):
605 if not self._is_pk_set():
606 raise TypeError("Model instances without primary key value are unhashable")
607 return hash(self.pk)
608
609 def __reduce__(self):
610 data = self.__getstate__()
611 data[DJANGO_VERSION_PICKLE_KEY] = django.__version__
612 class_id = self._meta.app_label, self._meta.object_name
613 return model_unpickle, (class_id,), data
614
615 def __getstate__(self):
616 """Hook to allow choosing the attributes to pickle."""
617 state = self.__dict__.copy()
618 state["_state"] = copy.copy(state["_state"])
619 state["_state"].fields_cache = state["_state"].fields_cache.copy()
620 # memoryview cannot be pickled, so cast it to bytes and store
621 # separately.
622 _memoryview_attrs = []
623 for attr, value in state.items():
624 if isinstance(value, memoryview):
625 _memoryview_attrs.append((attr, bytes(value)))
626 if _memoryview_attrs:
627 state["_memoryview_attrs"] = _memoryview_attrs
628 for attr, value in _memoryview_attrs:
629 state.pop(attr)
630 return state
631
632 def __setstate__(self, state):
633 pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
634 if pickled_version:
635 if pickled_version != django.__version__:
636 warnings.warn(
637 "Pickled model instance's Django version %s does not "
638 "match the current version %s."
639 % (pickled_version, django.__version__),
640 RuntimeWarning,
641 stacklevel=2,
642 )
643 else:
644 warnings.warn(
645 "Pickled model instance's Django version is not specified.",
646 RuntimeWarning,
647 stacklevel=2,
648 )
649 if "_memoryview_attrs" in state:
650 for attr, value in state.pop("_memoryview_attrs"):
651 state[attr] = memoryview(value)
652 self.__dict__.update(state)
653
654 def _get_pk_val(self, meta=None):
655 meta = meta or self._meta
656 return getattr(self, meta.pk.attname)
657
658 def _set_pk_val(self, value):
659 for parent_link in self._meta.parents.values():
660 if parent_link and parent_link != self._meta.pk:
661 setattr(self, parent_link.target_field.attname, value)
662 return setattr(self, self._meta.pk.attname, value)
663
664 pk = property(_get_pk_val, _set_pk_val)
665
666 def _is_pk_set(self, meta=None):
667 pk_val = self._get_pk_val(meta)
668 return not (
669 pk_val is None
670 or (isinstance(pk_val, tuple) and any(f is None for f in pk_val))
671 )
672
673 def get_deferred_fields(self):
674 """
675 Return a set containing names of deferred fields for this instance.
676 """
677 return {
678 f.attname
679 for f in self._meta.concrete_fields
680 if f.attname not in self.__dict__
681 }
682
683 def refresh_from_db(self, using=None, fields=None, from_queryset=None):
684 """
685 Reload field values from the database.
686
687 By default, the reloading happens from the database this instance was
688 loaded from, or by the read router if this instance wasn't loaded from
689 any database. The using parameter will override the default.
690
691 Fields can be used to specify which fields to reload. The fields
692 should be an iterable of field attnames. If fields is None, then
693 all non-deferred fields are reloaded.
694
695 When accessing deferred fields of an instance, the deferred loading
696 of the field will call this method.
697 """
698 if fields is None:
699 self._prefetched_objects_cache = {}
700 else:
701 prefetched_objects_cache = getattr(self, "_prefetched_objects_cache", ())
702 fields = set(fields)
703 for field in fields.copy():
704 if field in prefetched_objects_cache:
705 del prefetched_objects_cache[field]
706 fields.remove(field)
707 if not fields:
708 return
709 if any(LOOKUP_SEP in f for f in fields):
710 raise ValueError(
711 'Found "%s" in fields argument. Relations and transforms '
712 "are not allowed in fields." % LOOKUP_SEP
713 )
714
715 if from_queryset is None:
716 hints = {"instance": self}
717 from_queryset = self.__class__._base_manager.db_manager(using, hints=hints)
718 elif using is not None:
719 from_queryset = from_queryset.using(using)
720
721 db_instance_qs = from_queryset.filter(pk=self.pk)
722
723 # Use provided fields, if not set then reload all non-deferred fields.
724 deferred_fields = self.get_deferred_fields()
725 if fields is not None:
726 db_instance_qs = db_instance_qs.only(*fields)
727 elif deferred_fields:
728 db_instance_qs = db_instance_qs.only(
729 *{
730 f.attname
731 for f in self._meta.concrete_fields
732 if f.attname not in deferred_fields
733 }
734 )
735
736 db_instance = db_instance_qs.get()
737 non_loaded_fields = db_instance.get_deferred_fields()
738 for field in self._meta.concrete_fields:
739 if field.attname in non_loaded_fields:
740 # This field wasn't refreshed - skip ahead.
741 continue
742 setattr(self, field.attname, getattr(db_instance, field.attname))
743 # Clear or copy cached foreign keys.
744 if field.is_relation:
745 if field.is_cached(db_instance):
746 field.set_cached_value(self, field.get_cached_value(db_instance))
747 elif field.is_cached(self):
748 field.delete_cached_value(self)
749
750 # Clear cached relations.
751 for rel in self._meta.related_objects:
752 if (fields is None or rel.name in fields) and rel.is_cached(self):
753 rel.delete_cached_value(self)
754
755 # Clear cached private relations.
756 for field in self._meta.private_fields:
757 if (
758 (fields is None or field.name in fields)
759 and field.is_relation
760 and field.is_cached(self)
761 ):
762 field.delete_cached_value(self)
763
764 self._state.db = db_instance._state.db
765
766 async def arefresh_from_db(self, using=None, fields=None, from_queryset=None):
767 return await sync_to_async(self.refresh_from_db)(
768 using=using, fields=fields, from_queryset=from_queryset
769 )
770
771 def serializable_value(self, field_name):
772 """
773 Return the value of the field name for this instance. If the field is
774 a foreign key, return the id value instead of the object. If there's
775 no Field object with this name on the model, return the model
776 attribute's value.
777
778 Used to serialize a field's value (in the serializer, or form output,
779 for example). Normally, you would just access the attribute directly
780 and not use this method.
781 """
782 try:
783 field = self._meta.get_field(field_name)
784 except FieldDoesNotExist:
785 return getattr(self, field_name)
786 return getattr(self, field.attname)
787
788 def save(
789 self,
790 *,
791 force_insert=False,
792 force_update=False,
793 using=None,
794 update_fields=None,
795 ):
796 """
797 Save the current instance. Override this in a subclass if you want to
798 control the saving process.
799
800 The 'force_insert' and 'force_update' parameters can be used to insist
801 that the "save" must be an SQL insert or update (or equivalent for
802 non-SQL backends), respectively. Normally, they should not be set.
803 """
804
805 self._prepare_related_fields_for_save(operation_name="save")
806
807 using = using or router.db_for_write(self.__class__, instance=self)
808 if force_insert and (force_update or update_fields):
809 raise ValueError("Cannot force both insert and updating in model saving.")
810
811 deferred_non_generated_fields = {
812 f.attname
813 for f in self._meta.concrete_fields
814 if f.attname not in self.__dict__ and f.generated is False
815 }
816 if update_fields is not None:
817 # If update_fields is empty, skip the save. We do also check for
818 # no-op saves later on for inheritance cases. This bailout is
819 # still needed for skipping signal sending.
820 if not update_fields:
821 return
822
823 update_fields = frozenset(update_fields)
824 field_names = self._meta._non_pk_concrete_field_names
825 not_updatable_fields = update_fields.difference(field_names)
826
827 if not_updatable_fields:
828 raise ValueError(
829 "The following fields do not exist in this model, are m2m "
830 "fields, primary keys, or are non-concrete fields: %s"
831 % ", ".join(not_updatable_fields)
832 )
833
834 # If saving to the same database, and this model is deferred, then
835 # automatically do an "update_fields" save on the loaded fields.
836 elif (
837 not force_insert
838 and deferred_non_generated_fields
839 and using == self._state.db
840 ):
841 field_names = set()
842 pk_fields = self._meta.pk_fields
843 for field in self._meta.concrete_fields:
844 if field not in pk_fields and not hasattr(field, "through"):
845 field_names.add(field.attname)
846 loaded_fields = field_names.difference(deferred_non_generated_fields)
847 if loaded_fields:
848 update_fields = frozenset(loaded_fields)
849
850 self.save_base(
851 using=using,
852 force_insert=force_insert,
853 force_update=force_update,
854 update_fields=update_fields,
855 )
856
857 save.alters_data = True
858
859 async def asave(
860 self,
861 *,
862 force_insert=False,
863 force_update=False,
864 using=None,
865 update_fields=None,
866 ):
867 return await sync_to_async(self.save)(
868 force_insert=force_insert,
869 force_update=force_update,
870 using=using,
871 update_fields=update_fields,
872 )
873
874 asave.alters_data = True
875
876 @classmethod
877 def _validate_force_insert(cls, force_insert):
878 if force_insert is False:
879 return ()
880 if force_insert is True:
881 return (cls,)
882 if not isinstance(force_insert, tuple):
883 raise TypeError("force_insert must be a bool or tuple.")
884 for member in force_insert:
885 if not isinstance(member, ModelBase):
886 raise TypeError(
887 f"Invalid force_insert member. {member!r} must be a model subclass."
888 )
889 if not issubclass(cls, member):
890 raise TypeError(
891 f"Invalid force_insert member. {member.__qualname__} must be a "
892 f"base of {cls.__qualname__}."
893 )
894 return force_insert
895
896 def save_base(
897 self,
898 raw=False,
899 force_insert=False,
900 force_update=False,
901 using=None,
902 update_fields=None,
903 ):
904 """
905 Handle the parts of saving which should be done only once per save,
906 yet need to be done in raw saves, too. This includes some sanity
907 checks and signal sending.
908
909 The 'raw' argument is telling save_base not to save any parent
910 models and not to do any changes to the values before save. This
911 is used by fixture loading.
912 """
913 using = using or router.db_for_write(self.__class__, instance=self)
914 assert not (force_insert and (force_update or update_fields))
915 assert update_fields is None or update_fields
916 cls = origin = self.__class__
917 # Skip proxies, but keep the origin as the proxy model.
918 if cls._meta.proxy:
919 cls = cls._meta.concrete_model
920 meta = cls._meta
921 if not meta.auto_created:
922 pre_save.send(
923 sender=origin,
924 instance=self,
925 raw=raw,
926 using=using,
927 update_fields=update_fields,
928 )
929 # A transaction isn't needed if one query is issued.
930 if meta.parents:
931 context_manager = transaction.atomic(using=using, savepoint=False)
932 else:
933 context_manager = transaction.mark_for_rollback_on_error(using=using)
934 with context_manager:
935 parent_inserted = False
936 if not raw:
937 # Validate force insert only when parents are inserted.
938 force_insert = self._validate_force_insert(force_insert)
939 parent_inserted = self._save_parents(
940 cls, using, update_fields, force_insert
941 )
942 updated = self._save_table(
943 raw,
944 cls,
945 force_insert or parent_inserted,
946 force_update,
947 using,
948 update_fields,
949 )
950 # Store the database on which the object was saved
951 self._state.db = using
952 # Once saved, this is no longer a to-be-added instance.
953 self._state.adding = False
954
955 # Signal that the save is complete
956 if not meta.auto_created:
957 post_save.send(
958 sender=origin,
959 instance=self,
960 created=(not updated),
961 update_fields=update_fields,
962 raw=raw,
963 using=using,
964 )
965
966 save_base.alters_data = True
967
968 def _save_parents(
969 self, cls, using, update_fields, force_insert, updated_parents=None
970 ):
971 """Save all the parents of cls using values from self."""
972 meta = cls._meta
973 inserted = False
974 if updated_parents is None:
975 updated_parents = {}
976 for parent, field in meta.parents.items():
977 # Make sure the link fields are synced between parent and self.
978 if (
979 field
980 and getattr(self, parent._meta.pk.attname) is None
981 and getattr(self, field.attname) is not None
982 ):
983 setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
984 if (parent_updated := updated_parents.get(parent)) is None:
985 parent_inserted = self._save_parents(
986 cls=parent,
987 using=using,
988 update_fields=update_fields,
989 force_insert=force_insert,
990 updated_parents=updated_parents,
991 )
992 updated = self._save_table(
993 cls=parent,
994 using=using,
995 update_fields=update_fields,
996 force_insert=parent_inserted or issubclass(parent, force_insert),
997 )
998 if not updated:
999 inserted = True
1000 updated_parents[parent] = updated
1001 elif not parent_updated:
1002 inserted = True
1003 # Set the parent's PK value to self.
1004 if field:
1005 setattr(self, field.attname, self._get_pk_val(parent._meta))
1006 # Since we didn't have an instance of the parent handy set
1007 # attname directly, bypassing the descriptor. Invalidate
1008 # the related object cache, in case it's been accidentally
1009 # populated. A fresh instance will be re-built from the
1010 # database if necessary.
1011 if field.is_cached(self):
1012 field.delete_cached_value(self)
1013 return inserted
1014
1015 def _save_table(
1016 self,
1017 raw=False,
1018 cls=None,
1019 force_insert=False,
1020 force_update=False,
1021 using=None,
1022 update_fields=None,
1023 ):
1024 """
1025 Do the heavy-lifting involved in saving. Update or insert the data
1026 for a single table.
1027 """
1028 meta = cls._meta
1029 pk_fields = meta.pk_fields
1030 non_pks_non_generated = [
1031 f
1032 for f in meta.local_concrete_fields
1033 if f not in pk_fields and not f.generated
1034 ]
1035
1036 if update_fields:
1037 non_pks_non_generated = [
1038 f
1039 for f in non_pks_non_generated
1040 if f.name in update_fields or f.attname in update_fields
1041 ]
1042
1043 if not self._is_pk_set(meta):
1044 pk_val = meta.pk.get_pk_value_on_save(self)
1045 setattr(self, meta.pk.attname, pk_val)
1046 pk_set = self._is_pk_set(meta)
1047 if not pk_set and (force_update or update_fields):
1048 raise ValueError("Cannot force an update in save() with no primary key.")
1049 updated = False
1050 # Skip an UPDATE when adding an instance and primary key has a default.
1051 if (
1052 not raw
1053 and not force_insert
1054 and not force_update
1055 and self._state.adding
1056 and all(f.has_default() or f.has_db_default() for f in meta.pk_fields)
1057 ):
1058 force_insert = True
1059 # If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
1060 if pk_set and not force_insert:
1061 base_qs = cls._base_manager.using(using)
1062 values = [
1063 (
1064 f,
1065 None,
1066 (getattr(self, f.attname) if raw else f.pre_save(self, False)),
1067 )
1068 for f in non_pks_non_generated
1069 ]
1070 forced_update = update_fields or force_update
1071 pk_val = self._get_pk_val(meta)
1072 updated = self._do_update(
1073 base_qs, using, pk_val, values, update_fields, forced_update
1074 )
1075 if force_update and not updated:
1076 raise DatabaseError("Forced update did not affect any rows.")
1077 if update_fields and not updated:
1078 raise DatabaseError("Save with update_fields did not affect any rows.")
1079 if not updated:
1080 if meta.order_with_respect_to:
1081 # If this is a model with an order_with_respect_to
1082 # autopopulate the _order field
1083 field = meta.order_with_respect_to
1084 filter_args = field.get_filter_kwargs_for_object(self)
1085 self._order = (
1086 cls._base_manager.using(using)
1087 .filter(**filter_args)
1088 .aggregate(
1089 _order__max=Coalesce(
1090 ExpressionWrapper(
1091 Max("_order") + Value(1), output_field=IntegerField()
1092 ),
1093 Value(0),
1094 ),
1095 )["_order__max"]
1096 )
1097 fields = [
1098 f
1099 for f in meta.local_concrete_fields
1100 if not f.generated and (pk_set or f is not meta.auto_field)
1101 ]
1102 returning_fields = meta.db_returning_fields
1103 results = self._do_insert(
1104 cls._base_manager, using, fields, returning_fields, raw
1105 )
1106 if results:
1107 for value, field in zip(results[0], returning_fields):
1108 setattr(self, field.attname, value)
1109 return updated
1110
1111 def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
1112 """
1113 Try to update the model. Return True if the model was updated (if an
1114 update query was done and a matching row was found in the DB).
1115 """
1116 filtered = base_qs.filter(pk=pk_val)
1117 if not values:
1118 # We can end up here when saving a model in inheritance chain where
1119 # update_fields doesn't target any field in current model. In that
1120 # case we just say the update succeeded. Another case ending up here
1121 # is a model with just PK - in that case check that the PK still
1122 # exists.
1123 return update_fields is not None or filtered.exists()
1124 if self._meta.select_on_save and not forced_update:
1125 return (
1126 filtered.exists()
1127 and
1128 # It may happen that the object is deleted from the DB right after
1129 # this check, causing the subsequent UPDATE to return zero matching
1130 # rows. The same result can occur in some rare cases when the
1131 # database returns zero despite the UPDATE being executed
1132 # successfully (a row is matched and updated). In order to
1133 # distinguish these two cases, the object's existence in the
1134 # database is again checked for if the UPDATE query returns 0.
1135 (filtered._update(values) > 0 or filtered.exists())
1136 )
1137 return filtered._update(values) > 0
1138
1139 def _do_insert(self, manager, using, fields, returning_fields, raw):
1140 """
1141 Do an INSERT. If returning_fields is defined then this method should
1142 return the newly created data for the model.
1143 """
1144 return manager._insert(
1145 [self],
1146 fields=fields,
1147 returning_fields=returning_fields,
1148 using=using,
1149 raw=raw,
1150 )
1151
1152 def _prepare_related_fields_for_save(self, operation_name, fields=None):
1153 # Ensure that a model instance without a PK hasn't been assigned to
1154 # a ForeignKey, GenericForeignKey or OneToOneField on this model. If
1155 # the field is nullable, allowing the save would result in silent data
1156 # loss.
1157 for field in self._meta.concrete_fields:
1158 if fields and field not in fields:
1159 continue
1160 # If the related field isn't cached, then an instance hasn't been
1161 # assigned and there's no need to worry about this check.
1162 if field.is_relation and field.is_cached(self):
1163 obj = getattr(self, field.name, None)
1164 if not obj:
1165 continue
1166 # A pk may have been assigned manually to a model instance not
1167 # saved to the database (or auto-generated in a case like
1168 # UUIDField), but we allow the save to proceed and rely on the
1169 # database to raise an IntegrityError if applicable. If
1170 # constraints aren't supported by the database, there's the
1171 # unavoidable risk of data corruption.
1172 if not obj._is_pk_set():
1173 # Remove the object from a related instance cache.
1174 if not field.remote_field.multiple:
1175 field.remote_field.delete_cached_value(obj)
1176 raise ValueError(
1177 "%s() prohibited to prevent data loss due to unsaved "
1178 "related object '%s'." % (operation_name, field.name)
1179 )
1180 elif getattr(self, field.attname) in field.empty_values:
1181 # Set related object if it has been saved after an
1182 # assignment.
1183 setattr(self, field.name, obj)
1184 # If the relationship's pk/to_field was changed, clear the
1185 # cached relationship.
1186 if getattr(obj, field.target_field.attname) != getattr(
1187 self, field.attname
1188 ):
1189 field.delete_cached_value(self)
1190 # GenericForeignKeys are private.
1191 for field in self._meta.private_fields:
1192 if fields and field not in fields:
1193 continue
1194 if (
1195 field.is_relation
1196 and field.is_cached(self)
1197 and hasattr(field, "fk_field")
1198 ):
1199 obj = field.get_cached_value(self, default=None)
1200 if obj and not obj._is_pk_set():
1201 raise ValueError(
1202 f"{operation_name}() prohibited to prevent data loss due to "
1203 f"unsaved related object '{field.name}'."
1204 )
1205
1206 def delete(self, using=None, keep_parents=False):
1207 if not self._is_pk_set():
1208 raise ValueError(
1209 "%s object can't be deleted because its %s attribute is set "
1210 "to None." % (self._meta.object_name, self._meta.pk.attname)
1211 )
1212 using = using or router.db_for_write(self.__class__, instance=self)
1213 collector = Collector(using=using, origin=self)
1214 collector.collect([self], keep_parents=keep_parents)
1215 return collector.delete()
1216
1217 delete.alters_data = True
1218
1219 async def adelete(self, using=None, keep_parents=False):
1220 return await sync_to_async(self.delete)(
1221 using=using,
1222 keep_parents=keep_parents,
1223 )
1224
1225 adelete.alters_data = True
1226
1227 def _get_FIELD_display(self, field):
1228 value = getattr(self, field.attname)
1229 choices_dict = dict(make_hashable(field.flatchoices))
1230 # force_str() to coerce lazy strings.
1231 return force_str(
1232 choices_dict.get(make_hashable(value), value), strings_only=True
1233 )
1234
1235 def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
1236 if not self._is_pk_set():
1237 raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
1238 op = "gt" if is_next else "lt"
1239 order = "" if is_next else "-"
1240 param = getattr(self, field.attname)
1241 q = Q.create([(field.name, param), (f"pk__{op}", self.pk)], connector=Q.AND)
1242 q = Q.create([q, (f"{field.name}__{op}", param)], connector=Q.OR)
1243 qs = (
1244 self.__class__._default_manager.using(self._state.db)
1245 .filter(**kwargs)
1246 .filter(q)
1247 .order_by("%s%s" % (order, field.name), "%spk" % order)
1248 )
1249 try:
1250 return qs[0]
1251 except IndexError:
1252 raise self.DoesNotExist(
1253 "%s matching query does not exist." % self.__class__._meta.object_name
1254 )
1255
1256 def _get_next_or_previous_in_order(self, is_next):
1257 cachename = "__%s_order_cache" % is_next
1258 if not hasattr(self, cachename):
1259 op = "gt" if is_next else "lt"
1260 order = "_order" if is_next else "-_order"
1261 order_field = self._meta.order_with_respect_to
1262 filter_args = order_field.get_filter_kwargs_for_object(self)
1263 obj = (
1264 self.__class__._default_manager.filter(**filter_args)
1265 .filter(
1266 **{
1267 "_order__%s"
1268 % op: self.__class__._default_manager.values("_order").filter(
1269 **{self._meta.pk.name: self.pk}
1270 )
1271 }
1272 )
1273 .order_by(order)[:1]
1274 .get()
1275 )
1276 setattr(self, cachename, obj)
1277 return getattr(self, cachename)
1278
1279 def _get_field_expression_map(self, meta, exclude=None):
1280 if exclude is None:
1281 exclude = set()
1282 meta = meta or self._meta
1283 field_map = {}
1284 generated_fields = []
1285 for field in meta.local_concrete_fields:
1286 if field.name in exclude:
1287 continue
1288 if field.generated:
1289 if any(
1290 ref[0] in exclude
1291 for ref in self._get_expr_references(field.expression)
1292 ):
1293 continue
1294 generated_fields.append(field)
1295 continue
1296 value = getattr(self, field.attname)
1297 if not value or not hasattr(value, "resolve_expression"):
1298 value = Value(value, field)
1299 field_map[field.name] = value
1300 if "pk" not in exclude:
1301 field_map["pk"] = Value(self.pk, meta.pk)
1302 if generated_fields:
1303 replacements = {F(name): value for name, value in field_map.items()}
1304 for generated_field in generated_fields:
1305 field_map[generated_field.name] = ExpressionWrapper(
1306 generated_field.expression.replace_expressions(replacements),
1307 generated_field.output_field,
1308 )
1309
1310 return field_map
1311
1312 def prepare_database_save(self, field):
1313 if not self._is_pk_set():
1314 raise ValueError(
1315 "Unsaved model instance %r cannot be used in an ORM query." % self
1316 )
1317 return getattr(self, field.remote_field.get_related_field().attname)
1318
1319 def clean(self):
1320 """
1321 Hook for doing any extra model-wide validation after clean() has been
1322 called on every field by self.clean_fields. Any ValidationError raised
1323 by this method will not be associated with a particular field; it will
1324 have a special-case association with the field defined by NON_FIELD_ERRORS.
1325 """
1326 pass
1327
1328 def validate_unique(self, exclude=None):
1329 """
1330 Check unique constraints on the model and raise ValidationError if any
1331 failed.
1332 """
1333 unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
1334
1335 errors = self._perform_unique_checks(unique_checks)
1336 date_errors = self._perform_date_checks(date_checks)
1337
1338 for k, v in date_errors.items():
1339 errors.setdefault(k, []).extend(v)
1340
1341 if errors:
1342 raise ValidationError(errors)
1343
1344 def _get_unique_checks(self, exclude=None, include_meta_constraints=False):
1345 """
1346 Return a list of checks to perform. Since validate_unique() could be
1347 called from a ModelForm, some fields may have been excluded; we can't
1348 perform a unique check on a model that is missing fields involved
1349 in that check. Fields that did not validate should also be excluded,
1350 but they need to be passed in via the exclude argument.
1351 """
1352 if exclude is None:
1353 exclude = set()
1354 unique_checks = []
1355
1356 unique_togethers = [(self.__class__, self._meta.unique_together)]
1357 constraints = []
1358 if include_meta_constraints:
1359 constraints = [(self.__class__, self._meta.total_unique_constraints)]
1360 for parent_class in self._meta.all_parents:
1361 if parent_class._meta.unique_together:
1362 unique_togethers.append(
1363 (parent_class, parent_class._meta.unique_together)
1364 )
1365 if include_meta_constraints and parent_class._meta.total_unique_constraints:
1366 constraints.append(
1367 (parent_class, parent_class._meta.total_unique_constraints)
1368 )
1369
1370 for model_class, unique_together in unique_togethers:
1371 for check in unique_together:
1372 if not any(name in exclude for name in check):
1373 # Add the check if the field isn't excluded.
1374 unique_checks.append((model_class, tuple(check)))
1375
1376 if include_meta_constraints:
1377 for model_class, model_constraints in constraints:
1378 for constraint in model_constraints:
1379 if not any(name in exclude for name in constraint.fields):
1380 unique_checks.append((model_class, constraint.fields))
1381
1382 # These are checks for the unique_for_<date/year/month>.
1383 date_checks = []
1384
1385 # Gather a list of checks for fields declared as unique and add them to
1386 # the list of checks.
1387
1388 fields_with_class = [(self.__class__, self._meta.local_fields)]
1389 for parent_class in self._meta.all_parents:
1390 fields_with_class.append((parent_class, parent_class._meta.local_fields))
1391
1392 for model_class, fields in fields_with_class:
1393 for f in fields:
1394 name = f.name
1395 if name in exclude:
1396 continue
1397 if isinstance(f, CompositePrimaryKey):
1398 names = tuple(field.name for field in f.fields)
1399 if exclude.isdisjoint(names):
1400 unique_checks.append((model_class, names))
1401 continue
1402 if f.unique:
1403 unique_checks.append((model_class, (name,)))
1404 if f.unique_for_date and f.unique_for_date not in exclude:
1405 date_checks.append((model_class, "date", name, f.unique_for_date))
1406 if f.unique_for_year and f.unique_for_year not in exclude:
1407 date_checks.append((model_class, "year", name, f.unique_for_year))
1408 if f.unique_for_month and f.unique_for_month not in exclude:
1409 date_checks.append((model_class, "month", name, f.unique_for_month))
1410 return unique_checks, date_checks
1411
1412 def _perform_unique_checks(self, unique_checks):
1413 errors = {}
1414
1415 for model_class, unique_check in unique_checks:
1416 # Try to look up an existing object with the same values as this
1417 # object's values for all the unique field.
1418
1419 lookup_kwargs = {}
1420 for field_name in unique_check:
1421 f = self._meta.get_field(field_name)
1422 lookup_value = getattr(self, f.attname)
1423 # TODO: Handle multiple backends with different feature flags.
1424 if lookup_value is None or (
1425 lookup_value == ""
1426 and connection.features.interprets_empty_strings_as_nulls
1427 ):
1428 # no value, skip the lookup
1429 continue
1430 if f in model_class._meta.pk_fields and not self._state.adding:
1431 # no need to check for unique primary key when editing
1432 continue
1433 lookup_kwargs[str(field_name)] = lookup_value
1434
1435 # some fields were skipped, no reason to do the check
1436 if len(unique_check) != len(lookup_kwargs):
1437 continue
1438
1439 qs = model_class._default_manager.filter(**lookup_kwargs)
1440
1441 # Exclude the current object from the query if we are editing an
1442 # instance (as opposed to creating a new one)
1443 # Note that we need to use the pk as defined by model_class, not
1444 # self.pk. These can be different fields because model inheritance
1445 # allows single model to have effectively multiple primary keys.
1446 # Refs #17615.
1447 model_class_pk = self._get_pk_val(model_class._meta)
1448 if not self._state.adding and self._is_pk_set(model_class._meta):
1449 qs = qs.exclude(pk=model_class_pk)
1450 if qs.exists():
1451 if len(unique_check) == 1:
1452 key = unique_check[0]
1453 else:
1454 key = NON_FIELD_ERRORS
1455 errors.setdefault(key, []).append(
1456 self.unique_error_message(model_class, unique_check)
1457 )
1458
1459 return errors
1460
1461 def _perform_date_checks(self, date_checks):
1462 errors = {}
1463 for model_class, lookup_type, field, unique_for in date_checks:
1464 lookup_kwargs = {}
1465 # there's a ticket to add a date lookup, we can remove this special
1466 # case if that makes it's way in
1467 date = getattr(self, unique_for)
1468 if date is None:
1469 continue
1470 if lookup_type == "date":
1471 lookup_kwargs["%s__day" % unique_for] = date.day
1472 lookup_kwargs["%s__month" % unique_for] = date.month
1473 lookup_kwargs["%s__year" % unique_for] = date.year
1474 else:
1475 lookup_kwargs["%s__%s" % (unique_for, lookup_type)] = getattr(
1476 date, lookup_type
1477 )
1478 lookup_kwargs[field] = getattr(self, field)
1479
1480 qs = model_class._default_manager.filter(**lookup_kwargs)
1481 # Exclude the current object from the query if we are editing an
1482 # instance (as opposed to creating a new one)
1483 if not self._state.adding and self._is_pk_set():
1484 qs = qs.exclude(pk=self.pk)
1485
1486 if qs.exists():
1487 errors.setdefault(field, []).append(
1488 self.date_error_message(lookup_type, field, unique_for)
1489 )
1490 return errors
1491
1492 def date_error_message(self, lookup_type, field_name, unique_for):
1493 opts = self._meta
1494 field = opts.get_field(field_name)
1495 return ValidationError(
1496 message=field.error_messages["unique_for_date"],
1497 code="unique_for_date",
1498 params={
1499 "model": self,
1500 "model_name": capfirst(opts.verbose_name),
1501 "lookup_type": lookup_type,
1502 "field": field_name,
1503 "field_label": capfirst(field.verbose_name),
1504 "date_field": unique_for,
1505 "date_field_label": capfirst(opts.get_field(unique_for).verbose_name),
1506 },
1507 )
1508
1509 def unique_error_message(self, model_class, unique_check):
1510 opts = model_class._meta
1511
1512 params = {
1513 "model": self,
1514 "model_class": model_class,
1515 "model_name": capfirst(opts.verbose_name),
1516 "unique_check": unique_check,
1517 }
1518
1519 # A unique field
1520 if len(unique_check) == 1:
1521 field = opts.get_field(unique_check[0])
1522 params["field_label"] = capfirst(field.verbose_name)
1523 return ValidationError(
1524 message=field.error_messages["unique"],
1525 code="unique",
1526 params=params,
1527 )
1528
1529 # unique_together
1530 else:
1531 field_labels = [
1532 capfirst(opts.get_field(f).verbose_name) for f in unique_check
1533 ]
1534 params["field_labels"] = get_text_list(field_labels, _("and"))
1535 return ValidationError(
1536 message=_("%(model_name)s with this %(field_labels)s already exists."),
1537 code="unique_together",
1538 params=params,
1539 )
1540
1541 def get_constraints(self):
1542 constraints = [(self.__class__, self._meta.constraints)]
1543 for parent_class in self._meta.all_parents:
1544 if parent_class._meta.constraints:
1545 constraints.append((parent_class, parent_class._meta.constraints))
1546 return constraints
1547
1548 def validate_constraints(self, exclude=None):
1549 constraints = self.get_constraints()
1550 using = router.db_for_write(self.__class__, instance=self)
1551
1552 errors = {}
1553 for model_class, model_constraints in constraints:
1554 for constraint in model_constraints:
1555 try:
1556 constraint.validate(model_class, self, exclude=exclude, using=using)
1557 except ValidationError as e:
1558 if (
1559 getattr(e, "code", None) == "unique"
1560 and len(constraint.fields) == 1
1561 ):
1562 errors.setdefault(constraint.fields[0], []).append(e)
1563 else:
1564 errors = e.update_error_dict(errors)
1565 if errors:
1566 raise ValidationError(errors)
1567
1568 def full_clean(self, exclude=None, validate_unique=True, validate_constraints=True):
1569 """
1570 Call clean_fields(), clean(), validate_unique(), and
1571 validate_constraints() on the model. Raise a ValidationError for any
1572 errors that occur.
1573 """
1574 errors = {}
1575 if exclude is None:
1576 exclude = set()
1577 else:
1578 exclude = set(exclude)
1579
1580 try:
1581 self.clean_fields(exclude=exclude)
1582 except ValidationError as e:
1583 errors = e.update_error_dict(errors)
1584
1585 # Form.clean() is run even if other validation fails, so do the
1586 # same with Model.clean() for consistency.
1587 try:
1588 self.clean()
1589 except ValidationError as e:
1590 errors = e.update_error_dict(errors)
1591
1592 # Run unique checks, but only for fields that passed validation.
1593 if validate_unique:
1594 for name in errors:
1595 if name != NON_FIELD_ERRORS and name not in exclude:
1596 exclude.add(name)
1597 try:
1598 self.validate_unique(exclude=exclude)
1599 except ValidationError as e:
1600 errors = e.update_error_dict(errors)
1601
1602 # Run constraints checks, but only for fields that passed validation.
1603 if validate_constraints:
1604 for name in errors:
1605 if name != NON_FIELD_ERRORS and name not in exclude:
1606 exclude.add(name)
1607 try:
1608 self.validate_constraints(exclude=exclude)
1609 except ValidationError as e:
1610 errors = e.update_error_dict(errors)
1611
1612 if errors:
1613 raise ValidationError(errors)
1614
1615 def clean_fields(self, exclude=None):
1616 """
1617 Clean all fields and raise a ValidationError containing a dict
1618 of all validation errors if any occur.
1619 """
1620 if exclude is None:
1621 exclude = set()
1622
1623 errors = {}
1624 for f in self._meta.fields:
1625 if f.name in exclude or f.generated:
1626 continue
1627 # Skip validation for empty fields with blank=True. The developer
1628 # is responsible for making sure they have a valid value.
1629 raw_value = getattr(self, f.attname)
1630 if f.blank and raw_value in f.empty_values:
1631 continue
1632 # Skip validation for empty fields when db_default is used.
1633 if isinstance(raw_value, DatabaseDefault):
1634 continue
1635 try:
1636 setattr(self, f.attname, f.clean(raw_value, self))
1637 except ValidationError as e:
1638 errors[f.name] = e.error_list
1639
1640 if errors:
1641 raise ValidationError(errors)
1642
1643 @classmethod
1644 def check(cls, **kwargs):
1645 errors = [
1646 *cls._check_swappable(),
1647 *cls._check_model(),
1648 *cls._check_managers(**kwargs),
1649 ]
1650 if not cls._meta.swapped:
1651 databases = kwargs.get("databases") or []
1652 errors += [
1653 *cls._check_fields(**kwargs),
1654 *cls._check_m2m_through_same_relationship(),
1655 *cls._check_long_column_names(databases),
1656 ]
1657 clash_errors = (
1658 *cls._check_id_field(),
1659 *cls._check_field_name_clashes(),
1660 *cls._check_model_name_db_lookup_clashes(),
1661 *cls._check_property_name_related_field_accessor_clashes(),
1662 *cls._check_single_primary_key(),
1663 )
1664 errors.extend(clash_errors)
1665 # If there are field name clashes, hide consequent column name
1666 # clashes.
1667 if not clash_errors:
1668 errors.extend(cls._check_column_name_clashes())
1669 errors += [
1670 *cls._check_unique_together(),
1671 *cls._check_indexes(databases),
1672 *cls._check_ordering(),
1673 *cls._check_constraints(databases),
1674 *cls._check_default_pk(),
1675 *cls._check_db_table_comment(databases),
1676 *cls._check_composite_pk(),
1677 ]
1678
1679 return errors
1680
1681 @classmethod
1682 def _check_default_pk(cls):
1683 if (
1684 not cls._meta.abstract
1685 and cls._meta.pk.auto_created
1686 and
1687 # Inherited PKs are checked in parents models.
1688 not (
1689 isinstance(cls._meta.pk, OneToOneField)
1690 and cls._meta.pk.remote_field.parent_link
1691 )
1692 and not settings.is_overridden("DEFAULT_AUTO_FIELD")
1693 and cls._meta.app_config
1694 and not cls._meta.app_config._is_default_auto_field_overridden
1695 ):
1696 return [
1697 checks.Warning(
1698 f"Auto-created primary key used when not defining a "
1699 f"primary key type, by default "
1700 f"'{settings.DEFAULT_AUTO_FIELD}'.",
1701 hint=(
1702 f"Configure the DEFAULT_AUTO_FIELD setting or the "
1703 f"{cls._meta.app_config.__class__.__qualname__}."
1704 f"default_auto_field attribute to point to a subclass "
1705 f"of AutoField, e.g. 'django.db.models.BigAutoField'."
1706 ),
1707 obj=cls,
1708 id="models.W042",
1709 ),
1710 ]
1711 return []
1712
1713 @classmethod
1714 def _check_composite_pk(cls):
1715 errors = []
1716 meta = cls._meta
1717 pk = meta.pk
1718
1719 if not isinstance(pk, CompositePrimaryKey):
1720 return errors
1721
1722 seen_columns = defaultdict(list)
1723
1724 for field_name in pk.field_names:
1725 hint = None
1726
1727 try:
1728 field = meta.get_field(field_name)
1729 except FieldDoesNotExist:
1730 field = None
1731
1732 if not field:
1733 hint = f"{field_name!r} is not a valid field."
1734 elif not field.column:
1735 hint = f"{field_name!r} field has no column."
1736 elif field.null:
1737 hint = f"{field_name!r} field may not set 'null=True'."
1738 elif field.generated:
1739 hint = f"{field_name!r} field is a generated field."
1740 elif field not in meta.local_fields:
1741 hint = f"{field_name!r} field is not a local field."
1742 else:
1743 seen_columns[field.column].append(field_name)
1744
1745 if hint:
1746 errors.append(
1747 checks.Error(
1748 f"{field_name!r} cannot be included in the composite primary "
1749 "key.",
1750 hint=hint,
1751 obj=cls,
1752 id="models.E042",
1753 )
1754 )
1755
1756 for column, field_names in seen_columns.items():
1757 if len(field_names) > 1:
1758 field_name, *rest = field_names
1759 duplicates = ", ".join(repr(field) for field in rest)
1760 errors.append(
1761 checks.Error(
1762 f"{duplicates} cannot be included in the composite primary "
1763 "key.",
1764 hint=f"{duplicates} and {field_name!r} are the same fields.",
1765 obj=cls,
1766 id="models.E042",
1767 )
1768 )
1769
1770 return errors
1771
1772 @classmethod
1773 def _check_db_table_comment(cls, databases):
1774 if not cls._meta.db_table_comment:
1775 return []
1776 errors = []
1777 for db in databases:
1778 if not router.allow_migrate_model(db, cls):
1779 continue
1780 connection = connections[db]
1781 if not (
1782 connection.features.supports_comments
1783 or "supports_comments" in cls._meta.required_db_features
1784 ):
1785 errors.append(
1786 checks.Warning(
1787 f"{connection.display_name} does not support comments on "
1788 f"tables (db_table_comment).",
1789 obj=cls,
1790 id="models.W046",
1791 )
1792 )
1793 return errors
1794
1795 @classmethod
1796 def _check_swappable(cls):
1797 """Check if the swapped model exists."""
1798 errors = []
1799 if cls._meta.swapped:
1800 try:
1801 apps.get_model(cls._meta.swapped)
1802 except ValueError:
1803 errors.append(
1804 checks.Error(
1805 "'%s' is not of the form 'app_label.app_name'."
1806 % cls._meta.swappable,
1807 id="models.E001",
1808 )
1809 )
1810 except LookupError:
1811 app_label, model_name = cls._meta.swapped.split(".")
1812 errors.append(
1813 checks.Error(
1814 "'%s' references '%s.%s', which has not been "
1815 "installed, or is abstract."
1816 % (cls._meta.swappable, app_label, model_name),
1817 id="models.E002",
1818 )
1819 )
1820 return errors
1821
1822 @classmethod
1823 def _check_model(cls):
1824 errors = []
1825 if cls._meta.proxy:
1826 if cls._meta.local_fields or cls._meta.local_many_to_many:
1827 errors.append(
1828 checks.Error(
1829 "Proxy model '%s' contains model fields." % cls.__name__,
1830 id="models.E017",
1831 )
1832 )
1833 return errors
1834
1835 @classmethod
1836 def _check_managers(cls, **kwargs):
1837 """Perform all manager checks."""
1838 errors = []
1839 for manager in cls._meta.managers:
1840 errors.extend(manager.check(**kwargs))
1841 return errors
1842
1843 @classmethod
1844 def _check_fields(cls, **kwargs):
1845 """Perform all field checks."""
1846 errors = []
1847 for field in cls._meta.local_fields:
1848 errors.extend(field.check(**kwargs))
1849 for field in cls._meta.local_many_to_many:
1850 errors.extend(field.check(from_model=cls, **kwargs))
1851 return errors
1852
1853 @classmethod
1854 def _check_m2m_through_same_relationship(cls):
1855 """Check if no relationship model is used by more than one m2m field."""
1856
1857 errors = []
1858 seen_intermediary_signatures = []
1859
1860 fields = cls._meta.local_many_to_many
1861
1862 # Skip when the target model wasn't found.
1863 fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
1864
1865 # Skip when the relationship model wasn't found.
1866 fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
1867
1868 for f in fields:
1869 signature = (
1870 f.remote_field.model,
1871 cls,
1872 f.remote_field.through,
1873 f.remote_field.through_fields,
1874 )
1875 if signature in seen_intermediary_signatures:
1876 errors.append(
1877 checks.Error(
1878 "The model has two identical many-to-many relations "
1879 "through the intermediate model '%s'."
1880 % f.remote_field.through._meta.label,
1881 obj=cls,
1882 id="models.E003",
1883 )
1884 )
1885 else:
1886 seen_intermediary_signatures.append(signature)
1887 return errors
1888
1889 @classmethod
1890 def _check_id_field(cls):
1891 """Check if `id` field is a primary key."""
1892 fields = [
1893 f for f in cls._meta.local_fields if f.name == "id" and f != cls._meta.pk
1894 ]
1895 # fields is empty or consists of the invalid "id" field
1896 if fields and not fields[0].primary_key and cls._meta.pk.name == "id":
1897 return [
1898 checks.Error(
1899 "'id' can only be used as a field name if the field also "
1900 "sets 'primary_key=True'.",
1901 obj=cls,
1902 id="models.E004",
1903 )
1904 ]
1905 else:
1906 return []
1907
1908 @classmethod
1909 def _check_field_name_clashes(cls):
1910 """Forbid field shadowing in multi-table inheritance."""
1911 errors = []
1912 used_fields = {} # name or attname -> field
1913
1914 # Check that multi-inheritance doesn't cause field name shadowing.
1915 for parent in cls._meta.all_parents:
1916 for f in parent._meta.local_fields:
1917 clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
1918 if clash:
1919 errors.append(
1920 checks.Error(
1921 "The field '%s' from parent model "
1922 "'%s' clashes with the field '%s' "
1923 "from parent model '%s'."
1924 % (clash.name, clash.model._meta, f.name, f.model._meta),
1925 obj=cls,
1926 id="models.E005",
1927 )
1928 )
1929 used_fields[f.name] = f
1930 used_fields[f.attname] = f
1931
1932 # Check that fields defined in the model don't clash with fields from
1933 # parents, including auto-generated fields like multi-table inheritance
1934 # child accessors.
1935 for parent in cls._meta.all_parents:
1936 for f in parent._meta.get_fields():
1937 if f not in used_fields:
1938 used_fields[f.name] = f
1939
1940 # Check that parent links in diamond-shaped MTI models don't clash.
1941 for parent_link in cls._meta.parents.values():
1942 if not parent_link:
1943 continue
1944 clash = used_fields.get(parent_link.name) or None
1945 if clash:
1946 errors.append(
1947 checks.Error(
1948 f"The field '{parent_link.name}' clashes with the field "
1949 f"'{clash.name}' from model '{clash.model._meta}'.",
1950 obj=cls,
1951 id="models.E006",
1952 )
1953 )
1954
1955 for f in cls._meta.local_fields:
1956 clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
1957 # Note that we may detect clash between user-defined non-unique
1958 # field "id" and automatically added unique field "id", both
1959 # defined at the same model. This special case is considered in
1960 # _check_id_field and here we ignore it.
1961 id_conflict = (
1962 f.name == "id" and clash and clash.name == "id" and clash.model == cls
1963 )
1964 if clash and not id_conflict:
1965 errors.append(
1966 checks.Error(
1967 "The field '%s' clashes with the field '%s' "
1968 "from model '%s'." % (f.name, clash.name, clash.model._meta),
1969 obj=f,
1970 id="models.E006",
1971 )
1972 )
1973 used_fields[f.name] = f
1974 used_fields[f.attname] = f
1975
1976 return errors
1977
1978 @classmethod
1979 def _check_column_name_clashes(cls):
1980 # Store a list of column names which have already been used by other fields.
1981 used_column_names = []
1982 errors = []
1983
1984 for f in cls._meta.local_fields:
1985 column_name = f.column
1986
1987 # Ensure the column name is not already in use.
1988 if column_name and column_name in used_column_names:
1989 errors.append(
1990 checks.Error(
1991 "Field '%s' has column name '%s' that is used by "
1992 "another field." % (f.name, column_name),
1993 hint="Specify a 'db_column' for the field.",
1994 obj=cls,
1995 id="models.E007",
1996 )
1997 )
1998 else:
1999 used_column_names.append(column_name)
2000
2001 return errors
2002
2003 @classmethod
2004 def _check_model_name_db_lookup_clashes(cls):
2005 errors = []
2006 model_name = cls.__name__
2007 if model_name.startswith("_") or model_name.endswith("_"):
2008 errors.append(
2009 checks.Error(
2010 "The model name '%s' cannot start or end with an underscore "
2011 "as it collides with the query lookup syntax." % model_name,
2012 obj=cls,
2013 id="models.E023",
2014 )
2015 )
2016 elif LOOKUP_SEP in model_name:
2017 errors.append(
2018 checks.Error(
2019 "The model name '%s' cannot contain double underscores as "
2020 "it collides with the query lookup syntax." % model_name,
2021 obj=cls,
2022 id="models.E024",
2023 )
2024 )
2025 return errors
2026
2027 @classmethod
2028 def _check_property_name_related_field_accessor_clashes(cls):
2029 errors = []
2030 property_names = cls._meta._property_names
2031 related_field_accessors = (
2032 f.attname
2033 for f in cls._meta._get_fields(reverse=False)
2034 if f.is_relation and f.related_model is not None
2035 )
2036 for accessor in related_field_accessors:
2037 if accessor in property_names:
2038 errors.append(
2039 checks.Error(
2040 "The property '%s' clashes with a related field "
2041 "accessor." % accessor,
2042 obj=cls,
2043 id="models.E025",
2044 )
2045 )
2046 return errors
2047
2048 @classmethod
2049 def _check_single_primary_key(cls):
2050 errors = []
2051 if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1:
2052 errors.append(
2053 checks.Error(
2054 "The model cannot have more than one field with "
2055 "'primary_key=True'.",
2056 obj=cls,
2057 id="models.E026",
2058 )
2059 )
2060 return errors
2061
2062 @classmethod
2063 def _check_unique_together(cls):
2064 """Check the value of "unique_together" option."""
2065 if not isinstance(cls._meta.unique_together, (tuple, list)):
2066 return [
2067 checks.Error(
2068 "'unique_together' must be a list or tuple.",
2069 obj=cls,
2070 id="models.E010",
2071 )
2072 ]
2073
2074 elif any(
2075 not isinstance(fields, (tuple, list))
2076 for fields in cls._meta.unique_together
2077 ):
2078 return [
2079 checks.Error(
2080 "All 'unique_together' elements must be lists or tuples.",
2081 obj=cls,
2082 id="models.E011",
2083 )
2084 ]
2085
2086 else:
2087 errors = []
2088 for fields in cls._meta.unique_together:
2089 errors.extend(cls._check_local_fields(fields, "unique_together"))
2090 return errors
2091
2092 @classmethod
2093 def _check_indexes(cls, databases):
2094 """Check fields, names, and conditions of indexes."""
2095 errors = []
2096 references = set()
2097 for index in cls._meta.indexes:
2098 # Index name can't start with an underscore or a number, restricted
2099 # for cross-database compatibility with Oracle.
2100 if index.name[0] == "_" or index.name[0].isdigit():
2101 errors.append(
2102 checks.Error(
2103 "The index name '%s' cannot start with an underscore "
2104 "or a number." % index.name,
2105 obj=cls,
2106 id="models.E033",
2107 ),
2108 )
2109 if len(index.name) > index.max_name_length:
2110 errors.append(
2111 checks.Error(
2112 "The index name '%s' cannot be longer than %d "
2113 "characters." % (index.name, index.max_name_length),
2114 obj=cls,
2115 id="models.E034",
2116 ),
2117 )
2118 if index.contains_expressions:
2119 for expression in index.expressions:
2120 references.update(
2121 ref[0] for ref in cls._get_expr_references(expression)
2122 )
2123 for db in databases:
2124 if not router.allow_migrate_model(db, cls):
2125 continue
2126 connection = connections[db]
2127 if not (
2128 connection.features.supports_partial_indexes
2129 or "supports_partial_indexes" in cls._meta.required_db_features
2130 ) and any(index.condition is not None for index in cls._meta.indexes):
2131 errors.append(
2132 checks.Warning(
2133 "%s does not support indexes with conditions."
2134 % connection.display_name,
2135 hint=(
2136 "Conditions will be ignored. Silence this warning "
2137 "if you don't care about it."
2138 ),
2139 obj=cls,
2140 id="models.W037",
2141 )
2142 )
2143 if not (
2144 connection.features.supports_covering_indexes
2145 or "supports_covering_indexes" in cls._meta.required_db_features
2146 ) and any(index.include for index in cls._meta.indexes):
2147 errors.append(
2148 checks.Warning(
2149 "%s does not support indexes with non-key columns."
2150 % connection.display_name,
2151 hint=(
2152 "Non-key columns will be ignored. Silence this "
2153 "warning if you don't care about it."
2154 ),
2155 obj=cls,
2156 id="models.W040",
2157 )
2158 )
2159 if not (
2160 connection.features.supports_expression_indexes
2161 or "supports_expression_indexes" in cls._meta.required_db_features
2162 ) and any(index.contains_expressions for index in cls._meta.indexes):
2163 errors.append(
2164 checks.Warning(
2165 "%s does not support indexes on expressions."
2166 % connection.display_name,
2167 hint=(
2168 "An index won't be created. Silence this warning "
2169 "if you don't care about it."
2170 ),
2171 obj=cls,
2172 id="models.W043",
2173 )
2174 )
2175 fields = [
2176 field for index in cls._meta.indexes for field, _ in index.fields_orders
2177 ]
2178 fields += [include for index in cls._meta.indexes for include in index.include]
2179 fields += references
2180 errors.extend(cls._check_local_fields(fields, "indexes"))
2181 return errors
2182
2183 @classmethod
2184 def _check_local_fields(cls, fields, option):
2185 from django.db import models
2186
2187 # In order to avoid hitting the relation tree prematurely, we use our
2188 # own fields_map instead of using get_field()
2189 forward_fields_map = {}
2190 for field in cls._meta._get_fields(reverse=False):
2191 forward_fields_map[field.name] = field
2192 if hasattr(field, "attname"):
2193 forward_fields_map[field.attname] = field
2194
2195 errors = []
2196 for field_name in fields:
2197 try:
2198 field = forward_fields_map[field_name]
2199 except KeyError:
2200 errors.append(
2201 checks.Error(
2202 "'%s' refers to the nonexistent field '%s'."
2203 % (
2204 option,
2205 field_name,
2206 ),
2207 obj=cls,
2208 id="models.E012",
2209 )
2210 )
2211 else:
2212 if isinstance(field.remote_field, models.ManyToManyRel):
2213 errors.append(
2214 checks.Error(
2215 "'%s' refers to a ManyToManyField '%s', but "
2216 "ManyToManyFields are not permitted in '%s'."
2217 % (
2218 option,
2219 field_name,
2220 option,
2221 ),
2222 obj=cls,
2223 id="models.E013",
2224 )
2225 )
2226 elif isinstance(field, models.CompositePrimaryKey):
2227 errors.append(
2228 checks.Error(
2229 f"{option!r} refers to a CompositePrimaryKey "
2230 f"{field_name!r}, but CompositePrimaryKeys are not "
2231 f"permitted in {option!r}.",
2232 obj=cls,
2233 id="models.E048",
2234 )
2235 )
2236 elif field not in cls._meta.local_fields:
2237 errors.append(
2238 checks.Error(
2239 "'%s' refers to field '%s' which is not local to model "
2240 "'%s'." % (option, field_name, cls._meta.object_name),
2241 hint="This issue may be caused by multi-table inheritance.",
2242 obj=cls,
2243 id="models.E016",
2244 )
2245 )
2246 return errors
2247
2248 @classmethod
2249 def _check_ordering(cls):
2250 """
2251 Check "ordering" option -- is it a list of strings and do all fields
2252 exist?
2253 """
2254 if cls._meta._ordering_clash:
2255 return [
2256 checks.Error(
2257 "'ordering' and 'order_with_respect_to' cannot be used together.",
2258 obj=cls,
2259 id="models.E021",
2260 ),
2261 ]
2262
2263 if cls._meta.order_with_respect_to or not cls._meta.ordering:
2264 return []
2265
2266 if not isinstance(cls._meta.ordering, (list, tuple)):
2267 return [
2268 checks.Error(
2269 "'ordering' must be a tuple or list (even if you want to order by "
2270 "only one field).",
2271 obj=cls,
2272 id="models.E014",
2273 )
2274 ]
2275
2276 errors = []
2277 fields = cls._meta.ordering
2278
2279 # Skip expressions and '?' fields.
2280 fields = (f for f in fields if isinstance(f, str) and f != "?")
2281
2282 # Convert "-field" to "field".
2283 fields = (f.removeprefix("-") for f in fields)
2284
2285 # Separate related fields and non-related fields.
2286 _fields = []
2287 related_fields = []
2288 for f in fields:
2289 if LOOKUP_SEP in f:
2290 related_fields.append(f)
2291 else:
2292 _fields.append(f)
2293 fields = _fields
2294
2295 # Check related fields.
2296 for field in related_fields:
2297 _cls = cls
2298 fld = None
2299 for part in field.split(LOOKUP_SEP):
2300 try:
2301 # pk is an alias that won't be found by opts.get_field.
2302 if part == "pk":
2303 fld = _cls._meta.pk
2304 else:
2305 fld = _cls._meta.get_field(part)
2306 if fld.is_relation:
2307 _cls = fld.path_infos[-1].to_opts.model
2308 else:
2309 _cls = None
2310 except (FieldDoesNotExist, AttributeError):
2311 if fld is None or (
2312 fld.get_transform(part) is None and fld.get_lookup(part) is None
2313 ):
2314 errors.append(
2315 checks.Error(
2316 "'ordering' refers to the nonexistent field, "
2317 "related field, or lookup '%s'." % field,
2318 obj=cls,
2319 id="models.E015",
2320 )
2321 )
2322
2323 # Skip ordering on pk. This is always a valid order_by field
2324 # but is an alias and therefore won't be found by opts.get_field.
2325 fields = {f for f in fields if f != "pk"}
2326
2327 # Check for invalid or nonexistent fields in ordering.
2328 invalid_fields = []
2329
2330 # Any field name that is not present in field_names does not exist.
2331 # Also, ordering by m2m fields is not allowed.
2332 opts = cls._meta
2333 valid_fields = set(
2334 chain.from_iterable(
2335 (
2336 (f.name, f.attname)
2337 if not (f.auto_created and not f.concrete)
2338 else (f.field.related_query_name(),)
2339 )
2340 for f in chain(opts.fields, opts.related_objects)
2341 )
2342 )
2343
2344 invalid_fields.extend(fields - valid_fields)
2345
2346 for invalid_field in invalid_fields:
2347 errors.append(
2348 checks.Error(
2349 "'ordering' refers to the nonexistent field, related "
2350 "field, or lookup '%s'." % invalid_field,
2351 obj=cls,
2352 id="models.E015",
2353 )
2354 )
2355 return errors
2356
2357 @classmethod
2358 def _check_long_column_names(cls, databases):
2359 """
2360 Check that any auto-generated column names are shorter than the limits
2361 for each database in which the model will be created.
2362 """
2363 if not databases:
2364 return []
2365 errors = []
2366 allowed_len = None
2367 db_alias = None
2368
2369 # Find the minimum max allowed length among all specified db_aliases.
2370 for db in databases:
2371 # skip databases where the model won't be created
2372 if not router.allow_migrate_model(db, cls):
2373 continue
2374 connection = connections[db]
2375 max_name_length = connection.ops.max_name_length()
2376 if max_name_length is None or connection.features.truncates_names:
2377 continue
2378 else:
2379 if allowed_len is None:
2380 allowed_len = max_name_length
2381 db_alias = db
2382 elif max_name_length < allowed_len:
2383 allowed_len = max_name_length
2384 db_alias = db
2385
2386 if allowed_len is None:
2387 return errors
2388
2389 for f in cls._meta.local_fields:
2390 # Check if auto-generated name for the field is too long
2391 # for the database.
2392 if (
2393 f.db_column is None
2394 and (column_name := f.column) is not None
2395 and len(column_name) > allowed_len
2396 ):
2397 errors.append(
2398 checks.Error(
2399 'Autogenerated column name too long for field "%s". '
2400 'Maximum length is "%s" for database "%s".'
2401 % (column_name, allowed_len, db_alias),
2402 hint="Set the column name manually using 'db_column'.",
2403 obj=cls,
2404 id="models.E018",
2405 )
2406 )
2407
2408 for f in cls._meta.local_many_to_many:
2409 # Skip nonexistent models.
2410 if isinstance(f.remote_field.through, str):
2411 continue
2412
2413 # Check if auto-generated name for the M2M field is too long
2414 # for the database.
2415 for m2m in f.remote_field.through._meta.local_fields:
2416 if (
2417 m2m.db_column is None
2418 and (rel_name := m2m.column) is not None
2419 and len(rel_name) > allowed_len
2420 ):
2421 errors.append(
2422 checks.Error(
2423 "Autogenerated column name too long for M2M field "
2424 '"%s". Maximum length is "%s" for database "%s".'
2425 % (rel_name, allowed_len, db_alias),
2426 hint=(
2427 "Use 'through' to create a separate model for "
2428 "M2M and then set column_name using 'db_column'."
2429 ),
2430 obj=cls,
2431 id="models.E019",
2432 )
2433 )
2434
2435 return errors
2436
2437 @classmethod
2438 def _get_expr_references(cls, expr):
2439 if isinstance(expr, Q):
2440 for child in expr.children:
2441 if isinstance(child, tuple):
2442 lookup, value = child
2443 yield tuple(lookup.split(LOOKUP_SEP))
2444 yield from cls._get_expr_references(value)
2445 else:
2446 yield from cls._get_expr_references(child)
2447 elif isinstance(expr, F):
2448 yield tuple(expr.name.split(LOOKUP_SEP))
2449 elif hasattr(expr, "get_source_expressions"):
2450 for src_expr in expr.get_source_expressions():
2451 yield from cls._get_expr_references(src_expr)
2452
2453 @classmethod
2454 def _check_constraints(cls, databases):
2455 errors = []
2456 for db in databases:
2457 if not router.allow_migrate_model(db, cls):
2458 continue
2459 connection = connections[db]
2460 for constraint in cls._meta.constraints:
2461 errors.extend(constraint._check(cls, connection))
2462 return errors
2463
2464
2465############################################
2466# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
2467############################################
2468
2469# ORDERING METHODS #########################
2470
2471
2472def method_set_order(self, ordered_obj, id_list, using=None):
2473 order_wrt = ordered_obj._meta.order_with_respect_to
2474 filter_args = order_wrt.get_forward_related_filter(self)
2475 ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update(
2476 [ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)],
2477 ["_order"],
2478 )
2479
2480
2481def method_get_order(self, ordered_obj):
2482 order_wrt = ordered_obj._meta.order_with_respect_to
2483 filter_args = order_wrt.get_forward_related_filter(self)
2484 pk_name = ordered_obj._meta.pk.name
2485 return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
2486
2487
2488def make_foreign_order_accessors(model, related_model):
2489 setattr(
2490 related_model,
2491 "get_%s_order" % model.__name__.lower(),
2492 partialmethod(method_get_order, model),
2493 )
2494 setattr(
2495 related_model,
2496 "set_%s_order" % model.__name__.lower(),
2497 partialmethod(method_set_order, model),
2498 )
2499
2500
2501########
2502# MISC #
2503########
2504
2505
2506def model_unpickle(model_id):
2507 """Used to unpickle Model subclasses with deferred fields."""
2508 if isinstance(model_id, tuple):
2509 model = apps.get_model(*model_id)
2510 else:
2511 # Backwards compat - the model was cached directly in earlier versions.
2512 model = model_id
2513 return model.__new__(model)
2514
2515
2516model_unpickle.__safe_for_unpickle__ = True