1# orm/strategies.py
2# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: ignore-errors
8
9
10"""sqlalchemy.orm.interfaces.LoaderStrategy
11implementations, and related MapperOptions."""
12
13from __future__ import annotations
14
15import collections
16import itertools
17from typing import Any
18from typing import Dict
19from typing import Optional
20from typing import Tuple
21from typing import TYPE_CHECKING
22from typing import Union
23
24from . import attributes
25from . import exc as orm_exc
26from . import interfaces
27from . import loading
28from . import path_registry
29from . import properties
30from . import query
31from . import relationships
32from . import unitofwork
33from . import util as orm_util
34from .base import _DEFER_FOR_STATE
35from .base import _RAISE_FOR_STATE
36from .base import _SET_DEFERRED_EXPIRED
37from .base import ATTR_WAS_SET
38from .base import LoaderCallableStatus
39from .base import PASSIVE_OFF
40from .base import PassiveFlag
41from .context import _column_descriptions
42from .context import _ORMCompileState
43from .context import _ORMSelectCompileState
44from .context import QueryContext
45from .interfaces import LoaderStrategy
46from .interfaces import StrategizedProperty
47from .session import _state_session
48from .state import InstanceState
49from .strategy_options import Load
50from .util import _none_only_set
51from .util import AliasedClass
52from .. import event
53from .. import exc as sa_exc
54from .. import inspect
55from .. import log
56from .. import sql
57from .. import util
58from ..sql import util as sql_util
59from ..sql import visitors
60from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
61from ..sql.selectable import Select
62from ..util.typing import Literal
63
64if TYPE_CHECKING:
65 from .mapper import Mapper
66 from .relationships import RelationshipProperty
67 from ..sql.elements import ColumnElement
68
69
70def _register_attribute(
71 prop,
72 mapper,
73 useobject,
74 compare_function=None,
75 typecallable=None,
76 callable_=None,
77 proxy_property=None,
78 active_history=False,
79 impl_class=None,
80 default_scalar_value=None,
81 **kw,
82):
83 listen_hooks = []
84
85 uselist = useobject and prop.uselist
86
87 if useobject and prop.single_parent:
88 listen_hooks.append(_single_parent_validator)
89
90 if prop.key in prop.parent.validators:
91 fn, opts = prop.parent.validators[prop.key]
92 listen_hooks.append(
93 lambda desc, prop: orm_util._validator_events(
94 desc, prop.key, fn, **opts
95 )
96 )
97
98 if useobject:
99 listen_hooks.append(unitofwork._track_cascade_events)
100
101 # need to assemble backref listeners
102 # after the singleparentvalidator, mapper validator
103 if useobject:
104 backref = prop.back_populates
105 if backref and prop._effective_sync_backref:
106 listen_hooks.append(
107 lambda desc, prop: attributes._backref_listeners(
108 desc, backref, uselist
109 )
110 )
111
112 # a single MapperProperty is shared down a class inheritance
113 # hierarchy, so we set up attribute instrumentation and backref event
114 # for each mapper down the hierarchy.
115
116 # typically, "mapper" is the same as prop.parent, due to the way
117 # the configure_mappers() process runs, however this is not strongly
118 # enforced, and in the case of a second configure_mappers() run the
119 # mapper here might not be prop.parent; also, a subclass mapper may
120 # be called here before a superclass mapper. That is, can't depend
121 # on mappers not already being set up so we have to check each one.
122
123 for m in mapper.self_and_descendants:
124 if prop is m._props.get(
125 prop.key
126 ) and not m.class_manager._attr_has_impl(prop.key):
127 desc = attributes._register_attribute_impl(
128 m.class_,
129 prop.key,
130 parent_token=prop,
131 uselist=uselist,
132 compare_function=compare_function,
133 useobject=useobject,
134 trackparent=useobject
135 and (
136 prop.single_parent
137 or prop.direction is interfaces.ONETOMANY
138 ),
139 typecallable=typecallable,
140 callable_=callable_,
141 active_history=active_history,
142 default_scalar_value=default_scalar_value,
143 impl_class=impl_class,
144 send_modified_events=not useobject or not prop.viewonly,
145 doc=prop.doc,
146 **kw,
147 )
148
149 for hook in listen_hooks:
150 hook(desc, prop)
151
152
153@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
154class _UninstrumentedColumnLoader(LoaderStrategy):
155 """Represent a non-instrumented MapperProperty.
156
157 The polymorphic_on argument of mapper() often results in this,
158 if the argument is against the with_polymorphic selectable.
159
160 """
161
162 __slots__ = ("columns",)
163
164 def __init__(self, parent, strategy_key):
165 super().__init__(parent, strategy_key)
166 self.columns = self.parent_property.columns
167
168 def setup_query(
169 self,
170 compile_state,
171 query_entity,
172 path,
173 loadopt,
174 adapter,
175 column_collection=None,
176 **kwargs,
177 ):
178 for c in self.columns:
179 if adapter:
180 c = adapter.columns[c]
181 compile_state._append_dedupe_col_collection(c, column_collection)
182
183 def create_row_processor(
184 self,
185 context,
186 query_entity,
187 path,
188 loadopt,
189 mapper,
190 result,
191 adapter,
192 populators,
193 ):
194 pass
195
196
197@log.class_logger
198@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
199class _ColumnLoader(LoaderStrategy):
200 """Provide loading behavior for a :class:`.ColumnProperty`."""
201
202 __slots__ = "columns", "is_composite"
203
204 def __init__(self, parent, strategy_key):
205 super().__init__(parent, strategy_key)
206 self.columns = self.parent_property.columns
207 self.is_composite = hasattr(self.parent_property, "composite_class")
208
209 def setup_query(
210 self,
211 compile_state,
212 query_entity,
213 path,
214 loadopt,
215 adapter,
216 column_collection,
217 memoized_populators,
218 check_for_adapt=False,
219 **kwargs,
220 ):
221 for c in self.columns:
222 if adapter:
223 if check_for_adapt:
224 c = adapter.adapt_check_present(c)
225 if c is None:
226 return
227 else:
228 c = adapter.columns[c]
229
230 compile_state._append_dedupe_col_collection(c, column_collection)
231
232 fetch = self.columns[0]
233 if adapter:
234 fetch = adapter.columns[fetch]
235 if fetch is None:
236 # None happens here only for dml bulk_persistence cases
237 # when context.DMLReturningColFilter is used
238 return
239
240 memoized_populators[self.parent_property] = fetch
241
242 def init_class_attribute(self, mapper):
243 self.is_class_level = True
244 coltype = self.columns[0].type
245 # TODO: check all columns ? check for foreign key as well?
246 active_history = (
247 self.parent_property.active_history
248 or self.columns[0].primary_key
249 or (
250 mapper.version_id_col is not None
251 and mapper._columntoproperty.get(mapper.version_id_col, None)
252 is self.parent_property
253 )
254 )
255
256 _register_attribute(
257 self.parent_property,
258 mapper,
259 useobject=False,
260 compare_function=coltype.compare_values,
261 active_history=active_history,
262 default_scalar_value=self.parent_property._default_scalar_value,
263 )
264
265 def create_row_processor(
266 self,
267 context,
268 query_entity,
269 path,
270 loadopt,
271 mapper,
272 result,
273 adapter,
274 populators,
275 ):
276 # look through list of columns represented here
277 # to see which, if any, is present in the row.
278
279 for col in self.columns:
280 if adapter:
281 col = adapter.columns[col]
282 getter = result._getter(col, False)
283 if getter:
284 populators["quick"].append((self.key, getter))
285 break
286 else:
287 populators["expire"].append((self.key, True))
288
289
290@log.class_logger
291@properties.ColumnProperty.strategy_for(query_expression=True)
292class _ExpressionColumnLoader(_ColumnLoader):
293 def __init__(self, parent, strategy_key):
294 super().__init__(parent, strategy_key)
295
296 # compare to the "default" expression that is mapped in
297 # the column. If it's sql.null, we don't need to render
298 # unless an expr is passed in the options.
299 null = sql.null().label(None)
300 self._have_default_expression = any(
301 not c.compare(null) for c in self.parent_property.columns
302 )
303
304 def setup_query(
305 self,
306 compile_state,
307 query_entity,
308 path,
309 loadopt,
310 adapter,
311 column_collection,
312 memoized_populators,
313 **kwargs,
314 ):
315 columns = None
316 if loadopt and loadopt._extra_criteria:
317 columns = loadopt._extra_criteria
318
319 elif self._have_default_expression:
320 columns = self.parent_property.columns
321
322 if columns is None:
323 return
324
325 for c in columns:
326 if adapter:
327 c = adapter.columns[c]
328 compile_state._append_dedupe_col_collection(c, column_collection)
329
330 fetch = columns[0]
331 if adapter:
332 fetch = adapter.columns[fetch]
333 if fetch is None:
334 # None is not expected to be the result of any
335 # adapter implementation here, however there may be theoretical
336 # usages of returning() with context.DMLReturningColFilter
337 return
338
339 memoized_populators[self.parent_property] = fetch
340
341 def create_row_processor(
342 self,
343 context,
344 query_entity,
345 path,
346 loadopt,
347 mapper,
348 result,
349 adapter,
350 populators,
351 ):
352 # look through list of columns represented here
353 # to see which, if any, is present in the row.
354 if loadopt and loadopt._extra_criteria:
355 columns = loadopt._extra_criteria
356
357 for col in columns:
358 if adapter:
359 col = adapter.columns[col]
360 getter = result._getter(col, False)
361 if getter:
362 populators["quick"].append((self.key, getter))
363 break
364 else:
365 populators["expire"].append((self.key, True))
366
367 def init_class_attribute(self, mapper):
368 self.is_class_level = True
369
370 _register_attribute(
371 self.parent_property,
372 mapper,
373 useobject=False,
374 compare_function=self.columns[0].type.compare_values,
375 accepts_scalar_loader=False,
376 default_scalar_value=self.parent_property._default_scalar_value,
377 )
378
379
380@log.class_logger
381@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
382@properties.ColumnProperty.strategy_for(
383 deferred=True, instrument=True, raiseload=True
384)
385@properties.ColumnProperty.strategy_for(do_nothing=True)
386class _DeferredColumnLoader(LoaderStrategy):
387 """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
388
389 __slots__ = "columns", "group", "raiseload"
390
391 def __init__(self, parent, strategy_key):
392 super().__init__(parent, strategy_key)
393 if hasattr(self.parent_property, "composite_class"):
394 raise NotImplementedError(
395 "Deferred loading for composite types not implemented yet"
396 )
397 self.raiseload = self.strategy_opts.get("raiseload", False)
398 self.columns = self.parent_property.columns
399 self.group = self.parent_property.group
400
401 def create_row_processor(
402 self,
403 context,
404 query_entity,
405 path,
406 loadopt,
407 mapper,
408 result,
409 adapter,
410 populators,
411 ):
412 # for a DeferredColumnLoader, this method is only used during a
413 # "row processor only" query; see test_deferred.py ->
414 # tests with "rowproc_only" in their name. As of the 1.0 series,
415 # loading._instance_processor doesn't use a "row processing" function
416 # to populate columns, instead it uses data in the "populators"
417 # dictionary. Normally, the DeferredColumnLoader.setup_query()
418 # sets up that data in the "memoized_populators" dictionary
419 # and "create_row_processor()" here is never invoked.
420
421 if (
422 context.refresh_state
423 and context.query._compile_options._only_load_props
424 and self.key in context.query._compile_options._only_load_props
425 ):
426 self.parent_property._get_strategy(
427 (("deferred", False), ("instrument", True))
428 ).create_row_processor(
429 context,
430 query_entity,
431 path,
432 loadopt,
433 mapper,
434 result,
435 adapter,
436 populators,
437 )
438
439 elif not self.is_class_level:
440 if self.raiseload:
441 set_deferred_for_local_state = (
442 self.parent_property._raise_column_loader
443 )
444 else:
445 set_deferred_for_local_state = (
446 self.parent_property._deferred_column_loader
447 )
448 populators["new"].append((self.key, set_deferred_for_local_state))
449 else:
450 populators["expire"].append((self.key, False))
451
452 def init_class_attribute(self, mapper):
453 self.is_class_level = True
454
455 _register_attribute(
456 self.parent_property,
457 mapper,
458 useobject=False,
459 compare_function=self.columns[0].type.compare_values,
460 callable_=self._load_for_state,
461 load_on_unexpire=False,
462 default_scalar_value=self.parent_property._default_scalar_value,
463 )
464
465 def setup_query(
466 self,
467 compile_state,
468 query_entity,
469 path,
470 loadopt,
471 adapter,
472 column_collection,
473 memoized_populators,
474 only_load_props=None,
475 **kw,
476 ):
477 if (
478 (
479 compile_state.compile_options._render_for_subquery
480 and self.parent_property._renders_in_subqueries
481 )
482 or (
483 loadopt
484 and set(self.columns).intersection(
485 self.parent._should_undefer_in_wildcard
486 )
487 )
488 or (
489 loadopt
490 and self.group
491 and loadopt.local_opts.get(
492 "undefer_group_%s" % self.group, False
493 )
494 )
495 or (only_load_props and self.key in only_load_props)
496 ):
497 self.parent_property._get_strategy(
498 (("deferred", False), ("instrument", True))
499 ).setup_query(
500 compile_state,
501 query_entity,
502 path,
503 loadopt,
504 adapter,
505 column_collection,
506 memoized_populators,
507 **kw,
508 )
509 elif self.is_class_level:
510 memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
511 elif not self.raiseload:
512 memoized_populators[self.parent_property] = _DEFER_FOR_STATE
513 else:
514 memoized_populators[self.parent_property] = _RAISE_FOR_STATE
515
516 def _load_for_state(self, state, passive):
517 if not state.key:
518 return LoaderCallableStatus.ATTR_EMPTY
519
520 if not passive & PassiveFlag.SQL_OK:
521 return LoaderCallableStatus.PASSIVE_NO_RESULT
522
523 localparent = state.manager.mapper
524
525 if self.group:
526 toload = [
527 p.key
528 for p in localparent.iterate_properties
529 if isinstance(p, StrategizedProperty)
530 and isinstance(p.strategy, _DeferredColumnLoader)
531 and p.group == self.group
532 ]
533 else:
534 toload = [self.key]
535
536 # narrow the keys down to just those which have no history
537 group = [k for k in toload if k in state.unmodified]
538
539 session = _state_session(state)
540 if session is None:
541 raise orm_exc.DetachedInstanceError(
542 "Parent instance %s is not bound to a Session; "
543 "deferred load operation of attribute '%s' cannot proceed"
544 % (orm_util.state_str(state), self.key)
545 )
546
547 if self.raiseload:
548 self._invoke_raise_load(state, passive, "raise")
549
550 loading._load_scalar_attributes(
551 state.mapper, state, set(group), PASSIVE_OFF
552 )
553
554 return LoaderCallableStatus.ATTR_WAS_SET
555
556 def _invoke_raise_load(self, state, passive, lazy):
557 raise sa_exc.InvalidRequestError(
558 "'%s' is not available due to raiseload=True" % (self,)
559 )
560
561
562class _LoadDeferredColumns:
563 """serializable loader object used by DeferredColumnLoader"""
564
565 def __init__(self, key: str, raiseload: bool = False):
566 self.key = key
567 self.raiseload = raiseload
568
569 def __call__(self, state, passive=attributes.PASSIVE_OFF):
570 key = self.key
571
572 localparent = state.manager.mapper
573 prop = localparent._props[key]
574 if self.raiseload:
575 strategy_key = (
576 ("deferred", True),
577 ("instrument", True),
578 ("raiseload", True),
579 )
580 else:
581 strategy_key = (("deferred", True), ("instrument", True))
582 strategy = prop._get_strategy(strategy_key)
583 return strategy._load_for_state(state, passive)
584
585
586class _AbstractRelationshipLoader(LoaderStrategy):
587 """LoaderStratgies which deal with related objects."""
588
589 __slots__ = "mapper", "target", "uselist", "entity"
590
591 def __init__(self, parent, strategy_key):
592 super().__init__(parent, strategy_key)
593 self.mapper = self.parent_property.mapper
594 self.entity = self.parent_property.entity
595 self.target = self.parent_property.target
596 self.uselist = self.parent_property.uselist
597
598 def _immediateload_create_row_processor(
599 self,
600 context,
601 query_entity,
602 path,
603 loadopt,
604 mapper,
605 result,
606 adapter,
607 populators,
608 ):
609 return self.parent_property._get_strategy(
610 (("lazy", "immediate"),)
611 ).create_row_processor(
612 context,
613 query_entity,
614 path,
615 loadopt,
616 mapper,
617 result,
618 adapter,
619 populators,
620 )
621
622
623@log.class_logger
624@relationships.RelationshipProperty.strategy_for(do_nothing=True)
625class _DoNothingLoader(LoaderStrategy):
626 """Relationship loader that makes no change to the object's state.
627
628 Compared to NoLoader, this loader does not initialize the
629 collection/attribute to empty/none; the usual default LazyLoader will
630 take effect.
631
632 """
633
634
635@log.class_logger
636@relationships.RelationshipProperty.strategy_for(lazy="noload")
637@relationships.RelationshipProperty.strategy_for(lazy=None)
638class _NoLoader(_AbstractRelationshipLoader):
639 """Provide loading behavior for a :class:`.Relationship`
640 with "lazy=None".
641
642 """
643
644 __slots__ = ()
645
646 @util.deprecated(
647 "2.1",
648 "The ``noload`` loader strategy is deprecated and will be removed "
649 "in a future release. This option "
650 "produces incorrect results by returning ``None`` for related "
651 "items.",
652 )
653 def init_class_attribute(self, mapper):
654 self.is_class_level = True
655
656 _register_attribute(
657 self.parent_property,
658 mapper,
659 useobject=True,
660 typecallable=self.parent_property.collection_class,
661 )
662
663 def create_row_processor(
664 self,
665 context,
666 query_entity,
667 path,
668 loadopt,
669 mapper,
670 result,
671 adapter,
672 populators,
673 ):
674 def invoke_no_load(state, dict_, row):
675 if self.uselist:
676 attributes.init_state_collection(state, dict_, self.key)
677 else:
678 dict_[self.key] = None
679
680 populators["new"].append((self.key, invoke_no_load))
681
682
683@log.class_logger
684@relationships.RelationshipProperty.strategy_for(lazy=True)
685@relationships.RelationshipProperty.strategy_for(lazy="select")
686@relationships.RelationshipProperty.strategy_for(lazy="raise")
687@relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql")
688@relationships.RelationshipProperty.strategy_for(lazy="baked_select")
689class _LazyLoader(
690 _AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
691):
692 """Provide loading behavior for a :class:`.Relationship`
693 with "lazy=True", that is loads when first accessed.
694
695 """
696
697 __slots__ = (
698 "_lazywhere",
699 "_rev_lazywhere",
700 "_lazyload_reverse_option",
701 "_order_by",
702 "use_get",
703 "is_aliased_class",
704 "_bind_to_col",
705 "_equated_columns",
706 "_rev_bind_to_col",
707 "_rev_equated_columns",
708 "_simple_lazy_clause",
709 "_raise_always",
710 "_raise_on_sql",
711 )
712
713 _lazywhere: ColumnElement[bool]
714 _bind_to_col: Dict[str, ColumnElement[Any]]
715 _rev_lazywhere: ColumnElement[bool]
716 _rev_bind_to_col: Dict[str, ColumnElement[Any]]
717
718 parent_property: RelationshipProperty[Any]
719
720 def __init__(
721 self, parent: RelationshipProperty[Any], strategy_key: Tuple[Any, ...]
722 ):
723 super().__init__(parent, strategy_key)
724 self._raise_always = self.strategy_opts["lazy"] == "raise"
725 self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
726
727 self.is_aliased_class = inspect(self.entity).is_aliased_class
728
729 join_condition = self.parent_property._join_condition
730 (
731 self._lazywhere,
732 self._bind_to_col,
733 self._equated_columns,
734 ) = join_condition.create_lazy_clause()
735
736 (
737 self._rev_lazywhere,
738 self._rev_bind_to_col,
739 self._rev_equated_columns,
740 ) = join_condition.create_lazy_clause(reverse_direction=True)
741
742 if self.parent_property.order_by:
743 self._order_by = [
744 sql_util._deep_annotate(elem, {"_orm_adapt": True})
745 for elem in util.to_list(self.parent_property.order_by)
746 ]
747 else:
748 self._order_by = None
749
750 self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
751
752 # determine if our "lazywhere" clause is the same as the mapper's
753 # get() clause. then we can just use mapper.get()
754 #
755 # TODO: the "not self.uselist" can be taken out entirely; a m2o
756 # load that populates for a list (very unusual, but is possible with
757 # the API) can still set for "None" and the attribute system will
758 # populate as an empty list.
759 self.use_get = (
760 not self.is_aliased_class
761 and not self.uselist
762 and self.entity._get_clause[0].compare(
763 self._lazywhere,
764 use_proxies=True,
765 compare_keys=False,
766 equivalents=self.mapper._equivalent_columns,
767 )
768 )
769
770 if self.use_get:
771 for col in list(self._equated_columns):
772 if col in self.mapper._equivalent_columns:
773 for c in self.mapper._equivalent_columns[col]:
774 self._equated_columns[c] = self._equated_columns[col]
775
776 self.logger.info(
777 "%s will use Session.get() to optimize instance loads", self
778 )
779
780 def init_class_attribute(self, mapper):
781 self.is_class_level = True
782
783 _legacy_inactive_history_style = (
784 self.parent_property._legacy_inactive_history_style
785 )
786
787 if self.parent_property.active_history:
788 active_history = True
789 _deferred_history = False
790
791 elif (
792 self.parent_property.direction is not interfaces.MANYTOONE
793 or not self.use_get
794 ):
795 if _legacy_inactive_history_style:
796 active_history = True
797 _deferred_history = False
798 else:
799 active_history = False
800 _deferred_history = True
801 else:
802 active_history = _deferred_history = False
803
804 _register_attribute(
805 self.parent_property,
806 mapper,
807 useobject=True,
808 callable_=self._load_for_state,
809 typecallable=self.parent_property.collection_class,
810 active_history=active_history,
811 _deferred_history=_deferred_history,
812 )
813
814 def _memoized_attr__simple_lazy_clause(self):
815 lazywhere = sql_util._deep_annotate(
816 self._lazywhere, {"_orm_adapt": True}
817 )
818
819 criterion, bind_to_col = (lazywhere, self._bind_to_col)
820
821 params = []
822
823 def visit_bindparam(bindparam):
824 bindparam.unique = False
825
826 visitors.traverse(criterion, {}, {"bindparam": visit_bindparam})
827
828 def visit_bindparam(bindparam):
829 if bindparam._identifying_key in bind_to_col:
830 params.append(
831 (
832 bindparam.key,
833 bind_to_col[bindparam._identifying_key],
834 None,
835 )
836 )
837 elif bindparam.callable is None:
838 params.append((bindparam.key, None, bindparam.value))
839
840 criterion = visitors.cloned_traverse(
841 criterion, {}, {"bindparam": visit_bindparam}
842 )
843
844 return criterion, params
845
846 def _generate_lazy_clause(self, state, passive):
847 criterion, param_keys = self._simple_lazy_clause
848
849 if state is None:
850 return sql_util.adapt_criterion_to_null(
851 criterion, [key for key, ident, value in param_keys]
852 )
853
854 mapper = self.parent_property.parent
855
856 o = state.obj() # strong ref
857 dict_ = attributes.instance_dict(o)
858
859 if passive & PassiveFlag.INIT_OK:
860 passive ^= PassiveFlag.INIT_OK
861
862 params = {}
863 for key, ident, value in param_keys:
864 if ident is not None:
865 if passive and passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
866 value = mapper._get_committed_state_attr_by_column(
867 state, dict_, ident, passive
868 )
869 else:
870 value = mapper._get_state_attr_by_column(
871 state, dict_, ident, passive
872 )
873
874 params[key] = value
875
876 return criterion, params
877
878 def _invoke_raise_load(self, state, passive, lazy):
879 raise sa_exc.InvalidRequestError(
880 "'%s' is not available due to lazy='%s'" % (self, lazy)
881 )
882
883 def _load_for_state(
884 self,
885 state,
886 passive,
887 loadopt=None,
888 extra_criteria=(),
889 extra_options=(),
890 alternate_effective_path=None,
891 execution_options=util.EMPTY_DICT,
892 ):
893 if not state.key and (
894 (
895 not self.parent_property.load_on_pending
896 and not state._load_pending
897 )
898 or not state.session_id
899 ):
900 return LoaderCallableStatus.ATTR_EMPTY
901
902 pending = not state.key
903 primary_key_identity = None
904
905 use_get = self.use_get and (not loadopt or not loadopt._extra_criteria)
906
907 if (not passive & PassiveFlag.SQL_OK and not use_get) or (
908 not passive & attributes.NON_PERSISTENT_OK and pending
909 ):
910 return LoaderCallableStatus.PASSIVE_NO_RESULT
911
912 if (
913 # we were given lazy="raise"
914 self._raise_always
915 # the no_raise history-related flag was not passed
916 and not passive & PassiveFlag.NO_RAISE
917 and (
918 # if we are use_get and related_object_ok is disabled,
919 # which means we are at most looking in the identity map
920 # for history purposes or otherwise returning
921 # PASSIVE_NO_RESULT, don't raise. This is also a
922 # history-related flag
923 not use_get
924 or passive & PassiveFlag.RELATED_OBJECT_OK
925 )
926 ):
927 self._invoke_raise_load(state, passive, "raise")
928
929 session = _state_session(state)
930 if not session:
931 if passive & PassiveFlag.NO_RAISE:
932 return LoaderCallableStatus.PASSIVE_NO_RESULT
933
934 raise orm_exc.DetachedInstanceError(
935 "Parent instance %s is not bound to a Session; "
936 "lazy load operation of attribute '%s' cannot proceed"
937 % (orm_util.state_str(state), self.key)
938 )
939
940 # if we have a simple primary key load, check the
941 # identity map without generating a Query at all
942 if use_get:
943 primary_key_identity = self._get_ident_for_use_get(
944 session, state, passive
945 )
946 if LoaderCallableStatus.PASSIVE_NO_RESULT in primary_key_identity:
947 return LoaderCallableStatus.PASSIVE_NO_RESULT
948 elif LoaderCallableStatus.NEVER_SET in primary_key_identity:
949 return LoaderCallableStatus.NEVER_SET
950
951 # test for None alone in primary_key_identity based on
952 # allow_partial_pks preference. PASSIVE_NO_RESULT and NEVER_SET
953 # have already been tested above
954 if not self.mapper.allow_partial_pks:
955 if _none_only_set.intersection(primary_key_identity):
956 return None
957 else:
958 if _none_only_set.issuperset(primary_key_identity):
959 return None
960
961 if (
962 self.key in state.dict
963 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
964 ):
965 return LoaderCallableStatus.ATTR_WAS_SET
966
967 # look for this identity in the identity map. Delegate to the
968 # Query class in use, as it may have special rules for how it
969 # does this, including how it decides what the correct
970 # identity_token would be for this identity.
971
972 instance = session._identity_lookup(
973 self.entity,
974 primary_key_identity,
975 passive=passive,
976 lazy_loaded_from=state,
977 )
978
979 if instance is not None:
980 if instance is LoaderCallableStatus.PASSIVE_CLASS_MISMATCH:
981 return None
982 else:
983 return instance
984 elif (
985 not passive & PassiveFlag.SQL_OK
986 or not passive & PassiveFlag.RELATED_OBJECT_OK
987 ):
988 return LoaderCallableStatus.PASSIVE_NO_RESULT
989
990 return self._emit_lazyload(
991 session,
992 state,
993 primary_key_identity,
994 passive,
995 loadopt,
996 extra_criteria,
997 extra_options,
998 alternate_effective_path,
999 execution_options,
1000 )
1001
1002 def _get_ident_for_use_get(self, session, state, passive):
1003 instance_mapper = state.manager.mapper
1004
1005 if passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
1006 get_attr = instance_mapper._get_committed_state_attr_by_column
1007 else:
1008 get_attr = instance_mapper._get_state_attr_by_column
1009
1010 dict_ = state.dict
1011
1012 return [
1013 get_attr(state, dict_, self._equated_columns[pk], passive=passive)
1014 for pk in self.mapper.primary_key
1015 ]
1016
1017 @util.preload_module("sqlalchemy.orm.strategy_options")
1018 def _emit_lazyload(
1019 self,
1020 session,
1021 state,
1022 primary_key_identity,
1023 passive,
1024 loadopt,
1025 extra_criteria,
1026 extra_options,
1027 alternate_effective_path,
1028 execution_options,
1029 ):
1030 strategy_options = util.preloaded.orm_strategy_options
1031
1032 clauseelement = self.entity.__clause_element__()
1033 stmt = Select._create_raw_select(
1034 _raw_columns=[clauseelement],
1035 _propagate_attrs=clauseelement._propagate_attrs,
1036 _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
1037 _compile_options=_ORMCompileState.default_compile_options,
1038 )
1039 load_options = QueryContext.default_load_options
1040
1041 load_options += {
1042 "_invoke_all_eagers": False,
1043 "_lazy_loaded_from": state,
1044 }
1045
1046 if self.parent_property.secondary is not None:
1047 stmt = stmt.select_from(
1048 self.mapper, self.parent_property.secondary
1049 )
1050
1051 pending = not state.key
1052
1053 # don't autoflush on pending
1054 if pending or passive & attributes.NO_AUTOFLUSH:
1055 stmt._execution_options = util.immutabledict({"autoflush": False})
1056
1057 use_get = self.use_get
1058
1059 if state.load_options or (loadopt and loadopt._extra_criteria):
1060 if alternate_effective_path is None:
1061 effective_path = state.load_path[self.parent_property]
1062 else:
1063 effective_path = alternate_effective_path[self.parent_property]
1064
1065 opts = state.load_options
1066
1067 if loadopt and loadopt._extra_criteria:
1068 use_get = False
1069 opts += (
1070 orm_util.LoaderCriteriaOption(self.entity, extra_criteria),
1071 )
1072
1073 stmt._with_options = opts
1074 elif alternate_effective_path is None:
1075 # this path is used if there are not already any options
1076 # in the query, but an event may want to add them
1077 effective_path = state.mapper._path_registry[self.parent_property]
1078 else:
1079 # added by immediateloader
1080 effective_path = alternate_effective_path[self.parent_property]
1081
1082 if extra_options:
1083 stmt._with_options += extra_options
1084
1085 stmt._compile_options += {"_current_path": effective_path}
1086
1087 if use_get:
1088 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1089 self._invoke_raise_load(state, passive, "raise_on_sql")
1090
1091 return loading._load_on_pk_identity(
1092 session,
1093 stmt,
1094 primary_key_identity,
1095 load_options=load_options,
1096 execution_options=execution_options,
1097 )
1098
1099 if self._order_by:
1100 stmt._order_by_clauses = self._order_by
1101
1102 def _lazyload_reverse(compile_context):
1103 for rev in self.parent_property._reverse_property:
1104 # reverse props that are MANYTOONE are loading *this*
1105 # object from get(), so don't need to eager out to those.
1106 if (
1107 rev.direction is interfaces.MANYTOONE
1108 and rev._use_get
1109 and not isinstance(rev.strategy, _LazyLoader)
1110 ):
1111 strategy_options.Load._construct_for_existing_path(
1112 compile_context.compile_options._current_path[
1113 rev.parent
1114 ]
1115 ).lazyload(rev).process_compile_state(compile_context)
1116
1117 stmt = stmt._add_compile_state_func(
1118 _lazyload_reverse, self.parent_property
1119 )
1120
1121 lazy_clause, params = self._generate_lazy_clause(state, passive)
1122
1123 if execution_options:
1124 execution_options = util.EMPTY_DICT.merge_with(
1125 execution_options,
1126 {
1127 "_sa_orm_load_options": load_options,
1128 },
1129 )
1130 else:
1131 execution_options = {
1132 "_sa_orm_load_options": load_options,
1133 }
1134
1135 if (
1136 self.key in state.dict
1137 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
1138 ):
1139 return LoaderCallableStatus.ATTR_WAS_SET
1140
1141 if pending:
1142 if util.has_intersection(orm_util._none_set, params.values()):
1143 return None
1144
1145 elif util.has_intersection(orm_util._never_set, params.values()):
1146 return None
1147
1148 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1149 self._invoke_raise_load(state, passive, "raise_on_sql")
1150
1151 stmt._where_criteria = (lazy_clause,)
1152
1153 result = session.execute(
1154 stmt, params, execution_options=execution_options
1155 )
1156
1157 result = result.unique().scalars().all()
1158
1159 if self.uselist:
1160 return result
1161 else:
1162 l = len(result)
1163 if l:
1164 if l > 1:
1165 util.warn(
1166 "Multiple rows returned with "
1167 "uselist=False for lazily-loaded attribute '%s' "
1168 % self.parent_property
1169 )
1170
1171 return result[0]
1172 else:
1173 return None
1174
1175 def create_row_processor(
1176 self,
1177 context,
1178 query_entity,
1179 path,
1180 loadopt,
1181 mapper,
1182 result,
1183 adapter,
1184 populators,
1185 ):
1186 key = self.key
1187
1188 if (
1189 context.load_options._is_user_refresh
1190 and context.query._compile_options._only_load_props
1191 and self.key in context.query._compile_options._only_load_props
1192 ):
1193 return self._immediateload_create_row_processor(
1194 context,
1195 query_entity,
1196 path,
1197 loadopt,
1198 mapper,
1199 result,
1200 adapter,
1201 populators,
1202 )
1203
1204 if not self.is_class_level or (loadopt and loadopt._extra_criteria):
1205 # we are not the primary manager for this attribute
1206 # on this class - set up a
1207 # per-instance lazyloader, which will override the
1208 # class-level behavior.
1209 # this currently only happens when using a
1210 # "lazyload" option on a "no load"
1211 # attribute - "eager" attributes always have a
1212 # class-level lazyloader installed.
1213 set_lazy_callable = (
1214 InstanceState._instance_level_callable_processor
1215 )(
1216 mapper.class_manager,
1217 _LoadLazyAttribute(
1218 key,
1219 self,
1220 loadopt,
1221 (
1222 loadopt._generate_extra_criteria(context)
1223 if loadopt._extra_criteria
1224 else None
1225 ),
1226 ),
1227 key,
1228 )
1229
1230 populators["new"].append((self.key, set_lazy_callable))
1231 elif context.populate_existing or mapper.always_refresh:
1232
1233 def reset_for_lazy_callable(state, dict_, row):
1234 # we are the primary manager for this attribute on
1235 # this class - reset its
1236 # per-instance attribute state, so that the class-level
1237 # lazy loader is
1238 # executed when next referenced on this instance.
1239 # this is needed in
1240 # populate_existing() types of scenarios to reset
1241 # any existing state.
1242 state._reset(dict_, key)
1243
1244 populators["new"].append((self.key, reset_for_lazy_callable))
1245
1246
1247class _LoadLazyAttribute:
1248 """semi-serializable loader object used by LazyLoader
1249
1250 Historically, this object would be carried along with instances that
1251 needed to run lazyloaders, so it had to be serializable to support
1252 cached instances.
1253
1254 this is no longer a general requirement, and the case where this object
1255 is used is exactly the case where we can't really serialize easily,
1256 which is when extra criteria in the loader option is present.
1257
1258 We can't reliably serialize that as it refers to mapped entities and
1259 AliasedClass objects that are local to the current process, which would
1260 need to be matched up on deserialize e.g. the sqlalchemy.ext.serializer
1261 approach.
1262
1263 """
1264
1265 def __init__(self, key, initiating_strategy, loadopt, extra_criteria):
1266 self.key = key
1267 self.strategy_key = initiating_strategy.strategy_key
1268 self.loadopt = loadopt
1269 self.extra_criteria = extra_criteria
1270
1271 def __getstate__(self):
1272 if self.extra_criteria is not None:
1273 util.warn(
1274 "Can't reliably serialize a lazyload() option that "
1275 "contains additional criteria; please use eager loading "
1276 "for this case"
1277 )
1278 return {
1279 "key": self.key,
1280 "strategy_key": self.strategy_key,
1281 "loadopt": self.loadopt,
1282 "extra_criteria": (),
1283 }
1284
1285 def __call__(self, state, passive=attributes.PASSIVE_OFF):
1286 key = self.key
1287 instance_mapper = state.manager.mapper
1288 prop = instance_mapper._props[key]
1289 strategy = prop._strategies[self.strategy_key]
1290
1291 return strategy._load_for_state(
1292 state,
1293 passive,
1294 loadopt=self.loadopt,
1295 extra_criteria=self.extra_criteria,
1296 )
1297
1298
1299class _PostLoader(_AbstractRelationshipLoader):
1300 """A relationship loader that emits a second SELECT statement."""
1301
1302 __slots__ = ()
1303
1304 def _setup_for_recursion(self, context, path, loadopt, join_depth=None):
1305 effective_path = (
1306 context.compile_state.current_path or orm_util.PathRegistry.root
1307 ) + path
1308
1309 top_level_context = context._get_top_level_context()
1310 execution_options = util.immutabledict(
1311 {"sa_top_level_orm_context": top_level_context}
1312 )
1313
1314 if loadopt:
1315 recursion_depth = loadopt.local_opts.get("recursion_depth", None)
1316 unlimited_recursion = recursion_depth == -1
1317 else:
1318 recursion_depth = None
1319 unlimited_recursion = False
1320
1321 if recursion_depth is not None:
1322 if not self.parent_property._is_self_referential:
1323 raise sa_exc.InvalidRequestError(
1324 f"recursion_depth option on relationship "
1325 f"{self.parent_property} not valid for "
1326 "non-self-referential relationship"
1327 )
1328 recursion_depth = context.execution_options.get(
1329 f"_recursion_depth_{id(self)}", recursion_depth
1330 )
1331
1332 if not unlimited_recursion and recursion_depth < 0:
1333 return (
1334 effective_path,
1335 False,
1336 execution_options,
1337 recursion_depth,
1338 )
1339
1340 if not unlimited_recursion:
1341 execution_options = execution_options.union(
1342 {
1343 f"_recursion_depth_{id(self)}": recursion_depth - 1,
1344 }
1345 )
1346
1347 if loading._PostLoad.path_exists(
1348 context, effective_path, self.parent_property
1349 ):
1350 return effective_path, False, execution_options, recursion_depth
1351
1352 path_w_prop = path[self.parent_property]
1353 effective_path_w_prop = effective_path[self.parent_property]
1354
1355 if not path_w_prop.contains(context.attributes, "loader"):
1356 if join_depth:
1357 if effective_path_w_prop.length / 2 > join_depth:
1358 return (
1359 effective_path,
1360 False,
1361 execution_options,
1362 recursion_depth,
1363 )
1364 elif effective_path_w_prop.contains_mapper(self.mapper):
1365 return (
1366 effective_path,
1367 False,
1368 execution_options,
1369 recursion_depth,
1370 )
1371
1372 return effective_path, True, execution_options, recursion_depth
1373
1374
1375@relationships.RelationshipProperty.strategy_for(lazy="immediate")
1376class _ImmediateLoader(_PostLoader):
1377 __slots__ = ("join_depth",)
1378
1379 def __init__(self, parent, strategy_key):
1380 super().__init__(parent, strategy_key)
1381 self.join_depth = self.parent_property.join_depth
1382
1383 def init_class_attribute(self, mapper):
1384 self.parent_property._get_strategy(
1385 (("lazy", "select"),)
1386 ).init_class_attribute(mapper)
1387
1388 def create_row_processor(
1389 self,
1390 context,
1391 query_entity,
1392 path,
1393 loadopt,
1394 mapper,
1395 result,
1396 adapter,
1397 populators,
1398 ):
1399 if not context.compile_state.compile_options._enable_eagerloads:
1400 return
1401
1402 (
1403 effective_path,
1404 run_loader,
1405 execution_options,
1406 recursion_depth,
1407 ) = self._setup_for_recursion(context, path, loadopt, self.join_depth)
1408
1409 if not run_loader:
1410 # this will not emit SQL and will only emit for a many-to-one
1411 # "use get" load. the "_RELATED" part means it may return
1412 # instance even if its expired, since this is a mutually-recursive
1413 # load operation.
1414 flags = attributes.PASSIVE_NO_FETCH_RELATED | PassiveFlag.NO_RAISE
1415 else:
1416 flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE
1417
1418 loading._PostLoad.callable_for_path(
1419 context,
1420 effective_path,
1421 self.parent,
1422 self.parent_property,
1423 self._load_for_path,
1424 loadopt,
1425 flags,
1426 recursion_depth,
1427 execution_options,
1428 )
1429
1430 def _load_for_path(
1431 self,
1432 context,
1433 path,
1434 states,
1435 load_only,
1436 loadopt,
1437 flags,
1438 recursion_depth,
1439 execution_options,
1440 ):
1441 if recursion_depth:
1442 new_opt = Load(loadopt.path.entity)
1443 new_opt.context = (
1444 loadopt,
1445 loadopt._recurse(),
1446 )
1447 alternate_effective_path = path._truncate_recursive()
1448 extra_options = (new_opt,)
1449 else:
1450 alternate_effective_path = path
1451 extra_options = ()
1452
1453 key = self.key
1454 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
1455 for state, overwrite in states:
1456 dict_ = state.dict
1457
1458 if overwrite or key not in dict_:
1459 value = lazyloader._load_for_state(
1460 state,
1461 flags,
1462 extra_options=extra_options,
1463 alternate_effective_path=alternate_effective_path,
1464 execution_options=execution_options,
1465 )
1466 if value not in (
1467 ATTR_WAS_SET,
1468 LoaderCallableStatus.PASSIVE_NO_RESULT,
1469 ):
1470 state.get_impl(key).set_committed_value(
1471 state, dict_, value
1472 )
1473
1474
1475@log.class_logger
1476@relationships.RelationshipProperty.strategy_for(lazy="subquery")
1477class _SubqueryLoader(_PostLoader):
1478 __slots__ = ("join_depth",)
1479
1480 def __init__(self, parent, strategy_key):
1481 super().__init__(parent, strategy_key)
1482 self.join_depth = self.parent_property.join_depth
1483
1484 def init_class_attribute(self, mapper):
1485 self.parent_property._get_strategy(
1486 (("lazy", "select"),)
1487 ).init_class_attribute(mapper)
1488
1489 def _get_leftmost(
1490 self,
1491 orig_query_entity_index,
1492 subq_path,
1493 current_compile_state,
1494 is_root,
1495 ):
1496 given_subq_path = subq_path
1497 subq_path = subq_path.path
1498 subq_mapper = orm_util._class_to_mapper(subq_path[0])
1499
1500 # determine attributes of the leftmost mapper
1501 if (
1502 self.parent.isa(subq_mapper)
1503 and self.parent_property is subq_path[1]
1504 ):
1505 leftmost_mapper, leftmost_prop = self.parent, self.parent_property
1506 else:
1507 leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1]
1508
1509 if is_root:
1510 # the subq_path is also coming from cached state, so when we start
1511 # building up this path, it has to also be converted to be in terms
1512 # of the current state. this is for the specific case of the entity
1513 # is an AliasedClass against a subquery that's not otherwise going
1514 # to adapt
1515 new_subq_path = current_compile_state._entities[
1516 orig_query_entity_index
1517 ].entity_zero._path_registry[leftmost_prop]
1518 additional = len(subq_path) - len(new_subq_path)
1519 if additional:
1520 new_subq_path += path_registry.PathRegistry.coerce(
1521 subq_path[-additional:]
1522 )
1523 else:
1524 new_subq_path = given_subq_path
1525
1526 leftmost_cols = leftmost_prop.local_columns
1527
1528 leftmost_attr = [
1529 getattr(
1530 new_subq_path.path[0].entity,
1531 leftmost_mapper._columntoproperty[c].key,
1532 )
1533 for c in leftmost_cols
1534 ]
1535
1536 return leftmost_mapper, leftmost_attr, leftmost_prop, new_subq_path
1537
1538 def _generate_from_original_query(
1539 self,
1540 orig_compile_state,
1541 orig_query,
1542 leftmost_mapper,
1543 leftmost_attr,
1544 leftmost_relationship,
1545 orig_entity,
1546 ):
1547 # reformat the original query
1548 # to look only for significant columns
1549 q = orig_query._clone().correlate(None)
1550
1551 # LEGACY: make a Query back from the select() !!
1552 # This suits at least two legacy cases:
1553 # 1. applications which expect before_compile() to be called
1554 # below when we run .subquery() on this query (Keystone)
1555 # 2. applications which are doing subqueryload with complex
1556 # from_self() queries, as query.subquery() / .statement
1557 # has to do the full compile context for multiply-nested
1558 # from_self() (Neutron) - see test_subqload_from_self
1559 # for demo.
1560 q2 = query.Query.__new__(query.Query)
1561 q2.__dict__.update(q.__dict__)
1562 q = q2
1563
1564 # set the query's "FROM" list explicitly to what the
1565 # FROM list would be in any case, as we will be limiting
1566 # the columns in the SELECT list which may no longer include
1567 # all entities mentioned in things like WHERE, JOIN, etc.
1568 if not q._from_obj:
1569 q._enable_assertions = False
1570 q.select_from.non_generative(
1571 q,
1572 *{
1573 ent["entity"]
1574 for ent in _column_descriptions(
1575 orig_query, compile_state=orig_compile_state
1576 )
1577 if ent["entity"] is not None
1578 },
1579 )
1580
1581 # select from the identity columns of the outer (specifically, these
1582 # are the 'local_cols' of the property). This will remove other
1583 # columns from the query that might suggest the right entity which is
1584 # why we do set select_from above. The attributes we have are
1585 # coerced and adapted using the original query's adapter, which is
1586 # needed only for the case of adapting a subclass column to
1587 # that of a polymorphic selectable, e.g. we have
1588 # Engineer.primary_language and the entity is Person. All other
1589 # adaptations, e.g. from_self, select_entity_from(), will occur
1590 # within the new query when it compiles, as the compile_state we are
1591 # using here is only a partial one. If the subqueryload is from a
1592 # with_polymorphic() or other aliased() object, left_attr will already
1593 # be the correct attributes so no adaptation is needed.
1594 target_cols = orig_compile_state._adapt_col_list(
1595 [
1596 sql.coercions.expect(sql.roles.ColumnsClauseRole, o)
1597 for o in leftmost_attr
1598 ],
1599 orig_compile_state._get_current_adapter(),
1600 )
1601 q._raw_columns = target_cols
1602
1603 distinct_target_key = leftmost_relationship.distinct_target_key
1604
1605 if distinct_target_key is True:
1606 q._distinct = True
1607 elif distinct_target_key is None:
1608 # if target_cols refer to a non-primary key or only
1609 # part of a composite primary key, set the q as distinct
1610 for t in {c.table for c in target_cols}:
1611 if not set(target_cols).issuperset(t.primary_key):
1612 q._distinct = True
1613 break
1614
1615 # don't need ORDER BY if no limit/offset
1616 if not q._has_row_limiting_clause:
1617 q._order_by_clauses = ()
1618
1619 if q._distinct is True and q._order_by_clauses:
1620 # the logic to automatically add the order by columns to the query
1621 # when distinct is True is deprecated in the query
1622 to_add = sql_util.expand_column_list_from_order_by(
1623 target_cols, q._order_by_clauses
1624 )
1625 if to_add:
1626 q._set_entities(target_cols + to_add)
1627
1628 # the original query now becomes a subquery
1629 # which we'll join onto.
1630 # LEGACY: as "q" is a Query, the before_compile() event is invoked
1631 # here.
1632 embed_q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).subquery()
1633 left_alias = orm_util.AliasedClass(
1634 leftmost_mapper, embed_q, use_mapper_path=True
1635 )
1636 return left_alias
1637
1638 def _prep_for_joins(self, left_alias, subq_path):
1639 # figure out what's being joined. a.k.a. the fun part
1640 to_join = []
1641 pairs = list(subq_path.pairs())
1642
1643 for i, (mapper, prop) in enumerate(pairs):
1644 if i > 0:
1645 # look at the previous mapper in the chain -
1646 # if it is as or more specific than this prop's
1647 # mapper, use that instead.
1648 # note we have an assumption here that
1649 # the non-first element is always going to be a mapper,
1650 # not an AliasedClass
1651
1652 prev_mapper = pairs[i - 1][1].mapper
1653 to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
1654 else:
1655 to_append = mapper
1656
1657 to_join.append((to_append, prop.key))
1658
1659 # determine the immediate parent class we are joining from,
1660 # which needs to be aliased.
1661
1662 if len(to_join) < 2:
1663 # in the case of a one level eager load, this is the
1664 # leftmost "left_alias".
1665 parent_alias = left_alias
1666 else:
1667 info = inspect(to_join[-1][0])
1668 if info.is_aliased_class:
1669 parent_alias = info.entity
1670 else:
1671 # alias a plain mapper as we may be
1672 # joining multiple times
1673 parent_alias = orm_util.AliasedClass(
1674 info.entity, use_mapper_path=True
1675 )
1676
1677 local_cols = self.parent_property.local_columns
1678
1679 local_attr = [
1680 getattr(parent_alias, self.parent._columntoproperty[c].key)
1681 for c in local_cols
1682 ]
1683 return to_join, local_attr, parent_alias
1684
1685 def _apply_joins(
1686 self, q, to_join, left_alias, parent_alias, effective_entity
1687 ):
1688 ltj = len(to_join)
1689 if ltj == 1:
1690 to_join = [
1691 getattr(left_alias, to_join[0][1]).of_type(effective_entity)
1692 ]
1693 elif ltj == 2:
1694 to_join = [
1695 getattr(left_alias, to_join[0][1]).of_type(parent_alias),
1696 getattr(parent_alias, to_join[-1][1]).of_type(
1697 effective_entity
1698 ),
1699 ]
1700 elif ltj > 2:
1701 middle = [
1702 (
1703 (
1704 orm_util.AliasedClass(item[0])
1705 if not inspect(item[0]).is_aliased_class
1706 else item[0].entity
1707 ),
1708 item[1],
1709 )
1710 for item in to_join[1:-1]
1711 ]
1712 inner = []
1713
1714 while middle:
1715 item = middle.pop(0)
1716 attr = getattr(item[0], item[1])
1717 if middle:
1718 attr = attr.of_type(middle[0][0])
1719 else:
1720 attr = attr.of_type(parent_alias)
1721
1722 inner.append(attr)
1723
1724 to_join = (
1725 [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)]
1726 + inner
1727 + [
1728 getattr(parent_alias, to_join[-1][1]).of_type(
1729 effective_entity
1730 )
1731 ]
1732 )
1733
1734 for attr in to_join:
1735 q = q.join(attr)
1736
1737 return q
1738
1739 def _setup_options(
1740 self,
1741 context,
1742 q,
1743 subq_path,
1744 rewritten_path,
1745 orig_query,
1746 effective_entity,
1747 loadopt,
1748 ):
1749 # note that because the subqueryload object
1750 # does not re-use the cached query, instead always making
1751 # use of the current invoked query, while we have two queries
1752 # here (orig and context.query), they are both non-cached
1753 # queries and we can transfer the options as is without
1754 # adjusting for new criteria. Some work on #6881 / #6889
1755 # brought this into question.
1756 new_options = orig_query._with_options
1757
1758 if loadopt and loadopt._extra_criteria:
1759 new_options += (
1760 orm_util.LoaderCriteriaOption(
1761 self.entity,
1762 loadopt._generate_extra_criteria(context),
1763 ),
1764 )
1765
1766 # propagate loader options etc. to the new query.
1767 # these will fire relative to subq_path.
1768 q = q._with_current_path(rewritten_path)
1769 q = q.options(*new_options)
1770
1771 return q
1772
1773 def _setup_outermost_orderby(self, q):
1774 if self.parent_property.order_by:
1775
1776 def _setup_outermost_orderby(compile_context):
1777 compile_context.eager_order_by += tuple(
1778 util.to_list(self.parent_property.order_by)
1779 )
1780
1781 q = q._add_compile_state_func(
1782 _setup_outermost_orderby, self.parent_property
1783 )
1784
1785 return q
1786
1787 class _SubqCollections:
1788 """Given a :class:`_query.Query` used to emit the "subquery load",
1789 provide a load interface that executes the query at the
1790 first moment a value is needed.
1791
1792 """
1793
1794 __slots__ = (
1795 "session",
1796 "execution_options",
1797 "load_options",
1798 "params",
1799 "subq",
1800 "_data",
1801 )
1802
1803 def __init__(self, context, subq):
1804 # avoid creating a cycle by storing context
1805 # even though that's preferable
1806 self.session = context.session
1807 self.execution_options = context.execution_options
1808 self.load_options = context.load_options
1809 self.params = context.params or {}
1810 self.subq = subq
1811 self._data = None
1812
1813 def get(self, key, default):
1814 if self._data is None:
1815 self._load()
1816 return self._data.get(key, default)
1817
1818 def _load(self):
1819 self._data = collections.defaultdict(list)
1820
1821 q = self.subq
1822 assert q.session is None
1823
1824 q = q.with_session(self.session)
1825
1826 if self.load_options._populate_existing:
1827 q = q.populate_existing()
1828 # to work with baked query, the parameters may have been
1829 # updated since this query was created, so take these into account
1830
1831 rows = list(q.params(self.params))
1832 for k, v in itertools.groupby(rows, lambda x: x[1:]):
1833 self._data[k].extend(vv[0] for vv in v)
1834
1835 def loader(self, state, dict_, row):
1836 if self._data is None:
1837 self._load()
1838
1839 def _setup_query_from_rowproc(
1840 self,
1841 context,
1842 query_entity,
1843 path,
1844 entity,
1845 loadopt,
1846 adapter,
1847 ):
1848 compile_state = context.compile_state
1849 if (
1850 not compile_state.compile_options._enable_eagerloads
1851 or compile_state.compile_options._for_refresh_state
1852 ):
1853 return
1854
1855 orig_query_entity_index = compile_state._entities.index(query_entity)
1856 context.loaders_require_buffering = True
1857
1858 path = path[self.parent_property]
1859
1860 # build up a path indicating the path from the leftmost
1861 # entity to the thing we're subquery loading.
1862 with_poly_entity = path.get(
1863 compile_state.attributes, "path_with_polymorphic", None
1864 )
1865 if with_poly_entity is not None:
1866 effective_entity = with_poly_entity
1867 else:
1868 effective_entity = self.entity
1869
1870 subq_path, rewritten_path = context.query._execution_options.get(
1871 ("subquery_paths", None),
1872 (orm_util.PathRegistry.root, orm_util.PathRegistry.root),
1873 )
1874 is_root = subq_path is orm_util.PathRegistry.root
1875 subq_path = subq_path + path
1876 rewritten_path = rewritten_path + path
1877
1878 # use the current query being invoked, not the compile state
1879 # one. this is so that we get the current parameters. however,
1880 # it means we can't use the existing compile state, we have to make
1881 # a new one. other approaches include possibly using the
1882 # compiled query but swapping the params, seems only marginally
1883 # less time spent but more complicated
1884 orig_query = context.query._execution_options.get(
1885 ("orig_query", _SubqueryLoader), context.query
1886 )
1887
1888 # make a new compile_state for the query that's probably cached, but
1889 # we're sort of undoing a bit of that caching :(
1890 compile_state_cls = _ORMCompileState._get_plugin_class_for_plugin(
1891 orig_query, "orm"
1892 )
1893
1894 if orig_query._is_lambda_element:
1895 if context.load_options._lazy_loaded_from is None:
1896 util.warn(
1897 'subqueryloader for "%s" must invoke lambda callable '
1898 "at %r in "
1899 "order to produce a new query, decreasing the efficiency "
1900 "of caching for this statement. Consider using "
1901 "selectinload() for more effective full-lambda caching"
1902 % (self, orig_query)
1903 )
1904 orig_query = orig_query._resolved
1905
1906 # this is the more "quick" version, however it's not clear how
1907 # much of this we need. in particular I can't get a test to
1908 # fail if the "set_base_alias" is missing and not sure why that is.
1909 orig_compile_state = compile_state_cls._create_entities_collection(
1910 orig_query, legacy=False
1911 )
1912
1913 (
1914 leftmost_mapper,
1915 leftmost_attr,
1916 leftmost_relationship,
1917 rewritten_path,
1918 ) = self._get_leftmost(
1919 orig_query_entity_index,
1920 rewritten_path,
1921 orig_compile_state,
1922 is_root,
1923 )
1924
1925 # generate a new Query from the original, then
1926 # produce a subquery from it.
1927 left_alias = self._generate_from_original_query(
1928 orig_compile_state,
1929 orig_query,
1930 leftmost_mapper,
1931 leftmost_attr,
1932 leftmost_relationship,
1933 entity,
1934 )
1935
1936 # generate another Query that will join the
1937 # left alias to the target relationships.
1938 # basically doing a longhand
1939 # "from_self()". (from_self() itself not quite industrial
1940 # strength enough for all contingencies...but very close)
1941
1942 q = query.Query(effective_entity)
1943
1944 q._execution_options = context.query._execution_options.merge_with(
1945 context.execution_options,
1946 {
1947 ("orig_query", _SubqueryLoader): orig_query,
1948 ("subquery_paths", None): (subq_path, rewritten_path),
1949 },
1950 )
1951
1952 q = q._set_enable_single_crit(False)
1953 to_join, local_attr, parent_alias = self._prep_for_joins(
1954 left_alias, subq_path
1955 )
1956
1957 q = q.add_columns(*local_attr)
1958 q = self._apply_joins(
1959 q, to_join, left_alias, parent_alias, effective_entity
1960 )
1961
1962 q = self._setup_options(
1963 context,
1964 q,
1965 subq_path,
1966 rewritten_path,
1967 orig_query,
1968 effective_entity,
1969 loadopt,
1970 )
1971 q = self._setup_outermost_orderby(q)
1972
1973 return q
1974
1975 def create_row_processor(
1976 self,
1977 context,
1978 query_entity,
1979 path,
1980 loadopt,
1981 mapper,
1982 result,
1983 adapter,
1984 populators,
1985 ):
1986 if (
1987 loadopt
1988 and context.compile_state.statement is not None
1989 and context.compile_state.statement.is_dml
1990 ):
1991 util.warn_deprecated(
1992 "The subqueryload loader option is not compatible with DML "
1993 "statements such as INSERT, UPDATE. Only SELECT may be used."
1994 "This warning will become an exception in a future release.",
1995 "2.0",
1996 )
1997
1998 if context.refresh_state:
1999 return self._immediateload_create_row_processor(
2000 context,
2001 query_entity,
2002 path,
2003 loadopt,
2004 mapper,
2005 result,
2006 adapter,
2007 populators,
2008 )
2009
2010 _, run_loader, _, _ = self._setup_for_recursion(
2011 context, path, loadopt, self.join_depth
2012 )
2013 if not run_loader:
2014 return
2015
2016 if not isinstance(context.compile_state, _ORMSelectCompileState):
2017 # issue 7505 - subqueryload() in 1.3 and previous would silently
2018 # degrade for from_statement() without warning. this behavior
2019 # is restored here
2020 return
2021
2022 if not self.parent.class_manager[self.key].impl.supports_population:
2023 raise sa_exc.InvalidRequestError(
2024 "'%s' does not support object "
2025 "population - eager loading cannot be applied." % self
2026 )
2027
2028 # a little dance here as the "path" is still something that only
2029 # semi-tracks the exact series of things we are loading, still not
2030 # telling us about with_polymorphic() and stuff like that when it's at
2031 # the root.. the initial MapperEntity is more accurate for this case.
2032 if len(path) == 1:
2033 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
2034 return
2035 elif not orm_util._entity_isa(path[-1], self.parent):
2036 return
2037
2038 subq = self._setup_query_from_rowproc(
2039 context,
2040 query_entity,
2041 path,
2042 path[-1],
2043 loadopt,
2044 adapter,
2045 )
2046
2047 if subq is None:
2048 return
2049
2050 assert subq.session is None
2051
2052 path = path[self.parent_property]
2053
2054 local_cols = self.parent_property.local_columns
2055
2056 # cache the loaded collections in the context
2057 # so that inheriting mappers don't re-load when they
2058 # call upon create_row_processor again
2059 collections = path.get(context.attributes, "collections")
2060 if collections is None:
2061 collections = self._SubqCollections(context, subq)
2062 path.set(context.attributes, "collections", collections)
2063
2064 if adapter:
2065 local_cols = [adapter.columns[c] for c in local_cols]
2066
2067 if self.uselist:
2068 self._create_collection_loader(
2069 context, result, collections, local_cols, populators
2070 )
2071 else:
2072 self._create_scalar_loader(
2073 context, result, collections, local_cols, populators
2074 )
2075
2076 def _create_collection_loader(
2077 self, context, result, collections, local_cols, populators
2078 ):
2079 tuple_getter = result._tuple_getter(local_cols)
2080
2081 def load_collection_from_subq(state, dict_, row):
2082 collection = collections.get(tuple_getter(row), ())
2083 state.get_impl(self.key).set_committed_value(
2084 state, dict_, collection
2085 )
2086
2087 def load_collection_from_subq_existing_row(state, dict_, row):
2088 if self.key not in dict_:
2089 load_collection_from_subq(state, dict_, row)
2090
2091 populators["new"].append((self.key, load_collection_from_subq))
2092 populators["existing"].append(
2093 (self.key, load_collection_from_subq_existing_row)
2094 )
2095
2096 if context.invoke_all_eagers:
2097 populators["eager"].append((self.key, collections.loader))
2098
2099 def _create_scalar_loader(
2100 self, context, result, collections, local_cols, populators
2101 ):
2102 tuple_getter = result._tuple_getter(local_cols)
2103
2104 def load_scalar_from_subq(state, dict_, row):
2105 collection = collections.get(tuple_getter(row), (None,))
2106 if len(collection) > 1:
2107 util.warn(
2108 "Multiple rows returned with "
2109 "uselist=False for eagerly-loaded attribute '%s' " % self
2110 )
2111
2112 scalar = collection[0]
2113 state.get_impl(self.key).set_committed_value(state, dict_, scalar)
2114
2115 def load_scalar_from_subq_existing_row(state, dict_, row):
2116 if self.key not in dict_:
2117 load_scalar_from_subq(state, dict_, row)
2118
2119 populators["new"].append((self.key, load_scalar_from_subq))
2120 populators["existing"].append(
2121 (self.key, load_scalar_from_subq_existing_row)
2122 )
2123 if context.invoke_all_eagers:
2124 populators["eager"].append((self.key, collections.loader))
2125
2126
2127@log.class_logger
2128@relationships.RelationshipProperty.strategy_for(lazy="joined")
2129@relationships.RelationshipProperty.strategy_for(lazy=False)
2130class _JoinedLoader(_AbstractRelationshipLoader):
2131 """Provide loading behavior for a :class:`.Relationship`
2132 using joined eager loading.
2133
2134 """
2135
2136 __slots__ = "join_depth"
2137
2138 def __init__(self, parent, strategy_key):
2139 super().__init__(parent, strategy_key)
2140 self.join_depth = self.parent_property.join_depth
2141
2142 def init_class_attribute(self, mapper):
2143 self.parent_property._get_strategy(
2144 (("lazy", "select"),)
2145 ).init_class_attribute(mapper)
2146
2147 def setup_query(
2148 self,
2149 compile_state,
2150 query_entity,
2151 path,
2152 loadopt,
2153 adapter,
2154 column_collection=None,
2155 parentmapper=None,
2156 chained_from_outerjoin=False,
2157 **kwargs,
2158 ):
2159 """Add a left outer join to the statement that's being constructed."""
2160
2161 if not compile_state.compile_options._enable_eagerloads:
2162 return
2163 elif (
2164 loadopt
2165 and compile_state.statement is not None
2166 and compile_state.statement.is_dml
2167 ):
2168 util.warn_deprecated(
2169 "The joinedload loader option is not compatible with DML "
2170 "statements such as INSERT, UPDATE. Only SELECT may be used."
2171 "This warning will become an exception in a future release.",
2172 "2.0",
2173 )
2174 elif self.uselist:
2175 compile_state.multi_row_eager_loaders = True
2176
2177 path = path[self.parent_property]
2178
2179 user_defined_adapter = (
2180 self._init_user_defined_eager_proc(
2181 loadopt, compile_state, compile_state.attributes
2182 )
2183 if loadopt
2184 else False
2185 )
2186
2187 if user_defined_adapter is not False:
2188 # setup an adapter but dont create any JOIN, assume it's already
2189 # in the query
2190 (
2191 clauses,
2192 adapter,
2193 add_to_collection,
2194 ) = self._setup_query_on_user_defined_adapter(
2195 compile_state,
2196 query_entity,
2197 path,
2198 adapter,
2199 user_defined_adapter,
2200 )
2201
2202 # don't do "wrap" for multi-row, we want to wrap
2203 # limited/distinct SELECT,
2204 # because we want to put the JOIN on the outside.
2205
2206 else:
2207 # if not via query option, check for
2208 # a cycle
2209 if not path.contains(compile_state.attributes, "loader"):
2210 if self.join_depth:
2211 if path.length / 2 > self.join_depth:
2212 return
2213 elif path.contains_mapper(self.mapper):
2214 return
2215
2216 # add the JOIN and create an adapter
2217 (
2218 clauses,
2219 adapter,
2220 add_to_collection,
2221 chained_from_outerjoin,
2222 ) = self._generate_row_adapter(
2223 compile_state,
2224 query_entity,
2225 path,
2226 loadopt,
2227 adapter,
2228 column_collection,
2229 parentmapper,
2230 chained_from_outerjoin,
2231 )
2232
2233 # for multi-row, we want to wrap limited/distinct SELECT,
2234 # because we want to put the JOIN on the outside.
2235 compile_state.eager_adding_joins = True
2236
2237 with_poly_entity = path.get(
2238 compile_state.attributes, "path_with_polymorphic", None
2239 )
2240 if with_poly_entity is not None:
2241 with_polymorphic = inspect(
2242 with_poly_entity
2243 ).with_polymorphic_mappers
2244 else:
2245 with_polymorphic = None
2246
2247 path = path[self.entity]
2248
2249 loading._setup_entity_query(
2250 compile_state,
2251 self.mapper,
2252 query_entity,
2253 path,
2254 clauses,
2255 add_to_collection,
2256 with_polymorphic=with_polymorphic,
2257 parentmapper=self.mapper,
2258 chained_from_outerjoin=chained_from_outerjoin,
2259 )
2260
2261 has_nones = util.NONE_SET.intersection(compile_state.secondary_columns)
2262
2263 if has_nones:
2264 if with_poly_entity is not None:
2265 raise sa_exc.InvalidRequestError(
2266 "Detected unaliased columns when generating joined "
2267 "load. Make sure to use aliased=True or flat=True "
2268 "when using joined loading with with_polymorphic()."
2269 )
2270 else:
2271 compile_state.secondary_columns = [
2272 c for c in compile_state.secondary_columns if c is not None
2273 ]
2274
2275 def _init_user_defined_eager_proc(
2276 self, loadopt, compile_state, target_attributes
2277 ):
2278 # check if the opt applies at all
2279 if "eager_from_alias" not in loadopt.local_opts:
2280 # nope
2281 return False
2282
2283 path = loadopt.path.parent
2284
2285 # the option applies. check if the "user_defined_eager_row_processor"
2286 # has been built up.
2287 adapter = path.get(
2288 compile_state.attributes, "user_defined_eager_row_processor", False
2289 )
2290 if adapter is not False:
2291 # just return it
2292 return adapter
2293
2294 # otherwise figure it out.
2295 alias = loadopt.local_opts["eager_from_alias"]
2296 root_mapper, prop = path[-2:]
2297
2298 if alias is not None:
2299 if isinstance(alias, str):
2300 alias = prop.target.alias(alias)
2301 adapter = orm_util.ORMAdapter(
2302 orm_util._TraceAdaptRole.JOINEDLOAD_USER_DEFINED_ALIAS,
2303 prop.mapper,
2304 selectable=alias,
2305 equivalents=prop.mapper._equivalent_columns,
2306 limit_on_entity=False,
2307 )
2308 else:
2309 if path.contains(
2310 compile_state.attributes, "path_with_polymorphic"
2311 ):
2312 with_poly_entity = path.get(
2313 compile_state.attributes, "path_with_polymorphic"
2314 )
2315 adapter = orm_util.ORMAdapter(
2316 orm_util._TraceAdaptRole.JOINEDLOAD_PATH_WITH_POLYMORPHIC,
2317 with_poly_entity,
2318 equivalents=prop.mapper._equivalent_columns,
2319 )
2320 else:
2321 adapter = compile_state._polymorphic_adapters.get(
2322 prop.mapper, None
2323 )
2324 path.set(
2325 target_attributes,
2326 "user_defined_eager_row_processor",
2327 adapter,
2328 )
2329
2330 return adapter
2331
2332 def _setup_query_on_user_defined_adapter(
2333 self, context, entity, path, adapter, user_defined_adapter
2334 ):
2335 # apply some more wrapping to the "user defined adapter"
2336 # if we are setting up the query for SQL render.
2337 adapter = entity._get_entity_clauses(context)
2338
2339 if adapter and user_defined_adapter:
2340 user_defined_adapter = user_defined_adapter.wrap(adapter)
2341 path.set(
2342 context.attributes,
2343 "user_defined_eager_row_processor",
2344 user_defined_adapter,
2345 )
2346 elif adapter:
2347 user_defined_adapter = adapter
2348 path.set(
2349 context.attributes,
2350 "user_defined_eager_row_processor",
2351 user_defined_adapter,
2352 )
2353
2354 add_to_collection = context.primary_columns
2355 return user_defined_adapter, adapter, add_to_collection
2356
2357 def _generate_row_adapter(
2358 self,
2359 compile_state,
2360 entity,
2361 path,
2362 loadopt,
2363 adapter,
2364 column_collection,
2365 parentmapper,
2366 chained_from_outerjoin,
2367 ):
2368 with_poly_entity = path.get(
2369 compile_state.attributes, "path_with_polymorphic", None
2370 )
2371 if with_poly_entity:
2372 to_adapt = with_poly_entity
2373 else:
2374 insp = inspect(self.entity)
2375 if insp.is_aliased_class:
2376 alt_selectable = insp.selectable
2377 else:
2378 alt_selectable = None
2379
2380 to_adapt = orm_util.AliasedClass(
2381 self.mapper,
2382 alias=(
2383 alt_selectable._anonymous_fromclause(flat=True)
2384 if alt_selectable is not None
2385 else None
2386 ),
2387 flat=True,
2388 use_mapper_path=True,
2389 )
2390
2391 to_adapt_insp = inspect(to_adapt)
2392
2393 clauses = to_adapt_insp._memo(
2394 ("joinedloader_ormadapter", self),
2395 orm_util.ORMAdapter,
2396 orm_util._TraceAdaptRole.JOINEDLOAD_MEMOIZED_ADAPTER,
2397 to_adapt_insp,
2398 equivalents=self.mapper._equivalent_columns,
2399 adapt_required=True,
2400 allow_label_resolve=False,
2401 anonymize_labels=True,
2402 )
2403
2404 assert clauses.is_aliased_class
2405
2406 innerjoin = (
2407 loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin)
2408 if loadopt is not None
2409 else self.parent_property.innerjoin
2410 )
2411
2412 if not innerjoin:
2413 # if this is an outer join, all non-nested eager joins from
2414 # this path must also be outer joins
2415 chained_from_outerjoin = True
2416
2417 compile_state.create_eager_joins.append(
2418 (
2419 self._create_eager_join,
2420 entity,
2421 path,
2422 adapter,
2423 parentmapper,
2424 clauses,
2425 innerjoin,
2426 chained_from_outerjoin,
2427 loadopt._extra_criteria if loadopt else (),
2428 )
2429 )
2430
2431 add_to_collection = compile_state.secondary_columns
2432 path.set(compile_state.attributes, "eager_row_processor", clauses)
2433
2434 return clauses, adapter, add_to_collection, chained_from_outerjoin
2435
2436 def _create_eager_join(
2437 self,
2438 compile_state,
2439 query_entity,
2440 path,
2441 adapter,
2442 parentmapper,
2443 clauses,
2444 innerjoin,
2445 chained_from_outerjoin,
2446 extra_criteria,
2447 ):
2448 if parentmapper is None:
2449 localparent = query_entity.mapper
2450 else:
2451 localparent = parentmapper
2452
2453 # whether or not the Query will wrap the selectable in a subquery,
2454 # and then attach eager load joins to that (i.e., in the case of
2455 # LIMIT/OFFSET etc.)
2456 should_nest_selectable = (
2457 compile_state.multi_row_eager_loaders
2458 and compile_state._should_nest_selectable
2459 )
2460
2461 query_entity_key = None
2462
2463 if (
2464 query_entity not in compile_state.eager_joins
2465 and not should_nest_selectable
2466 and compile_state.from_clauses
2467 ):
2468 indexes = sql_util.find_left_clause_that_matches_given(
2469 compile_state.from_clauses, query_entity.selectable
2470 )
2471
2472 if len(indexes) > 1:
2473 # for the eager load case, I can't reproduce this right
2474 # now. For query.join() I can.
2475 raise sa_exc.InvalidRequestError(
2476 "Can't identify which query entity in which to joined "
2477 "eager load from. Please use an exact match when "
2478 "specifying the join path."
2479 )
2480
2481 if indexes:
2482 clause = compile_state.from_clauses[indexes[0]]
2483 # join to an existing FROM clause on the query.
2484 # key it to its list index in the eager_joins dict.
2485 # Query._compile_context will adapt as needed and
2486 # append to the FROM clause of the select().
2487 query_entity_key, default_towrap = indexes[0], clause
2488
2489 if query_entity_key is None:
2490 query_entity_key, default_towrap = (
2491 query_entity,
2492 query_entity.selectable,
2493 )
2494
2495 towrap = compile_state.eager_joins.setdefault(
2496 query_entity_key, default_towrap
2497 )
2498
2499 if adapter:
2500 if getattr(adapter, "is_aliased_class", False):
2501 # joining from an adapted entity. The adapted entity
2502 # might be a "with_polymorphic", so resolve that to our
2503 # specific mapper's entity before looking for our attribute
2504 # name on it.
2505 efm = adapter.aliased_insp._entity_for_mapper(
2506 localparent
2507 if localparent.isa(self.parent)
2508 else self.parent
2509 )
2510
2511 # look for our attribute on the adapted entity, else fall back
2512 # to our straight property
2513 onclause = getattr(efm.entity, self.key, self.parent_property)
2514 else:
2515 onclause = getattr(
2516 orm_util.AliasedClass(
2517 self.parent, adapter.selectable, use_mapper_path=True
2518 ),
2519 self.key,
2520 self.parent_property,
2521 )
2522
2523 else:
2524 onclause = self.parent_property
2525
2526 assert clauses.is_aliased_class
2527
2528 attach_on_outside = (
2529 not chained_from_outerjoin
2530 or not innerjoin
2531 or innerjoin == "unnested"
2532 or query_entity.entity_zero.represents_outer_join
2533 )
2534
2535 extra_join_criteria = extra_criteria
2536 additional_entity_criteria = compile_state.global_attributes.get(
2537 ("additional_entity_criteria", self.mapper), ()
2538 )
2539 if additional_entity_criteria:
2540 extra_join_criteria += tuple(
2541 ae._resolve_where_criteria(self.mapper)
2542 for ae in additional_entity_criteria
2543 if ae.propagate_to_loaders
2544 )
2545
2546 if attach_on_outside:
2547 # this is the "classic" eager join case.
2548 eagerjoin = orm_util._ORMJoin(
2549 towrap,
2550 clauses.aliased_insp,
2551 onclause,
2552 isouter=not innerjoin
2553 or query_entity.entity_zero.represents_outer_join
2554 or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
2555 _left_memo=self.parent,
2556 _right_memo=path[self.mapper],
2557 _extra_criteria=extra_join_criteria,
2558 )
2559 else:
2560 # all other cases are innerjoin=='nested' approach
2561 eagerjoin = self._splice_nested_inner_join(
2562 path, path[-2], towrap, clauses, onclause, extra_join_criteria
2563 )
2564
2565 compile_state.eager_joins[query_entity_key] = eagerjoin
2566
2567 # send a hint to the Query as to where it may "splice" this join
2568 eagerjoin.stop_on = query_entity.selectable
2569
2570 if not parentmapper:
2571 # for parentclause that is the non-eager end of the join,
2572 # ensure all the parent cols in the primaryjoin are actually
2573 # in the
2574 # columns clause (i.e. are not deferred), so that aliasing applied
2575 # by the Query propagates those columns outward.
2576 # This has the effect
2577 # of "undefering" those columns.
2578 for col in sql_util._find_columns(
2579 self.parent_property.primaryjoin
2580 ):
2581 if localparent.persist_selectable.c.contains_column(col):
2582 if adapter:
2583 col = adapter.columns[col]
2584 compile_state._append_dedupe_col_collection(
2585 col, compile_state.primary_columns
2586 )
2587
2588 if self.parent_property.order_by:
2589 compile_state.eager_order_by += tuple(
2590 (eagerjoin._target_adapter.copy_and_process)(
2591 util.to_list(self.parent_property.order_by)
2592 )
2593 )
2594
2595 def _splice_nested_inner_join(
2596 self,
2597 path,
2598 entity_we_want_to_splice_onto,
2599 join_obj,
2600 clauses,
2601 onclause,
2602 extra_criteria,
2603 entity_inside_join_structure: Union[
2604 Mapper, None, Literal[False]
2605 ] = False,
2606 detected_existing_path: Optional[path_registry.PathRegistry] = None,
2607 ):
2608 # recursive fn to splice a nested join into an existing one.
2609 # entity_inside_join_structure=False means this is the outermost call,
2610 # and it should return a value. entity_inside_join_structure=<mapper>
2611 # indicates we've descended into a join and are looking at a FROM
2612 # clause representing this mapper; if this is not
2613 # entity_we_want_to_splice_onto then return None to end the recursive
2614 # branch
2615
2616 assert entity_we_want_to_splice_onto is path[-2]
2617
2618 if entity_inside_join_structure is False:
2619 assert isinstance(join_obj, orm_util._ORMJoin)
2620
2621 if isinstance(join_obj, sql.selectable.FromGrouping):
2622 # FromGrouping - continue descending into the structure
2623 return self._splice_nested_inner_join(
2624 path,
2625 entity_we_want_to_splice_onto,
2626 join_obj.element,
2627 clauses,
2628 onclause,
2629 extra_criteria,
2630 entity_inside_join_structure,
2631 )
2632 elif isinstance(join_obj, orm_util._ORMJoin):
2633 # _ORMJoin - continue descending into the structure
2634
2635 join_right_path = join_obj._right_memo
2636
2637 # see if right side of join is viable
2638 target_join = self._splice_nested_inner_join(
2639 path,
2640 entity_we_want_to_splice_onto,
2641 join_obj.right,
2642 clauses,
2643 onclause,
2644 extra_criteria,
2645 entity_inside_join_structure=(
2646 join_right_path[-1].mapper
2647 if join_right_path is not None
2648 else None
2649 ),
2650 )
2651
2652 if target_join is not None:
2653 # for a right splice, attempt to flatten out
2654 # a JOIN b JOIN c JOIN .. to avoid needless
2655 # parenthesis nesting
2656 if not join_obj.isouter and not target_join.isouter:
2657 eagerjoin = join_obj._splice_into_center(target_join)
2658 else:
2659 eagerjoin = orm_util._ORMJoin(
2660 join_obj.left,
2661 target_join,
2662 join_obj.onclause,
2663 isouter=join_obj.isouter,
2664 _left_memo=join_obj._left_memo,
2665 )
2666
2667 eagerjoin._target_adapter = target_join._target_adapter
2668 return eagerjoin
2669
2670 else:
2671 # see if left side of join is viable
2672 target_join = self._splice_nested_inner_join(
2673 path,
2674 entity_we_want_to_splice_onto,
2675 join_obj.left,
2676 clauses,
2677 onclause,
2678 extra_criteria,
2679 entity_inside_join_structure=join_obj._left_memo,
2680 detected_existing_path=join_right_path,
2681 )
2682
2683 if target_join is not None:
2684 eagerjoin = orm_util._ORMJoin(
2685 target_join,
2686 join_obj.right,
2687 join_obj.onclause,
2688 isouter=join_obj.isouter,
2689 _right_memo=join_obj._right_memo,
2690 )
2691 eagerjoin._target_adapter = target_join._target_adapter
2692 return eagerjoin
2693
2694 # neither side viable, return None, or fail if this was the top
2695 # most call
2696 if entity_inside_join_structure is False:
2697 assert (
2698 False
2699 ), "assertion failed attempting to produce joined eager loads"
2700 return None
2701
2702 # reached an endpoint (e.g. a table that's mapped, or an alias of that
2703 # table). determine if we can use this endpoint to splice onto
2704
2705 # is this the entity we want to splice onto in the first place?
2706 if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure):
2707 return None
2708
2709 # path check. if we know the path how this join endpoint got here,
2710 # lets look at our path we are satisfying and see if we're in the
2711 # wrong place. This is specifically for when our entity may
2712 # appear more than once in the path, issue #11449
2713 # updated in issue #11965.
2714 if detected_existing_path and len(detected_existing_path) > 2:
2715 # this assertion is currently based on how this call is made,
2716 # where given a join_obj, the call will have these parameters as
2717 # entity_inside_join_structure=join_obj._left_memo
2718 # and entity_inside_join_structure=join_obj._right_memo.mapper
2719 assert detected_existing_path[-3] is entity_inside_join_structure
2720
2721 # from that, see if the path we are targeting matches the
2722 # "existing" path of this join all the way up to the midpoint
2723 # of this join object (e.g. the relationship).
2724 # if not, then this is not our target
2725 #
2726 # a test condition where this test is false looks like:
2727 #
2728 # desired splice: Node->kind->Kind
2729 # path of desired splice: NodeGroup->nodes->Node->kind
2730 # path we've located: NodeGroup->nodes->Node->common_node->Node
2731 #
2732 # above, because we want to splice kind->Kind onto
2733 # NodeGroup->nodes->Node, this is not our path because it actually
2734 # goes more steps than we want into self-referential
2735 # ->common_node->Node
2736 #
2737 # a test condition where this test is true looks like:
2738 #
2739 # desired splice: B->c2s->C2
2740 # path of desired splice: A->bs->B->c2s
2741 # path we've located: A->bs->B->c1s->C1
2742 #
2743 # above, we want to splice c2s->C2 onto B, and the located path
2744 # shows that the join ends with B->c1s->C1. so we will
2745 # add another join onto that, which would create a "branch" that
2746 # we might represent in a pseudopath as:
2747 #
2748 # B->c1s->C1
2749 # ->c2s->C2
2750 #
2751 # i.e. A JOIN B ON <bs> JOIN C1 ON <c1s>
2752 # JOIN C2 ON <c2s>
2753 #
2754
2755 if detected_existing_path[0:-2] != path.path[0:-1]:
2756 return None
2757
2758 return orm_util._ORMJoin(
2759 join_obj,
2760 clauses.aliased_insp,
2761 onclause,
2762 isouter=False,
2763 _left_memo=entity_inside_join_structure,
2764 _right_memo=path[path[-1].mapper],
2765 _extra_criteria=extra_criteria,
2766 )
2767
2768 def _create_eager_adapter(self, context, result, adapter, path, loadopt):
2769 compile_state = context.compile_state
2770
2771 user_defined_adapter = (
2772 self._init_user_defined_eager_proc(
2773 loadopt, compile_state, context.attributes
2774 )
2775 if loadopt
2776 else False
2777 )
2778
2779 if user_defined_adapter is not False:
2780 decorator = user_defined_adapter
2781 # user defined eagerloads are part of the "primary"
2782 # portion of the load.
2783 # the adapters applied to the Query should be honored.
2784 if compile_state.compound_eager_adapter and decorator:
2785 decorator = decorator.wrap(
2786 compile_state.compound_eager_adapter
2787 )
2788 elif compile_state.compound_eager_adapter:
2789 decorator = compile_state.compound_eager_adapter
2790 else:
2791 decorator = path.get(
2792 compile_state.attributes, "eager_row_processor"
2793 )
2794 if decorator is None:
2795 return False
2796
2797 if self.mapper._result_has_identity_key(result, decorator):
2798 return decorator
2799 else:
2800 # no identity key - don't return a row
2801 # processor, will cause a degrade to lazy
2802 return False
2803
2804 def create_row_processor(
2805 self,
2806 context,
2807 query_entity,
2808 path,
2809 loadopt,
2810 mapper,
2811 result,
2812 adapter,
2813 populators,
2814 ):
2815
2816 if not context.compile_state.compile_options._enable_eagerloads:
2817 return
2818
2819 if not self.parent.class_manager[self.key].impl.supports_population:
2820 raise sa_exc.InvalidRequestError(
2821 "'%s' does not support object "
2822 "population - eager loading cannot be applied." % self
2823 )
2824
2825 if self.uselist:
2826 context.loaders_require_uniquing = True
2827
2828 our_path = path[self.parent_property]
2829
2830 eager_adapter = self._create_eager_adapter(
2831 context, result, adapter, our_path, loadopt
2832 )
2833
2834 if eager_adapter is not False:
2835 key = self.key
2836
2837 _instance = loading._instance_processor(
2838 query_entity,
2839 self.mapper,
2840 context,
2841 result,
2842 our_path[self.entity],
2843 eager_adapter,
2844 )
2845
2846 if not self.uselist:
2847 self._create_scalar_loader(context, key, _instance, populators)
2848 else:
2849 self._create_collection_loader(
2850 context, key, _instance, populators
2851 )
2852 else:
2853 self.parent_property._get_strategy(
2854 (("lazy", "select"),)
2855 ).create_row_processor(
2856 context,
2857 query_entity,
2858 path,
2859 loadopt,
2860 mapper,
2861 result,
2862 adapter,
2863 populators,
2864 )
2865
2866 def _create_collection_loader(self, context, key, _instance, populators):
2867 def load_collection_from_joined_new_row(state, dict_, row):
2868 # note this must unconditionally clear out any existing collection.
2869 # an existing collection would be present only in the case of
2870 # populate_existing().
2871 collection = attributes.init_state_collection(state, dict_, key)
2872 result_list = util.UniqueAppender(
2873 collection, "append_without_event"
2874 )
2875 context.attributes[(state, key)] = result_list
2876 inst = _instance(row)
2877 if inst is not None:
2878 result_list.append(inst)
2879
2880 def load_collection_from_joined_existing_row(state, dict_, row):
2881 if (state, key) in context.attributes:
2882 result_list = context.attributes[(state, key)]
2883 else:
2884 # appender_key can be absent from context.attributes
2885 # with isnew=False when self-referential eager loading
2886 # is used; the same instance may be present in two
2887 # distinct sets of result columns
2888 collection = attributes.init_state_collection(
2889 state, dict_, key
2890 )
2891 result_list = util.UniqueAppender(
2892 collection, "append_without_event"
2893 )
2894 context.attributes[(state, key)] = result_list
2895 inst = _instance(row)
2896 if inst is not None:
2897 result_list.append(inst)
2898
2899 def load_collection_from_joined_exec(state, dict_, row):
2900 _instance(row)
2901
2902 populators["new"].append(
2903 (self.key, load_collection_from_joined_new_row)
2904 )
2905 populators["existing"].append(
2906 (self.key, load_collection_from_joined_existing_row)
2907 )
2908 if context.invoke_all_eagers:
2909 populators["eager"].append(
2910 (self.key, load_collection_from_joined_exec)
2911 )
2912
2913 def _create_scalar_loader(self, context, key, _instance, populators):
2914 def load_scalar_from_joined_new_row(state, dict_, row):
2915 # set a scalar object instance directly on the parent
2916 # object, bypassing InstrumentedAttribute event handlers.
2917 dict_[key] = _instance(row)
2918
2919 def load_scalar_from_joined_existing_row(state, dict_, row):
2920 # call _instance on the row, even though the object has
2921 # been created, so that we further descend into properties
2922 existing = _instance(row)
2923
2924 # conflicting value already loaded, this shouldn't happen
2925 if key in dict_:
2926 if existing is not dict_[key]:
2927 util.warn(
2928 "Multiple rows returned with "
2929 "uselist=False for eagerly-loaded attribute '%s' "
2930 % self
2931 )
2932 else:
2933 # this case is when one row has multiple loads of the
2934 # same entity (e.g. via aliasing), one has an attribute
2935 # that the other doesn't.
2936 dict_[key] = existing
2937
2938 def load_scalar_from_joined_exec(state, dict_, row):
2939 _instance(row)
2940
2941 populators["new"].append((self.key, load_scalar_from_joined_new_row))
2942 populators["existing"].append(
2943 (self.key, load_scalar_from_joined_existing_row)
2944 )
2945 if context.invoke_all_eagers:
2946 populators["eager"].append(
2947 (self.key, load_scalar_from_joined_exec)
2948 )
2949
2950
2951@log.class_logger
2952@relationships.RelationshipProperty.strategy_for(lazy="selectin")
2953class _SelectInLoader(_PostLoader, util.MemoizedSlots):
2954 __slots__ = (
2955 "join_depth",
2956 "omit_join",
2957 "_parent_alias",
2958 "_query_info",
2959 "_fallback_query_info",
2960 )
2961
2962 query_info = collections.namedtuple(
2963 "queryinfo",
2964 [
2965 "load_only_child",
2966 "load_with_join",
2967 "in_expr",
2968 "pk_cols",
2969 "zero_idx",
2970 "child_lookup_cols",
2971 ],
2972 )
2973
2974 _chunksize = 500
2975
2976 def __init__(self, parent, strategy_key):
2977 super().__init__(parent, strategy_key)
2978 self.join_depth = self.parent_property.join_depth
2979 is_m2o = self.parent_property.direction is interfaces.MANYTOONE
2980
2981 if self.parent_property.omit_join is not None:
2982 self.omit_join = self.parent_property.omit_join
2983 else:
2984 lazyloader = self.parent_property._get_strategy(
2985 (("lazy", "select"),)
2986 )
2987 if is_m2o:
2988 self.omit_join = lazyloader.use_get
2989 else:
2990 self.omit_join = self.parent._get_clause[0].compare(
2991 lazyloader._rev_lazywhere,
2992 use_proxies=True,
2993 compare_keys=False,
2994 equivalents=self.parent._equivalent_columns,
2995 )
2996
2997 if self.omit_join:
2998 if is_m2o:
2999 self._query_info = self._init_for_omit_join_m2o()
3000 self._fallback_query_info = self._init_for_join()
3001 else:
3002 self._query_info = self._init_for_omit_join()
3003 else:
3004 self._query_info = self._init_for_join()
3005
3006 def _init_for_omit_join(self):
3007 pk_to_fk = dict(
3008 self.parent_property._join_condition.local_remote_pairs
3009 )
3010 pk_to_fk.update(
3011 (equiv, pk_to_fk[k])
3012 for k in list(pk_to_fk)
3013 for equiv in self.parent._equivalent_columns.get(k, ())
3014 )
3015
3016 pk_cols = fk_cols = [
3017 pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk
3018 ]
3019 if len(fk_cols) > 1:
3020 in_expr = sql.tuple_(*fk_cols)
3021 zero_idx = False
3022 else:
3023 in_expr = fk_cols[0]
3024 zero_idx = True
3025
3026 return self.query_info(False, False, in_expr, pk_cols, zero_idx, None)
3027
3028 def _init_for_omit_join_m2o(self):
3029 pk_cols = self.mapper.primary_key
3030 if len(pk_cols) > 1:
3031 in_expr = sql.tuple_(*pk_cols)
3032 zero_idx = False
3033 else:
3034 in_expr = pk_cols[0]
3035 zero_idx = True
3036
3037 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
3038 lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols]
3039
3040 return self.query_info(
3041 True, False, in_expr, pk_cols, zero_idx, lookup_cols
3042 )
3043
3044 def _init_for_join(self):
3045 self._parent_alias = AliasedClass(self.parent.class_)
3046 pa_insp = inspect(self._parent_alias)
3047 pk_cols = [
3048 pa_insp._adapt_element(col) for col in self.parent.primary_key
3049 ]
3050 if len(pk_cols) > 1:
3051 in_expr = sql.tuple_(*pk_cols)
3052 zero_idx = False
3053 else:
3054 in_expr = pk_cols[0]
3055 zero_idx = True
3056 return self.query_info(False, True, in_expr, pk_cols, zero_idx, None)
3057
3058 def init_class_attribute(self, mapper):
3059 self.parent_property._get_strategy(
3060 (("lazy", "select"),)
3061 ).init_class_attribute(mapper)
3062
3063 def create_row_processor(
3064 self,
3065 context,
3066 query_entity,
3067 path,
3068 loadopt,
3069 mapper,
3070 result,
3071 adapter,
3072 populators,
3073 ):
3074 if context.refresh_state:
3075 return self._immediateload_create_row_processor(
3076 context,
3077 query_entity,
3078 path,
3079 loadopt,
3080 mapper,
3081 result,
3082 adapter,
3083 populators,
3084 )
3085
3086 (
3087 effective_path,
3088 run_loader,
3089 execution_options,
3090 recursion_depth,
3091 ) = self._setup_for_recursion(
3092 context, path, loadopt, join_depth=self.join_depth
3093 )
3094
3095 if not run_loader:
3096 return
3097
3098 if not context.compile_state.compile_options._enable_eagerloads:
3099 return
3100
3101 if not self.parent.class_manager[self.key].impl.supports_population:
3102 raise sa_exc.InvalidRequestError(
3103 "'%s' does not support object "
3104 "population - eager loading cannot be applied." % self
3105 )
3106
3107 # a little dance here as the "path" is still something that only
3108 # semi-tracks the exact series of things we are loading, still not
3109 # telling us about with_polymorphic() and stuff like that when it's at
3110 # the root.. the initial MapperEntity is more accurate for this case.
3111 if len(path) == 1:
3112 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
3113 return
3114 elif not orm_util._entity_isa(path[-1], self.parent):
3115 return
3116
3117 selectin_path = effective_path
3118
3119 path_w_prop = path[self.parent_property]
3120
3121 # build up a path indicating the path from the leftmost
3122 # entity to the thing we're subquery loading.
3123 with_poly_entity = path_w_prop.get(
3124 context.attributes, "path_with_polymorphic", None
3125 )
3126 if with_poly_entity is not None:
3127 effective_entity = inspect(with_poly_entity)
3128 else:
3129 effective_entity = self.entity
3130
3131 loading._PostLoad.callable_for_path(
3132 context,
3133 selectin_path,
3134 self.parent,
3135 self.parent_property,
3136 self._load_for_path,
3137 effective_entity,
3138 loadopt,
3139 recursion_depth,
3140 execution_options,
3141 )
3142
3143 def _load_for_path(
3144 self,
3145 context,
3146 path,
3147 states,
3148 load_only,
3149 effective_entity,
3150 loadopt,
3151 recursion_depth,
3152 execution_options,
3153 ):
3154 if load_only and self.key not in load_only:
3155 return
3156
3157 query_info = self._query_info
3158
3159 if query_info.load_only_child:
3160 our_states = collections.defaultdict(list)
3161 none_states = []
3162
3163 mapper = self.parent
3164
3165 for state, overwrite in states:
3166 state_dict = state.dict
3167 related_ident = tuple(
3168 mapper._get_state_attr_by_column(
3169 state,
3170 state_dict,
3171 lk,
3172 passive=attributes.PASSIVE_NO_FETCH,
3173 )
3174 for lk in query_info.child_lookup_cols
3175 )
3176 # if the loaded parent objects do not have the foreign key
3177 # to the related item loaded, then degrade into the joined
3178 # version of selectinload
3179 if LoaderCallableStatus.PASSIVE_NO_RESULT in related_ident:
3180 query_info = self._fallback_query_info
3181 break
3182
3183 # organize states into lists keyed to particular foreign
3184 # key values.
3185 if None not in related_ident:
3186 our_states[related_ident].append(
3187 (state, state_dict, overwrite)
3188 )
3189 else:
3190 # For FK values that have None, add them to a
3191 # separate collection that will be populated separately
3192 none_states.append((state, state_dict, overwrite))
3193
3194 # note the above conditional may have changed query_info
3195 if not query_info.load_only_child:
3196 our_states = [
3197 (state.key[1], state, state.dict, overwrite)
3198 for state, overwrite in states
3199 ]
3200
3201 pk_cols = query_info.pk_cols
3202 in_expr = query_info.in_expr
3203
3204 if not query_info.load_with_join:
3205 # in "omit join" mode, the primary key column and the
3206 # "in" expression are in terms of the related entity. So
3207 # if the related entity is polymorphic or otherwise aliased,
3208 # we need to adapt our "pk_cols" and "in_expr" to that
3209 # entity. in non-"omit join" mode, these are against the
3210 # parent entity and do not need adaption.
3211 if effective_entity.is_aliased_class:
3212 pk_cols = [
3213 effective_entity._adapt_element(col) for col in pk_cols
3214 ]
3215 in_expr = effective_entity._adapt_element(in_expr)
3216
3217 bundle_ent = orm_util.Bundle("pk", *pk_cols)
3218 bundle_sql = bundle_ent.__clause_element__()
3219
3220 entity_sql = effective_entity.__clause_element__()
3221 q = Select._create_raw_select(
3222 _raw_columns=[bundle_sql, entity_sql],
3223 _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
3224 _compile_options=_ORMCompileState.default_compile_options,
3225 _propagate_attrs={
3226 "compile_state_plugin": "orm",
3227 "plugin_subject": effective_entity,
3228 },
3229 )
3230
3231 if not query_info.load_with_join:
3232 # the Bundle we have in the "omit_join" case is against raw, non
3233 # annotated columns, so to ensure the Query knows its primary
3234 # entity, we add it explicitly. If we made the Bundle against
3235 # annotated columns, we hit a performance issue in this specific
3236 # case, which is detailed in issue #4347.
3237 q = q.select_from(effective_entity)
3238 else:
3239 # in the non-omit_join case, the Bundle is against the annotated/
3240 # mapped column of the parent entity, but the #4347 issue does not
3241 # occur in this case.
3242 q = q.select_from(self._parent_alias).join(
3243 getattr(self._parent_alias, self.parent_property.key).of_type(
3244 effective_entity
3245 )
3246 )
3247
3248 q = q.filter(in_expr.in_(sql.bindparam("primary_keys")))
3249
3250 # a test which exercises what these comments talk about is
3251 # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic
3252 #
3253 # effective_entity above is given to us in terms of the cached
3254 # statement, namely this one:
3255 orig_query = context.compile_state.select_statement
3256
3257 # the actual statement that was requested is this one:
3258 # context_query = context.user_passed_query
3259 #
3260 # that's not the cached one, however. So while it is of the identical
3261 # structure, if it has entities like AliasedInsp, which we get from
3262 # aliased() or with_polymorphic(), the AliasedInsp will likely be a
3263 # different object identity each time, and will not match up
3264 # hashing-wise to the corresponding AliasedInsp that's in the
3265 # cached query, meaning it won't match on paths and loader lookups
3266 # and loaders like this one will be skipped if it is used in options.
3267 #
3268 # as it turns out, standard loader options like selectinload(),
3269 # lazyload() that have a path need
3270 # to come from the cached query so that the AliasedInsp etc. objects
3271 # that are in the query line up with the object that's in the path
3272 # of the strategy object. however other options like
3273 # with_loader_criteria() that doesn't have a path (has a fixed entity)
3274 # and needs to have access to the latest closure state in order to
3275 # be correct, we need to use the uncached one.
3276 #
3277 # as of #8399 we let the loader option itself figure out what it
3278 # wants to do given cached and uncached version of itself.
3279
3280 effective_path = path[self.parent_property]
3281
3282 if orig_query is context.user_passed_query:
3283 new_options = orig_query._with_options
3284 else:
3285 cached_options = orig_query._with_options
3286 uncached_options = context.user_passed_query._with_options
3287
3288 # propagate compile state options from the original query,
3289 # updating their "extra_criteria" as necessary.
3290 # note this will create a different cache key than
3291 # "orig" options if extra_criteria is present, because the copy
3292 # of extra_criteria will have different boundparam than that of
3293 # the QueryableAttribute in the path
3294 new_options = [
3295 orig_opt._adapt_cached_option_to_uncached_option(
3296 context, uncached_opt
3297 )
3298 for orig_opt, uncached_opt in zip(
3299 cached_options, uncached_options
3300 )
3301 ]
3302
3303 if loadopt and loadopt._extra_criteria:
3304 new_options += (
3305 orm_util.LoaderCriteriaOption(
3306 effective_entity,
3307 loadopt._generate_extra_criteria(context),
3308 ),
3309 )
3310
3311 if recursion_depth is not None:
3312 effective_path = effective_path._truncate_recursive()
3313
3314 q = q.options(*new_options)
3315
3316 q = q._update_compile_options({"_current_path": effective_path})
3317 if context.populate_existing:
3318 q = q.execution_options(populate_existing=True)
3319
3320 if self.parent_property.order_by:
3321 if not query_info.load_with_join:
3322 eager_order_by = self.parent_property.order_by
3323 if effective_entity.is_aliased_class:
3324 eager_order_by = [
3325 effective_entity._adapt_element(elem)
3326 for elem in eager_order_by
3327 ]
3328 q = q.order_by(*eager_order_by)
3329 else:
3330
3331 def _setup_outermost_orderby(compile_context):
3332 compile_context.eager_order_by += tuple(
3333 util.to_list(self.parent_property.order_by)
3334 )
3335
3336 q = q._add_compile_state_func(
3337 _setup_outermost_orderby, self.parent_property
3338 )
3339
3340 if query_info.load_only_child:
3341 self._load_via_child(
3342 our_states,
3343 none_states,
3344 query_info,
3345 q,
3346 context,
3347 execution_options,
3348 )
3349 else:
3350 self._load_via_parent(
3351 our_states, query_info, q, context, execution_options
3352 )
3353
3354 def _load_via_child(
3355 self,
3356 our_states,
3357 none_states,
3358 query_info,
3359 q,
3360 context,
3361 execution_options,
3362 ):
3363 uselist = self.uselist
3364
3365 # this sort is really for the benefit of the unit tests
3366 our_keys = sorted(our_states)
3367 while our_keys:
3368 chunk = our_keys[0 : self._chunksize]
3369 our_keys = our_keys[self._chunksize :]
3370 data = {
3371 k: v
3372 for k, v in context.session.execute(
3373 q,
3374 params={
3375 "primary_keys": [
3376 key[0] if query_info.zero_idx else key
3377 for key in chunk
3378 ]
3379 },
3380 execution_options=execution_options,
3381 ).unique()
3382 }
3383
3384 for key in chunk:
3385 # for a real foreign key and no concurrent changes to the
3386 # DB while running this method, "key" is always present in
3387 # data. However, for primaryjoins without real foreign keys
3388 # a non-None primaryjoin condition may still refer to no
3389 # related object.
3390 related_obj = data.get(key, None)
3391 for state, dict_, overwrite in our_states[key]:
3392 if not overwrite and self.key in dict_:
3393 continue
3394
3395 state.get_impl(self.key).set_committed_value(
3396 state,
3397 dict_,
3398 related_obj if not uselist else [related_obj],
3399 )
3400 # populate none states with empty value / collection
3401 for state, dict_, overwrite in none_states:
3402 if not overwrite and self.key in dict_:
3403 continue
3404
3405 # note it's OK if this is a uselist=True attribute, the empty
3406 # collection will be populated
3407 state.get_impl(self.key).set_committed_value(state, dict_, None)
3408
3409 def _load_via_parent(
3410 self, our_states, query_info, q, context, execution_options
3411 ):
3412 uselist = self.uselist
3413 _empty_result = () if uselist else None
3414
3415 while our_states:
3416 chunk = our_states[0 : self._chunksize]
3417 our_states = our_states[self._chunksize :]
3418
3419 primary_keys = [
3420 key[0] if query_info.zero_idx else key
3421 for key, state, state_dict, overwrite in chunk
3422 ]
3423
3424 data = collections.defaultdict(list)
3425 for k, v in itertools.groupby(
3426 context.session.execute(
3427 q,
3428 params={"primary_keys": primary_keys},
3429 execution_options=execution_options,
3430 ).unique(),
3431 lambda x: x[0],
3432 ):
3433 data[k].extend(vv[1] for vv in v)
3434
3435 for key, state, state_dict, overwrite in chunk:
3436 if not overwrite and self.key in state_dict:
3437 continue
3438
3439 collection = data.get(key, _empty_result)
3440
3441 if not uselist and collection:
3442 if len(collection) > 1:
3443 util.warn(
3444 "Multiple rows returned with "
3445 "uselist=False for eagerly-loaded "
3446 "attribute '%s' " % self
3447 )
3448 state.get_impl(self.key).set_committed_value(
3449 state, state_dict, collection[0]
3450 )
3451 else:
3452 # note that empty tuple set on uselist=False sets the
3453 # value to None
3454 state.get_impl(self.key).set_committed_value(
3455 state, state_dict, collection
3456 )
3457
3458
3459def _single_parent_validator(desc, prop):
3460 def _do_check(state, value, oldvalue, initiator):
3461 if value is not None and initiator.key == prop.key:
3462 hasparent = initiator.hasparent(attributes.instance_state(value))
3463 if hasparent and oldvalue is not value:
3464 raise sa_exc.InvalidRequestError(
3465 "Instance %s is already associated with an instance "
3466 "of %s via its %s attribute, and is only allowed a "
3467 "single parent."
3468 % (orm_util.instance_str(value), state.class_, prop),
3469 code="bbf1",
3470 )
3471 return value
3472
3473 def append(state, value, initiator):
3474 return _do_check(state, value, None, initiator)
3475
3476 def set_(state, value, oldvalue, initiator):
3477 return _do_check(state, value, oldvalue, initiator)
3478
3479 event.listen(
3480 desc, "append", append, raw=True, retval=True, active_history=True
3481 )
3482 event.listen(desc, "set", set_, raw=True, retval=True, active_history=True)