1# orm/strategies.py
2# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: ignore-errors
8
9
10"""sqlalchemy.orm.interfaces.LoaderStrategy
11implementations, and related MapperOptions."""
12
13from __future__ import annotations
14
15import collections
16import itertools
17from typing import Any
18from typing import Dict
19from typing import Literal
20from typing import Optional
21from typing import Tuple
22from typing import TYPE_CHECKING
23from typing import Union
24
25from . import attributes
26from . import exc as orm_exc
27from . import interfaces
28from . import loading
29from . import path_registry
30from . import properties
31from . import query
32from . import relationships
33from . import unitofwork
34from . import util as orm_util
35from .base import _DEFER_FOR_STATE
36from .base import _RAISE_FOR_STATE
37from .base import _SET_DEFERRED_EXPIRED
38from .base import ATTR_WAS_SET
39from .base import LoaderCallableStatus
40from .base import PASSIVE_OFF
41from .base import PassiveFlag
42from .context import _column_descriptions
43from .context import _ORMCompileState
44from .context import _ORMSelectCompileState
45from .context import QueryContext
46from .interfaces import LoaderStrategy
47from .interfaces import StrategizedProperty
48from .session import _state_session
49from .state import InstanceState
50from .strategy_options import Load
51from .util import _none_only_set
52from .util import AliasedClass
53from .. import event
54from .. import exc as sa_exc
55from .. import inspect
56from .. import log
57from .. import sql
58from .. import util
59from ..sql import util as sql_util
60from ..sql import visitors
61from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
62from ..sql.selectable import Select
63
64if TYPE_CHECKING:
65 from .mapper import Mapper
66 from .relationships import RelationshipProperty
67 from ..sql.elements import ColumnElement
68
69
70def _register_attribute(
71 prop,
72 mapper,
73 useobject,
74 compare_function=None,
75 typecallable=None,
76 callable_=None,
77 proxy_property=None,
78 active_history=False,
79 impl_class=None,
80 default_scalar_value=None,
81 **kw,
82):
83 listen_hooks = []
84
85 uselist = useobject and prop.uselist
86
87 if useobject and prop.single_parent:
88 listen_hooks.append(_single_parent_validator)
89
90 if prop.key in prop.parent.validators:
91 fn, opts = prop.parent.validators[prop.key]
92 listen_hooks.append(
93 lambda desc, prop: orm_util._validator_events(
94 desc, prop.key, fn, **opts
95 )
96 )
97
98 if useobject:
99 listen_hooks.append(unitofwork._track_cascade_events)
100
101 # need to assemble backref listeners
102 # after the singleparentvalidator, mapper validator
103 if useobject:
104 backref = prop.back_populates
105 if backref and prop._effective_sync_backref:
106 listen_hooks.append(
107 lambda desc, prop: attributes._backref_listeners(
108 desc, backref, uselist
109 )
110 )
111
112 # a single MapperProperty is shared down a class inheritance
113 # hierarchy, so we set up attribute instrumentation and backref event
114 # for each mapper down the hierarchy.
115
116 # typically, "mapper" is the same as prop.parent, due to the way
117 # the configure_mappers() process runs, however this is not strongly
118 # enforced, and in the case of a second configure_mappers() run the
119 # mapper here might not be prop.parent; also, a subclass mapper may
120 # be called here before a superclass mapper. That is, can't depend
121 # on mappers not already being set up so we have to check each one.
122
123 for m in mapper.self_and_descendants:
124 if prop is m._props.get(
125 prop.key
126 ) and not m.class_manager._attr_has_impl(prop.key):
127 desc = attributes._register_attribute_impl(
128 m.class_,
129 prop.key,
130 parent_token=prop,
131 uselist=uselist,
132 compare_function=compare_function,
133 useobject=useobject,
134 trackparent=useobject
135 and (
136 prop.single_parent
137 or prop.direction is interfaces.ONETOMANY
138 ),
139 typecallable=typecallable,
140 callable_=callable_,
141 active_history=active_history,
142 default_scalar_value=default_scalar_value,
143 impl_class=impl_class,
144 send_modified_events=not useobject or not prop.viewonly,
145 doc=prop.doc,
146 **kw,
147 )
148
149 for hook in listen_hooks:
150 hook(desc, prop)
151
152
153@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
154class _UninstrumentedColumnLoader(LoaderStrategy):
155 """Represent a non-instrumented MapperProperty.
156
157 The polymorphic_on argument of mapper() often results in this,
158 if the argument is against the with_polymorphic selectable.
159
160 """
161
162 __slots__ = ("columns",)
163
164 def __init__(self, parent, strategy_key):
165 super().__init__(parent, strategy_key)
166 self.columns = self.parent_property.columns
167
168 def setup_query(
169 self,
170 compile_state,
171 query_entity,
172 path,
173 loadopt,
174 adapter,
175 column_collection=None,
176 **kwargs,
177 ):
178 for c in self.columns:
179 if adapter:
180 c = adapter.columns[c]
181 compile_state._append_dedupe_col_collection(c, column_collection)
182
183 def create_row_processor(
184 self,
185 context,
186 query_entity,
187 path,
188 loadopt,
189 mapper,
190 result,
191 adapter,
192 populators,
193 ):
194 pass
195
196
197@log.class_logger
198@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
199class _ColumnLoader(LoaderStrategy):
200 """Provide loading behavior for a :class:`.ColumnProperty`."""
201
202 __slots__ = "columns", "is_composite"
203
204 def __init__(self, parent, strategy_key):
205 super().__init__(parent, strategy_key)
206 self.columns = self.parent_property.columns
207 self.is_composite = hasattr(self.parent_property, "composite_class")
208
209 def setup_query(
210 self,
211 compile_state,
212 query_entity,
213 path,
214 loadopt,
215 adapter,
216 column_collection,
217 memoized_populators,
218 check_for_adapt=False,
219 **kwargs,
220 ):
221 for c in self.columns:
222 if adapter:
223 if check_for_adapt:
224 c = adapter.adapt_check_present(c)
225 if c is None:
226 return
227 else:
228 c = adapter.columns[c]
229
230 compile_state._append_dedupe_col_collection(c, column_collection)
231
232 fetch = self.columns[0]
233 if adapter:
234 fetch = adapter.columns[fetch]
235 if fetch is None:
236 # None happens here only for dml bulk_persistence cases
237 # when context.DMLReturningColFilter is used
238 return
239
240 memoized_populators[self.parent_property] = fetch
241
242 def init_class_attribute(self, mapper):
243 self.is_class_level = True
244 coltype = self.columns[0].type
245 # TODO: check all columns ? check for foreign key as well?
246 active_history = (
247 self.parent_property.active_history
248 or self.columns[0].primary_key
249 or (
250 mapper.version_id_col is not None
251 and mapper._columntoproperty.get(mapper.version_id_col, None)
252 is self.parent_property
253 )
254 )
255
256 _register_attribute(
257 self.parent_property,
258 mapper,
259 useobject=False,
260 compare_function=coltype.compare_values,
261 active_history=active_history,
262 default_scalar_value=self.parent_property._default_scalar_value,
263 )
264
265 def create_row_processor(
266 self,
267 context,
268 query_entity,
269 path,
270 loadopt,
271 mapper,
272 result,
273 adapter,
274 populators,
275 ):
276 # look through list of columns represented here
277 # to see which, if any, is present in the row.
278
279 for col in self.columns:
280 if adapter:
281 col = adapter.columns[col]
282 getter = result._getter(col, False)
283 if getter:
284 populators["quick"].append((self.key, getter))
285 break
286 else:
287 populators["expire"].append((self.key, True))
288
289
290@log.class_logger
291@properties.ColumnProperty.strategy_for(query_expression=True)
292class _ExpressionColumnLoader(_ColumnLoader):
293 def __init__(self, parent, strategy_key):
294 super().__init__(parent, strategy_key)
295
296 # compare to the "default" expression that is mapped in
297 # the column. If it's sql.null, we don't need to render
298 # unless an expr is passed in the options.
299 null = sql.null().label(None)
300 self._have_default_expression = any(
301 not c.compare(null) for c in self.parent_property.columns
302 )
303
304 def setup_query(
305 self,
306 compile_state,
307 query_entity,
308 path,
309 loadopt,
310 adapter,
311 column_collection,
312 memoized_populators,
313 **kwargs,
314 ):
315 columns = None
316 if loadopt and loadopt._extra_criteria:
317 columns = loadopt._extra_criteria
318
319 elif self._have_default_expression:
320 columns = self.parent_property.columns
321
322 if columns is None:
323 return
324
325 for c in columns:
326 if adapter:
327 c = adapter.columns[c]
328 compile_state._append_dedupe_col_collection(c, column_collection)
329
330 fetch = columns[0]
331 if adapter:
332 fetch = adapter.columns[fetch]
333 if fetch is None:
334 # None is not expected to be the result of any
335 # adapter implementation here, however there may be theoretical
336 # usages of returning() with context.DMLReturningColFilter
337 return
338
339 memoized_populators[self.parent_property] = fetch
340
341 # if the column being loaded is the polymorphic discriminator,
342 # and we have a with_expression() providing the actual column,
343 # update the query_entity to use the actual column instead of
344 # the default expression
345 if (
346 query_entity._polymorphic_discriminator is self.columns[0]
347 and loadopt
348 and loadopt._extra_criteria
349 ):
350 query_entity._polymorphic_discriminator = columns[0]
351
352 def create_row_processor(
353 self,
354 context,
355 query_entity,
356 path,
357 loadopt,
358 mapper,
359 result,
360 adapter,
361 populators,
362 ):
363 # look through list of columns represented here
364 # to see which, if any, is present in the row.
365 if loadopt and loadopt._extra_criteria:
366 columns = loadopt._extra_criteria
367
368 for col in columns:
369 if adapter:
370 col = adapter.columns[col]
371 getter = result._getter(col, False)
372 if getter:
373 populators["quick"].append((self.key, getter))
374 break
375 else:
376 populators["expire"].append((self.key, True))
377
378 def init_class_attribute(self, mapper):
379 self.is_class_level = True
380
381 _register_attribute(
382 self.parent_property,
383 mapper,
384 useobject=False,
385 compare_function=self.columns[0].type.compare_values,
386 accepts_scalar_loader=False,
387 default_scalar_value=self.parent_property._default_scalar_value,
388 )
389
390
391@log.class_logger
392@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
393@properties.ColumnProperty.strategy_for(
394 deferred=True, instrument=True, raiseload=True
395)
396@properties.ColumnProperty.strategy_for(do_nothing=True)
397class _DeferredColumnLoader(LoaderStrategy):
398 """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
399
400 __slots__ = "columns", "group", "raiseload"
401
402 def __init__(self, parent, strategy_key):
403 super().__init__(parent, strategy_key)
404 if hasattr(self.parent_property, "composite_class"):
405 raise NotImplementedError(
406 "Deferred loading for composite types not implemented yet"
407 )
408 self.raiseload = self.strategy_opts.get("raiseload", False)
409 self.columns = self.parent_property.columns
410 self.group = self.parent_property.group
411
412 def create_row_processor(
413 self,
414 context,
415 query_entity,
416 path,
417 loadopt,
418 mapper,
419 result,
420 adapter,
421 populators,
422 ):
423 # for a DeferredColumnLoader, this method is only used during a
424 # "row processor only" query; see test_deferred.py ->
425 # tests with "rowproc_only" in their name. As of the 1.0 series,
426 # loading._instance_processor doesn't use a "row processing" function
427 # to populate columns, instead it uses data in the "populators"
428 # dictionary. Normally, the DeferredColumnLoader.setup_query()
429 # sets up that data in the "memoized_populators" dictionary
430 # and "create_row_processor()" here is never invoked.
431
432 if (
433 context.refresh_state
434 and context.query._compile_options._only_load_props
435 and self.key in context.query._compile_options._only_load_props
436 ):
437 self.parent_property._get_strategy(
438 (("deferred", False), ("instrument", True))
439 ).create_row_processor(
440 context,
441 query_entity,
442 path,
443 loadopt,
444 mapper,
445 result,
446 adapter,
447 populators,
448 )
449
450 elif not self.is_class_level:
451 if self.raiseload:
452 set_deferred_for_local_state = (
453 self.parent_property._raise_column_loader
454 )
455 else:
456 set_deferred_for_local_state = (
457 self.parent_property._deferred_column_loader
458 )
459 populators["new"].append((self.key, set_deferred_for_local_state))
460 else:
461 populators["expire"].append((self.key, False))
462
463 def init_class_attribute(self, mapper):
464 self.is_class_level = True
465
466 _register_attribute(
467 self.parent_property,
468 mapper,
469 useobject=False,
470 compare_function=self.columns[0].type.compare_values,
471 callable_=self._load_for_state,
472 load_on_unexpire=False,
473 default_scalar_value=self.parent_property._default_scalar_value,
474 )
475
476 def setup_query(
477 self,
478 compile_state,
479 query_entity,
480 path,
481 loadopt,
482 adapter,
483 column_collection,
484 memoized_populators,
485 only_load_props=None,
486 **kw,
487 ):
488 if (
489 (
490 compile_state.compile_options._render_for_subquery
491 and self.parent_property._renders_in_subqueries
492 )
493 or (
494 loadopt
495 and set(self.columns).intersection(
496 self.parent._should_undefer_in_wildcard
497 )
498 )
499 or (
500 loadopt
501 and self.group
502 and loadopt.local_opts.get(
503 "undefer_group_%s" % self.group, False
504 )
505 )
506 or (only_load_props and self.key in only_load_props)
507 ):
508 self.parent_property._get_strategy(
509 (("deferred", False), ("instrument", True))
510 ).setup_query(
511 compile_state,
512 query_entity,
513 path,
514 loadopt,
515 adapter,
516 column_collection,
517 memoized_populators,
518 **kw,
519 )
520 elif self.is_class_level:
521 memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
522 elif not self.raiseload:
523 memoized_populators[self.parent_property] = _DEFER_FOR_STATE
524 else:
525 memoized_populators[self.parent_property] = _RAISE_FOR_STATE
526
527 def _load_for_state(self, state, passive):
528 if not state.key:
529 return LoaderCallableStatus.ATTR_EMPTY
530
531 if not passive & PassiveFlag.SQL_OK:
532 return LoaderCallableStatus.PASSIVE_NO_RESULT
533
534 localparent = state.manager.mapper
535
536 if self.group:
537 toload = [
538 p.key
539 for p in localparent.iterate_properties
540 if isinstance(p, StrategizedProperty)
541 and isinstance(p.strategy, _DeferredColumnLoader)
542 and p.group == self.group
543 ]
544 else:
545 toload = [self.key]
546
547 # narrow the keys down to just those which have no history
548 group = [k for k in toload if k in state.unmodified]
549
550 session = _state_session(state)
551 if session is None:
552 raise orm_exc.DetachedInstanceError(
553 "Parent instance %s is not bound to a Session; "
554 "deferred load operation of attribute '%s' cannot proceed"
555 % (orm_util.state_str(state), self.key)
556 )
557
558 if self.raiseload:
559 self._invoke_raise_load(state, passive, "raise")
560
561 loading._load_scalar_attributes(
562 state.mapper, state, set(group), PASSIVE_OFF
563 )
564
565 return LoaderCallableStatus.ATTR_WAS_SET
566
567 def _invoke_raise_load(self, state, passive, lazy):
568 raise sa_exc.InvalidRequestError(
569 "'%s' is not available due to raiseload=True" % (self,)
570 )
571
572
573class _LoadDeferredColumns:
574 """serializable loader object used by DeferredColumnLoader"""
575
576 def __init__(self, key: str, raiseload: bool = False):
577 self.key = key
578 self.raiseload = raiseload
579
580 def __call__(self, state, passive=attributes.PASSIVE_OFF):
581 key = self.key
582
583 localparent = state.manager.mapper
584 prop = localparent._props[key]
585 if self.raiseload:
586 strategy_key = (
587 ("deferred", True),
588 ("instrument", True),
589 ("raiseload", True),
590 )
591 else:
592 strategy_key = (("deferred", True), ("instrument", True))
593 strategy = prop._get_strategy(strategy_key)
594 return strategy._load_for_state(state, passive)
595
596
597class _AbstractRelationshipLoader(LoaderStrategy):
598 """LoaderStratgies which deal with related objects."""
599
600 __slots__ = "mapper", "target", "uselist", "entity"
601
602 def __init__(self, parent, strategy_key):
603 super().__init__(parent, strategy_key)
604 self.mapper = self.parent_property.mapper
605 self.entity = self.parent_property.entity
606 self.target = self.parent_property.target
607 self.uselist = self.parent_property.uselist
608
609 def _immediateload_create_row_processor(
610 self,
611 context,
612 query_entity,
613 path,
614 loadopt,
615 mapper,
616 result,
617 adapter,
618 populators,
619 ):
620 return self.parent_property._get_strategy(
621 (("lazy", "immediate"),)
622 ).create_row_processor(
623 context,
624 query_entity,
625 path,
626 loadopt,
627 mapper,
628 result,
629 adapter,
630 populators,
631 )
632
633
634@log.class_logger
635@relationships.RelationshipProperty.strategy_for(do_nothing=True)
636class _DoNothingLoader(LoaderStrategy):
637 """Relationship loader that makes no change to the object's state.
638
639 Compared to NoLoader, this loader does not initialize the
640 collection/attribute to empty/none; the usual default LazyLoader will
641 take effect.
642
643 """
644
645
646@log.class_logger
647@relationships.RelationshipProperty.strategy_for(lazy="noload")
648@relationships.RelationshipProperty.strategy_for(lazy=None)
649class _NoLoader(_AbstractRelationshipLoader):
650 """Provide loading behavior for a :class:`.Relationship`
651 with "lazy=None".
652
653 """
654
655 __slots__ = ()
656
657 @util.deprecated(
658 "2.1",
659 "The ``noload`` loader strategy is deprecated and will be removed "
660 "in a future release. This option "
661 "produces incorrect results by returning ``None`` for related "
662 "items.",
663 )
664 def init_class_attribute(self, mapper):
665 self.is_class_level = True
666
667 _register_attribute(
668 self.parent_property,
669 mapper,
670 useobject=True,
671 typecallable=self.parent_property.collection_class,
672 )
673
674 def create_row_processor(
675 self,
676 context,
677 query_entity,
678 path,
679 loadopt,
680 mapper,
681 result,
682 adapter,
683 populators,
684 ):
685 def invoke_no_load(state, dict_, row):
686 if self.uselist:
687 attributes.init_state_collection(state, dict_, self.key)
688 else:
689 dict_[self.key] = None
690
691 populators["new"].append((self.key, invoke_no_load))
692
693
694@log.class_logger
695@relationships.RelationshipProperty.strategy_for(lazy=True)
696@relationships.RelationshipProperty.strategy_for(lazy="select")
697@relationships.RelationshipProperty.strategy_for(lazy="raise")
698@relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql")
699@relationships.RelationshipProperty.strategy_for(lazy="baked_select")
700class _LazyLoader(
701 _AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
702):
703 """Provide loading behavior for a :class:`.Relationship`
704 with "lazy=True", that is loads when first accessed.
705
706 """
707
708 __slots__ = (
709 "_lazywhere",
710 "_rev_lazywhere",
711 "_lazyload_reverse_option",
712 "_order_by",
713 "use_get",
714 "is_aliased_class",
715 "_bind_to_col",
716 "_equated_columns",
717 "_rev_bind_to_col",
718 "_rev_equated_columns",
719 "_simple_lazy_clause",
720 "_raise_always",
721 "_raise_on_sql",
722 )
723
724 _lazywhere: ColumnElement[bool]
725 _bind_to_col: Dict[str, ColumnElement[Any]]
726 _rev_lazywhere: ColumnElement[bool]
727 _rev_bind_to_col: Dict[str, ColumnElement[Any]]
728
729 parent_property: RelationshipProperty[Any]
730
731 def __init__(
732 self, parent: RelationshipProperty[Any], strategy_key: Tuple[Any, ...]
733 ):
734 super().__init__(parent, strategy_key)
735 self._raise_always = self.strategy_opts["lazy"] == "raise"
736 self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
737
738 self.is_aliased_class = inspect(self.entity).is_aliased_class
739
740 join_condition = self.parent_property._join_condition
741 (
742 self._lazywhere,
743 self._bind_to_col,
744 self._equated_columns,
745 ) = join_condition.create_lazy_clause()
746
747 (
748 self._rev_lazywhere,
749 self._rev_bind_to_col,
750 self._rev_equated_columns,
751 ) = join_condition.create_lazy_clause(reverse_direction=True)
752
753 if self.parent_property.order_by:
754 self._order_by = util.to_list(self.parent_property.order_by)
755 else:
756 self._order_by = None
757
758 self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
759
760 # determine if our "lazywhere" clause is the same as the mapper's
761 # get() clause. then we can just use mapper.get()
762 #
763 # TODO: the "not self.uselist" can be taken out entirely; a m2o
764 # load that populates for a list (very unusual, but is possible with
765 # the API) can still set for "None" and the attribute system will
766 # populate as an empty list.
767 self.use_get = (
768 not self.is_aliased_class
769 and not self.uselist
770 and self.entity._get_clause[0].compare(
771 self._lazywhere,
772 use_proxies=True,
773 compare_keys=False,
774 equivalents=self.mapper._equivalent_columns,
775 )
776 )
777
778 if self.use_get:
779 for col in list(self._equated_columns):
780 if col in self.mapper._equivalent_columns:
781 for c in self.mapper._equivalent_columns[col]:
782 self._equated_columns[c] = self._equated_columns[col]
783
784 self.logger.info(
785 "%s will use Session.get() to optimize instance loads", self
786 )
787
788 def init_class_attribute(self, mapper):
789 self.is_class_level = True
790
791 _legacy_inactive_history_style = (
792 self.parent_property._legacy_inactive_history_style
793 )
794
795 if self.parent_property.active_history:
796 active_history = True
797 _deferred_history = False
798
799 elif (
800 self.parent_property.direction is not interfaces.MANYTOONE
801 or not self.use_get
802 ):
803 if _legacy_inactive_history_style:
804 active_history = True
805 _deferred_history = False
806 else:
807 active_history = False
808 _deferred_history = True
809 else:
810 active_history = _deferred_history = False
811
812 _register_attribute(
813 self.parent_property,
814 mapper,
815 useobject=True,
816 callable_=self._load_for_state,
817 typecallable=self.parent_property.collection_class,
818 active_history=active_history,
819 _deferred_history=_deferred_history,
820 )
821
822 def _memoized_attr__simple_lazy_clause(self):
823 lazywhere = self._lazywhere
824
825 criterion, bind_to_col = (lazywhere, self._bind_to_col)
826
827 params = []
828
829 def visit_bindparam(bindparam):
830 bindparam.unique = False
831
832 visitors.traverse(criterion, {}, {"bindparam": visit_bindparam})
833
834 def visit_bindparam(bindparam):
835 if bindparam._identifying_key in bind_to_col:
836 params.append(
837 (
838 bindparam.key,
839 bind_to_col[bindparam._identifying_key],
840 None,
841 )
842 )
843 elif bindparam.callable is None:
844 params.append((bindparam.key, None, bindparam.value))
845
846 criterion = visitors.cloned_traverse(
847 criterion, {}, {"bindparam": visit_bindparam}
848 )
849
850 return criterion, params
851
852 def _generate_lazy_clause(self, state, passive):
853 criterion, param_keys = self._simple_lazy_clause
854
855 if state is None:
856 return sql_util.adapt_criterion_to_null(
857 criterion, [key for key, ident, value in param_keys]
858 )
859
860 mapper = self.parent_property.parent
861
862 o = state.obj() # strong ref
863 dict_ = attributes.instance_dict(o)
864
865 if passive & PassiveFlag.INIT_OK:
866 passive ^= PassiveFlag.INIT_OK
867
868 params = {}
869 for key, ident, value in param_keys:
870 if ident is not None:
871 if passive and passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
872 value = mapper._get_committed_state_attr_by_column(
873 state, dict_, ident, passive
874 )
875 else:
876 value = mapper._get_state_attr_by_column(
877 state, dict_, ident, passive
878 )
879
880 params[key] = value
881
882 return criterion, params
883
884 def _invoke_raise_load(self, state, passive, lazy):
885 raise sa_exc.InvalidRequestError(
886 "'%s' is not available due to lazy='%s'" % (self, lazy)
887 )
888
889 def _load_for_state(
890 self,
891 state,
892 passive,
893 loadopt=None,
894 extra_criteria=(),
895 extra_options=(),
896 alternate_effective_path=None,
897 execution_options=util.EMPTY_DICT,
898 ):
899 if not state.key and (
900 (
901 not self.parent_property.load_on_pending
902 and not state._load_pending
903 )
904 or not state.session_id
905 ):
906 return LoaderCallableStatus.ATTR_EMPTY
907
908 pending = not state.key
909 primary_key_identity = None
910
911 use_get = self.use_get and (not loadopt or not loadopt._extra_criteria)
912
913 if (not passive & PassiveFlag.SQL_OK and not use_get) or (
914 not passive & attributes.NON_PERSISTENT_OK and pending
915 ):
916 return LoaderCallableStatus.PASSIVE_NO_RESULT
917
918 if (
919 # we were given lazy="raise"
920 self._raise_always
921 # the no_raise history-related flag was not passed
922 and not passive & PassiveFlag.NO_RAISE
923 and (
924 # if we are use_get and related_object_ok is disabled,
925 # which means we are at most looking in the identity map
926 # for history purposes or otherwise returning
927 # PASSIVE_NO_RESULT, don't raise. This is also a
928 # history-related flag
929 not use_get
930 or passive & PassiveFlag.RELATED_OBJECT_OK
931 )
932 ):
933 self._invoke_raise_load(state, passive, "raise")
934
935 session = _state_session(state)
936 if not session:
937 if passive & PassiveFlag.NO_RAISE:
938 return LoaderCallableStatus.PASSIVE_NO_RESULT
939
940 raise orm_exc.DetachedInstanceError(
941 "Parent instance %s is not bound to a Session; "
942 "lazy load operation of attribute '%s' cannot proceed"
943 % (orm_util.state_str(state), self.key)
944 )
945
946 # if we have a simple primary key load, check the
947 # identity map without generating a Query at all
948 if use_get:
949 primary_key_identity = self._get_ident_for_use_get(
950 session, state, passive
951 )
952 if LoaderCallableStatus.PASSIVE_NO_RESULT in primary_key_identity:
953 return LoaderCallableStatus.PASSIVE_NO_RESULT
954 elif LoaderCallableStatus.NEVER_SET in primary_key_identity:
955 return LoaderCallableStatus.NEVER_SET
956
957 # test for None alone in primary_key_identity based on
958 # allow_partial_pks preference. PASSIVE_NO_RESULT and NEVER_SET
959 # have already been tested above
960 if not self.mapper.allow_partial_pks:
961 if _none_only_set.intersection(primary_key_identity):
962 return None
963 else:
964 if _none_only_set.issuperset(primary_key_identity):
965 return None
966
967 if (
968 self.key in state.dict
969 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
970 ):
971 return LoaderCallableStatus.ATTR_WAS_SET
972
973 # look for this identity in the identity map. Delegate to the
974 # Query class in use, as it may have special rules for how it
975 # does this, including how it decides what the correct
976 # identity_token would be for this identity.
977
978 instance = session._identity_lookup(
979 self.entity,
980 primary_key_identity,
981 passive=passive,
982 lazy_loaded_from=state,
983 )
984
985 if instance is not None:
986 if instance is LoaderCallableStatus.PASSIVE_CLASS_MISMATCH:
987 return None
988 else:
989 return instance
990 elif (
991 not passive & PassiveFlag.SQL_OK
992 or not passive & PassiveFlag.RELATED_OBJECT_OK
993 ):
994 return LoaderCallableStatus.PASSIVE_NO_RESULT
995
996 return self._emit_lazyload(
997 session,
998 state,
999 primary_key_identity,
1000 passive,
1001 loadopt,
1002 extra_criteria,
1003 extra_options,
1004 alternate_effective_path,
1005 execution_options,
1006 )
1007
1008 def _get_ident_for_use_get(self, session, state, passive):
1009 instance_mapper = state.manager.mapper
1010
1011 if passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
1012 get_attr = instance_mapper._get_committed_state_attr_by_column
1013 else:
1014 get_attr = instance_mapper._get_state_attr_by_column
1015
1016 dict_ = state.dict
1017
1018 return [
1019 get_attr(state, dict_, self._equated_columns[pk], passive=passive)
1020 for pk in self.mapper.primary_key
1021 ]
1022
1023 @util.preload_module("sqlalchemy.orm.strategy_options")
1024 def _emit_lazyload(
1025 self,
1026 session,
1027 state,
1028 primary_key_identity,
1029 passive,
1030 loadopt,
1031 extra_criteria,
1032 extra_options,
1033 alternate_effective_path,
1034 execution_options,
1035 ):
1036 strategy_options = util.preloaded.orm_strategy_options
1037
1038 clauseelement = self.entity.__clause_element__()
1039 stmt = Select._create_raw_select(
1040 _raw_columns=[clauseelement],
1041 _propagate_attrs=clauseelement._propagate_attrs,
1042 _compile_options=_ORMCompileState.default_compile_options,
1043 )
1044 load_options = QueryContext.default_load_options
1045
1046 load_options += {
1047 "_invoke_all_eagers": False,
1048 "_lazy_loaded_from": state,
1049 }
1050
1051 if self.parent_property.secondary is not None:
1052 stmt = stmt.select_from(
1053 self.mapper, self.parent_property.secondary
1054 )
1055
1056 pending = not state.key
1057
1058 # don't autoflush on pending
1059 if pending or passive & attributes.NO_AUTOFLUSH:
1060 stmt._execution_options = util.immutabledict({"autoflush": False})
1061
1062 use_get = self.use_get
1063
1064 if state.load_options or (loadopt and loadopt._extra_criteria):
1065 if alternate_effective_path is None:
1066 effective_path = state.load_path[self.parent_property]
1067 else:
1068 effective_path = alternate_effective_path[self.parent_property]
1069
1070 opts = state.load_options
1071
1072 if loadopt and loadopt._extra_criteria:
1073 use_get = False
1074 opts += (
1075 orm_util.LoaderCriteriaOption(self.entity, extra_criteria),
1076 )
1077
1078 stmt._with_options = opts
1079 elif alternate_effective_path is None:
1080 # this path is used if there are not already any options
1081 # in the query, but an event may want to add them
1082 effective_path = state.mapper._path_registry[self.parent_property]
1083 else:
1084 # added by immediateloader
1085 effective_path = alternate_effective_path[self.parent_property]
1086
1087 if extra_options:
1088 stmt._with_options += extra_options
1089
1090 stmt._compile_options += {"_current_path": effective_path}
1091
1092 if use_get:
1093 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1094 self._invoke_raise_load(state, passive, "raise_on_sql")
1095
1096 return loading._load_on_pk_identity(
1097 session,
1098 stmt,
1099 primary_key_identity,
1100 load_options=load_options,
1101 execution_options=execution_options,
1102 )
1103
1104 if self._order_by:
1105 stmt._order_by_clauses = self._order_by
1106
1107 def _lazyload_reverse(compile_context):
1108 for rev in self.parent_property._reverse_property:
1109 # reverse props that are MANYTOONE are loading *this*
1110 # object from get(), so don't need to eager out to those.
1111 if (
1112 rev.direction is interfaces.MANYTOONE
1113 and rev._use_get
1114 and not isinstance(rev.strategy, _LazyLoader)
1115 ):
1116 strategy_options.Load._construct_for_existing_path(
1117 compile_context.compile_options._current_path[
1118 rev.parent
1119 ]
1120 ).lazyload(rev).process_compile_state(compile_context)
1121
1122 stmt = stmt._add_compile_state_func(
1123 _lazyload_reverse, self.parent_property
1124 )
1125
1126 lazy_clause, params = self._generate_lazy_clause(state, passive)
1127
1128 if execution_options:
1129 execution_options = util.EMPTY_DICT.merge_with(
1130 execution_options, {"_sa_orm_load_options": load_options}
1131 )
1132 else:
1133 execution_options = {
1134 "_sa_orm_load_options": load_options,
1135 }
1136
1137 if (
1138 self.key in state.dict
1139 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
1140 ):
1141 return LoaderCallableStatus.ATTR_WAS_SET
1142
1143 if pending:
1144 if util.has_intersection(orm_util._none_set, params.values()):
1145 return None
1146
1147 elif util.has_intersection(orm_util._never_set, params.values()):
1148 return None
1149
1150 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1151 self._invoke_raise_load(state, passive, "raise_on_sql")
1152
1153 stmt._where_criteria = (lazy_clause,)
1154
1155 result = session.execute(
1156 stmt, params, execution_options=execution_options
1157 )
1158
1159 result = result.unique().scalars().all()
1160
1161 if self.uselist:
1162 return result
1163 else:
1164 l = len(result)
1165 if l:
1166 if l > 1:
1167 util.warn(
1168 "Multiple rows returned with "
1169 "uselist=False for lazily-loaded attribute '%s' "
1170 % self.parent_property
1171 )
1172
1173 return result[0]
1174 else:
1175 return None
1176
1177 def create_row_processor(
1178 self,
1179 context,
1180 query_entity,
1181 path,
1182 loadopt,
1183 mapper,
1184 result,
1185 adapter,
1186 populators,
1187 ):
1188 key = self.key
1189
1190 if (
1191 context.load_options._is_user_refresh
1192 and context.query._compile_options._only_load_props
1193 and self.key in context.query._compile_options._only_load_props
1194 ):
1195 return self._immediateload_create_row_processor(
1196 context,
1197 query_entity,
1198 path,
1199 loadopt,
1200 mapper,
1201 result,
1202 adapter,
1203 populators,
1204 )
1205
1206 if not self.is_class_level or (loadopt and loadopt._extra_criteria):
1207 # we are not the primary manager for this attribute
1208 # on this class - set up a
1209 # per-instance lazyloader, which will override the
1210 # class-level behavior.
1211 # this currently only happens when using a
1212 # "lazyload" option on a "no load"
1213 # attribute - "eager" attributes always have a
1214 # class-level lazyloader installed.
1215 set_lazy_callable = (
1216 InstanceState._instance_level_callable_processor
1217 )(
1218 mapper.class_manager,
1219 _LoadLazyAttribute(
1220 key,
1221 self,
1222 loadopt,
1223 (
1224 loadopt._generate_extra_criteria(context)
1225 if loadopt._extra_criteria
1226 else None
1227 ),
1228 ),
1229 key,
1230 )
1231
1232 populators["new"].append((self.key, set_lazy_callable))
1233 elif context.populate_existing or mapper.always_refresh:
1234
1235 def reset_for_lazy_callable(state, dict_, row):
1236 # we are the primary manager for this attribute on
1237 # this class - reset its
1238 # per-instance attribute state, so that the class-level
1239 # lazy loader is
1240 # executed when next referenced on this instance.
1241 # this is needed in
1242 # populate_existing() types of scenarios to reset
1243 # any existing state.
1244 state._reset(dict_, key)
1245
1246 populators["new"].append((self.key, reset_for_lazy_callable))
1247
1248
1249class _LoadLazyAttribute:
1250 """semi-serializable loader object used by LazyLoader
1251
1252 Historically, this object would be carried along with instances that
1253 needed to run lazyloaders, so it had to be serializable to support
1254 cached instances.
1255
1256 this is no longer a general requirement, and the case where this object
1257 is used is exactly the case where we can't really serialize easily,
1258 which is when extra criteria in the loader option is present.
1259
1260 We can't reliably serialize that as it refers to mapped entities and
1261 AliasedClass objects that are local to the current process, which would
1262 need to be matched up on deserialize e.g. the sqlalchemy.ext.serializer
1263 approach.
1264
1265 """
1266
1267 def __init__(self, key, initiating_strategy, loadopt, extra_criteria):
1268 self.key = key
1269 self.strategy_key = initiating_strategy.strategy_key
1270 self.loadopt = loadopt
1271 self.extra_criteria = extra_criteria
1272
1273 def __getstate__(self):
1274 if self.extra_criteria is not None:
1275 util.warn(
1276 "Can't reliably serialize a lazyload() option that "
1277 "contains additional criteria; please use eager loading "
1278 "for this case"
1279 )
1280 return {
1281 "key": self.key,
1282 "strategy_key": self.strategy_key,
1283 "loadopt": self.loadopt,
1284 "extra_criteria": (),
1285 }
1286
1287 def __call__(self, state, passive=attributes.PASSIVE_OFF):
1288 key = self.key
1289 instance_mapper = state.manager.mapper
1290 prop = instance_mapper._props[key]
1291 strategy = prop._strategies[self.strategy_key]
1292
1293 return strategy._load_for_state(
1294 state,
1295 passive,
1296 loadopt=self.loadopt,
1297 extra_criteria=self.extra_criteria,
1298 )
1299
1300
1301class _PostLoader(_AbstractRelationshipLoader):
1302 """A relationship loader that emits a second SELECT statement."""
1303
1304 __slots__ = ()
1305
1306 def _setup_for_recursion(self, context, path, loadopt, join_depth=None):
1307 effective_path = (
1308 context.compile_state.current_path or orm_util.PathRegistry.root
1309 ) + path
1310
1311 top_level_context = context._get_top_level_context()
1312 execution_options = util.immutabledict(
1313 {"sa_top_level_orm_context": top_level_context}
1314 )
1315
1316 if loadopt:
1317 recursion_depth = loadopt.local_opts.get("recursion_depth", None)
1318 unlimited_recursion = recursion_depth == -1
1319 else:
1320 recursion_depth = None
1321 unlimited_recursion = False
1322
1323 if recursion_depth is not None:
1324 if not self.parent_property._is_self_referential:
1325 raise sa_exc.InvalidRequestError(
1326 f"recursion_depth option on relationship "
1327 f"{self.parent_property} not valid for "
1328 "non-self-referential relationship"
1329 )
1330 recursion_depth = context.execution_options.get(
1331 f"_recursion_depth_{id(self)}", recursion_depth
1332 )
1333
1334 if not unlimited_recursion and recursion_depth < 0:
1335 return (
1336 effective_path,
1337 False,
1338 execution_options,
1339 recursion_depth,
1340 )
1341
1342 if not unlimited_recursion:
1343 execution_options = execution_options.union(
1344 {
1345 f"_recursion_depth_{id(self)}": recursion_depth - 1,
1346 }
1347 )
1348
1349 if loading._PostLoad.path_exists(
1350 context, effective_path, self.parent_property
1351 ):
1352 return effective_path, False, execution_options, recursion_depth
1353
1354 path_w_prop = path[self.parent_property]
1355 effective_path_w_prop = effective_path[self.parent_property]
1356
1357 if not path_w_prop.contains(context.attributes, "loader"):
1358 if join_depth:
1359 if effective_path_w_prop.length / 2 > join_depth:
1360 return (
1361 effective_path,
1362 False,
1363 execution_options,
1364 recursion_depth,
1365 )
1366 elif effective_path_w_prop.contains_mapper(self.mapper):
1367 return (
1368 effective_path,
1369 False,
1370 execution_options,
1371 recursion_depth,
1372 )
1373
1374 return effective_path, True, execution_options, recursion_depth
1375
1376
1377@relationships.RelationshipProperty.strategy_for(lazy="immediate")
1378class _ImmediateLoader(_PostLoader):
1379 __slots__ = ("join_depth",)
1380
1381 def __init__(self, parent, strategy_key):
1382 super().__init__(parent, strategy_key)
1383 self.join_depth = self.parent_property.join_depth
1384
1385 def init_class_attribute(self, mapper):
1386 self.parent_property._get_strategy(
1387 (("lazy", "select"),)
1388 ).init_class_attribute(mapper)
1389
1390 def create_row_processor(
1391 self,
1392 context,
1393 query_entity,
1394 path,
1395 loadopt,
1396 mapper,
1397 result,
1398 adapter,
1399 populators,
1400 ):
1401 if not context.compile_state.compile_options._enable_eagerloads:
1402 return
1403
1404 (
1405 effective_path,
1406 run_loader,
1407 execution_options,
1408 recursion_depth,
1409 ) = self._setup_for_recursion(context, path, loadopt, self.join_depth)
1410
1411 if not run_loader:
1412 # this will not emit SQL and will only emit for a many-to-one
1413 # "use get" load. the "_RELATED" part means it may return
1414 # instance even if its expired, since this is a mutually-recursive
1415 # load operation.
1416 flags = attributes.PASSIVE_NO_FETCH_RELATED | PassiveFlag.NO_RAISE
1417 else:
1418 flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE
1419
1420 loading._PostLoad.callable_for_path(
1421 context,
1422 effective_path,
1423 self.parent,
1424 self.parent_property,
1425 self._load_for_path,
1426 loadopt,
1427 flags,
1428 recursion_depth,
1429 execution_options,
1430 )
1431
1432 def _load_for_path(
1433 self,
1434 context,
1435 path,
1436 states,
1437 load_only,
1438 loadopt,
1439 flags,
1440 recursion_depth,
1441 execution_options,
1442 ):
1443 if recursion_depth:
1444 new_opt = Load(loadopt.path.entity)
1445 new_opt.context = (
1446 loadopt,
1447 loadopt._recurse(),
1448 )
1449 alternate_effective_path = path._truncate_recursive()
1450 extra_options = (new_opt,)
1451 else:
1452 alternate_effective_path = path
1453 extra_options = ()
1454
1455 key = self.key
1456 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
1457 for state, overwrite in states:
1458 dict_ = state.dict
1459
1460 if overwrite or key not in dict_:
1461 value = lazyloader._load_for_state(
1462 state,
1463 flags,
1464 extra_options=extra_options,
1465 alternate_effective_path=alternate_effective_path,
1466 execution_options=execution_options,
1467 )
1468 if value not in (
1469 ATTR_WAS_SET,
1470 LoaderCallableStatus.PASSIVE_NO_RESULT,
1471 ):
1472 state.get_impl(key).set_committed_value(
1473 state, dict_, value
1474 )
1475
1476
1477@log.class_logger
1478@relationships.RelationshipProperty.strategy_for(lazy="subquery")
1479class _SubqueryLoader(_PostLoader):
1480 __slots__ = ("join_depth",)
1481
1482 def __init__(self, parent, strategy_key):
1483 super().__init__(parent, strategy_key)
1484 self.join_depth = self.parent_property.join_depth
1485
1486 def init_class_attribute(self, mapper):
1487 self.parent_property._get_strategy(
1488 (("lazy", "select"),)
1489 ).init_class_attribute(mapper)
1490
1491 def _get_leftmost(
1492 self,
1493 orig_query_entity_index,
1494 subq_path,
1495 current_compile_state,
1496 is_root,
1497 ):
1498 given_subq_path = subq_path
1499 subq_path = subq_path.path
1500 subq_mapper = orm_util._class_to_mapper(subq_path[0])
1501
1502 # determine attributes of the leftmost mapper
1503 if (
1504 self.parent.isa(subq_mapper)
1505 and self.parent_property is subq_path[1]
1506 ):
1507 leftmost_mapper, leftmost_prop = self.parent, self.parent_property
1508 else:
1509 leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1]
1510
1511 if is_root:
1512 # the subq_path is also coming from cached state, so when we start
1513 # building up this path, it has to also be converted to be in terms
1514 # of the current state. this is for the specific case of the entity
1515 # is an AliasedClass against a subquery that's not otherwise going
1516 # to adapt
1517 new_subq_path = current_compile_state._entities[
1518 orig_query_entity_index
1519 ].entity_zero._path_registry[leftmost_prop]
1520 additional = len(subq_path) - len(new_subq_path)
1521 if additional:
1522 new_subq_path += path_registry.PathRegistry.coerce(
1523 subq_path[-additional:]
1524 )
1525 else:
1526 new_subq_path = given_subq_path
1527
1528 leftmost_cols = leftmost_prop.local_columns
1529
1530 leftmost_attr = [
1531 getattr(
1532 new_subq_path.path[0].entity,
1533 leftmost_mapper._columntoproperty[c].key,
1534 )
1535 for c in leftmost_cols
1536 ]
1537
1538 return leftmost_mapper, leftmost_attr, leftmost_prop, new_subq_path
1539
1540 def _generate_from_original_query(
1541 self,
1542 orig_compile_state,
1543 orig_query,
1544 leftmost_mapper,
1545 leftmost_attr,
1546 leftmost_relationship,
1547 orig_entity,
1548 ):
1549 # reformat the original query
1550 # to look only for significant columns
1551 q = orig_query._clone().correlate(None)
1552
1553 # LEGACY: make a Query back from the select() !!
1554 # This suits at least two legacy cases:
1555 # 1. applications which expect before_compile() to be called
1556 # below when we run .subquery() on this query (Keystone)
1557 # 2. applications which are doing subqueryload with complex
1558 # from_self() queries, as query.subquery() / .statement
1559 # has to do the full compile context for multiply-nested
1560 # from_self() (Neutron) - see test_subqload_from_self
1561 # for demo.
1562 q2 = query.Query.__new__(query.Query)
1563 q2.__dict__.update(q.__dict__)
1564 q = q2
1565
1566 # set the query's "FROM" list explicitly to what the
1567 # FROM list would be in any case, as we will be limiting
1568 # the columns in the SELECT list which may no longer include
1569 # all entities mentioned in things like WHERE, JOIN, etc.
1570 if not q._from_obj:
1571 q._enable_assertions = False
1572 q.select_from.non_generative(
1573 q,
1574 *{
1575 ent["entity"]
1576 for ent in _column_descriptions(
1577 orig_query, compile_state=orig_compile_state
1578 )
1579 if ent["entity"] is not None
1580 },
1581 )
1582
1583 # select from the identity columns of the outer (specifically, these
1584 # are the 'local_cols' of the property). This will remove other
1585 # columns from the query that might suggest the right entity which is
1586 # why we do set select_from above. The attributes we have are
1587 # coerced and adapted using the original query's adapter, which is
1588 # needed only for the case of adapting a subclass column to
1589 # that of a polymorphic selectable, e.g. we have
1590 # Engineer.primary_language and the entity is Person. All other
1591 # adaptations, e.g. from_self, select_entity_from(), will occur
1592 # within the new query when it compiles, as the compile_state we are
1593 # using here is only a partial one. If the subqueryload is from a
1594 # with_polymorphic() or other aliased() object, left_attr will already
1595 # be the correct attributes so no adaptation is needed.
1596 target_cols = orig_compile_state._adapt_col_list(
1597 [
1598 sql.coercions.expect(sql.roles.ColumnsClauseRole, o)
1599 for o in leftmost_attr
1600 ],
1601 orig_compile_state._get_current_adapter(),
1602 )
1603 q._raw_columns = target_cols
1604
1605 distinct_target_key = leftmost_relationship.distinct_target_key
1606
1607 if distinct_target_key is True:
1608 q._distinct = True
1609 elif distinct_target_key is None:
1610 # if target_cols refer to a non-primary key or only
1611 # part of a composite primary key, set the q as distinct
1612 for t in {c.table for c in target_cols}:
1613 if not set(target_cols).issuperset(t.primary_key):
1614 q._distinct = True
1615 break
1616
1617 # don't need ORDER BY if no limit/offset
1618 if not q._has_row_limiting_clause:
1619 q._order_by_clauses = ()
1620
1621 if q._distinct is True and q._order_by_clauses:
1622 # the logic to automatically add the order by columns to the query
1623 # when distinct is True is deprecated in the query
1624 to_add = sql_util.expand_column_list_from_order_by(
1625 target_cols, q._order_by_clauses
1626 )
1627 if to_add:
1628 q._set_entities(target_cols + to_add)
1629
1630 # the original query now becomes a subquery
1631 # which we'll join onto.
1632 # LEGACY: as "q" is a Query, the before_compile() event is invoked
1633 # here.
1634 embed_q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).subquery()
1635 left_alias = orm_util.AliasedClass(
1636 leftmost_mapper, embed_q, use_mapper_path=True
1637 )
1638 return left_alias
1639
1640 def _prep_for_joins(self, left_alias, subq_path):
1641 # figure out what's being joined. a.k.a. the fun part
1642 to_join = []
1643 pairs = list(subq_path.pairs())
1644
1645 for i, (mapper, prop) in enumerate(pairs):
1646 if i > 0:
1647 # look at the previous mapper in the chain -
1648 # if it is as or more specific than this prop's
1649 # mapper, use that instead.
1650 # note we have an assumption here that
1651 # the non-first element is always going to be a mapper,
1652 # not an AliasedClass
1653
1654 prev_mapper = pairs[i - 1][1].mapper
1655 to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
1656 else:
1657 to_append = mapper
1658
1659 to_join.append((to_append, prop.key))
1660
1661 # determine the immediate parent class we are joining from,
1662 # which needs to be aliased.
1663
1664 if len(to_join) < 2:
1665 # in the case of a one level eager load, this is the
1666 # leftmost "left_alias".
1667 parent_alias = left_alias
1668 else:
1669 info = inspect(to_join[-1][0])
1670 if info.is_aliased_class:
1671 parent_alias = info.entity
1672 else:
1673 # alias a plain mapper as we may be
1674 # joining multiple times
1675 parent_alias = orm_util.AliasedClass(
1676 info.entity, use_mapper_path=True
1677 )
1678
1679 local_cols = self.parent_property.local_columns
1680
1681 local_attr = [
1682 getattr(parent_alias, self.parent._columntoproperty[c].key)
1683 for c in local_cols
1684 ]
1685 return to_join, local_attr, parent_alias
1686
1687 def _apply_joins(
1688 self, q, to_join, left_alias, parent_alias, effective_entity
1689 ):
1690 ltj = len(to_join)
1691 if ltj == 1:
1692 to_join = [
1693 getattr(left_alias, to_join[0][1]).of_type(effective_entity)
1694 ]
1695 elif ltj == 2:
1696 to_join = [
1697 getattr(left_alias, to_join[0][1]).of_type(parent_alias),
1698 getattr(parent_alias, to_join[-1][1]).of_type(
1699 effective_entity
1700 ),
1701 ]
1702 elif ltj > 2:
1703 middle = [
1704 (
1705 (
1706 orm_util.AliasedClass(item[0])
1707 if not inspect(item[0]).is_aliased_class
1708 else item[0].entity
1709 ),
1710 item[1],
1711 )
1712 for item in to_join[1:-1]
1713 ]
1714 inner = []
1715
1716 while middle:
1717 item = middle.pop(0)
1718 attr = getattr(item[0], item[1])
1719 if middle:
1720 attr = attr.of_type(middle[0][0])
1721 else:
1722 attr = attr.of_type(parent_alias)
1723
1724 inner.append(attr)
1725
1726 to_join = (
1727 [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)]
1728 + inner
1729 + [
1730 getattr(parent_alias, to_join[-1][1]).of_type(
1731 effective_entity
1732 )
1733 ]
1734 )
1735
1736 for attr in to_join:
1737 q = q.join(attr)
1738
1739 return q
1740
1741 def _setup_options(
1742 self,
1743 context,
1744 q,
1745 subq_path,
1746 rewritten_path,
1747 orig_query,
1748 effective_entity,
1749 loadopt,
1750 ):
1751 # note that because the subqueryload object
1752 # does not reuse the cached query, instead always making
1753 # use of the current invoked query, while we have two queries
1754 # here (orig and context.query), they are both non-cached
1755 # queries and we can transfer the options as is without
1756 # adjusting for new criteria. Some work on #6881 / #6889
1757 # brought this into question.
1758 new_options = orig_query._with_options
1759
1760 if loadopt and loadopt._extra_criteria:
1761 new_options += (
1762 orm_util.LoaderCriteriaOption(
1763 self.entity,
1764 loadopt._generate_extra_criteria(context),
1765 ),
1766 )
1767
1768 # propagate loader options etc. to the new query.
1769 # these will fire relative to subq_path.
1770 q = q._with_current_path(rewritten_path)
1771 q = q.options(*new_options)
1772
1773 return q
1774
1775 def _setup_outermost_orderby(self, q):
1776 if self.parent_property.order_by:
1777
1778 def _setup_outermost_orderby(compile_context):
1779 compile_context.eager_order_by += tuple(
1780 util.to_list(self.parent_property.order_by)
1781 )
1782
1783 q = q._add_compile_state_func(
1784 _setup_outermost_orderby, self.parent_property
1785 )
1786
1787 return q
1788
1789 class _SubqCollections:
1790 """Given a :class:`_query.Query` used to emit the "subquery load",
1791 provide a load interface that executes the query at the
1792 first moment a value is needed.
1793
1794 """
1795
1796 __slots__ = (
1797 "session",
1798 "execution_options",
1799 "load_options",
1800 "params",
1801 "subq",
1802 "_data",
1803 )
1804
1805 def __init__(self, context, subq):
1806 # avoid creating a cycle by storing context
1807 # even though that's preferable
1808 self.session = context.session
1809 self.execution_options = context.execution_options
1810 self.load_options = context.load_options
1811 self.params = context.params or {}
1812 self.subq = subq
1813 self._data = None
1814
1815 def get(self, key, default):
1816 if self._data is None:
1817 self._load()
1818 return self._data.get(key, default)
1819
1820 def _load(self):
1821 self._data = collections.defaultdict(list)
1822
1823 q = self.subq
1824 assert q.session is None
1825
1826 q = q.with_session(self.session)
1827
1828 if self.load_options._populate_existing:
1829 q = q.populate_existing()
1830 # to work with baked query, the parameters may have been
1831 # updated since this query was created, so take these into account
1832
1833 rows = list(q.params(self.params))
1834 for k, v in itertools.groupby(rows, lambda x: x[1:]):
1835 self._data[k].extend(vv[0] for vv in v)
1836
1837 def loader(self, state, dict_, row):
1838 if self._data is None:
1839 self._load()
1840
1841 def _setup_query_from_rowproc(
1842 self,
1843 context,
1844 query_entity,
1845 path,
1846 entity,
1847 loadopt,
1848 adapter,
1849 ):
1850 compile_state = context.compile_state
1851 if (
1852 not compile_state.compile_options._enable_eagerloads
1853 or compile_state.compile_options._for_refresh_state
1854 ):
1855 return
1856
1857 orig_query_entity_index = compile_state._entities.index(query_entity)
1858 context.loaders_require_buffering = True
1859
1860 path = path[self.parent_property]
1861
1862 # build up a path indicating the path from the leftmost
1863 # entity to the thing we're subquery loading.
1864 with_poly_entity = path.get(
1865 compile_state.attributes, "path_with_polymorphic", None
1866 )
1867 if with_poly_entity is not None:
1868 effective_entity = with_poly_entity
1869 else:
1870 effective_entity = self.entity
1871
1872 subq_path, rewritten_path = context.query._execution_options.get(
1873 ("subquery_paths", None),
1874 (orm_util.PathRegistry.root, orm_util.PathRegistry.root),
1875 )
1876 is_root = subq_path is orm_util.PathRegistry.root
1877 subq_path = subq_path + path
1878 rewritten_path = rewritten_path + path
1879
1880 # use the current query being invoked, not the compile state
1881 # one. this is so that we get the current parameters. however,
1882 # it means we can't use the existing compile state, we have to make
1883 # a new one. other approaches include possibly using the
1884 # compiled query but swapping the params, seems only marginally
1885 # less time spent but more complicated
1886 orig_query = context.query._execution_options.get(
1887 ("orig_query", _SubqueryLoader), context.query
1888 )
1889
1890 # make a new compile_state for the query that's probably cached, but
1891 # we're sort of undoing a bit of that caching :(
1892 compile_state_cls = _ORMCompileState._get_plugin_class_for_plugin(
1893 orig_query, "orm"
1894 )
1895
1896 if orig_query._is_lambda_element:
1897 if context.load_options._lazy_loaded_from is None:
1898 util.warn(
1899 'subqueryloader for "%s" must invoke lambda callable '
1900 "at %r in "
1901 "order to produce a new query, decreasing the efficiency "
1902 "of caching for this statement. Consider using "
1903 "selectinload() for more effective full-lambda caching"
1904 % (self, orig_query)
1905 )
1906 orig_query = orig_query._resolved
1907
1908 # this is the more "quick" version, however it's not clear how
1909 # much of this we need. in particular I can't get a test to
1910 # fail if the "set_base_alias" is missing and not sure why that is.
1911 orig_compile_state = compile_state_cls._create_entities_collection(
1912 orig_query, legacy=False
1913 )
1914
1915 (
1916 leftmost_mapper,
1917 leftmost_attr,
1918 leftmost_relationship,
1919 rewritten_path,
1920 ) = self._get_leftmost(
1921 orig_query_entity_index,
1922 rewritten_path,
1923 orig_compile_state,
1924 is_root,
1925 )
1926
1927 # generate a new Query from the original, then
1928 # produce a subquery from it.
1929 left_alias = self._generate_from_original_query(
1930 orig_compile_state,
1931 orig_query,
1932 leftmost_mapper,
1933 leftmost_attr,
1934 leftmost_relationship,
1935 entity,
1936 )
1937
1938 # generate another Query that will join the
1939 # left alias to the target relationships.
1940 # basically doing a longhand
1941 # "from_self()". (from_self() itself not quite industrial
1942 # strength enough for all contingencies...but very close)
1943
1944 q = query.Query(effective_entity)
1945
1946 q._execution_options = context.query._execution_options.merge_with(
1947 context.execution_options,
1948 {
1949 ("orig_query", _SubqueryLoader): orig_query,
1950 ("subquery_paths", None): (subq_path, rewritten_path),
1951 },
1952 )
1953
1954 q = q._set_enable_single_crit(False)
1955 to_join, local_attr, parent_alias = self._prep_for_joins(
1956 left_alias, subq_path
1957 )
1958
1959 q = q.add_columns(*local_attr)
1960 q = self._apply_joins(
1961 q, to_join, left_alias, parent_alias, effective_entity
1962 )
1963
1964 q = self._setup_options(
1965 context,
1966 q,
1967 subq_path,
1968 rewritten_path,
1969 orig_query,
1970 effective_entity,
1971 loadopt,
1972 )
1973 q = self._setup_outermost_orderby(q)
1974
1975 return q
1976
1977 def create_row_processor(
1978 self,
1979 context,
1980 query_entity,
1981 path,
1982 loadopt,
1983 mapper,
1984 result,
1985 adapter,
1986 populators,
1987 ):
1988 if (
1989 loadopt
1990 and context.compile_state.statement is not None
1991 and context.compile_state.statement.is_dml
1992 ):
1993 util.warn_deprecated(
1994 "The subqueryload loader option is not compatible with DML "
1995 "statements such as INSERT, UPDATE. Only SELECT may be used."
1996 "This warning will become an exception in a future release.",
1997 "2.0",
1998 )
1999
2000 if context.refresh_state:
2001 return self._immediateload_create_row_processor(
2002 context,
2003 query_entity,
2004 path,
2005 loadopt,
2006 mapper,
2007 result,
2008 adapter,
2009 populators,
2010 )
2011
2012 _, run_loader, _, _ = self._setup_for_recursion(
2013 context, path, loadopt, self.join_depth
2014 )
2015 if not run_loader:
2016 return
2017
2018 if not isinstance(context.compile_state, _ORMSelectCompileState):
2019 # issue 7505 - subqueryload() in 1.3 and previous would silently
2020 # degrade for from_statement() without warning. this behavior
2021 # is restored here
2022 return
2023
2024 if not self.parent.class_manager[self.key].impl.supports_population:
2025 raise sa_exc.InvalidRequestError(
2026 "'%s' does not support object "
2027 "population - eager loading cannot be applied." % self
2028 )
2029
2030 # a little dance here as the "path" is still something that only
2031 # semi-tracks the exact series of things we are loading, still not
2032 # telling us about with_polymorphic() and stuff like that when it's at
2033 # the root.. the initial MapperEntity is more accurate for this case.
2034 if len(path) == 1:
2035 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
2036 return
2037 elif not orm_util._entity_isa(path[-1], self.parent):
2038 return
2039
2040 subq = self._setup_query_from_rowproc(
2041 context,
2042 query_entity,
2043 path,
2044 path[-1],
2045 loadopt,
2046 adapter,
2047 )
2048
2049 if subq is None:
2050 return
2051
2052 assert subq.session is None
2053
2054 path = path[self.parent_property]
2055
2056 local_cols = self.parent_property.local_columns
2057
2058 # cache the loaded collections in the context
2059 # so that inheriting mappers don't re-load when they
2060 # call upon create_row_processor again
2061 collections = path.get(context.attributes, "collections")
2062 if collections is None:
2063 collections = self._SubqCollections(context, subq)
2064 path.set(context.attributes, "collections", collections)
2065
2066 if adapter:
2067 local_cols = [adapter.columns[c] for c in local_cols]
2068
2069 if self.uselist:
2070 self._create_collection_loader(
2071 context, result, collections, local_cols, populators
2072 )
2073 else:
2074 self._create_scalar_loader(
2075 context, result, collections, local_cols, populators
2076 )
2077
2078 def _create_collection_loader(
2079 self, context, result, collections, local_cols, populators
2080 ):
2081 tuple_getter = result._tuple_getter(local_cols)
2082
2083 def load_collection_from_subq(state, dict_, row):
2084 collection = collections.get(tuple_getter(row), ())
2085 state.get_impl(self.key).set_committed_value(
2086 state, dict_, collection
2087 )
2088
2089 def load_collection_from_subq_existing_row(state, dict_, row):
2090 if self.key not in dict_:
2091 load_collection_from_subq(state, dict_, row)
2092
2093 populators["new"].append((self.key, load_collection_from_subq))
2094 populators["existing"].append(
2095 (self.key, load_collection_from_subq_existing_row)
2096 )
2097
2098 if context.invoke_all_eagers:
2099 populators["eager"].append((self.key, collections.loader))
2100
2101 def _create_scalar_loader(
2102 self, context, result, collections, local_cols, populators
2103 ):
2104 tuple_getter = result._tuple_getter(local_cols)
2105
2106 def load_scalar_from_subq(state, dict_, row):
2107 collection = collections.get(tuple_getter(row), (None,))
2108 if len(collection) > 1:
2109 util.warn(
2110 "Multiple rows returned with "
2111 "uselist=False for eagerly-loaded attribute '%s' " % self
2112 )
2113
2114 scalar = collection[0]
2115 state.get_impl(self.key).set_committed_value(state, dict_, scalar)
2116
2117 def load_scalar_from_subq_existing_row(state, dict_, row):
2118 if self.key not in dict_:
2119 load_scalar_from_subq(state, dict_, row)
2120
2121 populators["new"].append((self.key, load_scalar_from_subq))
2122 populators["existing"].append(
2123 (self.key, load_scalar_from_subq_existing_row)
2124 )
2125 if context.invoke_all_eagers:
2126 populators["eager"].append((self.key, collections.loader))
2127
2128
2129@log.class_logger
2130@relationships.RelationshipProperty.strategy_for(lazy="joined")
2131@relationships.RelationshipProperty.strategy_for(lazy=False)
2132class _JoinedLoader(_AbstractRelationshipLoader):
2133 """Provide loading behavior for a :class:`.Relationship`
2134 using joined eager loading.
2135
2136 """
2137
2138 __slots__ = "join_depth"
2139
2140 def __init__(self, parent, strategy_key):
2141 super().__init__(parent, strategy_key)
2142 self.join_depth = self.parent_property.join_depth
2143
2144 def init_class_attribute(self, mapper):
2145 self.parent_property._get_strategy(
2146 (("lazy", "select"),)
2147 ).init_class_attribute(mapper)
2148
2149 def setup_query(
2150 self,
2151 compile_state,
2152 query_entity,
2153 path,
2154 loadopt,
2155 adapter,
2156 column_collection=None,
2157 parentmapper=None,
2158 chained_from_outerjoin=False,
2159 **kwargs,
2160 ):
2161 """Add a left outer join to the statement that's being constructed."""
2162
2163 if not compile_state.compile_options._enable_eagerloads:
2164 return
2165 elif (
2166 loadopt
2167 and compile_state.statement is not None
2168 and compile_state.statement.is_dml
2169 ):
2170 util.warn_deprecated(
2171 "The joinedload loader option is not compatible with DML "
2172 "statements such as INSERT, UPDATE. Only SELECT may be used."
2173 "This warning will become an exception in a future release.",
2174 "2.0",
2175 )
2176 elif self.uselist:
2177 compile_state.multi_row_eager_loaders = True
2178
2179 path = path[self.parent_property]
2180
2181 user_defined_adapter = (
2182 self._init_user_defined_eager_proc(
2183 loadopt, compile_state, compile_state.attributes
2184 )
2185 if loadopt
2186 else False
2187 )
2188
2189 if user_defined_adapter is not False:
2190 # setup an adapter but dont create any JOIN, assume it's already
2191 # in the query
2192 (
2193 clauses,
2194 adapter,
2195 add_to_collection,
2196 ) = self._setup_query_on_user_defined_adapter(
2197 compile_state,
2198 query_entity,
2199 path,
2200 adapter,
2201 user_defined_adapter,
2202 )
2203
2204 # don't do "wrap" for multi-row, we want to wrap
2205 # limited/distinct SELECT,
2206 # because we want to put the JOIN on the outside.
2207
2208 else:
2209 # if not via query option, check for
2210 # a cycle
2211 if not path.contains(compile_state.attributes, "loader"):
2212 if self.join_depth:
2213 if path.length / 2 > self.join_depth:
2214 return
2215 elif path.contains_mapper(self.mapper):
2216 return
2217
2218 # add the JOIN and create an adapter
2219 (
2220 clauses,
2221 adapter,
2222 add_to_collection,
2223 chained_from_outerjoin,
2224 ) = self._generate_row_adapter(
2225 compile_state,
2226 query_entity,
2227 path,
2228 loadopt,
2229 adapter,
2230 column_collection,
2231 parentmapper,
2232 chained_from_outerjoin,
2233 )
2234
2235 # for multi-row, we want to wrap limited/distinct SELECT,
2236 # because we want to put the JOIN on the outside.
2237 compile_state.eager_adding_joins = True
2238
2239 with_poly_entity = path.get(
2240 compile_state.attributes, "path_with_polymorphic", None
2241 )
2242 if with_poly_entity is not None:
2243 with_polymorphic = inspect(
2244 with_poly_entity
2245 ).with_polymorphic_mappers
2246 else:
2247 with_polymorphic = None
2248
2249 path = path[self.entity]
2250
2251 loading._setup_entity_query(
2252 compile_state,
2253 self.mapper,
2254 query_entity,
2255 path,
2256 clauses,
2257 add_to_collection,
2258 with_polymorphic=with_polymorphic,
2259 parentmapper=self.mapper,
2260 chained_from_outerjoin=chained_from_outerjoin,
2261 )
2262
2263 has_nones = util.NONE_SET.intersection(compile_state.secondary_columns)
2264
2265 if has_nones:
2266 if with_poly_entity is not None:
2267 raise sa_exc.InvalidRequestError(
2268 "Detected unaliased columns when generating joined "
2269 "load. Make sure to use aliased=True or flat=True "
2270 "when using joined loading with with_polymorphic()."
2271 )
2272 else:
2273 compile_state.secondary_columns = [
2274 c for c in compile_state.secondary_columns if c is not None
2275 ]
2276
2277 def _init_user_defined_eager_proc(
2278 self, loadopt, compile_state, target_attributes
2279 ):
2280 # check if the opt applies at all
2281 if "eager_from_alias" not in loadopt.local_opts:
2282 # nope
2283 return False
2284
2285 path = loadopt.path.parent
2286
2287 # the option applies. check if the "user_defined_eager_row_processor"
2288 # has been built up.
2289 adapter = path.get(
2290 compile_state.attributes, "user_defined_eager_row_processor", False
2291 )
2292 if adapter is not False:
2293 # just return it
2294 return adapter
2295
2296 # otherwise figure it out.
2297 alias = loadopt.local_opts["eager_from_alias"]
2298 root_mapper, prop = path[-2:]
2299
2300 if alias is not None:
2301 if isinstance(alias, str):
2302 alias = prop.target.alias(alias)
2303 adapter = orm_util.ORMAdapter(
2304 orm_util._TraceAdaptRole.JOINEDLOAD_USER_DEFINED_ALIAS,
2305 prop.mapper,
2306 selectable=alias,
2307 equivalents=prop.mapper._equivalent_columns,
2308 limit_on_entity=False,
2309 )
2310 else:
2311 if path.contains(
2312 compile_state.attributes, "path_with_polymorphic"
2313 ):
2314 with_poly_entity = path.get(
2315 compile_state.attributes, "path_with_polymorphic"
2316 )
2317 adapter = orm_util.ORMAdapter(
2318 orm_util._TraceAdaptRole.JOINEDLOAD_PATH_WITH_POLYMORPHIC,
2319 with_poly_entity,
2320 equivalents=prop.mapper._equivalent_columns,
2321 )
2322 else:
2323 adapter = compile_state._polymorphic_adapters.get(
2324 prop.mapper, None
2325 )
2326 path.set(
2327 target_attributes,
2328 "user_defined_eager_row_processor",
2329 adapter,
2330 )
2331
2332 return adapter
2333
2334 def _setup_query_on_user_defined_adapter(
2335 self, context, entity, path, adapter, user_defined_adapter
2336 ):
2337 # apply some more wrapping to the "user defined adapter"
2338 # if we are setting up the query for SQL render.
2339 adapter = entity._get_entity_clauses(context)
2340
2341 if adapter and user_defined_adapter:
2342 user_defined_adapter = user_defined_adapter.wrap(adapter)
2343 path.set(
2344 context.attributes,
2345 "user_defined_eager_row_processor",
2346 user_defined_adapter,
2347 )
2348 elif adapter:
2349 user_defined_adapter = adapter
2350 path.set(
2351 context.attributes,
2352 "user_defined_eager_row_processor",
2353 user_defined_adapter,
2354 )
2355
2356 add_to_collection = context.primary_columns
2357 return user_defined_adapter, adapter, add_to_collection
2358
2359 def _generate_row_adapter(
2360 self,
2361 compile_state,
2362 entity,
2363 path,
2364 loadopt,
2365 adapter,
2366 column_collection,
2367 parentmapper,
2368 chained_from_outerjoin,
2369 ):
2370 with_poly_entity = path.get(
2371 compile_state.attributes, "path_with_polymorphic", None
2372 )
2373 if with_poly_entity:
2374 to_adapt = with_poly_entity
2375 else:
2376 insp = inspect(self.entity)
2377 if insp.is_aliased_class:
2378 alt_selectable = insp.selectable
2379 else:
2380 alt_selectable = None
2381
2382 to_adapt = orm_util.AliasedClass(
2383 self.mapper,
2384 alias=(
2385 alt_selectable._anonymous_fromclause(flat=True)
2386 if alt_selectable is not None
2387 else None
2388 ),
2389 flat=True,
2390 use_mapper_path=True,
2391 )
2392
2393 to_adapt_insp = inspect(to_adapt)
2394
2395 clauses = to_adapt_insp._memo(
2396 ("joinedloader_ormadapter", self),
2397 orm_util.ORMAdapter,
2398 orm_util._TraceAdaptRole.JOINEDLOAD_MEMOIZED_ADAPTER,
2399 to_adapt_insp,
2400 equivalents=self.mapper._equivalent_columns,
2401 adapt_required=True,
2402 allow_label_resolve=False,
2403 anonymize_labels=True,
2404 )
2405
2406 assert clauses.is_aliased_class
2407
2408 innerjoin = (
2409 loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin)
2410 if loadopt is not None
2411 else self.parent_property.innerjoin
2412 )
2413
2414 if not innerjoin:
2415 # if this is an outer join, all non-nested eager joins from
2416 # this path must also be outer joins
2417 chained_from_outerjoin = True
2418
2419 compile_state.create_eager_joins.append(
2420 (
2421 self._create_eager_join,
2422 entity,
2423 path,
2424 adapter,
2425 parentmapper,
2426 clauses,
2427 innerjoin,
2428 chained_from_outerjoin,
2429 loadopt._extra_criteria if loadopt else (),
2430 )
2431 )
2432
2433 add_to_collection = compile_state.secondary_columns
2434 path.set(compile_state.attributes, "eager_row_processor", clauses)
2435
2436 return clauses, adapter, add_to_collection, chained_from_outerjoin
2437
2438 def _create_eager_join(
2439 self,
2440 compile_state,
2441 query_entity,
2442 path,
2443 adapter,
2444 parentmapper,
2445 clauses,
2446 innerjoin,
2447 chained_from_outerjoin,
2448 extra_criteria,
2449 ):
2450 if parentmapper is None:
2451 localparent = query_entity.mapper
2452 else:
2453 localparent = parentmapper
2454
2455 # whether or not the Query will wrap the selectable in a subquery,
2456 # and then attach eager load joins to that (i.e., in the case of
2457 # LIMIT/OFFSET etc.)
2458 should_nest_selectable = compile_state._should_nest_selectable
2459
2460 query_entity_key = None
2461
2462 if (
2463 query_entity not in compile_state.eager_joins
2464 and not should_nest_selectable
2465 and compile_state.from_clauses
2466 ):
2467 indexes = sql_util.find_left_clause_that_matches_given(
2468 compile_state.from_clauses, query_entity.selectable
2469 )
2470
2471 if len(indexes) > 1:
2472 # for the eager load case, I can't reproduce this right
2473 # now. For query.join() I can.
2474 raise sa_exc.InvalidRequestError(
2475 "Can't identify which query entity in which to joined "
2476 "eager load from. Please use an exact match when "
2477 "specifying the join path."
2478 )
2479
2480 if indexes:
2481 clause = compile_state.from_clauses[indexes[0]]
2482 # join to an existing FROM clause on the query.
2483 # key it to its list index in the eager_joins dict.
2484 # Query._compile_context will adapt as needed and
2485 # append to the FROM clause of the select().
2486 query_entity_key, default_towrap = indexes[0], clause
2487
2488 if query_entity_key is None:
2489 query_entity_key, default_towrap = (
2490 query_entity,
2491 query_entity.selectable,
2492 )
2493
2494 towrap = compile_state.eager_joins.setdefault(
2495 query_entity_key, default_towrap
2496 )
2497
2498 if adapter:
2499 if getattr(adapter, "is_aliased_class", False):
2500 # joining from an adapted entity. The adapted entity
2501 # might be a "with_polymorphic", so resolve that to our
2502 # specific mapper's entity before looking for our attribute
2503 # name on it.
2504 efm = adapter.aliased_insp._entity_for_mapper(
2505 localparent
2506 if localparent.isa(self.parent)
2507 else self.parent
2508 )
2509
2510 # look for our attribute on the adapted entity, else fall back
2511 # to our straight property
2512 onclause = getattr(efm.entity, self.key, self.parent_property)
2513 else:
2514 onclause = getattr(
2515 orm_util.AliasedClass(
2516 self.parent, adapter.selectable, use_mapper_path=True
2517 ),
2518 self.key,
2519 self.parent_property,
2520 )
2521
2522 else:
2523 onclause = self.parent_property
2524
2525 assert clauses.is_aliased_class
2526
2527 attach_on_outside = (
2528 not chained_from_outerjoin
2529 or not innerjoin
2530 or innerjoin == "unnested"
2531 or query_entity.entity_zero.represents_outer_join
2532 )
2533
2534 extra_join_criteria = extra_criteria
2535 additional_entity_criteria = compile_state.global_attributes.get(
2536 ("additional_entity_criteria", self.mapper), ()
2537 )
2538 if additional_entity_criteria:
2539 extra_join_criteria += tuple(
2540 ae._resolve_where_criteria(self.mapper)
2541 for ae in additional_entity_criteria
2542 if ae.propagate_to_loaders
2543 )
2544
2545 if attach_on_outside:
2546 # this is the "classic" eager join case.
2547 eagerjoin = orm_util._ORMJoin(
2548 towrap,
2549 clauses.aliased_insp,
2550 onclause,
2551 isouter=not innerjoin
2552 or query_entity.entity_zero.represents_outer_join
2553 or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
2554 _left_memo=self.parent,
2555 _right_memo=path[self.mapper],
2556 _extra_criteria=extra_join_criteria,
2557 )
2558 else:
2559 # all other cases are innerjoin=='nested' approach
2560 eagerjoin = self._splice_nested_inner_join(
2561 path, path[-2], towrap, clauses, onclause, extra_join_criteria
2562 )
2563
2564 compile_state.eager_joins[query_entity_key] = eagerjoin
2565
2566 # send a hint to the Query as to where it may "splice" this join
2567 eagerjoin.stop_on = query_entity.selectable
2568
2569 if not parentmapper:
2570 # for parentclause that is the non-eager end of the join,
2571 # ensure all the parent cols in the primaryjoin are actually
2572 # in the
2573 # columns clause (i.e. are not deferred), so that aliasing applied
2574 # by the Query propagates those columns outward.
2575 # This has the effect
2576 # of "undefering" those columns.
2577 for col in sql_util._find_columns(
2578 self.parent_property.primaryjoin
2579 ):
2580 if localparent.persist_selectable.c.contains_column(col):
2581 if adapter:
2582 col = adapter.columns[col]
2583 compile_state._append_dedupe_col_collection(
2584 col, compile_state.primary_columns
2585 )
2586
2587 if self.parent_property.order_by:
2588 compile_state.eager_order_by += tuple(
2589 (eagerjoin._target_adapter.copy_and_process)(
2590 util.to_list(self.parent_property.order_by)
2591 )
2592 )
2593
2594 def _splice_nested_inner_join(
2595 self,
2596 path,
2597 entity_we_want_to_splice_onto,
2598 join_obj,
2599 clauses,
2600 onclause,
2601 extra_criteria,
2602 entity_inside_join_structure: Union[
2603 Mapper, None, Literal[False]
2604 ] = False,
2605 detected_existing_path: Optional[path_registry.PathRegistry] = None,
2606 ):
2607 # recursive fn to splice a nested join into an existing one.
2608 # entity_inside_join_structure=False means this is the outermost call,
2609 # and it should return a value. entity_inside_join_structure=<mapper>
2610 # indicates we've descended into a join and are looking at a FROM
2611 # clause representing this mapper; if this is not
2612 # entity_we_want_to_splice_onto then return None to end the recursive
2613 # branch
2614
2615 assert entity_we_want_to_splice_onto is path[-2]
2616
2617 if entity_inside_join_structure is False:
2618 assert isinstance(join_obj, orm_util._ORMJoin)
2619
2620 if isinstance(join_obj, sql.selectable.FromGrouping):
2621 # FromGrouping - continue descending into the structure
2622 return self._splice_nested_inner_join(
2623 path,
2624 entity_we_want_to_splice_onto,
2625 join_obj.element,
2626 clauses,
2627 onclause,
2628 extra_criteria,
2629 entity_inside_join_structure,
2630 )
2631 elif isinstance(join_obj, orm_util._ORMJoin):
2632 # _ORMJoin - continue descending into the structure
2633
2634 join_right_path = join_obj._right_memo
2635
2636 # see if right side of join is viable
2637 target_join = self._splice_nested_inner_join(
2638 path,
2639 entity_we_want_to_splice_onto,
2640 join_obj.right,
2641 clauses,
2642 onclause,
2643 extra_criteria,
2644 entity_inside_join_structure=(
2645 join_right_path[-1].mapper
2646 if join_right_path is not None
2647 else None
2648 ),
2649 )
2650
2651 if target_join is not None:
2652 # for a right splice, attempt to flatten out
2653 # a JOIN b JOIN c JOIN .. to avoid needless
2654 # parenthesis nesting
2655 if not join_obj.isouter and not target_join.isouter:
2656 eagerjoin = join_obj._splice_into_center(target_join)
2657 else:
2658 eagerjoin = orm_util._ORMJoin(
2659 join_obj.left,
2660 target_join,
2661 join_obj.onclause,
2662 isouter=join_obj.isouter,
2663 _left_memo=join_obj._left_memo,
2664 )
2665
2666 eagerjoin._target_adapter = target_join._target_adapter
2667 return eagerjoin
2668
2669 else:
2670 # see if left side of join is viable
2671 target_join = self._splice_nested_inner_join(
2672 path,
2673 entity_we_want_to_splice_onto,
2674 join_obj.left,
2675 clauses,
2676 onclause,
2677 extra_criteria,
2678 entity_inside_join_structure=join_obj._left_memo,
2679 detected_existing_path=join_right_path,
2680 )
2681
2682 if target_join is not None:
2683 eagerjoin = orm_util._ORMJoin(
2684 target_join,
2685 join_obj.right,
2686 join_obj.onclause,
2687 isouter=join_obj.isouter,
2688 _right_memo=join_obj._right_memo,
2689 )
2690 eagerjoin._target_adapter = target_join._target_adapter
2691 return eagerjoin
2692
2693 # neither side viable, return None, or fail if this was the top
2694 # most call
2695 if entity_inside_join_structure is False:
2696 assert (
2697 False
2698 ), "assertion failed attempting to produce joined eager loads"
2699 return None
2700
2701 # reached an endpoint (e.g. a table that's mapped, or an alias of that
2702 # table). determine if we can use this endpoint to splice onto
2703
2704 # is this the entity we want to splice onto in the first place?
2705 if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure):
2706 return None
2707
2708 # path check. if we know the path how this join endpoint got here,
2709 # lets look at our path we are satisfying and see if we're in the
2710 # wrong place. This is specifically for when our entity may
2711 # appear more than once in the path, issue #11449
2712 # updated in issue #11965.
2713 if detected_existing_path and len(detected_existing_path) > 2:
2714 # this assertion is currently based on how this call is made,
2715 # where given a join_obj, the call will have these parameters as
2716 # entity_inside_join_structure=join_obj._left_memo
2717 # and entity_inside_join_structure=join_obj._right_memo.mapper
2718 assert detected_existing_path[-3] is entity_inside_join_structure
2719
2720 # from that, see if the path we are targeting matches the
2721 # "existing" path of this join all the way up to the midpoint
2722 # of this join object (e.g. the relationship).
2723 # if not, then this is not our target
2724 #
2725 # a test condition where this test is false looks like:
2726 #
2727 # desired splice: Node->kind->Kind
2728 # path of desired splice: NodeGroup->nodes->Node->kind
2729 # path we've located: NodeGroup->nodes->Node->common_node->Node
2730 #
2731 # above, because we want to splice kind->Kind onto
2732 # NodeGroup->nodes->Node, this is not our path because it actually
2733 # goes more steps than we want into self-referential
2734 # ->common_node->Node
2735 #
2736 # a test condition where this test is true looks like:
2737 #
2738 # desired splice: B->c2s->C2
2739 # path of desired splice: A->bs->B->c2s
2740 # path we've located: A->bs->B->c1s->C1
2741 #
2742 # above, we want to splice c2s->C2 onto B, and the located path
2743 # shows that the join ends with B->c1s->C1. so we will
2744 # add another join onto that, which would create a "branch" that
2745 # we might represent in a pseudopath as:
2746 #
2747 # B->c1s->C1
2748 # ->c2s->C2
2749 #
2750 # i.e. A JOIN B ON <bs> JOIN C1 ON <c1s>
2751 # JOIN C2 ON <c2s>
2752 #
2753
2754 if detected_existing_path[0:-2] != path.path[0:-1]:
2755 return None
2756
2757 return orm_util._ORMJoin(
2758 join_obj,
2759 clauses.aliased_insp,
2760 onclause,
2761 isouter=False,
2762 _left_memo=entity_inside_join_structure,
2763 _right_memo=path[path[-1].mapper],
2764 _extra_criteria=extra_criteria,
2765 )
2766
2767 def _create_eager_adapter(self, context, result, adapter, path, loadopt):
2768 compile_state = context.compile_state
2769
2770 user_defined_adapter = (
2771 self._init_user_defined_eager_proc(
2772 loadopt, compile_state, context.attributes
2773 )
2774 if loadopt
2775 else False
2776 )
2777
2778 if user_defined_adapter is not False:
2779 decorator = user_defined_adapter
2780 # user defined eagerloads are part of the "primary"
2781 # portion of the load.
2782 # the adapters applied to the Query should be honored.
2783 if compile_state.compound_eager_adapter and decorator:
2784 decorator = decorator.wrap(
2785 compile_state.compound_eager_adapter
2786 )
2787 elif compile_state.compound_eager_adapter:
2788 decorator = compile_state.compound_eager_adapter
2789 else:
2790 decorator = path.get(
2791 compile_state.attributes, "eager_row_processor"
2792 )
2793 if decorator is None:
2794 return False
2795
2796 if self.mapper._result_has_identity_key(result, decorator):
2797 return decorator
2798 else:
2799 # no identity key - don't return a row
2800 # processor, will cause a degrade to lazy
2801 return False
2802
2803 def create_row_processor(
2804 self,
2805 context,
2806 query_entity,
2807 path,
2808 loadopt,
2809 mapper,
2810 result,
2811 adapter,
2812 populators,
2813 ):
2814
2815 if not context.compile_state.compile_options._enable_eagerloads:
2816 return
2817
2818 if not self.parent.class_manager[self.key].impl.supports_population:
2819 raise sa_exc.InvalidRequestError(
2820 "'%s' does not support object "
2821 "population - eager loading cannot be applied." % self
2822 )
2823
2824 if self.uselist:
2825 context.loaders_require_uniquing = True
2826
2827 our_path = path[self.parent_property]
2828
2829 eager_adapter = self._create_eager_adapter(
2830 context, result, adapter, our_path, loadopt
2831 )
2832
2833 if eager_adapter is not False:
2834 key = self.key
2835
2836 _instance = loading._instance_processor(
2837 query_entity,
2838 self.mapper,
2839 context,
2840 result,
2841 our_path[self.entity],
2842 eager_adapter,
2843 )
2844
2845 if not self.uselist:
2846 self._create_scalar_loader(context, key, _instance, populators)
2847 else:
2848 self._create_collection_loader(
2849 context, key, _instance, populators
2850 )
2851 else:
2852 self.parent_property._get_strategy(
2853 (("lazy", "select"),)
2854 ).create_row_processor(
2855 context,
2856 query_entity,
2857 path,
2858 loadopt,
2859 mapper,
2860 result,
2861 adapter,
2862 populators,
2863 )
2864
2865 def _create_collection_loader(self, context, key, _instance, populators):
2866 def load_collection_from_joined_new_row(state, dict_, row):
2867 # note this must unconditionally clear out any existing collection.
2868 # an existing collection would be present only in the case of
2869 # populate_existing().
2870 collection = attributes.init_state_collection(state, dict_, key)
2871 result_list = util.UniqueAppender(
2872 collection, "append_without_event"
2873 )
2874 context.attributes[(state, key)] = result_list
2875 inst = _instance(row)
2876 if inst is not None:
2877 result_list.append(inst)
2878
2879 def load_collection_from_joined_existing_row(state, dict_, row):
2880 if (state, key) in context.attributes:
2881 result_list = context.attributes[(state, key)]
2882 else:
2883 # appender_key can be absent from context.attributes
2884 # with isnew=False when self-referential eager loading
2885 # is used; the same instance may be present in two
2886 # distinct sets of result columns
2887 collection = attributes.init_state_collection(
2888 state, dict_, key
2889 )
2890 result_list = util.UniqueAppender(
2891 collection, "append_without_event"
2892 )
2893 context.attributes[(state, key)] = result_list
2894 inst = _instance(row)
2895 if inst is not None:
2896 result_list.append(inst)
2897
2898 def load_collection_from_joined_exec(state, dict_, row):
2899 _instance(row)
2900
2901 populators["new"].append(
2902 (self.key, load_collection_from_joined_new_row)
2903 )
2904 populators["existing"].append(
2905 (self.key, load_collection_from_joined_existing_row)
2906 )
2907 if context.invoke_all_eagers:
2908 populators["eager"].append(
2909 (self.key, load_collection_from_joined_exec)
2910 )
2911
2912 def _create_scalar_loader(self, context, key, _instance, populators):
2913 def load_scalar_from_joined_new_row(state, dict_, row):
2914 # set a scalar object instance directly on the parent
2915 # object, bypassing InstrumentedAttribute event handlers.
2916 dict_[key] = _instance(row)
2917
2918 def load_scalar_from_joined_existing_row(state, dict_, row):
2919 # call _instance on the row, even though the object has
2920 # been created, so that we further descend into properties
2921 existing = _instance(row)
2922
2923 # conflicting value already loaded, this shouldn't happen
2924 if key in dict_:
2925 if existing is not dict_[key]:
2926 util.warn(
2927 "Multiple rows returned with "
2928 "uselist=False for eagerly-loaded attribute '%s' "
2929 % self
2930 )
2931 else:
2932 # this case is when one row has multiple loads of the
2933 # same entity (e.g. via aliasing), one has an attribute
2934 # that the other doesn't.
2935 dict_[key] = existing
2936
2937 def load_scalar_from_joined_exec(state, dict_, row):
2938 _instance(row)
2939
2940 populators["new"].append((self.key, load_scalar_from_joined_new_row))
2941 populators["existing"].append(
2942 (self.key, load_scalar_from_joined_existing_row)
2943 )
2944 if context.invoke_all_eagers:
2945 populators["eager"].append(
2946 (self.key, load_scalar_from_joined_exec)
2947 )
2948
2949
2950@log.class_logger
2951@relationships.RelationshipProperty.strategy_for(lazy="selectin")
2952class _SelectInLoader(_PostLoader, util.MemoizedSlots):
2953 __slots__ = (
2954 "join_depth",
2955 "omit_join",
2956 "_parent_alias",
2957 "_query_info",
2958 "_fallback_query_info",
2959 )
2960
2961 query_info = collections.namedtuple(
2962 "queryinfo",
2963 [
2964 "load_only_child",
2965 "load_with_join",
2966 "in_expr",
2967 "pk_cols",
2968 "zero_idx",
2969 "child_lookup_cols",
2970 ],
2971 )
2972
2973 _chunksize = 500
2974
2975 def __init__(self, parent, strategy_key):
2976 super().__init__(parent, strategy_key)
2977 self.join_depth = self.parent_property.join_depth
2978 is_m2o = self.parent_property.direction is interfaces.MANYTOONE
2979
2980 if self.parent_property.omit_join is not None:
2981 self.omit_join = self.parent_property.omit_join
2982 else:
2983 lazyloader = self.parent_property._get_strategy(
2984 (("lazy", "select"),)
2985 )
2986 if is_m2o:
2987 self.omit_join = lazyloader.use_get
2988 else:
2989 self.omit_join = self.parent._get_clause[0].compare(
2990 lazyloader._rev_lazywhere,
2991 use_proxies=True,
2992 compare_keys=False,
2993 equivalents=self.parent._equivalent_columns,
2994 )
2995
2996 if self.omit_join:
2997 if is_m2o:
2998 self._query_info = self._init_for_omit_join_m2o()
2999 self._fallback_query_info = self._init_for_join()
3000 else:
3001 self._query_info = self._init_for_omit_join()
3002 else:
3003 self._query_info = self._init_for_join()
3004
3005 def _init_for_omit_join(self):
3006 pk_to_fk = dict(
3007 self.parent_property._join_condition.local_remote_pairs
3008 )
3009 pk_to_fk.update(
3010 (equiv, pk_to_fk[k])
3011 for k in list(pk_to_fk)
3012 for equiv in self.parent._equivalent_columns.get(k, ())
3013 )
3014
3015 pk_cols = fk_cols = [
3016 pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk
3017 ]
3018 if len(fk_cols) > 1:
3019 in_expr = sql.tuple_(*fk_cols)
3020 zero_idx = False
3021 else:
3022 in_expr = fk_cols[0]
3023 zero_idx = True
3024
3025 return self.query_info(False, False, in_expr, pk_cols, zero_idx, None)
3026
3027 def _init_for_omit_join_m2o(self):
3028 pk_cols = self.mapper.primary_key
3029 if len(pk_cols) > 1:
3030 in_expr = sql.tuple_(*pk_cols)
3031 zero_idx = False
3032 else:
3033 in_expr = pk_cols[0]
3034 zero_idx = True
3035
3036 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
3037 lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols]
3038
3039 return self.query_info(
3040 True, False, in_expr, pk_cols, zero_idx, lookup_cols
3041 )
3042
3043 def _init_for_join(self):
3044 self._parent_alias = AliasedClass(self.parent.class_)
3045 pa_insp = inspect(self._parent_alias)
3046 pk_cols = [
3047 pa_insp._adapt_element(col) for col in self.parent.primary_key
3048 ]
3049 if len(pk_cols) > 1:
3050 in_expr = sql.tuple_(*pk_cols)
3051 zero_idx = False
3052 else:
3053 in_expr = pk_cols[0]
3054 zero_idx = True
3055 return self.query_info(False, True, in_expr, pk_cols, zero_idx, None)
3056
3057 def init_class_attribute(self, mapper):
3058 self.parent_property._get_strategy(
3059 (("lazy", "select"),)
3060 ).init_class_attribute(mapper)
3061
3062 def create_row_processor(
3063 self,
3064 context,
3065 query_entity,
3066 path,
3067 loadopt,
3068 mapper,
3069 result,
3070 adapter,
3071 populators,
3072 ):
3073 if context.refresh_state:
3074 return self._immediateload_create_row_processor(
3075 context,
3076 query_entity,
3077 path,
3078 loadopt,
3079 mapper,
3080 result,
3081 adapter,
3082 populators,
3083 )
3084
3085 (
3086 effective_path,
3087 run_loader,
3088 execution_options,
3089 recursion_depth,
3090 ) = self._setup_for_recursion(
3091 context, path, loadopt, join_depth=self.join_depth
3092 )
3093
3094 if not run_loader:
3095 return
3096
3097 if not context.compile_state.compile_options._enable_eagerloads:
3098 return
3099
3100 if not self.parent.class_manager[self.key].impl.supports_population:
3101 raise sa_exc.InvalidRequestError(
3102 "'%s' does not support object "
3103 "population - eager loading cannot be applied." % self
3104 )
3105
3106 # a little dance here as the "path" is still something that only
3107 # semi-tracks the exact series of things we are loading, still not
3108 # telling us about with_polymorphic() and stuff like that when it's at
3109 # the root.. the initial MapperEntity is more accurate for this case.
3110 if len(path) == 1:
3111 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
3112 return
3113 elif not orm_util._entity_isa(path[-1], self.parent):
3114 return
3115
3116 selectin_path = effective_path
3117
3118 path_w_prop = path[self.parent_property]
3119
3120 # build up a path indicating the path from the leftmost
3121 # entity to the thing we're subquery loading.
3122 with_poly_entity = path_w_prop.get(
3123 context.attributes, "path_with_polymorphic", None
3124 )
3125 if with_poly_entity is not None:
3126 effective_entity = inspect(with_poly_entity)
3127 else:
3128 effective_entity = self.entity
3129
3130 loading._PostLoad.callable_for_path(
3131 context,
3132 selectin_path,
3133 self.parent,
3134 self.parent_property,
3135 self._load_for_path,
3136 effective_entity,
3137 loadopt,
3138 recursion_depth,
3139 execution_options,
3140 )
3141
3142 def _load_for_path(
3143 self,
3144 context,
3145 path,
3146 states,
3147 load_only,
3148 effective_entity,
3149 loadopt,
3150 recursion_depth,
3151 execution_options,
3152 ):
3153 if load_only and self.key not in load_only:
3154 return
3155
3156 query_info = self._query_info
3157
3158 if query_info.load_only_child:
3159 our_states = collections.defaultdict(list)
3160 none_states = []
3161
3162 mapper = self.parent
3163
3164 for state, overwrite in states:
3165 state_dict = state.dict
3166 related_ident = tuple(
3167 mapper._get_state_attr_by_column(
3168 state,
3169 state_dict,
3170 lk,
3171 passive=attributes.PASSIVE_NO_FETCH,
3172 )
3173 for lk in query_info.child_lookup_cols
3174 )
3175 # if the loaded parent objects do not have the foreign key
3176 # to the related item loaded, then degrade into the joined
3177 # version of selectinload
3178 if LoaderCallableStatus.PASSIVE_NO_RESULT in related_ident:
3179 query_info = self._fallback_query_info
3180 break
3181
3182 # organize states into lists keyed to particular foreign
3183 # key values.
3184 if None not in related_ident:
3185 our_states[related_ident].append(
3186 (state, state_dict, overwrite)
3187 )
3188 else:
3189 # For FK values that have None, add them to a
3190 # separate collection that will be populated separately
3191 none_states.append((state, state_dict, overwrite))
3192
3193 # note the above conditional may have changed query_info
3194 if not query_info.load_only_child:
3195 our_states = [
3196 (state.key[1], state, state.dict, overwrite)
3197 for state, overwrite in states
3198 ]
3199
3200 pk_cols = query_info.pk_cols
3201 in_expr = query_info.in_expr
3202
3203 if not query_info.load_with_join:
3204 # in "omit join" mode, the primary key column and the
3205 # "in" expression are in terms of the related entity. So
3206 # if the related entity is polymorphic or otherwise aliased,
3207 # we need to adapt our "pk_cols" and "in_expr" to that
3208 # entity. in non-"omit join" mode, these are against the
3209 # parent entity and do not need adaption.
3210 if effective_entity.is_aliased_class:
3211 pk_cols = [
3212 effective_entity._adapt_element(col) for col in pk_cols
3213 ]
3214 in_expr = effective_entity._adapt_element(in_expr)
3215
3216 bundle_ent = orm_util.Bundle("pk", *pk_cols)
3217 bundle_sql = bundle_ent.__clause_element__()
3218
3219 entity_sql = effective_entity.__clause_element__()
3220 q = Select._create_raw_select(
3221 _raw_columns=[bundle_sql, entity_sql],
3222 _compile_options=_ORMCompileState.default_compile_options,
3223 _propagate_attrs={
3224 "compile_state_plugin": "orm",
3225 "plugin_subject": effective_entity,
3226 },
3227 )
3228
3229 if not query_info.load_with_join:
3230 # the Bundle we have in the "omit_join" case is against raw, non
3231 # annotated columns, so to ensure the Query knows its primary
3232 # entity, we add it explicitly. If we made the Bundle against
3233 # annotated columns, we hit a performance issue in this specific
3234 # case, which is detailed in issue #4347.
3235 q = q.select_from(effective_entity)
3236 else:
3237 # in the non-omit_join case, the Bundle is against the annotated/
3238 # mapped column of the parent entity, but the #4347 issue does not
3239 # occur in this case.
3240 q = q.select_from(self._parent_alias).join(
3241 getattr(self._parent_alias, self.parent_property.key).of_type(
3242 effective_entity
3243 )
3244 )
3245
3246 q = q.filter(in_expr.in_(sql.bindparam("primary_keys")))
3247
3248 # a test which exercises what these comments talk about is
3249 # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic
3250 #
3251 # effective_entity above is given to us in terms of the cached
3252 # statement, namely this one:
3253 orig_query = context.compile_state.select_statement
3254
3255 # the actual statement that was requested is this one:
3256 # context_query = context.user_passed_query
3257 #
3258 # that's not the cached one, however. So while it is of the identical
3259 # structure, if it has entities like AliasedInsp, which we get from
3260 # aliased() or with_polymorphic(), the AliasedInsp will likely be a
3261 # different object identity each time, and will not match up
3262 # hashing-wise to the corresponding AliasedInsp that's in the
3263 # cached query, meaning it won't match on paths and loader lookups
3264 # and loaders like this one will be skipped if it is used in options.
3265 #
3266 # as it turns out, standard loader options like selectinload(),
3267 # lazyload() that have a path need
3268 # to come from the cached query so that the AliasedInsp etc. objects
3269 # that are in the query line up with the object that's in the path
3270 # of the strategy object. however other options like
3271 # with_loader_criteria() that doesn't have a path (has a fixed entity)
3272 # and needs to have access to the latest closure state in order to
3273 # be correct, we need to use the uncached one.
3274 #
3275 # as of #8399 we let the loader option itself figure out what it
3276 # wants to do given cached and uncached version of itself.
3277
3278 effective_path = path[self.parent_property]
3279
3280 if orig_query is context.user_passed_query:
3281 new_options = orig_query._with_options
3282 else:
3283 cached_options = orig_query._with_options
3284 uncached_options = context.user_passed_query._with_options
3285
3286 # propagate compile state options from the original query,
3287 # updating their "extra_criteria" as necessary.
3288 # note this will create a different cache key than
3289 # "orig" options if extra_criteria is present, because the copy
3290 # of extra_criteria will have different boundparam than that of
3291 # the QueryableAttribute in the path
3292 new_options = [
3293 orig_opt._adapt_cached_option_to_uncached_option(
3294 context, uncached_opt
3295 )
3296 for orig_opt, uncached_opt in zip(
3297 cached_options, uncached_options
3298 )
3299 ]
3300
3301 if loadopt and loadopt._extra_criteria:
3302 new_options += (
3303 orm_util.LoaderCriteriaOption(
3304 effective_entity,
3305 loadopt._generate_extra_criteria(context),
3306 ),
3307 )
3308
3309 if recursion_depth is not None:
3310 effective_path = effective_path._truncate_recursive()
3311
3312 q = q.options(*new_options)
3313
3314 q = q._update_compile_options({"_current_path": effective_path})
3315 if context.populate_existing:
3316 q = q.execution_options(populate_existing=True)
3317
3318 if self.parent_property.order_by:
3319 if not query_info.load_with_join:
3320 eager_order_by = self.parent_property.order_by
3321 if effective_entity.is_aliased_class:
3322 eager_order_by = [
3323 effective_entity._adapt_element(elem)
3324 for elem in eager_order_by
3325 ]
3326 q = q.order_by(*eager_order_by)
3327 else:
3328
3329 def _setup_outermost_orderby(compile_context):
3330 compile_context.eager_order_by += tuple(
3331 util.to_list(self.parent_property.order_by)
3332 )
3333
3334 q = q._add_compile_state_func(
3335 _setup_outermost_orderby, self.parent_property
3336 )
3337
3338 if query_info.load_only_child:
3339 self._load_via_child(
3340 our_states,
3341 none_states,
3342 query_info,
3343 q,
3344 context,
3345 execution_options,
3346 )
3347 else:
3348 self._load_via_parent(
3349 our_states, query_info, q, context, execution_options
3350 )
3351
3352 def _load_via_child(
3353 self,
3354 our_states,
3355 none_states,
3356 query_info,
3357 q,
3358 context,
3359 execution_options,
3360 ):
3361 uselist = self.uselist
3362
3363 # this sort is really for the benefit of the unit tests
3364 our_keys = sorted(our_states)
3365 while our_keys:
3366 chunk = our_keys[0 : self._chunksize]
3367 our_keys = our_keys[self._chunksize :]
3368 data = {
3369 k: v
3370 for k, v in context.session.execute(
3371 q,
3372 params={
3373 "primary_keys": [
3374 key[0] if query_info.zero_idx else key
3375 for key in chunk
3376 ]
3377 },
3378 execution_options=execution_options,
3379 ).unique()
3380 }
3381
3382 for key in chunk:
3383 # for a real foreign key and no concurrent changes to the
3384 # DB while running this method, "key" is always present in
3385 # data. However, for primaryjoins without real foreign keys
3386 # a non-None primaryjoin condition may still refer to no
3387 # related object.
3388 related_obj = data.get(key, None)
3389 for state, dict_, overwrite in our_states[key]:
3390 if not overwrite and self.key in dict_:
3391 continue
3392
3393 state.get_impl(self.key).set_committed_value(
3394 state,
3395 dict_,
3396 related_obj if not uselist else [related_obj],
3397 )
3398 # populate none states with empty value / collection
3399 for state, dict_, overwrite in none_states:
3400 if not overwrite and self.key in dict_:
3401 continue
3402
3403 # note it's OK if this is a uselist=True attribute, the empty
3404 # collection will be populated
3405 state.get_impl(self.key).set_committed_value(state, dict_, None)
3406
3407 def _load_via_parent(
3408 self, our_states, query_info, q, context, execution_options
3409 ):
3410 uselist = self.uselist
3411 _empty_result = () if uselist else None
3412
3413 while our_states:
3414 chunk = our_states[0 : self._chunksize]
3415 our_states = our_states[self._chunksize :]
3416
3417 primary_keys = [
3418 key[0] if query_info.zero_idx else key
3419 for key, state, state_dict, overwrite in chunk
3420 ]
3421
3422 data = collections.defaultdict(list)
3423 for k, v in itertools.groupby(
3424 context.session.execute(
3425 q,
3426 params={"primary_keys": primary_keys},
3427 execution_options=execution_options,
3428 ).unique(),
3429 lambda x: x[0],
3430 ):
3431 data[k].extend(vv[1] for vv in v)
3432
3433 for key, state, state_dict, overwrite in chunk:
3434 if not overwrite and self.key in state_dict:
3435 continue
3436
3437 collection = data.get(key, _empty_result)
3438
3439 if not uselist and collection:
3440 if len(collection) > 1:
3441 util.warn(
3442 "Multiple rows returned with "
3443 "uselist=False for eagerly-loaded "
3444 "attribute '%s' " % self
3445 )
3446 state.get_impl(self.key).set_committed_value(
3447 state, state_dict, collection[0]
3448 )
3449 else:
3450 # note that empty tuple set on uselist=False sets the
3451 # value to None
3452 state.get_impl(self.key).set_committed_value(
3453 state, state_dict, collection
3454 )
3455
3456
3457def _single_parent_validator(desc, prop):
3458 def _do_check(state, value, oldvalue, initiator):
3459 if value is not None and initiator.key == prop.key:
3460 hasparent = initiator.hasparent(attributes.instance_state(value))
3461 if hasparent and oldvalue is not value:
3462 raise sa_exc.InvalidRequestError(
3463 "Instance %s is already associated with an instance "
3464 "of %s via its %s attribute, and is only allowed a "
3465 "single parent."
3466 % (orm_util.instance_str(value), state.class_, prop),
3467 code="bbf1",
3468 )
3469 return value
3470
3471 def append(state, value, initiator):
3472 return _do_check(state, value, None, initiator)
3473
3474 def set_(state, value, oldvalue, initiator):
3475 return _do_check(state, value, oldvalue, initiator)
3476
3477 event.listen(
3478 desc, "append", append, raw=True, retval=True, active_history=True
3479 )
3480 event.listen(desc, "set", set_, raw=True, retval=True, active_history=True)