1# orm/strategies.py
2# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: ignore-errors
8
9
10"""sqlalchemy.orm.interfaces.LoaderStrategy
11 implementations, and related MapperOptions."""
12
13from __future__ import annotations
14
15import collections
16import itertools
17from typing import Any
18from typing import Dict
19from typing import Optional
20from typing import Tuple
21from typing import TYPE_CHECKING
22from typing import Union
23
24from . import attributes
25from . import exc as orm_exc
26from . import interfaces
27from . import loading
28from . import path_registry
29from . import properties
30from . import query
31from . import relationships
32from . import unitofwork
33from . import util as orm_util
34from .base import _DEFER_FOR_STATE
35from .base import _RAISE_FOR_STATE
36from .base import _SET_DEFERRED_EXPIRED
37from .base import ATTR_WAS_SET
38from .base import LoaderCallableStatus
39from .base import PASSIVE_OFF
40from .base import PassiveFlag
41from .context import _column_descriptions
42from .context import ORMCompileState
43from .context import ORMSelectCompileState
44from .context import QueryContext
45from .interfaces import LoaderStrategy
46from .interfaces import StrategizedProperty
47from .session import _state_session
48from .state import InstanceState
49from .strategy_options import Load
50from .util import _none_set
51from .util import AliasedClass
52from .. import event
53from .. import exc as sa_exc
54from .. import inspect
55from .. import log
56from .. import sql
57from .. import util
58from ..sql import util as sql_util
59from ..sql import visitors
60from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
61from ..sql.selectable import Select
62from ..util.typing import Literal
63
64if TYPE_CHECKING:
65 from .mapper import Mapper
66 from .relationships import RelationshipProperty
67 from ..sql.elements import ColumnElement
68
69
70def _register_attribute(
71 prop,
72 mapper,
73 useobject,
74 compare_function=None,
75 typecallable=None,
76 callable_=None,
77 proxy_property=None,
78 active_history=False,
79 impl_class=None,
80 **kw,
81):
82 listen_hooks = []
83
84 uselist = useobject and prop.uselist
85
86 if useobject and prop.single_parent:
87 listen_hooks.append(single_parent_validator)
88
89 if prop.key in prop.parent.validators:
90 fn, opts = prop.parent.validators[prop.key]
91 listen_hooks.append(
92 lambda desc, prop: orm_util._validator_events(
93 desc, prop.key, fn, **opts
94 )
95 )
96
97 if useobject:
98 listen_hooks.append(unitofwork.track_cascade_events)
99
100 # need to assemble backref listeners
101 # after the singleparentvalidator, mapper validator
102 if useobject:
103 backref = prop.back_populates
104 if backref and prop._effective_sync_backref:
105 listen_hooks.append(
106 lambda desc, prop: attributes.backref_listeners(
107 desc, backref, uselist
108 )
109 )
110
111 # a single MapperProperty is shared down a class inheritance
112 # hierarchy, so we set up attribute instrumentation and backref event
113 # for each mapper down the hierarchy.
114
115 # typically, "mapper" is the same as prop.parent, due to the way
116 # the configure_mappers() process runs, however this is not strongly
117 # enforced, and in the case of a second configure_mappers() run the
118 # mapper here might not be prop.parent; also, a subclass mapper may
119 # be called here before a superclass mapper. That is, can't depend
120 # on mappers not already being set up so we have to check each one.
121
122 for m in mapper.self_and_descendants:
123 if prop is m._props.get(
124 prop.key
125 ) and not m.class_manager._attr_has_impl(prop.key):
126 desc = attributes.register_attribute_impl(
127 m.class_,
128 prop.key,
129 parent_token=prop,
130 uselist=uselist,
131 compare_function=compare_function,
132 useobject=useobject,
133 trackparent=useobject
134 and (
135 prop.single_parent
136 or prop.direction is interfaces.ONETOMANY
137 ),
138 typecallable=typecallable,
139 callable_=callable_,
140 active_history=active_history,
141 impl_class=impl_class,
142 send_modified_events=not useobject or not prop.viewonly,
143 doc=prop.doc,
144 **kw,
145 )
146
147 for hook in listen_hooks:
148 hook(desc, prop)
149
150
151@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
152class UninstrumentedColumnLoader(LoaderStrategy):
153 """Represent a non-instrumented MapperProperty.
154
155 The polymorphic_on argument of mapper() often results in this,
156 if the argument is against the with_polymorphic selectable.
157
158 """
159
160 __slots__ = ("columns",)
161
162 def __init__(self, parent, strategy_key):
163 super().__init__(parent, strategy_key)
164 self.columns = self.parent_property.columns
165
166 def setup_query(
167 self,
168 compile_state,
169 query_entity,
170 path,
171 loadopt,
172 adapter,
173 column_collection=None,
174 **kwargs,
175 ):
176 for c in self.columns:
177 if adapter:
178 c = adapter.columns[c]
179 compile_state._append_dedupe_col_collection(c, column_collection)
180
181 def create_row_processor(
182 self,
183 context,
184 query_entity,
185 path,
186 loadopt,
187 mapper,
188 result,
189 adapter,
190 populators,
191 ):
192 pass
193
194
195@log.class_logger
196@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
197class ColumnLoader(LoaderStrategy):
198 """Provide loading behavior for a :class:`.ColumnProperty`."""
199
200 __slots__ = "columns", "is_composite"
201
202 def __init__(self, parent, strategy_key):
203 super().__init__(parent, strategy_key)
204 self.columns = self.parent_property.columns
205 self.is_composite = hasattr(self.parent_property, "composite_class")
206
207 def setup_query(
208 self,
209 compile_state,
210 query_entity,
211 path,
212 loadopt,
213 adapter,
214 column_collection,
215 memoized_populators,
216 check_for_adapt=False,
217 **kwargs,
218 ):
219 for c in self.columns:
220 if adapter:
221 if check_for_adapt:
222 c = adapter.adapt_check_present(c)
223 if c is None:
224 return
225 else:
226 c = adapter.columns[c]
227
228 compile_state._append_dedupe_col_collection(c, column_collection)
229
230 fetch = self.columns[0]
231 if adapter:
232 fetch = adapter.columns[fetch]
233 if fetch is None:
234 # None happens here only for dml bulk_persistence cases
235 # when context.DMLReturningColFilter is used
236 return
237
238 memoized_populators[self.parent_property] = fetch
239
240 def init_class_attribute(self, mapper):
241 self.is_class_level = True
242 coltype = self.columns[0].type
243 # TODO: check all columns ? check for foreign key as well?
244 active_history = (
245 self.parent_property.active_history
246 or self.columns[0].primary_key
247 or (
248 mapper.version_id_col is not None
249 and mapper._columntoproperty.get(mapper.version_id_col, None)
250 is self.parent_property
251 )
252 )
253
254 _register_attribute(
255 self.parent_property,
256 mapper,
257 useobject=False,
258 compare_function=coltype.compare_values,
259 active_history=active_history,
260 )
261
262 def create_row_processor(
263 self,
264 context,
265 query_entity,
266 path,
267 loadopt,
268 mapper,
269 result,
270 adapter,
271 populators,
272 ):
273 # look through list of columns represented here
274 # to see which, if any, is present in the row.
275
276 for col in self.columns:
277 if adapter:
278 col = adapter.columns[col]
279 getter = result._getter(col, False)
280 if getter:
281 populators["quick"].append((self.key, getter))
282 break
283 else:
284 populators["expire"].append((self.key, True))
285
286
287@log.class_logger
288@properties.ColumnProperty.strategy_for(query_expression=True)
289class ExpressionColumnLoader(ColumnLoader):
290 def __init__(self, parent, strategy_key):
291 super().__init__(parent, strategy_key)
292
293 # compare to the "default" expression that is mapped in
294 # the column. If it's sql.null, we don't need to render
295 # unless an expr is passed in the options.
296 null = sql.null().label(None)
297 self._have_default_expression = any(
298 not c.compare(null) for c in self.parent_property.columns
299 )
300
301 def setup_query(
302 self,
303 compile_state,
304 query_entity,
305 path,
306 loadopt,
307 adapter,
308 column_collection,
309 memoized_populators,
310 **kwargs,
311 ):
312 columns = None
313 if loadopt and loadopt._extra_criteria:
314 columns = loadopt._extra_criteria
315
316 elif self._have_default_expression:
317 columns = self.parent_property.columns
318
319 if columns is None:
320 return
321
322 for c in columns:
323 if adapter:
324 c = adapter.columns[c]
325 compile_state._append_dedupe_col_collection(c, column_collection)
326
327 fetch = columns[0]
328 if adapter:
329 fetch = adapter.columns[fetch]
330 if fetch is None:
331 # None is not expected to be the result of any
332 # adapter implementation here, however there may be theoretical
333 # usages of returning() with context.DMLReturningColFilter
334 return
335
336 memoized_populators[self.parent_property] = fetch
337
338 def create_row_processor(
339 self,
340 context,
341 query_entity,
342 path,
343 loadopt,
344 mapper,
345 result,
346 adapter,
347 populators,
348 ):
349 # look through list of columns represented here
350 # to see which, if any, is present in the row.
351 if loadopt and loadopt._extra_criteria:
352 columns = loadopt._extra_criteria
353
354 for col in columns:
355 if adapter:
356 col = adapter.columns[col]
357 getter = result._getter(col, False)
358 if getter:
359 populators["quick"].append((self.key, getter))
360 break
361 else:
362 populators["expire"].append((self.key, True))
363
364 def init_class_attribute(self, mapper):
365 self.is_class_level = True
366
367 _register_attribute(
368 self.parent_property,
369 mapper,
370 useobject=False,
371 compare_function=self.columns[0].type.compare_values,
372 accepts_scalar_loader=False,
373 )
374
375
376@log.class_logger
377@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
378@properties.ColumnProperty.strategy_for(
379 deferred=True, instrument=True, raiseload=True
380)
381@properties.ColumnProperty.strategy_for(do_nothing=True)
382class DeferredColumnLoader(LoaderStrategy):
383 """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
384
385 __slots__ = "columns", "group", "raiseload"
386
387 def __init__(self, parent, strategy_key):
388 super().__init__(parent, strategy_key)
389 if hasattr(self.parent_property, "composite_class"):
390 raise NotImplementedError(
391 "Deferred loading for composite types not implemented yet"
392 )
393 self.raiseload = self.strategy_opts.get("raiseload", False)
394 self.columns = self.parent_property.columns
395 self.group = self.parent_property.group
396
397 def create_row_processor(
398 self,
399 context,
400 query_entity,
401 path,
402 loadopt,
403 mapper,
404 result,
405 adapter,
406 populators,
407 ):
408 # for a DeferredColumnLoader, this method is only used during a
409 # "row processor only" query; see test_deferred.py ->
410 # tests with "rowproc_only" in their name. As of the 1.0 series,
411 # loading._instance_processor doesn't use a "row processing" function
412 # to populate columns, instead it uses data in the "populators"
413 # dictionary. Normally, the DeferredColumnLoader.setup_query()
414 # sets up that data in the "memoized_populators" dictionary
415 # and "create_row_processor()" here is never invoked.
416
417 if (
418 context.refresh_state
419 and context.query._compile_options._only_load_props
420 and self.key in context.query._compile_options._only_load_props
421 ):
422 self.parent_property._get_strategy(
423 (("deferred", False), ("instrument", True))
424 ).create_row_processor(
425 context,
426 query_entity,
427 path,
428 loadopt,
429 mapper,
430 result,
431 adapter,
432 populators,
433 )
434
435 elif not self.is_class_level:
436 if self.raiseload:
437 set_deferred_for_local_state = (
438 self.parent_property._raise_column_loader
439 )
440 else:
441 set_deferred_for_local_state = (
442 self.parent_property._deferred_column_loader
443 )
444 populators["new"].append((self.key, set_deferred_for_local_state))
445 else:
446 populators["expire"].append((self.key, False))
447
448 def init_class_attribute(self, mapper):
449 self.is_class_level = True
450
451 _register_attribute(
452 self.parent_property,
453 mapper,
454 useobject=False,
455 compare_function=self.columns[0].type.compare_values,
456 callable_=self._load_for_state,
457 load_on_unexpire=False,
458 )
459
460 def setup_query(
461 self,
462 compile_state,
463 query_entity,
464 path,
465 loadopt,
466 adapter,
467 column_collection,
468 memoized_populators,
469 only_load_props=None,
470 **kw,
471 ):
472 if (
473 (
474 compile_state.compile_options._render_for_subquery
475 and self.parent_property._renders_in_subqueries
476 )
477 or (
478 loadopt
479 and set(self.columns).intersection(
480 self.parent._should_undefer_in_wildcard
481 )
482 )
483 or (
484 loadopt
485 and self.group
486 and loadopt.local_opts.get(
487 "undefer_group_%s" % self.group, False
488 )
489 )
490 or (only_load_props and self.key in only_load_props)
491 ):
492 self.parent_property._get_strategy(
493 (("deferred", False), ("instrument", True))
494 ).setup_query(
495 compile_state,
496 query_entity,
497 path,
498 loadopt,
499 adapter,
500 column_collection,
501 memoized_populators,
502 **kw,
503 )
504 elif self.is_class_level:
505 memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
506 elif not self.raiseload:
507 memoized_populators[self.parent_property] = _DEFER_FOR_STATE
508 else:
509 memoized_populators[self.parent_property] = _RAISE_FOR_STATE
510
511 def _load_for_state(self, state, passive):
512 if not state.key:
513 return LoaderCallableStatus.ATTR_EMPTY
514
515 if not passive & PassiveFlag.SQL_OK:
516 return LoaderCallableStatus.PASSIVE_NO_RESULT
517
518 localparent = state.manager.mapper
519
520 if self.group:
521 toload = [
522 p.key
523 for p in localparent.iterate_properties
524 if isinstance(p, StrategizedProperty)
525 and isinstance(p.strategy, DeferredColumnLoader)
526 and p.group == self.group
527 ]
528 else:
529 toload = [self.key]
530
531 # narrow the keys down to just those which have no history
532 group = [k for k in toload if k in state.unmodified]
533
534 session = _state_session(state)
535 if session is None:
536 raise orm_exc.DetachedInstanceError(
537 "Parent instance %s is not bound to a Session; "
538 "deferred load operation of attribute '%s' cannot proceed"
539 % (orm_util.state_str(state), self.key)
540 )
541
542 if self.raiseload:
543 self._invoke_raise_load(state, passive, "raise")
544
545 loading.load_scalar_attributes(
546 state.mapper, state, set(group), PASSIVE_OFF
547 )
548
549 return LoaderCallableStatus.ATTR_WAS_SET
550
551 def _invoke_raise_load(self, state, passive, lazy):
552 raise sa_exc.InvalidRequestError(
553 "'%s' is not available due to raiseload=True" % (self,)
554 )
555
556
557class LoadDeferredColumns:
558 """serializable loader object used by DeferredColumnLoader"""
559
560 def __init__(self, key: str, raiseload: bool = False):
561 self.key = key
562 self.raiseload = raiseload
563
564 def __call__(self, state, passive=attributes.PASSIVE_OFF):
565 key = self.key
566
567 localparent = state.manager.mapper
568 prop = localparent._props[key]
569 if self.raiseload:
570 strategy_key = (
571 ("deferred", True),
572 ("instrument", True),
573 ("raiseload", True),
574 )
575 else:
576 strategy_key = (("deferred", True), ("instrument", True))
577 strategy = prop._get_strategy(strategy_key)
578 return strategy._load_for_state(state, passive)
579
580
581class AbstractRelationshipLoader(LoaderStrategy):
582 """LoaderStratgies which deal with related objects."""
583
584 __slots__ = "mapper", "target", "uselist", "entity"
585
586 def __init__(self, parent, strategy_key):
587 super().__init__(parent, strategy_key)
588 self.mapper = self.parent_property.mapper
589 self.entity = self.parent_property.entity
590 self.target = self.parent_property.target
591 self.uselist = self.parent_property.uselist
592
593 def _immediateload_create_row_processor(
594 self,
595 context,
596 query_entity,
597 path,
598 loadopt,
599 mapper,
600 result,
601 adapter,
602 populators,
603 ):
604 return self.parent_property._get_strategy(
605 (("lazy", "immediate"),)
606 ).create_row_processor(
607 context,
608 query_entity,
609 path,
610 loadopt,
611 mapper,
612 result,
613 adapter,
614 populators,
615 )
616
617
618@log.class_logger
619@relationships.RelationshipProperty.strategy_for(do_nothing=True)
620class DoNothingLoader(LoaderStrategy):
621 """Relationship loader that makes no change to the object's state.
622
623 Compared to NoLoader, this loader does not initialize the
624 collection/attribute to empty/none; the usual default LazyLoader will
625 take effect.
626
627 """
628
629
630@log.class_logger
631@relationships.RelationshipProperty.strategy_for(lazy="noload")
632@relationships.RelationshipProperty.strategy_for(lazy=None)
633class NoLoader(AbstractRelationshipLoader):
634 """Provide loading behavior for a :class:`.Relationship`
635 with "lazy=None".
636
637 """
638
639 __slots__ = ()
640
641 def init_class_attribute(self, mapper):
642 self.is_class_level = True
643
644 _register_attribute(
645 self.parent_property,
646 mapper,
647 useobject=True,
648 typecallable=self.parent_property.collection_class,
649 )
650
651 def create_row_processor(
652 self,
653 context,
654 query_entity,
655 path,
656 loadopt,
657 mapper,
658 result,
659 adapter,
660 populators,
661 ):
662 def invoke_no_load(state, dict_, row):
663 if self.uselist:
664 attributes.init_state_collection(state, dict_, self.key)
665 else:
666 dict_[self.key] = None
667
668 populators["new"].append((self.key, invoke_no_load))
669
670
671@log.class_logger
672@relationships.RelationshipProperty.strategy_for(lazy=True)
673@relationships.RelationshipProperty.strategy_for(lazy="select")
674@relationships.RelationshipProperty.strategy_for(lazy="raise")
675@relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql")
676@relationships.RelationshipProperty.strategy_for(lazy="baked_select")
677class LazyLoader(
678 AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
679):
680 """Provide loading behavior for a :class:`.Relationship`
681 with "lazy=True", that is loads when first accessed.
682
683 """
684
685 __slots__ = (
686 "_lazywhere",
687 "_rev_lazywhere",
688 "_lazyload_reverse_option",
689 "_order_by",
690 "use_get",
691 "is_aliased_class",
692 "_bind_to_col",
693 "_equated_columns",
694 "_rev_bind_to_col",
695 "_rev_equated_columns",
696 "_simple_lazy_clause",
697 "_raise_always",
698 "_raise_on_sql",
699 )
700
701 _lazywhere: ColumnElement[bool]
702 _bind_to_col: Dict[str, ColumnElement[Any]]
703 _rev_lazywhere: ColumnElement[bool]
704 _rev_bind_to_col: Dict[str, ColumnElement[Any]]
705
706 parent_property: RelationshipProperty[Any]
707
708 def __init__(
709 self, parent: RelationshipProperty[Any], strategy_key: Tuple[Any, ...]
710 ):
711 super().__init__(parent, strategy_key)
712 self._raise_always = self.strategy_opts["lazy"] == "raise"
713 self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
714
715 self.is_aliased_class = inspect(self.entity).is_aliased_class
716
717 join_condition = self.parent_property._join_condition
718 (
719 self._lazywhere,
720 self._bind_to_col,
721 self._equated_columns,
722 ) = join_condition.create_lazy_clause()
723
724 (
725 self._rev_lazywhere,
726 self._rev_bind_to_col,
727 self._rev_equated_columns,
728 ) = join_condition.create_lazy_clause(reverse_direction=True)
729
730 if self.parent_property.order_by:
731 self._order_by = [
732 sql_util._deep_annotate(elem, {"_orm_adapt": True})
733 for elem in util.to_list(self.parent_property.order_by)
734 ]
735 else:
736 self._order_by = None
737
738 self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
739
740 # determine if our "lazywhere" clause is the same as the mapper's
741 # get() clause. then we can just use mapper.get()
742 #
743 # TODO: the "not self.uselist" can be taken out entirely; a m2o
744 # load that populates for a list (very unusual, but is possible with
745 # the API) can still set for "None" and the attribute system will
746 # populate as an empty list.
747 self.use_get = (
748 not self.is_aliased_class
749 and not self.uselist
750 and self.entity._get_clause[0].compare(
751 self._lazywhere,
752 use_proxies=True,
753 compare_keys=False,
754 equivalents=self.mapper._equivalent_columns,
755 )
756 )
757
758 if self.use_get:
759 for col in list(self._equated_columns):
760 if col in self.mapper._equivalent_columns:
761 for c in self.mapper._equivalent_columns[col]:
762 self._equated_columns[c] = self._equated_columns[col]
763
764 self.logger.info(
765 "%s will use Session.get() to optimize instance loads", self
766 )
767
768 def init_class_attribute(self, mapper):
769 self.is_class_level = True
770
771 _legacy_inactive_history_style = (
772 self.parent_property._legacy_inactive_history_style
773 )
774
775 if self.parent_property.active_history:
776 active_history = True
777 _deferred_history = False
778
779 elif (
780 self.parent_property.direction is not interfaces.MANYTOONE
781 or not self.use_get
782 ):
783 if _legacy_inactive_history_style:
784 active_history = True
785 _deferred_history = False
786 else:
787 active_history = False
788 _deferred_history = True
789 else:
790 active_history = _deferred_history = False
791
792 _register_attribute(
793 self.parent_property,
794 mapper,
795 useobject=True,
796 callable_=self._load_for_state,
797 typecallable=self.parent_property.collection_class,
798 active_history=active_history,
799 _deferred_history=_deferred_history,
800 )
801
802 def _memoized_attr__simple_lazy_clause(self):
803 lazywhere = sql_util._deep_annotate(
804 self._lazywhere, {"_orm_adapt": True}
805 )
806
807 criterion, bind_to_col = (lazywhere, self._bind_to_col)
808
809 params = []
810
811 def visit_bindparam(bindparam):
812 bindparam.unique = False
813
814 visitors.traverse(criterion, {}, {"bindparam": visit_bindparam})
815
816 def visit_bindparam(bindparam):
817 if bindparam._identifying_key in bind_to_col:
818 params.append(
819 (
820 bindparam.key,
821 bind_to_col[bindparam._identifying_key],
822 None,
823 )
824 )
825 elif bindparam.callable is None:
826 params.append((bindparam.key, None, bindparam.value))
827
828 criterion = visitors.cloned_traverse(
829 criterion, {}, {"bindparam": visit_bindparam}
830 )
831
832 return criterion, params
833
834 def _generate_lazy_clause(self, state, passive):
835 criterion, param_keys = self._simple_lazy_clause
836
837 if state is None:
838 return sql_util.adapt_criterion_to_null(
839 criterion, [key for key, ident, value in param_keys]
840 )
841
842 mapper = self.parent_property.parent
843
844 o = state.obj() # strong ref
845 dict_ = attributes.instance_dict(o)
846
847 if passive & PassiveFlag.INIT_OK:
848 passive ^= PassiveFlag.INIT_OK
849
850 params = {}
851 for key, ident, value in param_keys:
852 if ident is not None:
853 if passive and passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
854 value = mapper._get_committed_state_attr_by_column(
855 state, dict_, ident, passive
856 )
857 else:
858 value = mapper._get_state_attr_by_column(
859 state, dict_, ident, passive
860 )
861
862 params[key] = value
863
864 return criterion, params
865
866 def _invoke_raise_load(self, state, passive, lazy):
867 raise sa_exc.InvalidRequestError(
868 "'%s' is not available due to lazy='%s'" % (self, lazy)
869 )
870
871 def _load_for_state(
872 self,
873 state,
874 passive,
875 loadopt=None,
876 extra_criteria=(),
877 extra_options=(),
878 alternate_effective_path=None,
879 execution_options=util.EMPTY_DICT,
880 ):
881 if not state.key and (
882 (
883 not self.parent_property.load_on_pending
884 and not state._load_pending
885 )
886 or not state.session_id
887 ):
888 return LoaderCallableStatus.ATTR_EMPTY
889
890 pending = not state.key
891 primary_key_identity = None
892
893 use_get = self.use_get and (not loadopt or not loadopt._extra_criteria)
894
895 if (not passive & PassiveFlag.SQL_OK and not use_get) or (
896 not passive & attributes.NON_PERSISTENT_OK and pending
897 ):
898 return LoaderCallableStatus.PASSIVE_NO_RESULT
899
900 if (
901 # we were given lazy="raise"
902 self._raise_always
903 # the no_raise history-related flag was not passed
904 and not passive & PassiveFlag.NO_RAISE
905 and (
906 # if we are use_get and related_object_ok is disabled,
907 # which means we are at most looking in the identity map
908 # for history purposes or otherwise returning
909 # PASSIVE_NO_RESULT, don't raise. This is also a
910 # history-related flag
911 not use_get
912 or passive & PassiveFlag.RELATED_OBJECT_OK
913 )
914 ):
915 self._invoke_raise_load(state, passive, "raise")
916
917 session = _state_session(state)
918 if not session:
919 if passive & PassiveFlag.NO_RAISE:
920 return LoaderCallableStatus.PASSIVE_NO_RESULT
921
922 raise orm_exc.DetachedInstanceError(
923 "Parent instance %s is not bound to a Session; "
924 "lazy load operation of attribute '%s' cannot proceed"
925 % (orm_util.state_str(state), self.key)
926 )
927
928 # if we have a simple primary key load, check the
929 # identity map without generating a Query at all
930 if use_get:
931 primary_key_identity = self._get_ident_for_use_get(
932 session, state, passive
933 )
934 if LoaderCallableStatus.PASSIVE_NO_RESULT in primary_key_identity:
935 return LoaderCallableStatus.PASSIVE_NO_RESULT
936 elif LoaderCallableStatus.NEVER_SET in primary_key_identity:
937 return LoaderCallableStatus.NEVER_SET
938
939 if _none_set.issuperset(primary_key_identity):
940 return None
941
942 if (
943 self.key in state.dict
944 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
945 ):
946 return LoaderCallableStatus.ATTR_WAS_SET
947
948 # look for this identity in the identity map. Delegate to the
949 # Query class in use, as it may have special rules for how it
950 # does this, including how it decides what the correct
951 # identity_token would be for this identity.
952
953 instance = session._identity_lookup(
954 self.entity,
955 primary_key_identity,
956 passive=passive,
957 lazy_loaded_from=state,
958 )
959
960 if instance is not None:
961 if instance is LoaderCallableStatus.PASSIVE_CLASS_MISMATCH:
962 return None
963 else:
964 return instance
965 elif (
966 not passive & PassiveFlag.SQL_OK
967 or not passive & PassiveFlag.RELATED_OBJECT_OK
968 ):
969 return LoaderCallableStatus.PASSIVE_NO_RESULT
970
971 return self._emit_lazyload(
972 session,
973 state,
974 primary_key_identity,
975 passive,
976 loadopt,
977 extra_criteria,
978 extra_options,
979 alternate_effective_path,
980 execution_options,
981 )
982
983 def _get_ident_for_use_get(self, session, state, passive):
984 instance_mapper = state.manager.mapper
985
986 if passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
987 get_attr = instance_mapper._get_committed_state_attr_by_column
988 else:
989 get_attr = instance_mapper._get_state_attr_by_column
990
991 dict_ = state.dict
992
993 return [
994 get_attr(state, dict_, self._equated_columns[pk], passive=passive)
995 for pk in self.mapper.primary_key
996 ]
997
998 @util.preload_module("sqlalchemy.orm.strategy_options")
999 def _emit_lazyload(
1000 self,
1001 session,
1002 state,
1003 primary_key_identity,
1004 passive,
1005 loadopt,
1006 extra_criteria,
1007 extra_options,
1008 alternate_effective_path,
1009 execution_options,
1010 ):
1011 strategy_options = util.preloaded.orm_strategy_options
1012
1013 clauseelement = self.entity.__clause_element__()
1014 stmt = Select._create_raw_select(
1015 _raw_columns=[clauseelement],
1016 _propagate_attrs=clauseelement._propagate_attrs,
1017 _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
1018 _compile_options=ORMCompileState.default_compile_options,
1019 )
1020 load_options = QueryContext.default_load_options
1021
1022 load_options += {
1023 "_invoke_all_eagers": False,
1024 "_lazy_loaded_from": state,
1025 }
1026
1027 if self.parent_property.secondary is not None:
1028 stmt = stmt.select_from(
1029 self.mapper, self.parent_property.secondary
1030 )
1031
1032 pending = not state.key
1033
1034 # don't autoflush on pending
1035 if pending or passive & attributes.NO_AUTOFLUSH:
1036 stmt._execution_options = util.immutabledict({"autoflush": False})
1037
1038 use_get = self.use_get
1039
1040 if state.load_options or (loadopt and loadopt._extra_criteria):
1041 if alternate_effective_path is None:
1042 effective_path = state.load_path[self.parent_property]
1043 else:
1044 effective_path = alternate_effective_path[self.parent_property]
1045
1046 opts = state.load_options
1047
1048 if loadopt and loadopt._extra_criteria:
1049 use_get = False
1050 opts += (
1051 orm_util.LoaderCriteriaOption(self.entity, extra_criteria),
1052 )
1053
1054 stmt._with_options = opts
1055 elif alternate_effective_path is None:
1056 # this path is used if there are not already any options
1057 # in the query, but an event may want to add them
1058 effective_path = state.mapper._path_registry[self.parent_property]
1059 else:
1060 # added by immediateloader
1061 effective_path = alternate_effective_path[self.parent_property]
1062
1063 if extra_options:
1064 stmt._with_options += extra_options
1065
1066 stmt._compile_options += {"_current_path": effective_path}
1067
1068 if use_get:
1069 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1070 self._invoke_raise_load(state, passive, "raise_on_sql")
1071
1072 return loading.load_on_pk_identity(
1073 session,
1074 stmt,
1075 primary_key_identity,
1076 load_options=load_options,
1077 execution_options=execution_options,
1078 )
1079
1080 if self._order_by:
1081 stmt._order_by_clauses = self._order_by
1082
1083 def _lazyload_reverse(compile_context):
1084 for rev in self.parent_property._reverse_property:
1085 # reverse props that are MANYTOONE are loading *this*
1086 # object from get(), so don't need to eager out to those.
1087 if (
1088 rev.direction is interfaces.MANYTOONE
1089 and rev._use_get
1090 and not isinstance(rev.strategy, LazyLoader)
1091 ):
1092 strategy_options.Load._construct_for_existing_path(
1093 compile_context.compile_options._current_path[
1094 rev.parent
1095 ]
1096 ).lazyload(rev).process_compile_state(compile_context)
1097
1098 stmt._with_context_options += (
1099 (_lazyload_reverse, self.parent_property),
1100 )
1101
1102 lazy_clause, params = self._generate_lazy_clause(state, passive)
1103
1104 if execution_options:
1105 execution_options = util.EMPTY_DICT.merge_with(
1106 execution_options,
1107 {
1108 "_sa_orm_load_options": load_options,
1109 },
1110 )
1111 else:
1112 execution_options = {
1113 "_sa_orm_load_options": load_options,
1114 }
1115
1116 if (
1117 self.key in state.dict
1118 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
1119 ):
1120 return LoaderCallableStatus.ATTR_WAS_SET
1121
1122 if pending:
1123 if util.has_intersection(orm_util._none_set, params.values()):
1124 return None
1125
1126 elif util.has_intersection(orm_util._never_set, params.values()):
1127 return None
1128
1129 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1130 self._invoke_raise_load(state, passive, "raise_on_sql")
1131
1132 stmt._where_criteria = (lazy_clause,)
1133
1134 result = session.execute(
1135 stmt, params, execution_options=execution_options
1136 )
1137
1138 result = result.unique().scalars().all()
1139
1140 if self.uselist:
1141 return result
1142 else:
1143 l = len(result)
1144 if l:
1145 if l > 1:
1146 util.warn(
1147 "Multiple rows returned with "
1148 "uselist=False for lazily-loaded attribute '%s' "
1149 % self.parent_property
1150 )
1151
1152 return result[0]
1153 else:
1154 return None
1155
1156 def create_row_processor(
1157 self,
1158 context,
1159 query_entity,
1160 path,
1161 loadopt,
1162 mapper,
1163 result,
1164 adapter,
1165 populators,
1166 ):
1167 key = self.key
1168
1169 if (
1170 context.load_options._is_user_refresh
1171 and context.query._compile_options._only_load_props
1172 and self.key in context.query._compile_options._only_load_props
1173 ):
1174 return self._immediateload_create_row_processor(
1175 context,
1176 query_entity,
1177 path,
1178 loadopt,
1179 mapper,
1180 result,
1181 adapter,
1182 populators,
1183 )
1184
1185 if not self.is_class_level or (loadopt and loadopt._extra_criteria):
1186 # we are not the primary manager for this attribute
1187 # on this class - set up a
1188 # per-instance lazyloader, which will override the
1189 # class-level behavior.
1190 # this currently only happens when using a
1191 # "lazyload" option on a "no load"
1192 # attribute - "eager" attributes always have a
1193 # class-level lazyloader installed.
1194 set_lazy_callable = (
1195 InstanceState._instance_level_callable_processor
1196 )(
1197 mapper.class_manager,
1198 LoadLazyAttribute(
1199 key,
1200 self,
1201 loadopt,
1202 (
1203 loadopt._generate_extra_criteria(context)
1204 if loadopt._extra_criteria
1205 else None
1206 ),
1207 ),
1208 key,
1209 )
1210
1211 populators["new"].append((self.key, set_lazy_callable))
1212 elif context.populate_existing or mapper.always_refresh:
1213
1214 def reset_for_lazy_callable(state, dict_, row):
1215 # we are the primary manager for this attribute on
1216 # this class - reset its
1217 # per-instance attribute state, so that the class-level
1218 # lazy loader is
1219 # executed when next referenced on this instance.
1220 # this is needed in
1221 # populate_existing() types of scenarios to reset
1222 # any existing state.
1223 state._reset(dict_, key)
1224
1225 populators["new"].append((self.key, reset_for_lazy_callable))
1226
1227
1228class LoadLazyAttribute:
1229 """semi-serializable loader object used by LazyLoader
1230
1231 Historically, this object would be carried along with instances that
1232 needed to run lazyloaders, so it had to be serializable to support
1233 cached instances.
1234
1235 this is no longer a general requirement, and the case where this object
1236 is used is exactly the case where we can't really serialize easily,
1237 which is when extra criteria in the loader option is present.
1238
1239 We can't reliably serialize that as it refers to mapped entities and
1240 AliasedClass objects that are local to the current process, which would
1241 need to be matched up on deserialize e.g. the sqlalchemy.ext.serializer
1242 approach.
1243
1244 """
1245
1246 def __init__(self, key, initiating_strategy, loadopt, extra_criteria):
1247 self.key = key
1248 self.strategy_key = initiating_strategy.strategy_key
1249 self.loadopt = loadopt
1250 self.extra_criteria = extra_criteria
1251
1252 def __getstate__(self):
1253 if self.extra_criteria is not None:
1254 util.warn(
1255 "Can't reliably serialize a lazyload() option that "
1256 "contains additional criteria; please use eager loading "
1257 "for this case"
1258 )
1259 return {
1260 "key": self.key,
1261 "strategy_key": self.strategy_key,
1262 "loadopt": self.loadopt,
1263 "extra_criteria": (),
1264 }
1265
1266 def __call__(self, state, passive=attributes.PASSIVE_OFF):
1267 key = self.key
1268 instance_mapper = state.manager.mapper
1269 prop = instance_mapper._props[key]
1270 strategy = prop._strategies[self.strategy_key]
1271
1272 return strategy._load_for_state(
1273 state,
1274 passive,
1275 loadopt=self.loadopt,
1276 extra_criteria=self.extra_criteria,
1277 )
1278
1279
1280class PostLoader(AbstractRelationshipLoader):
1281 """A relationship loader that emits a second SELECT statement."""
1282
1283 __slots__ = ()
1284
1285 def _setup_for_recursion(self, context, path, loadopt, join_depth=None):
1286 effective_path = (
1287 context.compile_state.current_path or orm_util.PathRegistry.root
1288 ) + path
1289
1290 top_level_context = context._get_top_level_context()
1291 execution_options = util.immutabledict(
1292 {"sa_top_level_orm_context": top_level_context}
1293 )
1294
1295 if loadopt:
1296 recursion_depth = loadopt.local_opts.get("recursion_depth", None)
1297 unlimited_recursion = recursion_depth == -1
1298 else:
1299 recursion_depth = None
1300 unlimited_recursion = False
1301
1302 if recursion_depth is not None:
1303 if not self.parent_property._is_self_referential:
1304 raise sa_exc.InvalidRequestError(
1305 f"recursion_depth option on relationship "
1306 f"{self.parent_property} not valid for "
1307 "non-self-referential relationship"
1308 )
1309 recursion_depth = context.execution_options.get(
1310 f"_recursion_depth_{id(self)}", recursion_depth
1311 )
1312
1313 if not unlimited_recursion and recursion_depth < 0:
1314 return (
1315 effective_path,
1316 False,
1317 execution_options,
1318 recursion_depth,
1319 )
1320
1321 if not unlimited_recursion:
1322 execution_options = execution_options.union(
1323 {
1324 f"_recursion_depth_{id(self)}": recursion_depth - 1,
1325 }
1326 )
1327
1328 if loading.PostLoad.path_exists(
1329 context, effective_path, self.parent_property
1330 ):
1331 return effective_path, False, execution_options, recursion_depth
1332
1333 path_w_prop = path[self.parent_property]
1334 effective_path_w_prop = effective_path[self.parent_property]
1335
1336 if not path_w_prop.contains(context.attributes, "loader"):
1337 if join_depth:
1338 if effective_path_w_prop.length / 2 > join_depth:
1339 return (
1340 effective_path,
1341 False,
1342 execution_options,
1343 recursion_depth,
1344 )
1345 elif effective_path_w_prop.contains_mapper(self.mapper):
1346 return (
1347 effective_path,
1348 False,
1349 execution_options,
1350 recursion_depth,
1351 )
1352
1353 return effective_path, True, execution_options, recursion_depth
1354
1355
1356@relationships.RelationshipProperty.strategy_for(lazy="immediate")
1357class ImmediateLoader(PostLoader):
1358 __slots__ = ("join_depth",)
1359
1360 def __init__(self, parent, strategy_key):
1361 super().__init__(parent, strategy_key)
1362 self.join_depth = self.parent_property.join_depth
1363
1364 def init_class_attribute(self, mapper):
1365 self.parent_property._get_strategy(
1366 (("lazy", "select"),)
1367 ).init_class_attribute(mapper)
1368
1369 def create_row_processor(
1370 self,
1371 context,
1372 query_entity,
1373 path,
1374 loadopt,
1375 mapper,
1376 result,
1377 adapter,
1378 populators,
1379 ):
1380 if not context.compile_state.compile_options._enable_eagerloads:
1381 return
1382
1383 (
1384 effective_path,
1385 run_loader,
1386 execution_options,
1387 recursion_depth,
1388 ) = self._setup_for_recursion(context, path, loadopt, self.join_depth)
1389
1390 if not run_loader:
1391 # this will not emit SQL and will only emit for a many-to-one
1392 # "use get" load. the "_RELATED" part means it may return
1393 # instance even if its expired, since this is a mutually-recursive
1394 # load operation.
1395 flags = attributes.PASSIVE_NO_FETCH_RELATED | PassiveFlag.NO_RAISE
1396 else:
1397 flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE
1398
1399 loading.PostLoad.callable_for_path(
1400 context,
1401 effective_path,
1402 self.parent,
1403 self.parent_property,
1404 self._load_for_path,
1405 loadopt,
1406 flags,
1407 recursion_depth,
1408 execution_options,
1409 )
1410
1411 def _load_for_path(
1412 self,
1413 context,
1414 path,
1415 states,
1416 load_only,
1417 loadopt,
1418 flags,
1419 recursion_depth,
1420 execution_options,
1421 ):
1422 if recursion_depth:
1423 new_opt = Load(loadopt.path.entity)
1424 new_opt.context = (
1425 loadopt,
1426 loadopt._recurse(),
1427 )
1428 alternate_effective_path = path._truncate_recursive()
1429 extra_options = (new_opt,)
1430 else:
1431 new_opt = None
1432 alternate_effective_path = path
1433 extra_options = ()
1434
1435 key = self.key
1436 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
1437 for state, overwrite in states:
1438 dict_ = state.dict
1439
1440 if overwrite or key not in dict_:
1441 value = lazyloader._load_for_state(
1442 state,
1443 flags,
1444 extra_options=extra_options,
1445 alternate_effective_path=alternate_effective_path,
1446 execution_options=execution_options,
1447 )
1448 if value not in (
1449 ATTR_WAS_SET,
1450 LoaderCallableStatus.PASSIVE_NO_RESULT,
1451 ):
1452 state.get_impl(key).set_committed_value(
1453 state, dict_, value
1454 )
1455
1456
1457@log.class_logger
1458@relationships.RelationshipProperty.strategy_for(lazy="subquery")
1459class SubqueryLoader(PostLoader):
1460 __slots__ = ("join_depth",)
1461
1462 def __init__(self, parent, strategy_key):
1463 super().__init__(parent, strategy_key)
1464 self.join_depth = self.parent_property.join_depth
1465
1466 def init_class_attribute(self, mapper):
1467 self.parent_property._get_strategy(
1468 (("lazy", "select"),)
1469 ).init_class_attribute(mapper)
1470
1471 def _get_leftmost(
1472 self,
1473 orig_query_entity_index,
1474 subq_path,
1475 current_compile_state,
1476 is_root,
1477 ):
1478 given_subq_path = subq_path
1479 subq_path = subq_path.path
1480 subq_mapper = orm_util._class_to_mapper(subq_path[0])
1481
1482 # determine attributes of the leftmost mapper
1483 if (
1484 self.parent.isa(subq_mapper)
1485 and self.parent_property is subq_path[1]
1486 ):
1487 leftmost_mapper, leftmost_prop = self.parent, self.parent_property
1488 else:
1489 leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1]
1490
1491 if is_root:
1492 # the subq_path is also coming from cached state, so when we start
1493 # building up this path, it has to also be converted to be in terms
1494 # of the current state. this is for the specific case of the entity
1495 # is an AliasedClass against a subquery that's not otherwise going
1496 # to adapt
1497 new_subq_path = current_compile_state._entities[
1498 orig_query_entity_index
1499 ].entity_zero._path_registry[leftmost_prop]
1500 additional = len(subq_path) - len(new_subq_path)
1501 if additional:
1502 new_subq_path += path_registry.PathRegistry.coerce(
1503 subq_path[-additional:]
1504 )
1505 else:
1506 new_subq_path = given_subq_path
1507
1508 leftmost_cols = leftmost_prop.local_columns
1509
1510 leftmost_attr = [
1511 getattr(
1512 new_subq_path.path[0].entity,
1513 leftmost_mapper._columntoproperty[c].key,
1514 )
1515 for c in leftmost_cols
1516 ]
1517
1518 return leftmost_mapper, leftmost_attr, leftmost_prop, new_subq_path
1519
1520 def _generate_from_original_query(
1521 self,
1522 orig_compile_state,
1523 orig_query,
1524 leftmost_mapper,
1525 leftmost_attr,
1526 leftmost_relationship,
1527 orig_entity,
1528 ):
1529 # reformat the original query
1530 # to look only for significant columns
1531 q = orig_query._clone().correlate(None)
1532
1533 # LEGACY: make a Query back from the select() !!
1534 # This suits at least two legacy cases:
1535 # 1. applications which expect before_compile() to be called
1536 # below when we run .subquery() on this query (Keystone)
1537 # 2. applications which are doing subqueryload with complex
1538 # from_self() queries, as query.subquery() / .statement
1539 # has to do the full compile context for multiply-nested
1540 # from_self() (Neutron) - see test_subqload_from_self
1541 # for demo.
1542 q2 = query.Query.__new__(query.Query)
1543 q2.__dict__.update(q.__dict__)
1544 q = q2
1545
1546 # set the query's "FROM" list explicitly to what the
1547 # FROM list would be in any case, as we will be limiting
1548 # the columns in the SELECT list which may no longer include
1549 # all entities mentioned in things like WHERE, JOIN, etc.
1550 if not q._from_obj:
1551 q._enable_assertions = False
1552 q.select_from.non_generative(
1553 q,
1554 *{
1555 ent["entity"]
1556 for ent in _column_descriptions(
1557 orig_query, compile_state=orig_compile_state
1558 )
1559 if ent["entity"] is not None
1560 },
1561 )
1562
1563 # select from the identity columns of the outer (specifically, these
1564 # are the 'local_cols' of the property). This will remove other
1565 # columns from the query that might suggest the right entity which is
1566 # why we do set select_from above. The attributes we have are
1567 # coerced and adapted using the original query's adapter, which is
1568 # needed only for the case of adapting a subclass column to
1569 # that of a polymorphic selectable, e.g. we have
1570 # Engineer.primary_language and the entity is Person. All other
1571 # adaptations, e.g. from_self, select_entity_from(), will occur
1572 # within the new query when it compiles, as the compile_state we are
1573 # using here is only a partial one. If the subqueryload is from a
1574 # with_polymorphic() or other aliased() object, left_attr will already
1575 # be the correct attributes so no adaptation is needed.
1576 target_cols = orig_compile_state._adapt_col_list(
1577 [
1578 sql.coercions.expect(sql.roles.ColumnsClauseRole, o)
1579 for o in leftmost_attr
1580 ],
1581 orig_compile_state._get_current_adapter(),
1582 )
1583 q._raw_columns = target_cols
1584
1585 distinct_target_key = leftmost_relationship.distinct_target_key
1586
1587 if distinct_target_key is True:
1588 q._distinct = True
1589 elif distinct_target_key is None:
1590 # if target_cols refer to a non-primary key or only
1591 # part of a composite primary key, set the q as distinct
1592 for t in {c.table for c in target_cols}:
1593 if not set(target_cols).issuperset(t.primary_key):
1594 q._distinct = True
1595 break
1596
1597 # don't need ORDER BY if no limit/offset
1598 if not q._has_row_limiting_clause:
1599 q._order_by_clauses = ()
1600
1601 if q._distinct is True and q._order_by_clauses:
1602 # the logic to automatically add the order by columns to the query
1603 # when distinct is True is deprecated in the query
1604 to_add = sql_util.expand_column_list_from_order_by(
1605 target_cols, q._order_by_clauses
1606 )
1607 if to_add:
1608 q._set_entities(target_cols + to_add)
1609
1610 # the original query now becomes a subquery
1611 # which we'll join onto.
1612 # LEGACY: as "q" is a Query, the before_compile() event is invoked
1613 # here.
1614 embed_q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).subquery()
1615 left_alias = orm_util.AliasedClass(
1616 leftmost_mapper, embed_q, use_mapper_path=True
1617 )
1618 return left_alias
1619
1620 def _prep_for_joins(self, left_alias, subq_path):
1621 # figure out what's being joined. a.k.a. the fun part
1622 to_join = []
1623 pairs = list(subq_path.pairs())
1624
1625 for i, (mapper, prop) in enumerate(pairs):
1626 if i > 0:
1627 # look at the previous mapper in the chain -
1628 # if it is as or more specific than this prop's
1629 # mapper, use that instead.
1630 # note we have an assumption here that
1631 # the non-first element is always going to be a mapper,
1632 # not an AliasedClass
1633
1634 prev_mapper = pairs[i - 1][1].mapper
1635 to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
1636 else:
1637 to_append = mapper
1638
1639 to_join.append((to_append, prop.key))
1640
1641 # determine the immediate parent class we are joining from,
1642 # which needs to be aliased.
1643
1644 if len(to_join) < 2:
1645 # in the case of a one level eager load, this is the
1646 # leftmost "left_alias".
1647 parent_alias = left_alias
1648 else:
1649 info = inspect(to_join[-1][0])
1650 if info.is_aliased_class:
1651 parent_alias = info.entity
1652 else:
1653 # alias a plain mapper as we may be
1654 # joining multiple times
1655 parent_alias = orm_util.AliasedClass(
1656 info.entity, use_mapper_path=True
1657 )
1658
1659 local_cols = self.parent_property.local_columns
1660
1661 local_attr = [
1662 getattr(parent_alias, self.parent._columntoproperty[c].key)
1663 for c in local_cols
1664 ]
1665 return to_join, local_attr, parent_alias
1666
1667 def _apply_joins(
1668 self, q, to_join, left_alias, parent_alias, effective_entity
1669 ):
1670 ltj = len(to_join)
1671 if ltj == 1:
1672 to_join = [
1673 getattr(left_alias, to_join[0][1]).of_type(effective_entity)
1674 ]
1675 elif ltj == 2:
1676 to_join = [
1677 getattr(left_alias, to_join[0][1]).of_type(parent_alias),
1678 getattr(parent_alias, to_join[-1][1]).of_type(
1679 effective_entity
1680 ),
1681 ]
1682 elif ltj > 2:
1683 middle = [
1684 (
1685 (
1686 orm_util.AliasedClass(item[0])
1687 if not inspect(item[0]).is_aliased_class
1688 else item[0].entity
1689 ),
1690 item[1],
1691 )
1692 for item in to_join[1:-1]
1693 ]
1694 inner = []
1695
1696 while middle:
1697 item = middle.pop(0)
1698 attr = getattr(item[0], item[1])
1699 if middle:
1700 attr = attr.of_type(middle[0][0])
1701 else:
1702 attr = attr.of_type(parent_alias)
1703
1704 inner.append(attr)
1705
1706 to_join = (
1707 [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)]
1708 + inner
1709 + [
1710 getattr(parent_alias, to_join[-1][1]).of_type(
1711 effective_entity
1712 )
1713 ]
1714 )
1715
1716 for attr in to_join:
1717 q = q.join(attr)
1718
1719 return q
1720
1721 def _setup_options(
1722 self,
1723 context,
1724 q,
1725 subq_path,
1726 rewritten_path,
1727 orig_query,
1728 effective_entity,
1729 loadopt,
1730 ):
1731 # note that because the subqueryload object
1732 # does not re-use the cached query, instead always making
1733 # use of the current invoked query, while we have two queries
1734 # here (orig and context.query), they are both non-cached
1735 # queries and we can transfer the options as is without
1736 # adjusting for new criteria. Some work on #6881 / #6889
1737 # brought this into question.
1738 new_options = orig_query._with_options
1739
1740 if loadopt and loadopt._extra_criteria:
1741 new_options += (
1742 orm_util.LoaderCriteriaOption(
1743 self.entity,
1744 loadopt._generate_extra_criteria(context),
1745 ),
1746 )
1747
1748 # propagate loader options etc. to the new query.
1749 # these will fire relative to subq_path.
1750 q = q._with_current_path(rewritten_path)
1751 q = q.options(*new_options)
1752
1753 return q
1754
1755 def _setup_outermost_orderby(self, q):
1756 if self.parent_property.order_by:
1757
1758 def _setup_outermost_orderby(compile_context):
1759 compile_context.eager_order_by += tuple(
1760 util.to_list(self.parent_property.order_by)
1761 )
1762
1763 q = q._add_context_option(
1764 _setup_outermost_orderby, self.parent_property
1765 )
1766
1767 return q
1768
1769 class _SubqCollections:
1770 """Given a :class:`_query.Query` used to emit the "subquery load",
1771 provide a load interface that executes the query at the
1772 first moment a value is needed.
1773
1774 """
1775
1776 __slots__ = (
1777 "session",
1778 "execution_options",
1779 "load_options",
1780 "params",
1781 "subq",
1782 "_data",
1783 )
1784
1785 def __init__(self, context, subq):
1786 # avoid creating a cycle by storing context
1787 # even though that's preferable
1788 self.session = context.session
1789 self.execution_options = context.execution_options
1790 self.load_options = context.load_options
1791 self.params = context.params or {}
1792 self.subq = subq
1793 self._data = None
1794
1795 def get(self, key, default):
1796 if self._data is None:
1797 self._load()
1798 return self._data.get(key, default)
1799
1800 def _load(self):
1801 self._data = collections.defaultdict(list)
1802
1803 q = self.subq
1804 assert q.session is None
1805
1806 q = q.with_session(self.session)
1807
1808 if self.load_options._populate_existing:
1809 q = q.populate_existing()
1810 # to work with baked query, the parameters may have been
1811 # updated since this query was created, so take these into account
1812
1813 rows = list(q.params(self.params))
1814 for k, v in itertools.groupby(rows, lambda x: x[1:]):
1815 self._data[k].extend(vv[0] for vv in v)
1816
1817 def loader(self, state, dict_, row):
1818 if self._data is None:
1819 self._load()
1820
1821 def _setup_query_from_rowproc(
1822 self,
1823 context,
1824 query_entity,
1825 path,
1826 entity,
1827 loadopt,
1828 adapter,
1829 ):
1830 compile_state = context.compile_state
1831 if (
1832 not compile_state.compile_options._enable_eagerloads
1833 or compile_state.compile_options._for_refresh_state
1834 ):
1835 return
1836
1837 orig_query_entity_index = compile_state._entities.index(query_entity)
1838 context.loaders_require_buffering = True
1839
1840 path = path[self.parent_property]
1841
1842 # build up a path indicating the path from the leftmost
1843 # entity to the thing we're subquery loading.
1844 with_poly_entity = path.get(
1845 compile_state.attributes, "path_with_polymorphic", None
1846 )
1847 if with_poly_entity is not None:
1848 effective_entity = with_poly_entity
1849 else:
1850 effective_entity = self.entity
1851
1852 subq_path, rewritten_path = context.query._execution_options.get(
1853 ("subquery_paths", None),
1854 (orm_util.PathRegistry.root, orm_util.PathRegistry.root),
1855 )
1856 is_root = subq_path is orm_util.PathRegistry.root
1857 subq_path = subq_path + path
1858 rewritten_path = rewritten_path + path
1859
1860 # use the current query being invoked, not the compile state
1861 # one. this is so that we get the current parameters. however,
1862 # it means we can't use the existing compile state, we have to make
1863 # a new one. other approaches include possibly using the
1864 # compiled query but swapping the params, seems only marginally
1865 # less time spent but more complicated
1866 orig_query = context.query._execution_options.get(
1867 ("orig_query", SubqueryLoader), context.query
1868 )
1869
1870 # make a new compile_state for the query that's probably cached, but
1871 # we're sort of undoing a bit of that caching :(
1872 compile_state_cls = ORMCompileState._get_plugin_class_for_plugin(
1873 orig_query, "orm"
1874 )
1875
1876 if orig_query._is_lambda_element:
1877 if context.load_options._lazy_loaded_from is None:
1878 util.warn(
1879 'subqueryloader for "%s" must invoke lambda callable '
1880 "at %r in "
1881 "order to produce a new query, decreasing the efficiency "
1882 "of caching for this statement. Consider using "
1883 "selectinload() for more effective full-lambda caching"
1884 % (self, orig_query)
1885 )
1886 orig_query = orig_query._resolved
1887
1888 # this is the more "quick" version, however it's not clear how
1889 # much of this we need. in particular I can't get a test to
1890 # fail if the "set_base_alias" is missing and not sure why that is.
1891 orig_compile_state = compile_state_cls._create_entities_collection(
1892 orig_query, legacy=False
1893 )
1894
1895 (
1896 leftmost_mapper,
1897 leftmost_attr,
1898 leftmost_relationship,
1899 rewritten_path,
1900 ) = self._get_leftmost(
1901 orig_query_entity_index,
1902 rewritten_path,
1903 orig_compile_state,
1904 is_root,
1905 )
1906
1907 # generate a new Query from the original, then
1908 # produce a subquery from it.
1909 left_alias = self._generate_from_original_query(
1910 orig_compile_state,
1911 orig_query,
1912 leftmost_mapper,
1913 leftmost_attr,
1914 leftmost_relationship,
1915 entity,
1916 )
1917
1918 # generate another Query that will join the
1919 # left alias to the target relationships.
1920 # basically doing a longhand
1921 # "from_self()". (from_self() itself not quite industrial
1922 # strength enough for all contingencies...but very close)
1923
1924 q = query.Query(effective_entity)
1925
1926 q._execution_options = context.query._execution_options.merge_with(
1927 context.execution_options,
1928 {
1929 ("orig_query", SubqueryLoader): orig_query,
1930 ("subquery_paths", None): (subq_path, rewritten_path),
1931 },
1932 )
1933
1934 q = q._set_enable_single_crit(False)
1935 to_join, local_attr, parent_alias = self._prep_for_joins(
1936 left_alias, subq_path
1937 )
1938
1939 q = q.add_columns(*local_attr)
1940 q = self._apply_joins(
1941 q, to_join, left_alias, parent_alias, effective_entity
1942 )
1943
1944 q = self._setup_options(
1945 context,
1946 q,
1947 subq_path,
1948 rewritten_path,
1949 orig_query,
1950 effective_entity,
1951 loadopt,
1952 )
1953 q = self._setup_outermost_orderby(q)
1954
1955 return q
1956
1957 def create_row_processor(
1958 self,
1959 context,
1960 query_entity,
1961 path,
1962 loadopt,
1963 mapper,
1964 result,
1965 adapter,
1966 populators,
1967 ):
1968 if context.refresh_state:
1969 return self._immediateload_create_row_processor(
1970 context,
1971 query_entity,
1972 path,
1973 loadopt,
1974 mapper,
1975 result,
1976 adapter,
1977 populators,
1978 )
1979
1980 _, run_loader, _, _ = self._setup_for_recursion(
1981 context, path, loadopt, self.join_depth
1982 )
1983 if not run_loader:
1984 return
1985
1986 if not isinstance(context.compile_state, ORMSelectCompileState):
1987 # issue 7505 - subqueryload() in 1.3 and previous would silently
1988 # degrade for from_statement() without warning. this behavior
1989 # is restored here
1990 return
1991
1992 if not self.parent.class_manager[self.key].impl.supports_population:
1993 raise sa_exc.InvalidRequestError(
1994 "'%s' does not support object "
1995 "population - eager loading cannot be applied." % self
1996 )
1997
1998 # a little dance here as the "path" is still something that only
1999 # semi-tracks the exact series of things we are loading, still not
2000 # telling us about with_polymorphic() and stuff like that when it's at
2001 # the root.. the initial MapperEntity is more accurate for this case.
2002 if len(path) == 1:
2003 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
2004 return
2005 elif not orm_util._entity_isa(path[-1], self.parent):
2006 return
2007
2008 subq = self._setup_query_from_rowproc(
2009 context,
2010 query_entity,
2011 path,
2012 path[-1],
2013 loadopt,
2014 adapter,
2015 )
2016
2017 if subq is None:
2018 return
2019
2020 assert subq.session is None
2021
2022 path = path[self.parent_property]
2023
2024 local_cols = self.parent_property.local_columns
2025
2026 # cache the loaded collections in the context
2027 # so that inheriting mappers don't re-load when they
2028 # call upon create_row_processor again
2029 collections = path.get(context.attributes, "collections")
2030 if collections is None:
2031 collections = self._SubqCollections(context, subq)
2032 path.set(context.attributes, "collections", collections)
2033
2034 if adapter:
2035 local_cols = [adapter.columns[c] for c in local_cols]
2036
2037 if self.uselist:
2038 self._create_collection_loader(
2039 context, result, collections, local_cols, populators
2040 )
2041 else:
2042 self._create_scalar_loader(
2043 context, result, collections, local_cols, populators
2044 )
2045
2046 def _create_collection_loader(
2047 self, context, result, collections, local_cols, populators
2048 ):
2049 tuple_getter = result._tuple_getter(local_cols)
2050
2051 def load_collection_from_subq(state, dict_, row):
2052 collection = collections.get(tuple_getter(row), ())
2053 state.get_impl(self.key).set_committed_value(
2054 state, dict_, collection
2055 )
2056
2057 def load_collection_from_subq_existing_row(state, dict_, row):
2058 if self.key not in dict_:
2059 load_collection_from_subq(state, dict_, row)
2060
2061 populators["new"].append((self.key, load_collection_from_subq))
2062 populators["existing"].append(
2063 (self.key, load_collection_from_subq_existing_row)
2064 )
2065
2066 if context.invoke_all_eagers:
2067 populators["eager"].append((self.key, collections.loader))
2068
2069 def _create_scalar_loader(
2070 self, context, result, collections, local_cols, populators
2071 ):
2072 tuple_getter = result._tuple_getter(local_cols)
2073
2074 def load_scalar_from_subq(state, dict_, row):
2075 collection = collections.get(tuple_getter(row), (None,))
2076 if len(collection) > 1:
2077 util.warn(
2078 "Multiple rows returned with "
2079 "uselist=False for eagerly-loaded attribute '%s' " % self
2080 )
2081
2082 scalar = collection[0]
2083 state.get_impl(self.key).set_committed_value(state, dict_, scalar)
2084
2085 def load_scalar_from_subq_existing_row(state, dict_, row):
2086 if self.key not in dict_:
2087 load_scalar_from_subq(state, dict_, row)
2088
2089 populators["new"].append((self.key, load_scalar_from_subq))
2090 populators["existing"].append(
2091 (self.key, load_scalar_from_subq_existing_row)
2092 )
2093 if context.invoke_all_eagers:
2094 populators["eager"].append((self.key, collections.loader))
2095
2096
2097@log.class_logger
2098@relationships.RelationshipProperty.strategy_for(lazy="joined")
2099@relationships.RelationshipProperty.strategy_for(lazy=False)
2100class JoinedLoader(AbstractRelationshipLoader):
2101 """Provide loading behavior for a :class:`.Relationship`
2102 using joined eager loading.
2103
2104 """
2105
2106 __slots__ = "join_depth"
2107
2108 def __init__(self, parent, strategy_key):
2109 super().__init__(parent, strategy_key)
2110 self.join_depth = self.parent_property.join_depth
2111
2112 def init_class_attribute(self, mapper):
2113 self.parent_property._get_strategy(
2114 (("lazy", "select"),)
2115 ).init_class_attribute(mapper)
2116
2117 def setup_query(
2118 self,
2119 compile_state,
2120 query_entity,
2121 path,
2122 loadopt,
2123 adapter,
2124 column_collection=None,
2125 parentmapper=None,
2126 chained_from_outerjoin=False,
2127 **kwargs,
2128 ):
2129 """Add a left outer join to the statement that's being constructed."""
2130
2131 if not compile_state.compile_options._enable_eagerloads:
2132 return
2133 elif self.uselist:
2134 compile_state.multi_row_eager_loaders = True
2135
2136 path = path[self.parent_property]
2137
2138 with_polymorphic = None
2139
2140 user_defined_adapter = (
2141 self._init_user_defined_eager_proc(
2142 loadopt, compile_state, compile_state.attributes
2143 )
2144 if loadopt
2145 else False
2146 )
2147
2148 if user_defined_adapter is not False:
2149 # setup an adapter but dont create any JOIN, assume it's already
2150 # in the query
2151 (
2152 clauses,
2153 adapter,
2154 add_to_collection,
2155 ) = self._setup_query_on_user_defined_adapter(
2156 compile_state,
2157 query_entity,
2158 path,
2159 adapter,
2160 user_defined_adapter,
2161 )
2162
2163 # don't do "wrap" for multi-row, we want to wrap
2164 # limited/distinct SELECT,
2165 # because we want to put the JOIN on the outside.
2166
2167 else:
2168 # if not via query option, check for
2169 # a cycle
2170 if not path.contains(compile_state.attributes, "loader"):
2171 if self.join_depth:
2172 if path.length / 2 > self.join_depth:
2173 return
2174 elif path.contains_mapper(self.mapper):
2175 return
2176
2177 # add the JOIN and create an adapter
2178 (
2179 clauses,
2180 adapter,
2181 add_to_collection,
2182 chained_from_outerjoin,
2183 ) = self._generate_row_adapter(
2184 compile_state,
2185 query_entity,
2186 path,
2187 loadopt,
2188 adapter,
2189 column_collection,
2190 parentmapper,
2191 chained_from_outerjoin,
2192 )
2193
2194 # for multi-row, we want to wrap limited/distinct SELECT,
2195 # because we want to put the JOIN on the outside.
2196 compile_state.eager_adding_joins = True
2197
2198 with_poly_entity = path.get(
2199 compile_state.attributes, "path_with_polymorphic", None
2200 )
2201 if with_poly_entity is not None:
2202 with_polymorphic = inspect(
2203 with_poly_entity
2204 ).with_polymorphic_mappers
2205 else:
2206 with_polymorphic = None
2207
2208 path = path[self.entity]
2209
2210 loading._setup_entity_query(
2211 compile_state,
2212 self.mapper,
2213 query_entity,
2214 path,
2215 clauses,
2216 add_to_collection,
2217 with_polymorphic=with_polymorphic,
2218 parentmapper=self.mapper,
2219 chained_from_outerjoin=chained_from_outerjoin,
2220 )
2221
2222 has_nones = util.NONE_SET.intersection(compile_state.secondary_columns)
2223
2224 if has_nones:
2225 if with_poly_entity is not None:
2226 raise sa_exc.InvalidRequestError(
2227 "Detected unaliased columns when generating joined "
2228 "load. Make sure to use aliased=True or flat=True "
2229 "when using joined loading with with_polymorphic()."
2230 )
2231 else:
2232 compile_state.secondary_columns = [
2233 c for c in compile_state.secondary_columns if c is not None
2234 ]
2235
2236 def _init_user_defined_eager_proc(
2237 self, loadopt, compile_state, target_attributes
2238 ):
2239 # check if the opt applies at all
2240 if "eager_from_alias" not in loadopt.local_opts:
2241 # nope
2242 return False
2243
2244 path = loadopt.path.parent
2245
2246 # the option applies. check if the "user_defined_eager_row_processor"
2247 # has been built up.
2248 adapter = path.get(
2249 compile_state.attributes, "user_defined_eager_row_processor", False
2250 )
2251 if adapter is not False:
2252 # just return it
2253 return adapter
2254
2255 # otherwise figure it out.
2256 alias = loadopt.local_opts["eager_from_alias"]
2257 root_mapper, prop = path[-2:]
2258
2259 if alias is not None:
2260 if isinstance(alias, str):
2261 alias = prop.target.alias(alias)
2262 adapter = orm_util.ORMAdapter(
2263 orm_util._TraceAdaptRole.JOINEDLOAD_USER_DEFINED_ALIAS,
2264 prop.mapper,
2265 selectable=alias,
2266 equivalents=prop.mapper._equivalent_columns,
2267 limit_on_entity=False,
2268 )
2269 else:
2270 if path.contains(
2271 compile_state.attributes, "path_with_polymorphic"
2272 ):
2273 with_poly_entity = path.get(
2274 compile_state.attributes, "path_with_polymorphic"
2275 )
2276 adapter = orm_util.ORMAdapter(
2277 orm_util._TraceAdaptRole.JOINEDLOAD_PATH_WITH_POLYMORPHIC,
2278 with_poly_entity,
2279 equivalents=prop.mapper._equivalent_columns,
2280 )
2281 else:
2282 adapter = compile_state._polymorphic_adapters.get(
2283 prop.mapper, None
2284 )
2285 path.set(
2286 target_attributes,
2287 "user_defined_eager_row_processor",
2288 adapter,
2289 )
2290
2291 return adapter
2292
2293 def _setup_query_on_user_defined_adapter(
2294 self, context, entity, path, adapter, user_defined_adapter
2295 ):
2296 # apply some more wrapping to the "user defined adapter"
2297 # if we are setting up the query for SQL render.
2298 adapter = entity._get_entity_clauses(context)
2299
2300 if adapter and user_defined_adapter:
2301 user_defined_adapter = user_defined_adapter.wrap(adapter)
2302 path.set(
2303 context.attributes,
2304 "user_defined_eager_row_processor",
2305 user_defined_adapter,
2306 )
2307 elif adapter:
2308 user_defined_adapter = adapter
2309 path.set(
2310 context.attributes,
2311 "user_defined_eager_row_processor",
2312 user_defined_adapter,
2313 )
2314
2315 add_to_collection = context.primary_columns
2316 return user_defined_adapter, adapter, add_to_collection
2317
2318 def _generate_row_adapter(
2319 self,
2320 compile_state,
2321 entity,
2322 path,
2323 loadopt,
2324 adapter,
2325 column_collection,
2326 parentmapper,
2327 chained_from_outerjoin,
2328 ):
2329 with_poly_entity = path.get(
2330 compile_state.attributes, "path_with_polymorphic", None
2331 )
2332 if with_poly_entity:
2333 to_adapt = with_poly_entity
2334 else:
2335 insp = inspect(self.entity)
2336 if insp.is_aliased_class:
2337 alt_selectable = insp.selectable
2338 else:
2339 alt_selectable = None
2340
2341 to_adapt = orm_util.AliasedClass(
2342 self.mapper,
2343 alias=(
2344 alt_selectable._anonymous_fromclause(flat=True)
2345 if alt_selectable is not None
2346 else None
2347 ),
2348 flat=True,
2349 use_mapper_path=True,
2350 )
2351
2352 to_adapt_insp = inspect(to_adapt)
2353
2354 clauses = to_adapt_insp._memo(
2355 ("joinedloader_ormadapter", self),
2356 orm_util.ORMAdapter,
2357 orm_util._TraceAdaptRole.JOINEDLOAD_MEMOIZED_ADAPTER,
2358 to_adapt_insp,
2359 equivalents=self.mapper._equivalent_columns,
2360 adapt_required=True,
2361 allow_label_resolve=False,
2362 anonymize_labels=True,
2363 )
2364
2365 assert clauses.is_aliased_class
2366
2367 innerjoin = (
2368 loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin)
2369 if loadopt is not None
2370 else self.parent_property.innerjoin
2371 )
2372
2373 if not innerjoin:
2374 # if this is an outer join, all non-nested eager joins from
2375 # this path must also be outer joins
2376 chained_from_outerjoin = True
2377
2378 compile_state.create_eager_joins.append(
2379 (
2380 self._create_eager_join,
2381 entity,
2382 path,
2383 adapter,
2384 parentmapper,
2385 clauses,
2386 innerjoin,
2387 chained_from_outerjoin,
2388 loadopt._extra_criteria if loadopt else (),
2389 )
2390 )
2391
2392 add_to_collection = compile_state.secondary_columns
2393 path.set(compile_state.attributes, "eager_row_processor", clauses)
2394
2395 return clauses, adapter, add_to_collection, chained_from_outerjoin
2396
2397 def _create_eager_join(
2398 self,
2399 compile_state,
2400 query_entity,
2401 path,
2402 adapter,
2403 parentmapper,
2404 clauses,
2405 innerjoin,
2406 chained_from_outerjoin,
2407 extra_criteria,
2408 ):
2409 if parentmapper is None:
2410 localparent = query_entity.mapper
2411 else:
2412 localparent = parentmapper
2413
2414 # whether or not the Query will wrap the selectable in a subquery,
2415 # and then attach eager load joins to that (i.e., in the case of
2416 # LIMIT/OFFSET etc.)
2417 should_nest_selectable = (
2418 compile_state.multi_row_eager_loaders
2419 and compile_state._should_nest_selectable
2420 )
2421
2422 query_entity_key = None
2423
2424 if (
2425 query_entity not in compile_state.eager_joins
2426 and not should_nest_selectable
2427 and compile_state.from_clauses
2428 ):
2429 indexes = sql_util.find_left_clause_that_matches_given(
2430 compile_state.from_clauses, query_entity.selectable
2431 )
2432
2433 if len(indexes) > 1:
2434 # for the eager load case, I can't reproduce this right
2435 # now. For query.join() I can.
2436 raise sa_exc.InvalidRequestError(
2437 "Can't identify which query entity in which to joined "
2438 "eager load from. Please use an exact match when "
2439 "specifying the join path."
2440 )
2441
2442 if indexes:
2443 clause = compile_state.from_clauses[indexes[0]]
2444 # join to an existing FROM clause on the query.
2445 # key it to its list index in the eager_joins dict.
2446 # Query._compile_context will adapt as needed and
2447 # append to the FROM clause of the select().
2448 query_entity_key, default_towrap = indexes[0], clause
2449
2450 if query_entity_key is None:
2451 query_entity_key, default_towrap = (
2452 query_entity,
2453 query_entity.selectable,
2454 )
2455
2456 towrap = compile_state.eager_joins.setdefault(
2457 query_entity_key, default_towrap
2458 )
2459
2460 if adapter:
2461 if getattr(adapter, "is_aliased_class", False):
2462 # joining from an adapted entity. The adapted entity
2463 # might be a "with_polymorphic", so resolve that to our
2464 # specific mapper's entity before looking for our attribute
2465 # name on it.
2466 efm = adapter.aliased_insp._entity_for_mapper(
2467 localparent
2468 if localparent.isa(self.parent)
2469 else self.parent
2470 )
2471
2472 # look for our attribute on the adapted entity, else fall back
2473 # to our straight property
2474 onclause = getattr(efm.entity, self.key, self.parent_property)
2475 else:
2476 onclause = getattr(
2477 orm_util.AliasedClass(
2478 self.parent, adapter.selectable, use_mapper_path=True
2479 ),
2480 self.key,
2481 self.parent_property,
2482 )
2483
2484 else:
2485 onclause = self.parent_property
2486
2487 assert clauses.is_aliased_class
2488
2489 attach_on_outside = (
2490 not chained_from_outerjoin
2491 or not innerjoin
2492 or innerjoin == "unnested"
2493 or query_entity.entity_zero.represents_outer_join
2494 )
2495
2496 extra_join_criteria = extra_criteria
2497 additional_entity_criteria = compile_state.global_attributes.get(
2498 ("additional_entity_criteria", self.mapper), ()
2499 )
2500 if additional_entity_criteria:
2501 extra_join_criteria += tuple(
2502 ae._resolve_where_criteria(self.mapper)
2503 for ae in additional_entity_criteria
2504 if ae.propagate_to_loaders
2505 )
2506
2507 if attach_on_outside:
2508 # this is the "classic" eager join case.
2509 eagerjoin = orm_util._ORMJoin(
2510 towrap,
2511 clauses.aliased_insp,
2512 onclause,
2513 isouter=not innerjoin
2514 or query_entity.entity_zero.represents_outer_join
2515 or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
2516 _left_memo=self.parent,
2517 _right_memo=path[self.mapper],
2518 _extra_criteria=extra_join_criteria,
2519 )
2520 else:
2521 # all other cases are innerjoin=='nested' approach
2522 eagerjoin = self._splice_nested_inner_join(
2523 path, path[-2], towrap, clauses, onclause, extra_join_criteria
2524 )
2525
2526 compile_state.eager_joins[query_entity_key] = eagerjoin
2527
2528 # send a hint to the Query as to where it may "splice" this join
2529 eagerjoin.stop_on = query_entity.selectable
2530
2531 if not parentmapper:
2532 # for parentclause that is the non-eager end of the join,
2533 # ensure all the parent cols in the primaryjoin are actually
2534 # in the
2535 # columns clause (i.e. are not deferred), so that aliasing applied
2536 # by the Query propagates those columns outward.
2537 # This has the effect
2538 # of "undefering" those columns.
2539 for col in sql_util._find_columns(
2540 self.parent_property.primaryjoin
2541 ):
2542 if localparent.persist_selectable.c.contains_column(col):
2543 if adapter:
2544 col = adapter.columns[col]
2545 compile_state._append_dedupe_col_collection(
2546 col, compile_state.primary_columns
2547 )
2548
2549 if self.parent_property.order_by:
2550 compile_state.eager_order_by += tuple(
2551 (eagerjoin._target_adapter.copy_and_process)(
2552 util.to_list(self.parent_property.order_by)
2553 )
2554 )
2555
2556 def _splice_nested_inner_join(
2557 self,
2558 path,
2559 entity_we_want_to_splice_onto,
2560 join_obj,
2561 clauses,
2562 onclause,
2563 extra_criteria,
2564 entity_inside_join_structure: Union[
2565 Mapper, None, Literal[False]
2566 ] = False,
2567 detected_existing_path: Optional[path_registry.PathRegistry] = None,
2568 ):
2569 # recursive fn to splice a nested join into an existing one.
2570 # entity_inside_join_structure=False means this is the outermost call,
2571 # and it should return a value. entity_inside_join_structure=<mapper>
2572 # indicates we've descended into a join and are looking at a FROM
2573 # clause representing this mapper; if this is not
2574 # entity_we_want_to_splice_onto then return None to end the recursive
2575 # branch
2576
2577 assert entity_we_want_to_splice_onto is path[-2]
2578
2579 if entity_inside_join_structure is False:
2580 assert isinstance(join_obj, orm_util._ORMJoin)
2581
2582 if isinstance(join_obj, sql.selectable.FromGrouping):
2583 # FromGrouping - continue descending into the structure
2584 return self._splice_nested_inner_join(
2585 path,
2586 entity_we_want_to_splice_onto,
2587 join_obj.element,
2588 clauses,
2589 onclause,
2590 extra_criteria,
2591 entity_inside_join_structure,
2592 )
2593 elif isinstance(join_obj, orm_util._ORMJoin):
2594 # _ORMJoin - continue descending into the structure
2595
2596 join_right_path = join_obj._right_memo
2597
2598 # see if right side of join is viable
2599 target_join = self._splice_nested_inner_join(
2600 path,
2601 entity_we_want_to_splice_onto,
2602 join_obj.right,
2603 clauses,
2604 onclause,
2605 extra_criteria,
2606 entity_inside_join_structure=(
2607 join_right_path[-1].mapper
2608 if join_right_path is not None
2609 else None
2610 ),
2611 )
2612
2613 if target_join is not None:
2614 # for a right splice, attempt to flatten out
2615 # a JOIN b JOIN c JOIN .. to avoid needless
2616 # parenthesis nesting
2617 if not join_obj.isouter and not target_join.isouter:
2618 eagerjoin = join_obj._splice_into_center(target_join)
2619 else:
2620 eagerjoin = orm_util._ORMJoin(
2621 join_obj.left,
2622 target_join,
2623 join_obj.onclause,
2624 isouter=join_obj.isouter,
2625 _left_memo=join_obj._left_memo,
2626 )
2627
2628 eagerjoin._target_adapter = target_join._target_adapter
2629 return eagerjoin
2630
2631 else:
2632 # see if left side of join is viable
2633 target_join = self._splice_nested_inner_join(
2634 path,
2635 entity_we_want_to_splice_onto,
2636 join_obj.left,
2637 clauses,
2638 onclause,
2639 extra_criteria,
2640 entity_inside_join_structure=join_obj._left_memo,
2641 detected_existing_path=join_right_path,
2642 )
2643
2644 if target_join is not None:
2645 eagerjoin = orm_util._ORMJoin(
2646 target_join,
2647 join_obj.right,
2648 join_obj.onclause,
2649 isouter=join_obj.isouter,
2650 _right_memo=join_obj._right_memo,
2651 )
2652 eagerjoin._target_adapter = target_join._target_adapter
2653 return eagerjoin
2654
2655 # neither side viable, return None, or fail if this was the top
2656 # most call
2657 if entity_inside_join_structure is False:
2658 assert (
2659 False
2660 ), "assertion failed attempting to produce joined eager loads"
2661 return None
2662
2663 # reached an endpoint (e.g. a table that's mapped, or an alias of that
2664 # table). determine if we can use this endpoint to splice onto
2665
2666 # is this the entity we want to splice onto in the first place?
2667 if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure):
2668 return None
2669
2670 # path check. if we know the path how this join endpoint got here,
2671 # lets look at our path we are satisfying and see if we're in the
2672 # wrong place. This is specifically for when our entity may
2673 # appear more than once in the path, issue #11449
2674 if detected_existing_path:
2675 # this assertion is currently based on how this call is made,
2676 # where given a join_obj, the call will have these parameters as
2677 # entity_inside_join_structure=join_obj._left_memo
2678 # and entity_inside_join_structure=join_obj._right_memo.mapper
2679 assert detected_existing_path[-3] is entity_inside_join_structure
2680
2681 # from that, see if the path we are targeting matches the
2682 # "existing" path of this join all the way up to the midpoint
2683 # of this join object (e.g. the relationship).
2684 # if not, then this is not our target
2685 #
2686 # a test condition where this test is false looks like:
2687 #
2688 # desired splice: Node->kind->Kind
2689 # path of desired splice: NodeGroup->nodes->Node->kind
2690 # path we've located: NodeGroup->nodes->Node->common_node->Node
2691 #
2692 # above, because we want to splice kind->Kind onto
2693 # NodeGroup->nodes->Node, this is not our path because it actually
2694 # goes more steps than we want into self-referential
2695 # ->common_node->Node
2696 #
2697 # a test condition where this test is true looks like:
2698 #
2699 # desired splice: B->c2s->C2
2700 # path of desired splice: A->bs->B->c2s
2701 # path we've located: A->bs->B->c1s->C1
2702 #
2703 # above, we want to splice c2s->C2 onto B, and the located path
2704 # shows that the join ends with B->c1s->C1. so we will
2705 # add another join onto that, which would create a "branch" that
2706 # we might represent in a pseudopath as:
2707 #
2708 # B->c1s->C1
2709 # ->c2s->C2
2710 #
2711 # i.e. A JOIN B ON <bs> JOIN C1 ON <c1s>
2712 # JOIN C2 ON <c2s>
2713 #
2714
2715 if detected_existing_path[0:-2] != path.path[0:-1]:
2716 return None
2717
2718 return orm_util._ORMJoin(
2719 join_obj,
2720 clauses.aliased_insp,
2721 onclause,
2722 isouter=False,
2723 _left_memo=entity_inside_join_structure,
2724 _right_memo=path[path[-1].mapper],
2725 _extra_criteria=extra_criteria,
2726 )
2727
2728 def _create_eager_adapter(self, context, result, adapter, path, loadopt):
2729 compile_state = context.compile_state
2730
2731 user_defined_adapter = (
2732 self._init_user_defined_eager_proc(
2733 loadopt, compile_state, context.attributes
2734 )
2735 if loadopt
2736 else False
2737 )
2738
2739 if user_defined_adapter is not False:
2740 decorator = user_defined_adapter
2741 # user defined eagerloads are part of the "primary"
2742 # portion of the load.
2743 # the adapters applied to the Query should be honored.
2744 if compile_state.compound_eager_adapter and decorator:
2745 decorator = decorator.wrap(
2746 compile_state.compound_eager_adapter
2747 )
2748 elif compile_state.compound_eager_adapter:
2749 decorator = compile_state.compound_eager_adapter
2750 else:
2751 decorator = path.get(
2752 compile_state.attributes, "eager_row_processor"
2753 )
2754 if decorator is None:
2755 return False
2756
2757 if self.mapper._result_has_identity_key(result, decorator):
2758 return decorator
2759 else:
2760 # no identity key - don't return a row
2761 # processor, will cause a degrade to lazy
2762 return False
2763
2764 def create_row_processor(
2765 self,
2766 context,
2767 query_entity,
2768 path,
2769 loadopt,
2770 mapper,
2771 result,
2772 adapter,
2773 populators,
2774 ):
2775
2776 if not context.compile_state.compile_options._enable_eagerloads:
2777 return
2778
2779 if not self.parent.class_manager[self.key].impl.supports_population:
2780 raise sa_exc.InvalidRequestError(
2781 "'%s' does not support object "
2782 "population - eager loading cannot be applied." % self
2783 )
2784
2785 if self.uselist:
2786 context.loaders_require_uniquing = True
2787
2788 our_path = path[self.parent_property]
2789
2790 eager_adapter = self._create_eager_adapter(
2791 context, result, adapter, our_path, loadopt
2792 )
2793
2794 if eager_adapter is not False:
2795 key = self.key
2796
2797 _instance = loading._instance_processor(
2798 query_entity,
2799 self.mapper,
2800 context,
2801 result,
2802 our_path[self.entity],
2803 eager_adapter,
2804 )
2805
2806 if not self.uselist:
2807 self._create_scalar_loader(context, key, _instance, populators)
2808 else:
2809 self._create_collection_loader(
2810 context, key, _instance, populators
2811 )
2812 else:
2813 self.parent_property._get_strategy(
2814 (("lazy", "select"),)
2815 ).create_row_processor(
2816 context,
2817 query_entity,
2818 path,
2819 loadopt,
2820 mapper,
2821 result,
2822 adapter,
2823 populators,
2824 )
2825
2826 def _create_collection_loader(self, context, key, _instance, populators):
2827 def load_collection_from_joined_new_row(state, dict_, row):
2828 # note this must unconditionally clear out any existing collection.
2829 # an existing collection would be present only in the case of
2830 # populate_existing().
2831 collection = attributes.init_state_collection(state, dict_, key)
2832 result_list = util.UniqueAppender(
2833 collection, "append_without_event"
2834 )
2835 context.attributes[(state, key)] = result_list
2836 inst = _instance(row)
2837 if inst is not None:
2838 result_list.append(inst)
2839
2840 def load_collection_from_joined_existing_row(state, dict_, row):
2841 if (state, key) in context.attributes:
2842 result_list = context.attributes[(state, key)]
2843 else:
2844 # appender_key can be absent from context.attributes
2845 # with isnew=False when self-referential eager loading
2846 # is used; the same instance may be present in two
2847 # distinct sets of result columns
2848 collection = attributes.init_state_collection(
2849 state, dict_, key
2850 )
2851 result_list = util.UniqueAppender(
2852 collection, "append_without_event"
2853 )
2854 context.attributes[(state, key)] = result_list
2855 inst = _instance(row)
2856 if inst is not None:
2857 result_list.append(inst)
2858
2859 def load_collection_from_joined_exec(state, dict_, row):
2860 _instance(row)
2861
2862 populators["new"].append(
2863 (self.key, load_collection_from_joined_new_row)
2864 )
2865 populators["existing"].append(
2866 (self.key, load_collection_from_joined_existing_row)
2867 )
2868 if context.invoke_all_eagers:
2869 populators["eager"].append(
2870 (self.key, load_collection_from_joined_exec)
2871 )
2872
2873 def _create_scalar_loader(self, context, key, _instance, populators):
2874 def load_scalar_from_joined_new_row(state, dict_, row):
2875 # set a scalar object instance directly on the parent
2876 # object, bypassing InstrumentedAttribute event handlers.
2877 dict_[key] = _instance(row)
2878
2879 def load_scalar_from_joined_existing_row(state, dict_, row):
2880 # call _instance on the row, even though the object has
2881 # been created, so that we further descend into properties
2882 existing = _instance(row)
2883
2884 # conflicting value already loaded, this shouldn't happen
2885 if key in dict_:
2886 if existing is not dict_[key]:
2887 util.warn(
2888 "Multiple rows returned with "
2889 "uselist=False for eagerly-loaded attribute '%s' "
2890 % self
2891 )
2892 else:
2893 # this case is when one row has multiple loads of the
2894 # same entity (e.g. via aliasing), one has an attribute
2895 # that the other doesn't.
2896 dict_[key] = existing
2897
2898 def load_scalar_from_joined_exec(state, dict_, row):
2899 _instance(row)
2900
2901 populators["new"].append((self.key, load_scalar_from_joined_new_row))
2902 populators["existing"].append(
2903 (self.key, load_scalar_from_joined_existing_row)
2904 )
2905 if context.invoke_all_eagers:
2906 populators["eager"].append(
2907 (self.key, load_scalar_from_joined_exec)
2908 )
2909
2910
2911@log.class_logger
2912@relationships.RelationshipProperty.strategy_for(lazy="selectin")
2913class SelectInLoader(PostLoader, util.MemoizedSlots):
2914 __slots__ = (
2915 "join_depth",
2916 "omit_join",
2917 "_parent_alias",
2918 "_query_info",
2919 "_fallback_query_info",
2920 )
2921
2922 query_info = collections.namedtuple(
2923 "queryinfo",
2924 [
2925 "load_only_child",
2926 "load_with_join",
2927 "in_expr",
2928 "pk_cols",
2929 "zero_idx",
2930 "child_lookup_cols",
2931 ],
2932 )
2933
2934 _chunksize = 500
2935
2936 def __init__(self, parent, strategy_key):
2937 super().__init__(parent, strategy_key)
2938 self.join_depth = self.parent_property.join_depth
2939 is_m2o = self.parent_property.direction is interfaces.MANYTOONE
2940
2941 if self.parent_property.omit_join is not None:
2942 self.omit_join = self.parent_property.omit_join
2943 else:
2944 lazyloader = self.parent_property._get_strategy(
2945 (("lazy", "select"),)
2946 )
2947 if is_m2o:
2948 self.omit_join = lazyloader.use_get
2949 else:
2950 self.omit_join = self.parent._get_clause[0].compare(
2951 lazyloader._rev_lazywhere,
2952 use_proxies=True,
2953 compare_keys=False,
2954 equivalents=self.parent._equivalent_columns,
2955 )
2956
2957 if self.omit_join:
2958 if is_m2o:
2959 self._query_info = self._init_for_omit_join_m2o()
2960 self._fallback_query_info = self._init_for_join()
2961 else:
2962 self._query_info = self._init_for_omit_join()
2963 else:
2964 self._query_info = self._init_for_join()
2965
2966 def _init_for_omit_join(self):
2967 pk_to_fk = dict(
2968 self.parent_property._join_condition.local_remote_pairs
2969 )
2970 pk_to_fk.update(
2971 (equiv, pk_to_fk[k])
2972 for k in list(pk_to_fk)
2973 for equiv in self.parent._equivalent_columns.get(k, ())
2974 )
2975
2976 pk_cols = fk_cols = [
2977 pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk
2978 ]
2979 if len(fk_cols) > 1:
2980 in_expr = sql.tuple_(*fk_cols)
2981 zero_idx = False
2982 else:
2983 in_expr = fk_cols[0]
2984 zero_idx = True
2985
2986 return self.query_info(False, False, in_expr, pk_cols, zero_idx, None)
2987
2988 def _init_for_omit_join_m2o(self):
2989 pk_cols = self.mapper.primary_key
2990 if len(pk_cols) > 1:
2991 in_expr = sql.tuple_(*pk_cols)
2992 zero_idx = False
2993 else:
2994 in_expr = pk_cols[0]
2995 zero_idx = True
2996
2997 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
2998 lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols]
2999
3000 return self.query_info(
3001 True, False, in_expr, pk_cols, zero_idx, lookup_cols
3002 )
3003
3004 def _init_for_join(self):
3005 self._parent_alias = AliasedClass(self.parent.class_)
3006 pa_insp = inspect(self._parent_alias)
3007 pk_cols = [
3008 pa_insp._adapt_element(col) for col in self.parent.primary_key
3009 ]
3010 if len(pk_cols) > 1:
3011 in_expr = sql.tuple_(*pk_cols)
3012 zero_idx = False
3013 else:
3014 in_expr = pk_cols[0]
3015 zero_idx = True
3016 return self.query_info(False, True, in_expr, pk_cols, zero_idx, None)
3017
3018 def init_class_attribute(self, mapper):
3019 self.parent_property._get_strategy(
3020 (("lazy", "select"),)
3021 ).init_class_attribute(mapper)
3022
3023 def create_row_processor(
3024 self,
3025 context,
3026 query_entity,
3027 path,
3028 loadopt,
3029 mapper,
3030 result,
3031 adapter,
3032 populators,
3033 ):
3034 if context.refresh_state:
3035 return self._immediateload_create_row_processor(
3036 context,
3037 query_entity,
3038 path,
3039 loadopt,
3040 mapper,
3041 result,
3042 adapter,
3043 populators,
3044 )
3045
3046 (
3047 effective_path,
3048 run_loader,
3049 execution_options,
3050 recursion_depth,
3051 ) = self._setup_for_recursion(
3052 context, path, loadopt, join_depth=self.join_depth
3053 )
3054
3055 if not run_loader:
3056 return
3057
3058 if not context.compile_state.compile_options._enable_eagerloads:
3059 return
3060
3061 if not self.parent.class_manager[self.key].impl.supports_population:
3062 raise sa_exc.InvalidRequestError(
3063 "'%s' does not support object "
3064 "population - eager loading cannot be applied." % self
3065 )
3066
3067 # a little dance here as the "path" is still something that only
3068 # semi-tracks the exact series of things we are loading, still not
3069 # telling us about with_polymorphic() and stuff like that when it's at
3070 # the root.. the initial MapperEntity is more accurate for this case.
3071 if len(path) == 1:
3072 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
3073 return
3074 elif not orm_util._entity_isa(path[-1], self.parent):
3075 return
3076
3077 selectin_path = effective_path
3078
3079 path_w_prop = path[self.parent_property]
3080
3081 # build up a path indicating the path from the leftmost
3082 # entity to the thing we're subquery loading.
3083 with_poly_entity = path_w_prop.get(
3084 context.attributes, "path_with_polymorphic", None
3085 )
3086 if with_poly_entity is not None:
3087 effective_entity = inspect(with_poly_entity)
3088 else:
3089 effective_entity = self.entity
3090
3091 loading.PostLoad.callable_for_path(
3092 context,
3093 selectin_path,
3094 self.parent,
3095 self.parent_property,
3096 self._load_for_path,
3097 effective_entity,
3098 loadopt,
3099 recursion_depth,
3100 execution_options,
3101 )
3102
3103 def _load_for_path(
3104 self,
3105 context,
3106 path,
3107 states,
3108 load_only,
3109 effective_entity,
3110 loadopt,
3111 recursion_depth,
3112 execution_options,
3113 ):
3114 if load_only and self.key not in load_only:
3115 return
3116
3117 query_info = self._query_info
3118
3119 if query_info.load_only_child:
3120 our_states = collections.defaultdict(list)
3121 none_states = []
3122
3123 mapper = self.parent
3124
3125 for state, overwrite in states:
3126 state_dict = state.dict
3127 related_ident = tuple(
3128 mapper._get_state_attr_by_column(
3129 state,
3130 state_dict,
3131 lk,
3132 passive=attributes.PASSIVE_NO_FETCH,
3133 )
3134 for lk in query_info.child_lookup_cols
3135 )
3136 # if the loaded parent objects do not have the foreign key
3137 # to the related item loaded, then degrade into the joined
3138 # version of selectinload
3139 if LoaderCallableStatus.PASSIVE_NO_RESULT in related_ident:
3140 query_info = self._fallback_query_info
3141 break
3142
3143 # organize states into lists keyed to particular foreign
3144 # key values.
3145 if None not in related_ident:
3146 our_states[related_ident].append(
3147 (state, state_dict, overwrite)
3148 )
3149 else:
3150 # For FK values that have None, add them to a
3151 # separate collection that will be populated separately
3152 none_states.append((state, state_dict, overwrite))
3153
3154 # note the above conditional may have changed query_info
3155 if not query_info.load_only_child:
3156 our_states = [
3157 (state.key[1], state, state.dict, overwrite)
3158 for state, overwrite in states
3159 ]
3160
3161 pk_cols = query_info.pk_cols
3162 in_expr = query_info.in_expr
3163
3164 if not query_info.load_with_join:
3165 # in "omit join" mode, the primary key column and the
3166 # "in" expression are in terms of the related entity. So
3167 # if the related entity is polymorphic or otherwise aliased,
3168 # we need to adapt our "pk_cols" and "in_expr" to that
3169 # entity. in non-"omit join" mode, these are against the
3170 # parent entity and do not need adaption.
3171 if effective_entity.is_aliased_class:
3172 pk_cols = [
3173 effective_entity._adapt_element(col) for col in pk_cols
3174 ]
3175 in_expr = effective_entity._adapt_element(in_expr)
3176
3177 bundle_ent = orm_util.Bundle("pk", *pk_cols)
3178 bundle_sql = bundle_ent.__clause_element__()
3179
3180 entity_sql = effective_entity.__clause_element__()
3181 q = Select._create_raw_select(
3182 _raw_columns=[bundle_sql, entity_sql],
3183 _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
3184 _compile_options=ORMCompileState.default_compile_options,
3185 _propagate_attrs={
3186 "compile_state_plugin": "orm",
3187 "plugin_subject": effective_entity,
3188 },
3189 )
3190
3191 if not query_info.load_with_join:
3192 # the Bundle we have in the "omit_join" case is against raw, non
3193 # annotated columns, so to ensure the Query knows its primary
3194 # entity, we add it explicitly. If we made the Bundle against
3195 # annotated columns, we hit a performance issue in this specific
3196 # case, which is detailed in issue #4347.
3197 q = q.select_from(effective_entity)
3198 else:
3199 # in the non-omit_join case, the Bundle is against the annotated/
3200 # mapped column of the parent entity, but the #4347 issue does not
3201 # occur in this case.
3202 q = q.select_from(self._parent_alias).join(
3203 getattr(self._parent_alias, self.parent_property.key).of_type(
3204 effective_entity
3205 )
3206 )
3207
3208 q = q.filter(in_expr.in_(sql.bindparam("primary_keys")))
3209
3210 # a test which exercises what these comments talk about is
3211 # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic
3212 #
3213 # effective_entity above is given to us in terms of the cached
3214 # statement, namely this one:
3215 orig_query = context.compile_state.select_statement
3216
3217 # the actual statement that was requested is this one:
3218 # context_query = context.query
3219 #
3220 # that's not the cached one, however. So while it is of the identical
3221 # structure, if it has entities like AliasedInsp, which we get from
3222 # aliased() or with_polymorphic(), the AliasedInsp will likely be a
3223 # different object identity each time, and will not match up
3224 # hashing-wise to the corresponding AliasedInsp that's in the
3225 # cached query, meaning it won't match on paths and loader lookups
3226 # and loaders like this one will be skipped if it is used in options.
3227 #
3228 # as it turns out, standard loader options like selectinload(),
3229 # lazyload() that have a path need
3230 # to come from the cached query so that the AliasedInsp etc. objects
3231 # that are in the query line up with the object that's in the path
3232 # of the strategy object. however other options like
3233 # with_loader_criteria() that doesn't have a path (has a fixed entity)
3234 # and needs to have access to the latest closure state in order to
3235 # be correct, we need to use the uncached one.
3236 #
3237 # as of #8399 we let the loader option itself figure out what it
3238 # wants to do given cached and uncached version of itself.
3239
3240 effective_path = path[self.parent_property]
3241
3242 if orig_query is context.query:
3243 new_options = orig_query._with_options
3244 else:
3245 cached_options = orig_query._with_options
3246 uncached_options = context.query._with_options
3247
3248 # propagate compile state options from the original query,
3249 # updating their "extra_criteria" as necessary.
3250 # note this will create a different cache key than
3251 # "orig" options if extra_criteria is present, because the copy
3252 # of extra_criteria will have different boundparam than that of
3253 # the QueryableAttribute in the path
3254 new_options = [
3255 orig_opt._adapt_cached_option_to_uncached_option(
3256 context, uncached_opt
3257 )
3258 for orig_opt, uncached_opt in zip(
3259 cached_options, uncached_options
3260 )
3261 ]
3262
3263 if loadopt and loadopt._extra_criteria:
3264 new_options += (
3265 orm_util.LoaderCriteriaOption(
3266 effective_entity,
3267 loadopt._generate_extra_criteria(context),
3268 ),
3269 )
3270
3271 if recursion_depth is not None:
3272 effective_path = effective_path._truncate_recursive()
3273
3274 q = q.options(*new_options)
3275
3276 q = q._update_compile_options({"_current_path": effective_path})
3277 if context.populate_existing:
3278 q = q.execution_options(populate_existing=True)
3279
3280 if self.parent_property.order_by:
3281 if not query_info.load_with_join:
3282 eager_order_by = self.parent_property.order_by
3283 if effective_entity.is_aliased_class:
3284 eager_order_by = [
3285 effective_entity._adapt_element(elem)
3286 for elem in eager_order_by
3287 ]
3288 q = q.order_by(*eager_order_by)
3289 else:
3290
3291 def _setup_outermost_orderby(compile_context):
3292 compile_context.eager_order_by += tuple(
3293 util.to_list(self.parent_property.order_by)
3294 )
3295
3296 q = q._add_context_option(
3297 _setup_outermost_orderby, self.parent_property
3298 )
3299
3300 if query_info.load_only_child:
3301 self._load_via_child(
3302 our_states,
3303 none_states,
3304 query_info,
3305 q,
3306 context,
3307 execution_options,
3308 )
3309 else:
3310 self._load_via_parent(
3311 our_states, query_info, q, context, execution_options
3312 )
3313
3314 def _load_via_child(
3315 self,
3316 our_states,
3317 none_states,
3318 query_info,
3319 q,
3320 context,
3321 execution_options,
3322 ):
3323 uselist = self.uselist
3324
3325 # this sort is really for the benefit of the unit tests
3326 our_keys = sorted(our_states)
3327 while our_keys:
3328 chunk = our_keys[0 : self._chunksize]
3329 our_keys = our_keys[self._chunksize :]
3330 data = {
3331 k: v
3332 for k, v in context.session.execute(
3333 q,
3334 params={
3335 "primary_keys": [
3336 key[0] if query_info.zero_idx else key
3337 for key in chunk
3338 ]
3339 },
3340 execution_options=execution_options,
3341 ).unique()
3342 }
3343
3344 for key in chunk:
3345 # for a real foreign key and no concurrent changes to the
3346 # DB while running this method, "key" is always present in
3347 # data. However, for primaryjoins without real foreign keys
3348 # a non-None primaryjoin condition may still refer to no
3349 # related object.
3350 related_obj = data.get(key, None)
3351 for state, dict_, overwrite in our_states[key]:
3352 if not overwrite and self.key in dict_:
3353 continue
3354
3355 state.get_impl(self.key).set_committed_value(
3356 state,
3357 dict_,
3358 related_obj if not uselist else [related_obj],
3359 )
3360 # populate none states with empty value / collection
3361 for state, dict_, overwrite in none_states:
3362 if not overwrite and self.key in dict_:
3363 continue
3364
3365 # note it's OK if this is a uselist=True attribute, the empty
3366 # collection will be populated
3367 state.get_impl(self.key).set_committed_value(state, dict_, None)
3368
3369 def _load_via_parent(
3370 self, our_states, query_info, q, context, execution_options
3371 ):
3372 uselist = self.uselist
3373 _empty_result = () if uselist else None
3374
3375 while our_states:
3376 chunk = our_states[0 : self._chunksize]
3377 our_states = our_states[self._chunksize :]
3378
3379 primary_keys = [
3380 key[0] if query_info.zero_idx else key
3381 for key, state, state_dict, overwrite in chunk
3382 ]
3383
3384 data = collections.defaultdict(list)
3385 for k, v in itertools.groupby(
3386 context.session.execute(
3387 q,
3388 params={"primary_keys": primary_keys},
3389 execution_options=execution_options,
3390 ).unique(),
3391 lambda x: x[0],
3392 ):
3393 data[k].extend(vv[1] for vv in v)
3394
3395 for key, state, state_dict, overwrite in chunk:
3396 if not overwrite and self.key in state_dict:
3397 continue
3398
3399 collection = data.get(key, _empty_result)
3400
3401 if not uselist and collection:
3402 if len(collection) > 1:
3403 util.warn(
3404 "Multiple rows returned with "
3405 "uselist=False for eagerly-loaded "
3406 "attribute '%s' " % self
3407 )
3408 state.get_impl(self.key).set_committed_value(
3409 state, state_dict, collection[0]
3410 )
3411 else:
3412 # note that empty tuple set on uselist=False sets the
3413 # value to None
3414 state.get_impl(self.key).set_committed_value(
3415 state, state_dict, collection
3416 )
3417
3418
3419def single_parent_validator(desc, prop):
3420 def _do_check(state, value, oldvalue, initiator):
3421 if value is not None and initiator.key == prop.key:
3422 hasparent = initiator.hasparent(attributes.instance_state(value))
3423 if hasparent and oldvalue is not value:
3424 raise sa_exc.InvalidRequestError(
3425 "Instance %s is already associated with an instance "
3426 "of %s via its %s attribute, and is only allowed a "
3427 "single parent."
3428 % (orm_util.instance_str(value), state.class_, prop),
3429 code="bbf1",
3430 )
3431 return value
3432
3433 def append(state, value, initiator):
3434 return _do_check(state, value, None, initiator)
3435
3436 def set_(state, value, oldvalue, initiator):
3437 return _do_check(state, value, oldvalue, initiator)
3438
3439 event.listen(
3440 desc, "append", append, raw=True, retval=True, active_history=True
3441 )
3442 event.listen(desc, "set", set_, raw=True, retval=True, active_history=True)