1# orm/strategies.py
2# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: ignore-errors
8
9
10"""sqlalchemy.orm.interfaces.LoaderStrategy
11 implementations, and related MapperOptions."""
12
13from __future__ import annotations
14
15import collections
16import itertools
17from typing import Any
18from typing import Dict
19from typing import Optional
20from typing import Tuple
21from typing import TYPE_CHECKING
22from typing import Union
23
24from . import attributes
25from . import exc as orm_exc
26from . import interfaces
27from . import loading
28from . import path_registry
29from . import properties
30from . import query
31from . import relationships
32from . import unitofwork
33from . import util as orm_util
34from .base import _DEFER_FOR_STATE
35from .base import _RAISE_FOR_STATE
36from .base import _SET_DEFERRED_EXPIRED
37from .base import ATTR_WAS_SET
38from .base import LoaderCallableStatus
39from .base import PASSIVE_OFF
40from .base import PassiveFlag
41from .context import _column_descriptions
42from .context import ORMCompileState
43from .context import ORMSelectCompileState
44from .context import QueryContext
45from .interfaces import LoaderStrategy
46from .interfaces import StrategizedProperty
47from .session import _state_session
48from .state import InstanceState
49from .strategy_options import Load
50from .util import _none_set
51from .util import AliasedClass
52from .. import event
53from .. import exc as sa_exc
54from .. import inspect
55from .. import log
56from .. import sql
57from .. import util
58from ..sql import util as sql_util
59from ..sql import visitors
60from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
61from ..sql.selectable import Select
62from ..util.typing import Literal
63
64if TYPE_CHECKING:
65 from .mapper import Mapper
66 from .relationships import RelationshipProperty
67 from ..sql.elements import ColumnElement
68
69
70def _register_attribute(
71 prop,
72 mapper,
73 useobject,
74 compare_function=None,
75 typecallable=None,
76 callable_=None,
77 proxy_property=None,
78 active_history=False,
79 impl_class=None,
80 **kw,
81):
82 listen_hooks = []
83
84 uselist = useobject and prop.uselist
85
86 if useobject and prop.single_parent:
87 listen_hooks.append(single_parent_validator)
88
89 if prop.key in prop.parent.validators:
90 fn, opts = prop.parent.validators[prop.key]
91 listen_hooks.append(
92 lambda desc, prop: orm_util._validator_events(
93 desc, prop.key, fn, **opts
94 )
95 )
96
97 if useobject:
98 listen_hooks.append(unitofwork.track_cascade_events)
99
100 # need to assemble backref listeners
101 # after the singleparentvalidator, mapper validator
102 if useobject:
103 backref = prop.back_populates
104 if backref and prop._effective_sync_backref:
105 listen_hooks.append(
106 lambda desc, prop: attributes.backref_listeners(
107 desc, backref, uselist
108 )
109 )
110
111 # a single MapperProperty is shared down a class inheritance
112 # hierarchy, so we set up attribute instrumentation and backref event
113 # for each mapper down the hierarchy.
114
115 # typically, "mapper" is the same as prop.parent, due to the way
116 # the configure_mappers() process runs, however this is not strongly
117 # enforced, and in the case of a second configure_mappers() run the
118 # mapper here might not be prop.parent; also, a subclass mapper may
119 # be called here before a superclass mapper. That is, can't depend
120 # on mappers not already being set up so we have to check each one.
121
122 for m in mapper.self_and_descendants:
123 if prop is m._props.get(
124 prop.key
125 ) and not m.class_manager._attr_has_impl(prop.key):
126 desc = attributes.register_attribute_impl(
127 m.class_,
128 prop.key,
129 parent_token=prop,
130 uselist=uselist,
131 compare_function=compare_function,
132 useobject=useobject,
133 trackparent=useobject
134 and (
135 prop.single_parent
136 or prop.direction is interfaces.ONETOMANY
137 ),
138 typecallable=typecallable,
139 callable_=callable_,
140 active_history=active_history,
141 impl_class=impl_class,
142 send_modified_events=not useobject or not prop.viewonly,
143 doc=prop.doc,
144 **kw,
145 )
146
147 for hook in listen_hooks:
148 hook(desc, prop)
149
150
151@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
152class UninstrumentedColumnLoader(LoaderStrategy):
153 """Represent a non-instrumented MapperProperty.
154
155 The polymorphic_on argument of mapper() often results in this,
156 if the argument is against the with_polymorphic selectable.
157
158 """
159
160 __slots__ = ("columns",)
161
162 def __init__(self, parent, strategy_key):
163 super().__init__(parent, strategy_key)
164 self.columns = self.parent_property.columns
165
166 def setup_query(
167 self,
168 compile_state,
169 query_entity,
170 path,
171 loadopt,
172 adapter,
173 column_collection=None,
174 **kwargs,
175 ):
176 for c in self.columns:
177 if adapter:
178 c = adapter.columns[c]
179 compile_state._append_dedupe_col_collection(c, column_collection)
180
181 def create_row_processor(
182 self,
183 context,
184 query_entity,
185 path,
186 loadopt,
187 mapper,
188 result,
189 adapter,
190 populators,
191 ):
192 pass
193
194
195@log.class_logger
196@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
197class ColumnLoader(LoaderStrategy):
198 """Provide loading behavior for a :class:`.ColumnProperty`."""
199
200 __slots__ = "columns", "is_composite"
201
202 def __init__(self, parent, strategy_key):
203 super().__init__(parent, strategy_key)
204 self.columns = self.parent_property.columns
205 self.is_composite = hasattr(self.parent_property, "composite_class")
206
207 def setup_query(
208 self,
209 compile_state,
210 query_entity,
211 path,
212 loadopt,
213 adapter,
214 column_collection,
215 memoized_populators,
216 check_for_adapt=False,
217 **kwargs,
218 ):
219 for c in self.columns:
220 if adapter:
221 if check_for_adapt:
222 c = adapter.adapt_check_present(c)
223 if c is None:
224 return
225 else:
226 c = adapter.columns[c]
227
228 compile_state._append_dedupe_col_collection(c, column_collection)
229
230 fetch = self.columns[0]
231 if adapter:
232 fetch = adapter.columns[fetch]
233 if fetch is None:
234 # None happens here only for dml bulk_persistence cases
235 # when context.DMLReturningColFilter is used
236 return
237
238 memoized_populators[self.parent_property] = fetch
239
240 def init_class_attribute(self, mapper):
241 self.is_class_level = True
242 coltype = self.columns[0].type
243 # TODO: check all columns ? check for foreign key as well?
244 active_history = (
245 self.parent_property.active_history
246 or self.columns[0].primary_key
247 or (
248 mapper.version_id_col is not None
249 and mapper._columntoproperty.get(mapper.version_id_col, None)
250 is self.parent_property
251 )
252 )
253
254 _register_attribute(
255 self.parent_property,
256 mapper,
257 useobject=False,
258 compare_function=coltype.compare_values,
259 active_history=active_history,
260 )
261
262 def create_row_processor(
263 self,
264 context,
265 query_entity,
266 path,
267 loadopt,
268 mapper,
269 result,
270 adapter,
271 populators,
272 ):
273 # look through list of columns represented here
274 # to see which, if any, is present in the row.
275
276 for col in self.columns:
277 if adapter:
278 col = adapter.columns[col]
279 getter = result._getter(col, False)
280 if getter:
281 populators["quick"].append((self.key, getter))
282 break
283 else:
284 populators["expire"].append((self.key, True))
285
286
287@log.class_logger
288@properties.ColumnProperty.strategy_for(query_expression=True)
289class ExpressionColumnLoader(ColumnLoader):
290 def __init__(self, parent, strategy_key):
291 super().__init__(parent, strategy_key)
292
293 # compare to the "default" expression that is mapped in
294 # the column. If it's sql.null, we don't need to render
295 # unless an expr is passed in the options.
296 null = sql.null().label(None)
297 self._have_default_expression = any(
298 not c.compare(null) for c in self.parent_property.columns
299 )
300
301 def setup_query(
302 self,
303 compile_state,
304 query_entity,
305 path,
306 loadopt,
307 adapter,
308 column_collection,
309 memoized_populators,
310 **kwargs,
311 ):
312 columns = None
313 if loadopt and loadopt._extra_criteria:
314 columns = loadopt._extra_criteria
315
316 elif self._have_default_expression:
317 columns = self.parent_property.columns
318
319 if columns is None:
320 return
321
322 for c in columns:
323 if adapter:
324 c = adapter.columns[c]
325 compile_state._append_dedupe_col_collection(c, column_collection)
326
327 fetch = columns[0]
328 if adapter:
329 fetch = adapter.columns[fetch]
330 if fetch is None:
331 # None is not expected to be the result of any
332 # adapter implementation here, however there may be theoretical
333 # usages of returning() with context.DMLReturningColFilter
334 return
335
336 memoized_populators[self.parent_property] = fetch
337
338 def create_row_processor(
339 self,
340 context,
341 query_entity,
342 path,
343 loadopt,
344 mapper,
345 result,
346 adapter,
347 populators,
348 ):
349 # look through list of columns represented here
350 # to see which, if any, is present in the row.
351 if loadopt and loadopt._extra_criteria:
352 columns = loadopt._extra_criteria
353
354 for col in columns:
355 if adapter:
356 col = adapter.columns[col]
357 getter = result._getter(col, False)
358 if getter:
359 populators["quick"].append((self.key, getter))
360 break
361 else:
362 populators["expire"].append((self.key, True))
363
364 def init_class_attribute(self, mapper):
365 self.is_class_level = True
366
367 _register_attribute(
368 self.parent_property,
369 mapper,
370 useobject=False,
371 compare_function=self.columns[0].type.compare_values,
372 accepts_scalar_loader=False,
373 )
374
375
376@log.class_logger
377@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
378@properties.ColumnProperty.strategy_for(
379 deferred=True, instrument=True, raiseload=True
380)
381@properties.ColumnProperty.strategy_for(do_nothing=True)
382class DeferredColumnLoader(LoaderStrategy):
383 """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
384
385 __slots__ = "columns", "group", "raiseload"
386
387 def __init__(self, parent, strategy_key):
388 super().__init__(parent, strategy_key)
389 if hasattr(self.parent_property, "composite_class"):
390 raise NotImplementedError(
391 "Deferred loading for composite types not implemented yet"
392 )
393 self.raiseload = self.strategy_opts.get("raiseload", False)
394 self.columns = self.parent_property.columns
395 self.group = self.parent_property.group
396
397 def create_row_processor(
398 self,
399 context,
400 query_entity,
401 path,
402 loadopt,
403 mapper,
404 result,
405 adapter,
406 populators,
407 ):
408 # for a DeferredColumnLoader, this method is only used during a
409 # "row processor only" query; see test_deferred.py ->
410 # tests with "rowproc_only" in their name. As of the 1.0 series,
411 # loading._instance_processor doesn't use a "row processing" function
412 # to populate columns, instead it uses data in the "populators"
413 # dictionary. Normally, the DeferredColumnLoader.setup_query()
414 # sets up that data in the "memoized_populators" dictionary
415 # and "create_row_processor()" here is never invoked.
416
417 if (
418 context.refresh_state
419 and context.query._compile_options._only_load_props
420 and self.key in context.query._compile_options._only_load_props
421 ):
422 self.parent_property._get_strategy(
423 (("deferred", False), ("instrument", True))
424 ).create_row_processor(
425 context,
426 query_entity,
427 path,
428 loadopt,
429 mapper,
430 result,
431 adapter,
432 populators,
433 )
434
435 elif not self.is_class_level:
436 if self.raiseload:
437 set_deferred_for_local_state = (
438 self.parent_property._raise_column_loader
439 )
440 else:
441 set_deferred_for_local_state = (
442 self.parent_property._deferred_column_loader
443 )
444 populators["new"].append((self.key, set_deferred_for_local_state))
445 else:
446 populators["expire"].append((self.key, False))
447
448 def init_class_attribute(self, mapper):
449 self.is_class_level = True
450
451 _register_attribute(
452 self.parent_property,
453 mapper,
454 useobject=False,
455 compare_function=self.columns[0].type.compare_values,
456 callable_=self._load_for_state,
457 load_on_unexpire=False,
458 )
459
460 def setup_query(
461 self,
462 compile_state,
463 query_entity,
464 path,
465 loadopt,
466 adapter,
467 column_collection,
468 memoized_populators,
469 only_load_props=None,
470 **kw,
471 ):
472 if (
473 (
474 compile_state.compile_options._render_for_subquery
475 and self.parent_property._renders_in_subqueries
476 )
477 or (
478 loadopt
479 and set(self.columns).intersection(
480 self.parent._should_undefer_in_wildcard
481 )
482 )
483 or (
484 loadopt
485 and self.group
486 and loadopt.local_opts.get(
487 "undefer_group_%s" % self.group, False
488 )
489 )
490 or (only_load_props and self.key in only_load_props)
491 ):
492 self.parent_property._get_strategy(
493 (("deferred", False), ("instrument", True))
494 ).setup_query(
495 compile_state,
496 query_entity,
497 path,
498 loadopt,
499 adapter,
500 column_collection,
501 memoized_populators,
502 **kw,
503 )
504 elif self.is_class_level:
505 memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
506 elif not self.raiseload:
507 memoized_populators[self.parent_property] = _DEFER_FOR_STATE
508 else:
509 memoized_populators[self.parent_property] = _RAISE_FOR_STATE
510
511 def _load_for_state(self, state, passive):
512 if not state.key:
513 return LoaderCallableStatus.ATTR_EMPTY
514
515 if not passive & PassiveFlag.SQL_OK:
516 return LoaderCallableStatus.PASSIVE_NO_RESULT
517
518 localparent = state.manager.mapper
519
520 if self.group:
521 toload = [
522 p.key
523 for p in localparent.iterate_properties
524 if isinstance(p, StrategizedProperty)
525 and isinstance(p.strategy, DeferredColumnLoader)
526 and p.group == self.group
527 ]
528 else:
529 toload = [self.key]
530
531 # narrow the keys down to just those which have no history
532 group = [k for k in toload if k in state.unmodified]
533
534 session = _state_session(state)
535 if session is None:
536 raise orm_exc.DetachedInstanceError(
537 "Parent instance %s is not bound to a Session; "
538 "deferred load operation of attribute '%s' cannot proceed"
539 % (orm_util.state_str(state), self.key)
540 )
541
542 if self.raiseload:
543 self._invoke_raise_load(state, passive, "raise")
544
545 loading.load_scalar_attributes(
546 state.mapper, state, set(group), PASSIVE_OFF
547 )
548
549 return LoaderCallableStatus.ATTR_WAS_SET
550
551 def _invoke_raise_load(self, state, passive, lazy):
552 raise sa_exc.InvalidRequestError(
553 "'%s' is not available due to raiseload=True" % (self,)
554 )
555
556
557class LoadDeferredColumns:
558 """serializable loader object used by DeferredColumnLoader"""
559
560 def __init__(self, key: str, raiseload: bool = False):
561 self.key = key
562 self.raiseload = raiseload
563
564 def __call__(self, state, passive=attributes.PASSIVE_OFF):
565 key = self.key
566
567 localparent = state.manager.mapper
568 prop = localparent._props[key]
569 if self.raiseload:
570 strategy_key = (
571 ("deferred", True),
572 ("instrument", True),
573 ("raiseload", True),
574 )
575 else:
576 strategy_key = (("deferred", True), ("instrument", True))
577 strategy = prop._get_strategy(strategy_key)
578 return strategy._load_for_state(state, passive)
579
580
581class AbstractRelationshipLoader(LoaderStrategy):
582 """LoaderStratgies which deal with related objects."""
583
584 __slots__ = "mapper", "target", "uselist", "entity"
585
586 def __init__(self, parent, strategy_key):
587 super().__init__(parent, strategy_key)
588 self.mapper = self.parent_property.mapper
589 self.entity = self.parent_property.entity
590 self.target = self.parent_property.target
591 self.uselist = self.parent_property.uselist
592
593 def _immediateload_create_row_processor(
594 self,
595 context,
596 query_entity,
597 path,
598 loadopt,
599 mapper,
600 result,
601 adapter,
602 populators,
603 ):
604 return self.parent_property._get_strategy(
605 (("lazy", "immediate"),)
606 ).create_row_processor(
607 context,
608 query_entity,
609 path,
610 loadopt,
611 mapper,
612 result,
613 adapter,
614 populators,
615 )
616
617
618@log.class_logger
619@relationships.RelationshipProperty.strategy_for(do_nothing=True)
620class DoNothingLoader(LoaderStrategy):
621 """Relationship loader that makes no change to the object's state.
622
623 Compared to NoLoader, this loader does not initialize the
624 collection/attribute to empty/none; the usual default LazyLoader will
625 take effect.
626
627 """
628
629
630@log.class_logger
631@relationships.RelationshipProperty.strategy_for(lazy="noload")
632@relationships.RelationshipProperty.strategy_for(lazy=None)
633class NoLoader(AbstractRelationshipLoader):
634 """Provide loading behavior for a :class:`.Relationship`
635 with "lazy=None".
636
637 """
638
639 __slots__ = ()
640
641 def init_class_attribute(self, mapper):
642 self.is_class_level = True
643
644 _register_attribute(
645 self.parent_property,
646 mapper,
647 useobject=True,
648 typecallable=self.parent_property.collection_class,
649 )
650
651 def create_row_processor(
652 self,
653 context,
654 query_entity,
655 path,
656 loadopt,
657 mapper,
658 result,
659 adapter,
660 populators,
661 ):
662 def invoke_no_load(state, dict_, row):
663 if self.uselist:
664 attributes.init_state_collection(state, dict_, self.key)
665 else:
666 dict_[self.key] = None
667
668 populators["new"].append((self.key, invoke_no_load))
669
670
671@log.class_logger
672@relationships.RelationshipProperty.strategy_for(lazy=True)
673@relationships.RelationshipProperty.strategy_for(lazy="select")
674@relationships.RelationshipProperty.strategy_for(lazy="raise")
675@relationships.RelationshipProperty.strategy_for(lazy="raise_on_sql")
676@relationships.RelationshipProperty.strategy_for(lazy="baked_select")
677class LazyLoader(
678 AbstractRelationshipLoader, util.MemoizedSlots, log.Identified
679):
680 """Provide loading behavior for a :class:`.Relationship`
681 with "lazy=True", that is loads when first accessed.
682
683 """
684
685 __slots__ = (
686 "_lazywhere",
687 "_rev_lazywhere",
688 "_lazyload_reverse_option",
689 "_order_by",
690 "use_get",
691 "is_aliased_class",
692 "_bind_to_col",
693 "_equated_columns",
694 "_rev_bind_to_col",
695 "_rev_equated_columns",
696 "_simple_lazy_clause",
697 "_raise_always",
698 "_raise_on_sql",
699 )
700
701 _lazywhere: ColumnElement[bool]
702 _bind_to_col: Dict[str, ColumnElement[Any]]
703 _rev_lazywhere: ColumnElement[bool]
704 _rev_bind_to_col: Dict[str, ColumnElement[Any]]
705
706 parent_property: RelationshipProperty[Any]
707
708 def __init__(
709 self, parent: RelationshipProperty[Any], strategy_key: Tuple[Any, ...]
710 ):
711 super().__init__(parent, strategy_key)
712 self._raise_always = self.strategy_opts["lazy"] == "raise"
713 self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
714
715 self.is_aliased_class = inspect(self.entity).is_aliased_class
716
717 join_condition = self.parent_property._join_condition
718 (
719 self._lazywhere,
720 self._bind_to_col,
721 self._equated_columns,
722 ) = join_condition.create_lazy_clause()
723
724 (
725 self._rev_lazywhere,
726 self._rev_bind_to_col,
727 self._rev_equated_columns,
728 ) = join_condition.create_lazy_clause(reverse_direction=True)
729
730 if self.parent_property.order_by:
731 self._order_by = [
732 sql_util._deep_annotate(elem, {"_orm_adapt": True})
733 for elem in util.to_list(self.parent_property.order_by)
734 ]
735 else:
736 self._order_by = None
737
738 self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
739
740 # determine if our "lazywhere" clause is the same as the mapper's
741 # get() clause. then we can just use mapper.get()
742 #
743 # TODO: the "not self.uselist" can be taken out entirely; a m2o
744 # load that populates for a list (very unusual, but is possible with
745 # the API) can still set for "None" and the attribute system will
746 # populate as an empty list.
747 self.use_get = (
748 not self.is_aliased_class
749 and not self.uselist
750 and self.entity._get_clause[0].compare(
751 self._lazywhere,
752 use_proxies=True,
753 compare_keys=False,
754 equivalents=self.mapper._equivalent_columns,
755 )
756 )
757
758 if self.use_get:
759 for col in list(self._equated_columns):
760 if col in self.mapper._equivalent_columns:
761 for c in self.mapper._equivalent_columns[col]:
762 self._equated_columns[c] = self._equated_columns[col]
763
764 self.logger.info(
765 "%s will use Session.get() to optimize instance loads", self
766 )
767
768 def init_class_attribute(self, mapper):
769 self.is_class_level = True
770
771 _legacy_inactive_history_style = (
772 self.parent_property._legacy_inactive_history_style
773 )
774
775 if self.parent_property.active_history:
776 active_history = True
777 _deferred_history = False
778
779 elif (
780 self.parent_property.direction is not interfaces.MANYTOONE
781 or not self.use_get
782 ):
783 if _legacy_inactive_history_style:
784 active_history = True
785 _deferred_history = False
786 else:
787 active_history = False
788 _deferred_history = True
789 else:
790 active_history = _deferred_history = False
791
792 _register_attribute(
793 self.parent_property,
794 mapper,
795 useobject=True,
796 callable_=self._load_for_state,
797 typecallable=self.parent_property.collection_class,
798 active_history=active_history,
799 _deferred_history=_deferred_history,
800 )
801
802 def _memoized_attr__simple_lazy_clause(self):
803 lazywhere = sql_util._deep_annotate(
804 self._lazywhere, {"_orm_adapt": True}
805 )
806
807 criterion, bind_to_col = (lazywhere, self._bind_to_col)
808
809 params = []
810
811 def visit_bindparam(bindparam):
812 bindparam.unique = False
813
814 visitors.traverse(criterion, {}, {"bindparam": visit_bindparam})
815
816 def visit_bindparam(bindparam):
817 if bindparam._identifying_key in bind_to_col:
818 params.append(
819 (
820 bindparam.key,
821 bind_to_col[bindparam._identifying_key],
822 None,
823 )
824 )
825 elif bindparam.callable is None:
826 params.append((bindparam.key, None, bindparam.value))
827
828 criterion = visitors.cloned_traverse(
829 criterion, {}, {"bindparam": visit_bindparam}
830 )
831
832 return criterion, params
833
834 def _generate_lazy_clause(self, state, passive):
835 criterion, param_keys = self._simple_lazy_clause
836
837 if state is None:
838 return sql_util.adapt_criterion_to_null(
839 criterion, [key for key, ident, value in param_keys]
840 )
841
842 mapper = self.parent_property.parent
843
844 o = state.obj() # strong ref
845 dict_ = attributes.instance_dict(o)
846
847 if passive & PassiveFlag.INIT_OK:
848 passive ^= PassiveFlag.INIT_OK
849
850 params = {}
851 for key, ident, value in param_keys:
852 if ident is not None:
853 if passive and passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
854 value = mapper._get_committed_state_attr_by_column(
855 state, dict_, ident, passive
856 )
857 else:
858 value = mapper._get_state_attr_by_column(
859 state, dict_, ident, passive
860 )
861
862 params[key] = value
863
864 return criterion, params
865
866 def _invoke_raise_load(self, state, passive, lazy):
867 raise sa_exc.InvalidRequestError(
868 "'%s' is not available due to lazy='%s'" % (self, lazy)
869 )
870
871 def _load_for_state(
872 self,
873 state,
874 passive,
875 loadopt=None,
876 extra_criteria=(),
877 extra_options=(),
878 alternate_effective_path=None,
879 execution_options=util.EMPTY_DICT,
880 ):
881 if not state.key and (
882 (
883 not self.parent_property.load_on_pending
884 and not state._load_pending
885 )
886 or not state.session_id
887 ):
888 return LoaderCallableStatus.ATTR_EMPTY
889
890 pending = not state.key
891 primary_key_identity = None
892
893 use_get = self.use_get and (not loadopt or not loadopt._extra_criteria)
894
895 if (not passive & PassiveFlag.SQL_OK and not use_get) or (
896 not passive & attributes.NON_PERSISTENT_OK and pending
897 ):
898 return LoaderCallableStatus.PASSIVE_NO_RESULT
899
900 if (
901 # we were given lazy="raise"
902 self._raise_always
903 # the no_raise history-related flag was not passed
904 and not passive & PassiveFlag.NO_RAISE
905 and (
906 # if we are use_get and related_object_ok is disabled,
907 # which means we are at most looking in the identity map
908 # for history purposes or otherwise returning
909 # PASSIVE_NO_RESULT, don't raise. This is also a
910 # history-related flag
911 not use_get
912 or passive & PassiveFlag.RELATED_OBJECT_OK
913 )
914 ):
915 self._invoke_raise_load(state, passive, "raise")
916
917 session = _state_session(state)
918 if not session:
919 if passive & PassiveFlag.NO_RAISE:
920 return LoaderCallableStatus.PASSIVE_NO_RESULT
921
922 raise orm_exc.DetachedInstanceError(
923 "Parent instance %s is not bound to a Session; "
924 "lazy load operation of attribute '%s' cannot proceed"
925 % (orm_util.state_str(state), self.key)
926 )
927
928 # if we have a simple primary key load, check the
929 # identity map without generating a Query at all
930 if use_get:
931 primary_key_identity = self._get_ident_for_use_get(
932 session, state, passive
933 )
934 if LoaderCallableStatus.PASSIVE_NO_RESULT in primary_key_identity:
935 return LoaderCallableStatus.PASSIVE_NO_RESULT
936 elif LoaderCallableStatus.NEVER_SET in primary_key_identity:
937 return LoaderCallableStatus.NEVER_SET
938
939 if _none_set.issuperset(primary_key_identity):
940 return None
941
942 if (
943 self.key in state.dict
944 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
945 ):
946 return LoaderCallableStatus.ATTR_WAS_SET
947
948 # look for this identity in the identity map. Delegate to the
949 # Query class in use, as it may have special rules for how it
950 # does this, including how it decides what the correct
951 # identity_token would be for this identity.
952
953 instance = session._identity_lookup(
954 self.entity,
955 primary_key_identity,
956 passive=passive,
957 lazy_loaded_from=state,
958 )
959
960 if instance is not None:
961 if instance is LoaderCallableStatus.PASSIVE_CLASS_MISMATCH:
962 return None
963 else:
964 return instance
965 elif (
966 not passive & PassiveFlag.SQL_OK
967 or not passive & PassiveFlag.RELATED_OBJECT_OK
968 ):
969 return LoaderCallableStatus.PASSIVE_NO_RESULT
970
971 return self._emit_lazyload(
972 session,
973 state,
974 primary_key_identity,
975 passive,
976 loadopt,
977 extra_criteria,
978 extra_options,
979 alternate_effective_path,
980 execution_options,
981 )
982
983 def _get_ident_for_use_get(self, session, state, passive):
984 instance_mapper = state.manager.mapper
985
986 if passive & PassiveFlag.LOAD_AGAINST_COMMITTED:
987 get_attr = instance_mapper._get_committed_state_attr_by_column
988 else:
989 get_attr = instance_mapper._get_state_attr_by_column
990
991 dict_ = state.dict
992
993 return [
994 get_attr(state, dict_, self._equated_columns[pk], passive=passive)
995 for pk in self.mapper.primary_key
996 ]
997
998 @util.preload_module("sqlalchemy.orm.strategy_options")
999 def _emit_lazyload(
1000 self,
1001 session,
1002 state,
1003 primary_key_identity,
1004 passive,
1005 loadopt,
1006 extra_criteria,
1007 extra_options,
1008 alternate_effective_path,
1009 execution_options,
1010 ):
1011 strategy_options = util.preloaded.orm_strategy_options
1012
1013 clauseelement = self.entity.__clause_element__()
1014 stmt = Select._create_raw_select(
1015 _raw_columns=[clauseelement],
1016 _propagate_attrs=clauseelement._propagate_attrs,
1017 _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
1018 _compile_options=ORMCompileState.default_compile_options,
1019 )
1020 load_options = QueryContext.default_load_options
1021
1022 load_options += {
1023 "_invoke_all_eagers": False,
1024 "_lazy_loaded_from": state,
1025 }
1026
1027 if self.parent_property.secondary is not None:
1028 stmt = stmt.select_from(
1029 self.mapper, self.parent_property.secondary
1030 )
1031
1032 pending = not state.key
1033
1034 # don't autoflush on pending
1035 if pending or passive & attributes.NO_AUTOFLUSH:
1036 stmt._execution_options = util.immutabledict({"autoflush": False})
1037
1038 use_get = self.use_get
1039
1040 if state.load_options or (loadopt and loadopt._extra_criteria):
1041 if alternate_effective_path is None:
1042 effective_path = state.load_path[self.parent_property]
1043 else:
1044 effective_path = alternate_effective_path[self.parent_property]
1045
1046 opts = state.load_options
1047
1048 if loadopt and loadopt._extra_criteria:
1049 use_get = False
1050 opts += (
1051 orm_util.LoaderCriteriaOption(self.entity, extra_criteria),
1052 )
1053
1054 stmt._with_options = opts
1055 elif alternate_effective_path is None:
1056 # this path is used if there are not already any options
1057 # in the query, but an event may want to add them
1058 effective_path = state.mapper._path_registry[self.parent_property]
1059 else:
1060 # added by immediateloader
1061 effective_path = alternate_effective_path[self.parent_property]
1062
1063 if extra_options:
1064 stmt._with_options += extra_options
1065
1066 stmt._compile_options += {"_current_path": effective_path}
1067
1068 if use_get:
1069 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1070 self._invoke_raise_load(state, passive, "raise_on_sql")
1071
1072 return loading.load_on_pk_identity(
1073 session,
1074 stmt,
1075 primary_key_identity,
1076 load_options=load_options,
1077 execution_options=execution_options,
1078 )
1079
1080 if self._order_by:
1081 stmt._order_by_clauses = self._order_by
1082
1083 def _lazyload_reverse(compile_context):
1084 for rev in self.parent_property._reverse_property:
1085 # reverse props that are MANYTOONE are loading *this*
1086 # object from get(), so don't need to eager out to those.
1087 if (
1088 rev.direction is interfaces.MANYTOONE
1089 and rev._use_get
1090 and not isinstance(rev.strategy, LazyLoader)
1091 ):
1092 strategy_options.Load._construct_for_existing_path(
1093 compile_context.compile_options._current_path[
1094 rev.parent
1095 ]
1096 ).lazyload(rev).process_compile_state(compile_context)
1097
1098 stmt._with_context_options += (
1099 (_lazyload_reverse, self.parent_property),
1100 )
1101
1102 lazy_clause, params = self._generate_lazy_clause(state, passive)
1103
1104 if execution_options:
1105 execution_options = util.EMPTY_DICT.merge_with(
1106 execution_options,
1107 {
1108 "_sa_orm_load_options": load_options,
1109 },
1110 )
1111 else:
1112 execution_options = {
1113 "_sa_orm_load_options": load_options,
1114 }
1115
1116 if (
1117 self.key in state.dict
1118 and not passive & PassiveFlag.DEFERRED_HISTORY_LOAD
1119 ):
1120 return LoaderCallableStatus.ATTR_WAS_SET
1121
1122 if pending:
1123 if util.has_intersection(orm_util._none_set, params.values()):
1124 return None
1125
1126 elif util.has_intersection(orm_util._never_set, params.values()):
1127 return None
1128
1129 if self._raise_on_sql and not passive & PassiveFlag.NO_RAISE:
1130 self._invoke_raise_load(state, passive, "raise_on_sql")
1131
1132 stmt._where_criteria = (lazy_clause,)
1133
1134 result = session.execute(
1135 stmt, params, execution_options=execution_options
1136 )
1137
1138 result = result.unique().scalars().all()
1139
1140 if self.uselist:
1141 return result
1142 else:
1143 l = len(result)
1144 if l:
1145 if l > 1:
1146 util.warn(
1147 "Multiple rows returned with "
1148 "uselist=False for lazily-loaded attribute '%s' "
1149 % self.parent_property
1150 )
1151
1152 return result[0]
1153 else:
1154 return None
1155
1156 def create_row_processor(
1157 self,
1158 context,
1159 query_entity,
1160 path,
1161 loadopt,
1162 mapper,
1163 result,
1164 adapter,
1165 populators,
1166 ):
1167 key = self.key
1168
1169 if (
1170 context.load_options._is_user_refresh
1171 and context.query._compile_options._only_load_props
1172 and self.key in context.query._compile_options._only_load_props
1173 ):
1174 return self._immediateload_create_row_processor(
1175 context,
1176 query_entity,
1177 path,
1178 loadopt,
1179 mapper,
1180 result,
1181 adapter,
1182 populators,
1183 )
1184
1185 if not self.is_class_level or (loadopt and loadopt._extra_criteria):
1186 # we are not the primary manager for this attribute
1187 # on this class - set up a
1188 # per-instance lazyloader, which will override the
1189 # class-level behavior.
1190 # this currently only happens when using a
1191 # "lazyload" option on a "no load"
1192 # attribute - "eager" attributes always have a
1193 # class-level lazyloader installed.
1194 set_lazy_callable = (
1195 InstanceState._instance_level_callable_processor
1196 )(
1197 mapper.class_manager,
1198 LoadLazyAttribute(
1199 key,
1200 self,
1201 loadopt,
1202 (
1203 loadopt._generate_extra_criteria(context)
1204 if loadopt._extra_criteria
1205 else None
1206 ),
1207 ),
1208 key,
1209 )
1210
1211 populators["new"].append((self.key, set_lazy_callable))
1212 elif context.populate_existing or mapper.always_refresh:
1213
1214 def reset_for_lazy_callable(state, dict_, row):
1215 # we are the primary manager for this attribute on
1216 # this class - reset its
1217 # per-instance attribute state, so that the class-level
1218 # lazy loader is
1219 # executed when next referenced on this instance.
1220 # this is needed in
1221 # populate_existing() types of scenarios to reset
1222 # any existing state.
1223 state._reset(dict_, key)
1224
1225 populators["new"].append((self.key, reset_for_lazy_callable))
1226
1227
1228class LoadLazyAttribute:
1229 """semi-serializable loader object used by LazyLoader
1230
1231 Historically, this object would be carried along with instances that
1232 needed to run lazyloaders, so it had to be serializable to support
1233 cached instances.
1234
1235 this is no longer a general requirement, and the case where this object
1236 is used is exactly the case where we can't really serialize easily,
1237 which is when extra criteria in the loader option is present.
1238
1239 We can't reliably serialize that as it refers to mapped entities and
1240 AliasedClass objects that are local to the current process, which would
1241 need to be matched up on deserialize e.g. the sqlalchemy.ext.serializer
1242 approach.
1243
1244 """
1245
1246 def __init__(self, key, initiating_strategy, loadopt, extra_criteria):
1247 self.key = key
1248 self.strategy_key = initiating_strategy.strategy_key
1249 self.loadopt = loadopt
1250 self.extra_criteria = extra_criteria
1251
1252 def __getstate__(self):
1253 if self.extra_criteria is not None:
1254 util.warn(
1255 "Can't reliably serialize a lazyload() option that "
1256 "contains additional criteria; please use eager loading "
1257 "for this case"
1258 )
1259 return {
1260 "key": self.key,
1261 "strategy_key": self.strategy_key,
1262 "loadopt": self.loadopt,
1263 "extra_criteria": (),
1264 }
1265
1266 def __call__(self, state, passive=attributes.PASSIVE_OFF):
1267 key = self.key
1268 instance_mapper = state.manager.mapper
1269 prop = instance_mapper._props[key]
1270 strategy = prop._strategies[self.strategy_key]
1271
1272 return strategy._load_for_state(
1273 state,
1274 passive,
1275 loadopt=self.loadopt,
1276 extra_criteria=self.extra_criteria,
1277 )
1278
1279
1280class PostLoader(AbstractRelationshipLoader):
1281 """A relationship loader that emits a second SELECT statement."""
1282
1283 __slots__ = ()
1284
1285 def _setup_for_recursion(self, context, path, loadopt, join_depth=None):
1286 effective_path = (
1287 context.compile_state.current_path or orm_util.PathRegistry.root
1288 ) + path
1289
1290 top_level_context = context._get_top_level_context()
1291 execution_options = util.immutabledict(
1292 {"sa_top_level_orm_context": top_level_context}
1293 )
1294
1295 if loadopt:
1296 recursion_depth = loadopt.local_opts.get("recursion_depth", None)
1297 unlimited_recursion = recursion_depth == -1
1298 else:
1299 recursion_depth = None
1300 unlimited_recursion = False
1301
1302 if recursion_depth is not None:
1303 if not self.parent_property._is_self_referential:
1304 raise sa_exc.InvalidRequestError(
1305 f"recursion_depth option on relationship "
1306 f"{self.parent_property} not valid for "
1307 "non-self-referential relationship"
1308 )
1309 recursion_depth = context.execution_options.get(
1310 f"_recursion_depth_{id(self)}", recursion_depth
1311 )
1312
1313 if not unlimited_recursion and recursion_depth < 0:
1314 return (
1315 effective_path,
1316 False,
1317 execution_options,
1318 recursion_depth,
1319 )
1320
1321 if not unlimited_recursion:
1322 execution_options = execution_options.union(
1323 {
1324 f"_recursion_depth_{id(self)}": recursion_depth - 1,
1325 }
1326 )
1327
1328 if loading.PostLoad.path_exists(
1329 context, effective_path, self.parent_property
1330 ):
1331 return effective_path, False, execution_options, recursion_depth
1332
1333 path_w_prop = path[self.parent_property]
1334 effective_path_w_prop = effective_path[self.parent_property]
1335
1336 if not path_w_prop.contains(context.attributes, "loader"):
1337 if join_depth:
1338 if effective_path_w_prop.length / 2 > join_depth:
1339 return (
1340 effective_path,
1341 False,
1342 execution_options,
1343 recursion_depth,
1344 )
1345 elif effective_path_w_prop.contains_mapper(self.mapper):
1346 return (
1347 effective_path,
1348 False,
1349 execution_options,
1350 recursion_depth,
1351 )
1352
1353 return effective_path, True, execution_options, recursion_depth
1354
1355
1356@relationships.RelationshipProperty.strategy_for(lazy="immediate")
1357class ImmediateLoader(PostLoader):
1358 __slots__ = ("join_depth",)
1359
1360 def __init__(self, parent, strategy_key):
1361 super().__init__(parent, strategy_key)
1362 self.join_depth = self.parent_property.join_depth
1363
1364 def init_class_attribute(self, mapper):
1365 self.parent_property._get_strategy(
1366 (("lazy", "select"),)
1367 ).init_class_attribute(mapper)
1368
1369 def create_row_processor(
1370 self,
1371 context,
1372 query_entity,
1373 path,
1374 loadopt,
1375 mapper,
1376 result,
1377 adapter,
1378 populators,
1379 ):
1380 (
1381 effective_path,
1382 run_loader,
1383 execution_options,
1384 recursion_depth,
1385 ) = self._setup_for_recursion(context, path, loadopt, self.join_depth)
1386 if not run_loader:
1387 # this will not emit SQL and will only emit for a many-to-one
1388 # "use get" load. the "_RELATED" part means it may return
1389 # instance even if its expired, since this is a mutually-recursive
1390 # load operation.
1391 flags = attributes.PASSIVE_NO_FETCH_RELATED | PassiveFlag.NO_RAISE
1392 else:
1393 flags = attributes.PASSIVE_OFF | PassiveFlag.NO_RAISE
1394
1395 loading.PostLoad.callable_for_path(
1396 context,
1397 effective_path,
1398 self.parent,
1399 self.parent_property,
1400 self._load_for_path,
1401 loadopt,
1402 flags,
1403 recursion_depth,
1404 execution_options,
1405 )
1406
1407 def _load_for_path(
1408 self,
1409 context,
1410 path,
1411 states,
1412 load_only,
1413 loadopt,
1414 flags,
1415 recursion_depth,
1416 execution_options,
1417 ):
1418 if recursion_depth:
1419 new_opt = Load(loadopt.path.entity)
1420 new_opt.context = (
1421 loadopt,
1422 loadopt._recurse(),
1423 )
1424 alternate_effective_path = path._truncate_recursive()
1425 extra_options = (new_opt,)
1426 else:
1427 new_opt = None
1428 alternate_effective_path = path
1429 extra_options = ()
1430
1431 key = self.key
1432 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
1433 for state, overwrite in states:
1434 dict_ = state.dict
1435
1436 if overwrite or key not in dict_:
1437 value = lazyloader._load_for_state(
1438 state,
1439 flags,
1440 extra_options=extra_options,
1441 alternate_effective_path=alternate_effective_path,
1442 execution_options=execution_options,
1443 )
1444 if value not in (
1445 ATTR_WAS_SET,
1446 LoaderCallableStatus.PASSIVE_NO_RESULT,
1447 ):
1448 state.get_impl(key).set_committed_value(
1449 state, dict_, value
1450 )
1451
1452
1453@log.class_logger
1454@relationships.RelationshipProperty.strategy_for(lazy="subquery")
1455class SubqueryLoader(PostLoader):
1456 __slots__ = ("join_depth",)
1457
1458 def __init__(self, parent, strategy_key):
1459 super().__init__(parent, strategy_key)
1460 self.join_depth = self.parent_property.join_depth
1461
1462 def init_class_attribute(self, mapper):
1463 self.parent_property._get_strategy(
1464 (("lazy", "select"),)
1465 ).init_class_attribute(mapper)
1466
1467 def _get_leftmost(
1468 self,
1469 orig_query_entity_index,
1470 subq_path,
1471 current_compile_state,
1472 is_root,
1473 ):
1474 given_subq_path = subq_path
1475 subq_path = subq_path.path
1476 subq_mapper = orm_util._class_to_mapper(subq_path[0])
1477
1478 # determine attributes of the leftmost mapper
1479 if (
1480 self.parent.isa(subq_mapper)
1481 and self.parent_property is subq_path[1]
1482 ):
1483 leftmost_mapper, leftmost_prop = self.parent, self.parent_property
1484 else:
1485 leftmost_mapper, leftmost_prop = subq_mapper, subq_path[1]
1486
1487 if is_root:
1488 # the subq_path is also coming from cached state, so when we start
1489 # building up this path, it has to also be converted to be in terms
1490 # of the current state. this is for the specific case of the entity
1491 # is an AliasedClass against a subquery that's not otherwise going
1492 # to adapt
1493 new_subq_path = current_compile_state._entities[
1494 orig_query_entity_index
1495 ].entity_zero._path_registry[leftmost_prop]
1496 additional = len(subq_path) - len(new_subq_path)
1497 if additional:
1498 new_subq_path += path_registry.PathRegistry.coerce(
1499 subq_path[-additional:]
1500 )
1501 else:
1502 new_subq_path = given_subq_path
1503
1504 leftmost_cols = leftmost_prop.local_columns
1505
1506 leftmost_attr = [
1507 getattr(
1508 new_subq_path.path[0].entity,
1509 leftmost_mapper._columntoproperty[c].key,
1510 )
1511 for c in leftmost_cols
1512 ]
1513
1514 return leftmost_mapper, leftmost_attr, leftmost_prop, new_subq_path
1515
1516 def _generate_from_original_query(
1517 self,
1518 orig_compile_state,
1519 orig_query,
1520 leftmost_mapper,
1521 leftmost_attr,
1522 leftmost_relationship,
1523 orig_entity,
1524 ):
1525 # reformat the original query
1526 # to look only for significant columns
1527 q = orig_query._clone().correlate(None)
1528
1529 # LEGACY: make a Query back from the select() !!
1530 # This suits at least two legacy cases:
1531 # 1. applications which expect before_compile() to be called
1532 # below when we run .subquery() on this query (Keystone)
1533 # 2. applications which are doing subqueryload with complex
1534 # from_self() queries, as query.subquery() / .statement
1535 # has to do the full compile context for multiply-nested
1536 # from_self() (Neutron) - see test_subqload_from_self
1537 # for demo.
1538 q2 = query.Query.__new__(query.Query)
1539 q2.__dict__.update(q.__dict__)
1540 q = q2
1541
1542 # set the query's "FROM" list explicitly to what the
1543 # FROM list would be in any case, as we will be limiting
1544 # the columns in the SELECT list which may no longer include
1545 # all entities mentioned in things like WHERE, JOIN, etc.
1546 if not q._from_obj:
1547 q._enable_assertions = False
1548 q.select_from.non_generative(
1549 q,
1550 *{
1551 ent["entity"]
1552 for ent in _column_descriptions(
1553 orig_query, compile_state=orig_compile_state
1554 )
1555 if ent["entity"] is not None
1556 },
1557 )
1558
1559 # select from the identity columns of the outer (specifically, these
1560 # are the 'local_cols' of the property). This will remove other
1561 # columns from the query that might suggest the right entity which is
1562 # why we do set select_from above. The attributes we have are
1563 # coerced and adapted using the original query's adapter, which is
1564 # needed only for the case of adapting a subclass column to
1565 # that of a polymorphic selectable, e.g. we have
1566 # Engineer.primary_language and the entity is Person. All other
1567 # adaptations, e.g. from_self, select_entity_from(), will occur
1568 # within the new query when it compiles, as the compile_state we are
1569 # using here is only a partial one. If the subqueryload is from a
1570 # with_polymorphic() or other aliased() object, left_attr will already
1571 # be the correct attributes so no adaptation is needed.
1572 target_cols = orig_compile_state._adapt_col_list(
1573 [
1574 sql.coercions.expect(sql.roles.ColumnsClauseRole, o)
1575 for o in leftmost_attr
1576 ],
1577 orig_compile_state._get_current_adapter(),
1578 )
1579 q._raw_columns = target_cols
1580
1581 distinct_target_key = leftmost_relationship.distinct_target_key
1582
1583 if distinct_target_key is True:
1584 q._distinct = True
1585 elif distinct_target_key is None:
1586 # if target_cols refer to a non-primary key or only
1587 # part of a composite primary key, set the q as distinct
1588 for t in {c.table for c in target_cols}:
1589 if not set(target_cols).issuperset(t.primary_key):
1590 q._distinct = True
1591 break
1592
1593 # don't need ORDER BY if no limit/offset
1594 if not q._has_row_limiting_clause:
1595 q._order_by_clauses = ()
1596
1597 if q._distinct is True and q._order_by_clauses:
1598 # the logic to automatically add the order by columns to the query
1599 # when distinct is True is deprecated in the query
1600 to_add = sql_util.expand_column_list_from_order_by(
1601 target_cols, q._order_by_clauses
1602 )
1603 if to_add:
1604 q._set_entities(target_cols + to_add)
1605
1606 # the original query now becomes a subquery
1607 # which we'll join onto.
1608 # LEGACY: as "q" is a Query, the before_compile() event is invoked
1609 # here.
1610 embed_q = q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).subquery()
1611 left_alias = orm_util.AliasedClass(
1612 leftmost_mapper, embed_q, use_mapper_path=True
1613 )
1614 return left_alias
1615
1616 def _prep_for_joins(self, left_alias, subq_path):
1617 # figure out what's being joined. a.k.a. the fun part
1618 to_join = []
1619 pairs = list(subq_path.pairs())
1620
1621 for i, (mapper, prop) in enumerate(pairs):
1622 if i > 0:
1623 # look at the previous mapper in the chain -
1624 # if it is as or more specific than this prop's
1625 # mapper, use that instead.
1626 # note we have an assumption here that
1627 # the non-first element is always going to be a mapper,
1628 # not an AliasedClass
1629
1630 prev_mapper = pairs[i - 1][1].mapper
1631 to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
1632 else:
1633 to_append = mapper
1634
1635 to_join.append((to_append, prop.key))
1636
1637 # determine the immediate parent class we are joining from,
1638 # which needs to be aliased.
1639
1640 if len(to_join) < 2:
1641 # in the case of a one level eager load, this is the
1642 # leftmost "left_alias".
1643 parent_alias = left_alias
1644 else:
1645 info = inspect(to_join[-1][0])
1646 if info.is_aliased_class:
1647 parent_alias = info.entity
1648 else:
1649 # alias a plain mapper as we may be
1650 # joining multiple times
1651 parent_alias = orm_util.AliasedClass(
1652 info.entity, use_mapper_path=True
1653 )
1654
1655 local_cols = self.parent_property.local_columns
1656
1657 local_attr = [
1658 getattr(parent_alias, self.parent._columntoproperty[c].key)
1659 for c in local_cols
1660 ]
1661 return to_join, local_attr, parent_alias
1662
1663 def _apply_joins(
1664 self, q, to_join, left_alias, parent_alias, effective_entity
1665 ):
1666 ltj = len(to_join)
1667 if ltj == 1:
1668 to_join = [
1669 getattr(left_alias, to_join[0][1]).of_type(effective_entity)
1670 ]
1671 elif ltj == 2:
1672 to_join = [
1673 getattr(left_alias, to_join[0][1]).of_type(parent_alias),
1674 getattr(parent_alias, to_join[-1][1]).of_type(
1675 effective_entity
1676 ),
1677 ]
1678 elif ltj > 2:
1679 middle = [
1680 (
1681 (
1682 orm_util.AliasedClass(item[0])
1683 if not inspect(item[0]).is_aliased_class
1684 else item[0].entity
1685 ),
1686 item[1],
1687 )
1688 for item in to_join[1:-1]
1689 ]
1690 inner = []
1691
1692 while middle:
1693 item = middle.pop(0)
1694 attr = getattr(item[0], item[1])
1695 if middle:
1696 attr = attr.of_type(middle[0][0])
1697 else:
1698 attr = attr.of_type(parent_alias)
1699
1700 inner.append(attr)
1701
1702 to_join = (
1703 [getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)]
1704 + inner
1705 + [
1706 getattr(parent_alias, to_join[-1][1]).of_type(
1707 effective_entity
1708 )
1709 ]
1710 )
1711
1712 for attr in to_join:
1713 q = q.join(attr)
1714
1715 return q
1716
1717 def _setup_options(
1718 self,
1719 context,
1720 q,
1721 subq_path,
1722 rewritten_path,
1723 orig_query,
1724 effective_entity,
1725 loadopt,
1726 ):
1727 # note that because the subqueryload object
1728 # does not re-use the cached query, instead always making
1729 # use of the current invoked query, while we have two queries
1730 # here (orig and context.query), they are both non-cached
1731 # queries and we can transfer the options as is without
1732 # adjusting for new criteria. Some work on #6881 / #6889
1733 # brought this into question.
1734 new_options = orig_query._with_options
1735
1736 if loadopt and loadopt._extra_criteria:
1737 new_options += (
1738 orm_util.LoaderCriteriaOption(
1739 self.entity,
1740 loadopt._generate_extra_criteria(context),
1741 ),
1742 )
1743
1744 # propagate loader options etc. to the new query.
1745 # these will fire relative to subq_path.
1746 q = q._with_current_path(rewritten_path)
1747 q = q.options(*new_options)
1748
1749 return q
1750
1751 def _setup_outermost_orderby(self, q):
1752 if self.parent_property.order_by:
1753
1754 def _setup_outermost_orderby(compile_context):
1755 compile_context.eager_order_by += tuple(
1756 util.to_list(self.parent_property.order_by)
1757 )
1758
1759 q = q._add_context_option(
1760 _setup_outermost_orderby, self.parent_property
1761 )
1762
1763 return q
1764
1765 class _SubqCollections:
1766 """Given a :class:`_query.Query` used to emit the "subquery load",
1767 provide a load interface that executes the query at the
1768 first moment a value is needed.
1769
1770 """
1771
1772 __slots__ = (
1773 "session",
1774 "execution_options",
1775 "load_options",
1776 "params",
1777 "subq",
1778 "_data",
1779 )
1780
1781 def __init__(self, context, subq):
1782 # avoid creating a cycle by storing context
1783 # even though that's preferable
1784 self.session = context.session
1785 self.execution_options = context.execution_options
1786 self.load_options = context.load_options
1787 self.params = context.params or {}
1788 self.subq = subq
1789 self._data = None
1790
1791 def get(self, key, default):
1792 if self._data is None:
1793 self._load()
1794 return self._data.get(key, default)
1795
1796 def _load(self):
1797 self._data = collections.defaultdict(list)
1798
1799 q = self.subq
1800 assert q.session is None
1801
1802 q = q.with_session(self.session)
1803
1804 if self.load_options._populate_existing:
1805 q = q.populate_existing()
1806 # to work with baked query, the parameters may have been
1807 # updated since this query was created, so take these into account
1808
1809 rows = list(q.params(self.params))
1810 for k, v in itertools.groupby(rows, lambda x: x[1:]):
1811 self._data[k].extend(vv[0] for vv in v)
1812
1813 def loader(self, state, dict_, row):
1814 if self._data is None:
1815 self._load()
1816
1817 def _setup_query_from_rowproc(
1818 self,
1819 context,
1820 query_entity,
1821 path,
1822 entity,
1823 loadopt,
1824 adapter,
1825 ):
1826 compile_state = context.compile_state
1827 if (
1828 not compile_state.compile_options._enable_eagerloads
1829 or compile_state.compile_options._for_refresh_state
1830 ):
1831 return
1832
1833 orig_query_entity_index = compile_state._entities.index(query_entity)
1834 context.loaders_require_buffering = True
1835
1836 path = path[self.parent_property]
1837
1838 # build up a path indicating the path from the leftmost
1839 # entity to the thing we're subquery loading.
1840 with_poly_entity = path.get(
1841 compile_state.attributes, "path_with_polymorphic", None
1842 )
1843 if with_poly_entity is not None:
1844 effective_entity = with_poly_entity
1845 else:
1846 effective_entity = self.entity
1847
1848 subq_path, rewritten_path = context.query._execution_options.get(
1849 ("subquery_paths", None),
1850 (orm_util.PathRegistry.root, orm_util.PathRegistry.root),
1851 )
1852 is_root = subq_path is orm_util.PathRegistry.root
1853 subq_path = subq_path + path
1854 rewritten_path = rewritten_path + path
1855
1856 # use the current query being invoked, not the compile state
1857 # one. this is so that we get the current parameters. however,
1858 # it means we can't use the existing compile state, we have to make
1859 # a new one. other approaches include possibly using the
1860 # compiled query but swapping the params, seems only marginally
1861 # less time spent but more complicated
1862 orig_query = context.query._execution_options.get(
1863 ("orig_query", SubqueryLoader), context.query
1864 )
1865
1866 # make a new compile_state for the query that's probably cached, but
1867 # we're sort of undoing a bit of that caching :(
1868 compile_state_cls = ORMCompileState._get_plugin_class_for_plugin(
1869 orig_query, "orm"
1870 )
1871
1872 if orig_query._is_lambda_element:
1873 if context.load_options._lazy_loaded_from is None:
1874 util.warn(
1875 'subqueryloader for "%s" must invoke lambda callable '
1876 "at %r in "
1877 "order to produce a new query, decreasing the efficiency "
1878 "of caching for this statement. Consider using "
1879 "selectinload() for more effective full-lambda caching"
1880 % (self, orig_query)
1881 )
1882 orig_query = orig_query._resolved
1883
1884 # this is the more "quick" version, however it's not clear how
1885 # much of this we need. in particular I can't get a test to
1886 # fail if the "set_base_alias" is missing and not sure why that is.
1887 orig_compile_state = compile_state_cls._create_entities_collection(
1888 orig_query, legacy=False
1889 )
1890
1891 (
1892 leftmost_mapper,
1893 leftmost_attr,
1894 leftmost_relationship,
1895 rewritten_path,
1896 ) = self._get_leftmost(
1897 orig_query_entity_index,
1898 rewritten_path,
1899 orig_compile_state,
1900 is_root,
1901 )
1902
1903 # generate a new Query from the original, then
1904 # produce a subquery from it.
1905 left_alias = self._generate_from_original_query(
1906 orig_compile_state,
1907 orig_query,
1908 leftmost_mapper,
1909 leftmost_attr,
1910 leftmost_relationship,
1911 entity,
1912 )
1913
1914 # generate another Query that will join the
1915 # left alias to the target relationships.
1916 # basically doing a longhand
1917 # "from_self()". (from_self() itself not quite industrial
1918 # strength enough for all contingencies...but very close)
1919
1920 q = query.Query(effective_entity)
1921
1922 q._execution_options = context.query._execution_options.merge_with(
1923 context.execution_options,
1924 {
1925 ("orig_query", SubqueryLoader): orig_query,
1926 ("subquery_paths", None): (subq_path, rewritten_path),
1927 },
1928 )
1929
1930 q = q._set_enable_single_crit(False)
1931 to_join, local_attr, parent_alias = self._prep_for_joins(
1932 left_alias, subq_path
1933 )
1934
1935 q = q.add_columns(*local_attr)
1936 q = self._apply_joins(
1937 q, to_join, left_alias, parent_alias, effective_entity
1938 )
1939
1940 q = self._setup_options(
1941 context,
1942 q,
1943 subq_path,
1944 rewritten_path,
1945 orig_query,
1946 effective_entity,
1947 loadopt,
1948 )
1949 q = self._setup_outermost_orderby(q)
1950
1951 return q
1952
1953 def create_row_processor(
1954 self,
1955 context,
1956 query_entity,
1957 path,
1958 loadopt,
1959 mapper,
1960 result,
1961 adapter,
1962 populators,
1963 ):
1964 if context.refresh_state:
1965 return self._immediateload_create_row_processor(
1966 context,
1967 query_entity,
1968 path,
1969 loadopt,
1970 mapper,
1971 result,
1972 adapter,
1973 populators,
1974 )
1975
1976 _, run_loader, _, _ = self._setup_for_recursion(
1977 context, path, loadopt, self.join_depth
1978 )
1979 if not run_loader:
1980 return
1981
1982 if not isinstance(context.compile_state, ORMSelectCompileState):
1983 # issue 7505 - subqueryload() in 1.3 and previous would silently
1984 # degrade for from_statement() without warning. this behavior
1985 # is restored here
1986 return
1987
1988 if not self.parent.class_manager[self.key].impl.supports_population:
1989 raise sa_exc.InvalidRequestError(
1990 "'%s' does not support object "
1991 "population - eager loading cannot be applied." % self
1992 )
1993
1994 # a little dance here as the "path" is still something that only
1995 # semi-tracks the exact series of things we are loading, still not
1996 # telling us about with_polymorphic() and stuff like that when it's at
1997 # the root.. the initial MapperEntity is more accurate for this case.
1998 if len(path) == 1:
1999 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
2000 return
2001 elif not orm_util._entity_isa(path[-1], self.parent):
2002 return
2003
2004 subq = self._setup_query_from_rowproc(
2005 context,
2006 query_entity,
2007 path,
2008 path[-1],
2009 loadopt,
2010 adapter,
2011 )
2012
2013 if subq is None:
2014 return
2015
2016 assert subq.session is None
2017
2018 path = path[self.parent_property]
2019
2020 local_cols = self.parent_property.local_columns
2021
2022 # cache the loaded collections in the context
2023 # so that inheriting mappers don't re-load when they
2024 # call upon create_row_processor again
2025 collections = path.get(context.attributes, "collections")
2026 if collections is None:
2027 collections = self._SubqCollections(context, subq)
2028 path.set(context.attributes, "collections", collections)
2029
2030 if adapter:
2031 local_cols = [adapter.columns[c] for c in local_cols]
2032
2033 if self.uselist:
2034 self._create_collection_loader(
2035 context, result, collections, local_cols, populators
2036 )
2037 else:
2038 self._create_scalar_loader(
2039 context, result, collections, local_cols, populators
2040 )
2041
2042 def _create_collection_loader(
2043 self, context, result, collections, local_cols, populators
2044 ):
2045 tuple_getter = result._tuple_getter(local_cols)
2046
2047 def load_collection_from_subq(state, dict_, row):
2048 collection = collections.get(tuple_getter(row), ())
2049 state.get_impl(self.key).set_committed_value(
2050 state, dict_, collection
2051 )
2052
2053 def load_collection_from_subq_existing_row(state, dict_, row):
2054 if self.key not in dict_:
2055 load_collection_from_subq(state, dict_, row)
2056
2057 populators["new"].append((self.key, load_collection_from_subq))
2058 populators["existing"].append(
2059 (self.key, load_collection_from_subq_existing_row)
2060 )
2061
2062 if context.invoke_all_eagers:
2063 populators["eager"].append((self.key, collections.loader))
2064
2065 def _create_scalar_loader(
2066 self, context, result, collections, local_cols, populators
2067 ):
2068 tuple_getter = result._tuple_getter(local_cols)
2069
2070 def load_scalar_from_subq(state, dict_, row):
2071 collection = collections.get(tuple_getter(row), (None,))
2072 if len(collection) > 1:
2073 util.warn(
2074 "Multiple rows returned with "
2075 "uselist=False for eagerly-loaded attribute '%s' " % self
2076 )
2077
2078 scalar = collection[0]
2079 state.get_impl(self.key).set_committed_value(state, dict_, scalar)
2080
2081 def load_scalar_from_subq_existing_row(state, dict_, row):
2082 if self.key not in dict_:
2083 load_scalar_from_subq(state, dict_, row)
2084
2085 populators["new"].append((self.key, load_scalar_from_subq))
2086 populators["existing"].append(
2087 (self.key, load_scalar_from_subq_existing_row)
2088 )
2089 if context.invoke_all_eagers:
2090 populators["eager"].append((self.key, collections.loader))
2091
2092
2093@log.class_logger
2094@relationships.RelationshipProperty.strategy_for(lazy="joined")
2095@relationships.RelationshipProperty.strategy_for(lazy=False)
2096class JoinedLoader(AbstractRelationshipLoader):
2097 """Provide loading behavior for a :class:`.Relationship`
2098 using joined eager loading.
2099
2100 """
2101
2102 __slots__ = "join_depth"
2103
2104 def __init__(self, parent, strategy_key):
2105 super().__init__(parent, strategy_key)
2106 self.join_depth = self.parent_property.join_depth
2107
2108 def init_class_attribute(self, mapper):
2109 self.parent_property._get_strategy(
2110 (("lazy", "select"),)
2111 ).init_class_attribute(mapper)
2112
2113 def setup_query(
2114 self,
2115 compile_state,
2116 query_entity,
2117 path,
2118 loadopt,
2119 adapter,
2120 column_collection=None,
2121 parentmapper=None,
2122 chained_from_outerjoin=False,
2123 **kwargs,
2124 ):
2125 """Add a left outer join to the statement that's being constructed."""
2126
2127 if not compile_state.compile_options._enable_eagerloads:
2128 return
2129 elif self.uselist:
2130 compile_state.multi_row_eager_loaders = True
2131
2132 path = path[self.parent_property]
2133
2134 with_polymorphic = None
2135
2136 user_defined_adapter = (
2137 self._init_user_defined_eager_proc(
2138 loadopt, compile_state, compile_state.attributes
2139 )
2140 if loadopt
2141 else False
2142 )
2143
2144 if user_defined_adapter is not False:
2145 # setup an adapter but dont create any JOIN, assume it's already
2146 # in the query
2147 (
2148 clauses,
2149 adapter,
2150 add_to_collection,
2151 ) = self._setup_query_on_user_defined_adapter(
2152 compile_state,
2153 query_entity,
2154 path,
2155 adapter,
2156 user_defined_adapter,
2157 )
2158
2159 # don't do "wrap" for multi-row, we want to wrap
2160 # limited/distinct SELECT,
2161 # because we want to put the JOIN on the outside.
2162
2163 else:
2164 # if not via query option, check for
2165 # a cycle
2166 if not path.contains(compile_state.attributes, "loader"):
2167 if self.join_depth:
2168 if path.length / 2 > self.join_depth:
2169 return
2170 elif path.contains_mapper(self.mapper):
2171 return
2172
2173 # add the JOIN and create an adapter
2174 (
2175 clauses,
2176 adapter,
2177 add_to_collection,
2178 chained_from_outerjoin,
2179 ) = self._generate_row_adapter(
2180 compile_state,
2181 query_entity,
2182 path,
2183 loadopt,
2184 adapter,
2185 column_collection,
2186 parentmapper,
2187 chained_from_outerjoin,
2188 )
2189
2190 # for multi-row, we want to wrap limited/distinct SELECT,
2191 # because we want to put the JOIN on the outside.
2192 compile_state.eager_adding_joins = True
2193
2194 with_poly_entity = path.get(
2195 compile_state.attributes, "path_with_polymorphic", None
2196 )
2197 if with_poly_entity is not None:
2198 with_polymorphic = inspect(
2199 with_poly_entity
2200 ).with_polymorphic_mappers
2201 else:
2202 with_polymorphic = None
2203
2204 path = path[self.entity]
2205
2206 loading._setup_entity_query(
2207 compile_state,
2208 self.mapper,
2209 query_entity,
2210 path,
2211 clauses,
2212 add_to_collection,
2213 with_polymorphic=with_polymorphic,
2214 parentmapper=self.mapper,
2215 chained_from_outerjoin=chained_from_outerjoin,
2216 )
2217
2218 has_nones = util.NONE_SET.intersection(compile_state.secondary_columns)
2219
2220 if has_nones:
2221 if with_poly_entity is not None:
2222 raise sa_exc.InvalidRequestError(
2223 "Detected unaliased columns when generating joined "
2224 "load. Make sure to use aliased=True or flat=True "
2225 "when using joined loading with with_polymorphic()."
2226 )
2227 else:
2228 compile_state.secondary_columns = [
2229 c for c in compile_state.secondary_columns if c is not None
2230 ]
2231
2232 def _init_user_defined_eager_proc(
2233 self, loadopt, compile_state, target_attributes
2234 ):
2235 # check if the opt applies at all
2236 if "eager_from_alias" not in loadopt.local_opts:
2237 # nope
2238 return False
2239
2240 path = loadopt.path.parent
2241
2242 # the option applies. check if the "user_defined_eager_row_processor"
2243 # has been built up.
2244 adapter = path.get(
2245 compile_state.attributes, "user_defined_eager_row_processor", False
2246 )
2247 if adapter is not False:
2248 # just return it
2249 return adapter
2250
2251 # otherwise figure it out.
2252 alias = loadopt.local_opts["eager_from_alias"]
2253 root_mapper, prop = path[-2:]
2254
2255 if alias is not None:
2256 if isinstance(alias, str):
2257 alias = prop.target.alias(alias)
2258 adapter = orm_util.ORMAdapter(
2259 orm_util._TraceAdaptRole.JOINEDLOAD_USER_DEFINED_ALIAS,
2260 prop.mapper,
2261 selectable=alias,
2262 equivalents=prop.mapper._equivalent_columns,
2263 limit_on_entity=False,
2264 )
2265 else:
2266 if path.contains(
2267 compile_state.attributes, "path_with_polymorphic"
2268 ):
2269 with_poly_entity = path.get(
2270 compile_state.attributes, "path_with_polymorphic"
2271 )
2272 adapter = orm_util.ORMAdapter(
2273 orm_util._TraceAdaptRole.JOINEDLOAD_PATH_WITH_POLYMORPHIC,
2274 with_poly_entity,
2275 equivalents=prop.mapper._equivalent_columns,
2276 )
2277 else:
2278 adapter = compile_state._polymorphic_adapters.get(
2279 prop.mapper, None
2280 )
2281 path.set(
2282 target_attributes,
2283 "user_defined_eager_row_processor",
2284 adapter,
2285 )
2286
2287 return adapter
2288
2289 def _setup_query_on_user_defined_adapter(
2290 self, context, entity, path, adapter, user_defined_adapter
2291 ):
2292 # apply some more wrapping to the "user defined adapter"
2293 # if we are setting up the query for SQL render.
2294 adapter = entity._get_entity_clauses(context)
2295
2296 if adapter and user_defined_adapter:
2297 user_defined_adapter = user_defined_adapter.wrap(adapter)
2298 path.set(
2299 context.attributes,
2300 "user_defined_eager_row_processor",
2301 user_defined_adapter,
2302 )
2303 elif adapter:
2304 user_defined_adapter = adapter
2305 path.set(
2306 context.attributes,
2307 "user_defined_eager_row_processor",
2308 user_defined_adapter,
2309 )
2310
2311 add_to_collection = context.primary_columns
2312 return user_defined_adapter, adapter, add_to_collection
2313
2314 def _generate_row_adapter(
2315 self,
2316 compile_state,
2317 entity,
2318 path,
2319 loadopt,
2320 adapter,
2321 column_collection,
2322 parentmapper,
2323 chained_from_outerjoin,
2324 ):
2325 with_poly_entity = path.get(
2326 compile_state.attributes, "path_with_polymorphic", None
2327 )
2328 if with_poly_entity:
2329 to_adapt = with_poly_entity
2330 else:
2331 insp = inspect(self.entity)
2332 if insp.is_aliased_class:
2333 alt_selectable = insp.selectable
2334 else:
2335 alt_selectable = None
2336
2337 to_adapt = orm_util.AliasedClass(
2338 self.mapper,
2339 alias=(
2340 alt_selectable._anonymous_fromclause(flat=True)
2341 if alt_selectable is not None
2342 else None
2343 ),
2344 flat=True,
2345 use_mapper_path=True,
2346 )
2347
2348 to_adapt_insp = inspect(to_adapt)
2349
2350 clauses = to_adapt_insp._memo(
2351 ("joinedloader_ormadapter", self),
2352 orm_util.ORMAdapter,
2353 orm_util._TraceAdaptRole.JOINEDLOAD_MEMOIZED_ADAPTER,
2354 to_adapt_insp,
2355 equivalents=self.mapper._equivalent_columns,
2356 adapt_required=True,
2357 allow_label_resolve=False,
2358 anonymize_labels=True,
2359 )
2360
2361 assert clauses.is_aliased_class
2362
2363 innerjoin = (
2364 loadopt.local_opts.get("innerjoin", self.parent_property.innerjoin)
2365 if loadopt is not None
2366 else self.parent_property.innerjoin
2367 )
2368
2369 if not innerjoin:
2370 # if this is an outer join, all non-nested eager joins from
2371 # this path must also be outer joins
2372 chained_from_outerjoin = True
2373
2374 compile_state.create_eager_joins.append(
2375 (
2376 self._create_eager_join,
2377 entity,
2378 path,
2379 adapter,
2380 parentmapper,
2381 clauses,
2382 innerjoin,
2383 chained_from_outerjoin,
2384 loadopt._extra_criteria if loadopt else (),
2385 )
2386 )
2387
2388 add_to_collection = compile_state.secondary_columns
2389 path.set(compile_state.attributes, "eager_row_processor", clauses)
2390
2391 return clauses, adapter, add_to_collection, chained_from_outerjoin
2392
2393 def _create_eager_join(
2394 self,
2395 compile_state,
2396 query_entity,
2397 path,
2398 adapter,
2399 parentmapper,
2400 clauses,
2401 innerjoin,
2402 chained_from_outerjoin,
2403 extra_criteria,
2404 ):
2405 if parentmapper is None:
2406 localparent = query_entity.mapper
2407 else:
2408 localparent = parentmapper
2409
2410 # whether or not the Query will wrap the selectable in a subquery,
2411 # and then attach eager load joins to that (i.e., in the case of
2412 # LIMIT/OFFSET etc.)
2413 should_nest_selectable = (
2414 compile_state.multi_row_eager_loaders
2415 and compile_state._should_nest_selectable
2416 )
2417
2418 query_entity_key = None
2419
2420 if (
2421 query_entity not in compile_state.eager_joins
2422 and not should_nest_selectable
2423 and compile_state.from_clauses
2424 ):
2425 indexes = sql_util.find_left_clause_that_matches_given(
2426 compile_state.from_clauses, query_entity.selectable
2427 )
2428
2429 if len(indexes) > 1:
2430 # for the eager load case, I can't reproduce this right
2431 # now. For query.join() I can.
2432 raise sa_exc.InvalidRequestError(
2433 "Can't identify which query entity in which to joined "
2434 "eager load from. Please use an exact match when "
2435 "specifying the join path."
2436 )
2437
2438 if indexes:
2439 clause = compile_state.from_clauses[indexes[0]]
2440 # join to an existing FROM clause on the query.
2441 # key it to its list index in the eager_joins dict.
2442 # Query._compile_context will adapt as needed and
2443 # append to the FROM clause of the select().
2444 query_entity_key, default_towrap = indexes[0], clause
2445
2446 if query_entity_key is None:
2447 query_entity_key, default_towrap = (
2448 query_entity,
2449 query_entity.selectable,
2450 )
2451
2452 towrap = compile_state.eager_joins.setdefault(
2453 query_entity_key, default_towrap
2454 )
2455
2456 if adapter:
2457 if getattr(adapter, "is_aliased_class", False):
2458 # joining from an adapted entity. The adapted entity
2459 # might be a "with_polymorphic", so resolve that to our
2460 # specific mapper's entity before looking for our attribute
2461 # name on it.
2462 efm = adapter.aliased_insp._entity_for_mapper(
2463 localparent
2464 if localparent.isa(self.parent)
2465 else self.parent
2466 )
2467
2468 # look for our attribute on the adapted entity, else fall back
2469 # to our straight property
2470 onclause = getattr(efm.entity, self.key, self.parent_property)
2471 else:
2472 onclause = getattr(
2473 orm_util.AliasedClass(
2474 self.parent, adapter.selectable, use_mapper_path=True
2475 ),
2476 self.key,
2477 self.parent_property,
2478 )
2479
2480 else:
2481 onclause = self.parent_property
2482
2483 assert clauses.is_aliased_class
2484
2485 attach_on_outside = (
2486 not chained_from_outerjoin
2487 or not innerjoin
2488 or innerjoin == "unnested"
2489 or query_entity.entity_zero.represents_outer_join
2490 )
2491
2492 extra_join_criteria = extra_criteria
2493 additional_entity_criteria = compile_state.global_attributes.get(
2494 ("additional_entity_criteria", self.mapper), ()
2495 )
2496 if additional_entity_criteria:
2497 extra_join_criteria += tuple(
2498 ae._resolve_where_criteria(self.mapper)
2499 for ae in additional_entity_criteria
2500 if ae.propagate_to_loaders
2501 )
2502
2503 if attach_on_outside:
2504 # this is the "classic" eager join case.
2505 eagerjoin = orm_util._ORMJoin(
2506 towrap,
2507 clauses.aliased_insp,
2508 onclause,
2509 isouter=not innerjoin
2510 or query_entity.entity_zero.represents_outer_join
2511 or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
2512 _left_memo=self.parent,
2513 _right_memo=path[self.mapper],
2514 _extra_criteria=extra_join_criteria,
2515 )
2516 else:
2517 # all other cases are innerjoin=='nested' approach
2518 eagerjoin = self._splice_nested_inner_join(
2519 path, path[-2], towrap, clauses, onclause, extra_join_criteria
2520 )
2521
2522 compile_state.eager_joins[query_entity_key] = eagerjoin
2523
2524 # send a hint to the Query as to where it may "splice" this join
2525 eagerjoin.stop_on = query_entity.selectable
2526
2527 if not parentmapper:
2528 # for parentclause that is the non-eager end of the join,
2529 # ensure all the parent cols in the primaryjoin are actually
2530 # in the
2531 # columns clause (i.e. are not deferred), so that aliasing applied
2532 # by the Query propagates those columns outward.
2533 # This has the effect
2534 # of "undefering" those columns.
2535 for col in sql_util._find_columns(
2536 self.parent_property.primaryjoin
2537 ):
2538 if localparent.persist_selectable.c.contains_column(col):
2539 if adapter:
2540 col = adapter.columns[col]
2541 compile_state._append_dedupe_col_collection(
2542 col, compile_state.primary_columns
2543 )
2544
2545 if self.parent_property.order_by:
2546 compile_state.eager_order_by += tuple(
2547 (eagerjoin._target_adapter.copy_and_process)(
2548 util.to_list(self.parent_property.order_by)
2549 )
2550 )
2551
2552 def _splice_nested_inner_join(
2553 self,
2554 path,
2555 entity_we_want_to_splice_onto,
2556 join_obj,
2557 clauses,
2558 onclause,
2559 extra_criteria,
2560 entity_inside_join_structure: Union[
2561 Mapper, None, Literal[False]
2562 ] = False,
2563 detected_existing_path: Optional[path_registry.PathRegistry] = None,
2564 ):
2565 # recursive fn to splice a nested join into an existing one.
2566 # entity_inside_join_structure=False means this is the outermost call,
2567 # and it should return a value. entity_inside_join_structure=<mapper>
2568 # indicates we've descended into a join and are looking at a FROM
2569 # clause representing this mapper; if this is not
2570 # entity_we_want_to_splice_onto then return None to end the recursive
2571 # branch
2572
2573 assert entity_we_want_to_splice_onto is path[-2]
2574
2575 if entity_inside_join_structure is False:
2576 assert isinstance(join_obj, orm_util._ORMJoin)
2577
2578 if isinstance(join_obj, sql.selectable.FromGrouping):
2579 # FromGrouping - continue descending into the structure
2580 return self._splice_nested_inner_join(
2581 path,
2582 entity_we_want_to_splice_onto,
2583 join_obj.element,
2584 clauses,
2585 onclause,
2586 extra_criteria,
2587 entity_inside_join_structure,
2588 )
2589 elif isinstance(join_obj, orm_util._ORMJoin):
2590 # _ORMJoin - continue descending into the structure
2591
2592 join_right_path = join_obj._right_memo
2593
2594 # see if right side of join is viable
2595 target_join = self._splice_nested_inner_join(
2596 path,
2597 entity_we_want_to_splice_onto,
2598 join_obj.right,
2599 clauses,
2600 onclause,
2601 extra_criteria,
2602 entity_inside_join_structure=(
2603 join_right_path[-1].mapper
2604 if join_right_path is not None
2605 else None
2606 ),
2607 )
2608
2609 if target_join is not None:
2610 # for a right splice, attempt to flatten out
2611 # a JOIN b JOIN c JOIN .. to avoid needless
2612 # parenthesis nesting
2613 if not join_obj.isouter and not target_join.isouter:
2614 eagerjoin = join_obj._splice_into_center(target_join)
2615 else:
2616 eagerjoin = orm_util._ORMJoin(
2617 join_obj.left,
2618 target_join,
2619 join_obj.onclause,
2620 isouter=join_obj.isouter,
2621 _left_memo=join_obj._left_memo,
2622 )
2623
2624 eagerjoin._target_adapter = target_join._target_adapter
2625 return eagerjoin
2626
2627 else:
2628 # see if left side of join is viable
2629 target_join = self._splice_nested_inner_join(
2630 path,
2631 entity_we_want_to_splice_onto,
2632 join_obj.left,
2633 clauses,
2634 onclause,
2635 extra_criteria,
2636 entity_inside_join_structure=join_obj._left_memo,
2637 detected_existing_path=join_right_path,
2638 )
2639
2640 if target_join is not None:
2641 eagerjoin = orm_util._ORMJoin(
2642 target_join,
2643 join_obj.right,
2644 join_obj.onclause,
2645 isouter=join_obj.isouter,
2646 _right_memo=join_obj._right_memo,
2647 )
2648 eagerjoin._target_adapter = target_join._target_adapter
2649 return eagerjoin
2650
2651 # neither side viable, return None, or fail if this was the top
2652 # most call
2653 if entity_inside_join_structure is False:
2654 assert (
2655 False
2656 ), "assertion failed attempting to produce joined eager loads"
2657 return None
2658
2659 # reached an endpoint (e.g. a table that's mapped, or an alias of that
2660 # table). determine if we can use this endpoint to splice onto
2661
2662 # is this the entity we want to splice onto in the first place?
2663 if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure):
2664 return None
2665
2666 # path check. if we know the path how this join endpoint got here,
2667 # lets look at our path we are satisfying and see if we're in the
2668 # wrong place. This is specifically for when our entity may
2669 # appear more than once in the path, issue #11449
2670 if detected_existing_path:
2671 # this assertion is currently based on how this call is made,
2672 # where given a join_obj, the call will have these parameters as
2673 # entity_inside_join_structure=join_obj._left_memo
2674 # and entity_inside_join_structure=join_obj._right_memo.mapper
2675 assert detected_existing_path[-3] is entity_inside_join_structure
2676
2677 # from that, see if the path we are targeting matches the
2678 # "existing" path of this join all the way up to the midpoint
2679 # of this join object (e.g. the relationship).
2680 # if not, then this is not our target
2681 #
2682 # a test condition where this test is false looks like:
2683 #
2684 # desired splice: Node->kind->Kind
2685 # path of desired splice: NodeGroup->nodes->Node->kind
2686 # path we've located: NodeGroup->nodes->Node->common_node->Node
2687 #
2688 # above, because we want to splice kind->Kind onto
2689 # NodeGroup->nodes->Node, this is not our path because it actually
2690 # goes more steps than we want into self-referential
2691 # ->common_node->Node
2692 #
2693 # a test condition where this test is true looks like:
2694 #
2695 # desired splice: B->c2s->C2
2696 # path of desired splice: A->bs->B->c2s
2697 # path we've located: A->bs->B->c1s->C1
2698 #
2699 # above, we want to splice c2s->C2 onto B, and the located path
2700 # shows that the join ends with B->c1s->C1. so we will
2701 # add another join onto that, which would create a "branch" that
2702 # we might represent in a pseudopath as:
2703 #
2704 # B->c1s->C1
2705 # ->c2s->C2
2706 #
2707 # i.e. A JOIN B ON <bs> JOIN C1 ON <c1s>
2708 # JOIN C2 ON <c2s>
2709 #
2710
2711 if detected_existing_path[0:-2] != path.path[0:-1]:
2712 return None
2713
2714 return orm_util._ORMJoin(
2715 join_obj,
2716 clauses.aliased_insp,
2717 onclause,
2718 isouter=False,
2719 _left_memo=entity_inside_join_structure,
2720 _right_memo=path[path[-1].mapper],
2721 _extra_criteria=extra_criteria,
2722 )
2723
2724 def _create_eager_adapter(self, context, result, adapter, path, loadopt):
2725 compile_state = context.compile_state
2726
2727 user_defined_adapter = (
2728 self._init_user_defined_eager_proc(
2729 loadopt, compile_state, context.attributes
2730 )
2731 if loadopt
2732 else False
2733 )
2734
2735 if user_defined_adapter is not False:
2736 decorator = user_defined_adapter
2737 # user defined eagerloads are part of the "primary"
2738 # portion of the load.
2739 # the adapters applied to the Query should be honored.
2740 if compile_state.compound_eager_adapter and decorator:
2741 decorator = decorator.wrap(
2742 compile_state.compound_eager_adapter
2743 )
2744 elif compile_state.compound_eager_adapter:
2745 decorator = compile_state.compound_eager_adapter
2746 else:
2747 decorator = path.get(
2748 compile_state.attributes, "eager_row_processor"
2749 )
2750 if decorator is None:
2751 return False
2752
2753 if self.mapper._result_has_identity_key(result, decorator):
2754 return decorator
2755 else:
2756 # no identity key - don't return a row
2757 # processor, will cause a degrade to lazy
2758 return False
2759
2760 def create_row_processor(
2761 self,
2762 context,
2763 query_entity,
2764 path,
2765 loadopt,
2766 mapper,
2767 result,
2768 adapter,
2769 populators,
2770 ):
2771 if not self.parent.class_manager[self.key].impl.supports_population:
2772 raise sa_exc.InvalidRequestError(
2773 "'%s' does not support object "
2774 "population - eager loading cannot be applied." % self
2775 )
2776
2777 if self.uselist:
2778 context.loaders_require_uniquing = True
2779
2780 our_path = path[self.parent_property]
2781
2782 eager_adapter = self._create_eager_adapter(
2783 context, result, adapter, our_path, loadopt
2784 )
2785
2786 if eager_adapter is not False:
2787 key = self.key
2788
2789 _instance = loading._instance_processor(
2790 query_entity,
2791 self.mapper,
2792 context,
2793 result,
2794 our_path[self.entity],
2795 eager_adapter,
2796 )
2797
2798 if not self.uselist:
2799 self._create_scalar_loader(context, key, _instance, populators)
2800 else:
2801 self._create_collection_loader(
2802 context, key, _instance, populators
2803 )
2804 else:
2805 self.parent_property._get_strategy(
2806 (("lazy", "select"),)
2807 ).create_row_processor(
2808 context,
2809 query_entity,
2810 path,
2811 loadopt,
2812 mapper,
2813 result,
2814 adapter,
2815 populators,
2816 )
2817
2818 def _create_collection_loader(self, context, key, _instance, populators):
2819 def load_collection_from_joined_new_row(state, dict_, row):
2820 # note this must unconditionally clear out any existing collection.
2821 # an existing collection would be present only in the case of
2822 # populate_existing().
2823 collection = attributes.init_state_collection(state, dict_, key)
2824 result_list = util.UniqueAppender(
2825 collection, "append_without_event"
2826 )
2827 context.attributes[(state, key)] = result_list
2828 inst = _instance(row)
2829 if inst is not None:
2830 result_list.append(inst)
2831
2832 def load_collection_from_joined_existing_row(state, dict_, row):
2833 if (state, key) in context.attributes:
2834 result_list = context.attributes[(state, key)]
2835 else:
2836 # appender_key can be absent from context.attributes
2837 # with isnew=False when self-referential eager loading
2838 # is used; the same instance may be present in two
2839 # distinct sets of result columns
2840 collection = attributes.init_state_collection(
2841 state, dict_, key
2842 )
2843 result_list = util.UniqueAppender(
2844 collection, "append_without_event"
2845 )
2846 context.attributes[(state, key)] = result_list
2847 inst = _instance(row)
2848 if inst is not None:
2849 result_list.append(inst)
2850
2851 def load_collection_from_joined_exec(state, dict_, row):
2852 _instance(row)
2853
2854 populators["new"].append(
2855 (self.key, load_collection_from_joined_new_row)
2856 )
2857 populators["existing"].append(
2858 (self.key, load_collection_from_joined_existing_row)
2859 )
2860 if context.invoke_all_eagers:
2861 populators["eager"].append(
2862 (self.key, load_collection_from_joined_exec)
2863 )
2864
2865 def _create_scalar_loader(self, context, key, _instance, populators):
2866 def load_scalar_from_joined_new_row(state, dict_, row):
2867 # set a scalar object instance directly on the parent
2868 # object, bypassing InstrumentedAttribute event handlers.
2869 dict_[key] = _instance(row)
2870
2871 def load_scalar_from_joined_existing_row(state, dict_, row):
2872 # call _instance on the row, even though the object has
2873 # been created, so that we further descend into properties
2874 existing = _instance(row)
2875
2876 # conflicting value already loaded, this shouldn't happen
2877 if key in dict_:
2878 if existing is not dict_[key]:
2879 util.warn(
2880 "Multiple rows returned with "
2881 "uselist=False for eagerly-loaded attribute '%s' "
2882 % self
2883 )
2884 else:
2885 # this case is when one row has multiple loads of the
2886 # same entity (e.g. via aliasing), one has an attribute
2887 # that the other doesn't.
2888 dict_[key] = existing
2889
2890 def load_scalar_from_joined_exec(state, dict_, row):
2891 _instance(row)
2892
2893 populators["new"].append((self.key, load_scalar_from_joined_new_row))
2894 populators["existing"].append(
2895 (self.key, load_scalar_from_joined_existing_row)
2896 )
2897 if context.invoke_all_eagers:
2898 populators["eager"].append(
2899 (self.key, load_scalar_from_joined_exec)
2900 )
2901
2902
2903@log.class_logger
2904@relationships.RelationshipProperty.strategy_for(lazy="selectin")
2905class SelectInLoader(PostLoader, util.MemoizedSlots):
2906 __slots__ = (
2907 "join_depth",
2908 "omit_join",
2909 "_parent_alias",
2910 "_query_info",
2911 "_fallback_query_info",
2912 )
2913
2914 query_info = collections.namedtuple(
2915 "queryinfo",
2916 [
2917 "load_only_child",
2918 "load_with_join",
2919 "in_expr",
2920 "pk_cols",
2921 "zero_idx",
2922 "child_lookup_cols",
2923 ],
2924 )
2925
2926 _chunksize = 500
2927
2928 def __init__(self, parent, strategy_key):
2929 super().__init__(parent, strategy_key)
2930 self.join_depth = self.parent_property.join_depth
2931 is_m2o = self.parent_property.direction is interfaces.MANYTOONE
2932
2933 if self.parent_property.omit_join is not None:
2934 self.omit_join = self.parent_property.omit_join
2935 else:
2936 lazyloader = self.parent_property._get_strategy(
2937 (("lazy", "select"),)
2938 )
2939 if is_m2o:
2940 self.omit_join = lazyloader.use_get
2941 else:
2942 self.omit_join = self.parent._get_clause[0].compare(
2943 lazyloader._rev_lazywhere,
2944 use_proxies=True,
2945 compare_keys=False,
2946 equivalents=self.parent._equivalent_columns,
2947 )
2948
2949 if self.omit_join:
2950 if is_m2o:
2951 self._query_info = self._init_for_omit_join_m2o()
2952 self._fallback_query_info = self._init_for_join()
2953 else:
2954 self._query_info = self._init_for_omit_join()
2955 else:
2956 self._query_info = self._init_for_join()
2957
2958 def _init_for_omit_join(self):
2959 pk_to_fk = dict(
2960 self.parent_property._join_condition.local_remote_pairs
2961 )
2962 pk_to_fk.update(
2963 (equiv, pk_to_fk[k])
2964 for k in list(pk_to_fk)
2965 for equiv in self.parent._equivalent_columns.get(k, ())
2966 )
2967
2968 pk_cols = fk_cols = [
2969 pk_to_fk[col] for col in self.parent.primary_key if col in pk_to_fk
2970 ]
2971 if len(fk_cols) > 1:
2972 in_expr = sql.tuple_(*fk_cols)
2973 zero_idx = False
2974 else:
2975 in_expr = fk_cols[0]
2976 zero_idx = True
2977
2978 return self.query_info(False, False, in_expr, pk_cols, zero_idx, None)
2979
2980 def _init_for_omit_join_m2o(self):
2981 pk_cols = self.mapper.primary_key
2982 if len(pk_cols) > 1:
2983 in_expr = sql.tuple_(*pk_cols)
2984 zero_idx = False
2985 else:
2986 in_expr = pk_cols[0]
2987 zero_idx = True
2988
2989 lazyloader = self.parent_property._get_strategy((("lazy", "select"),))
2990 lookup_cols = [lazyloader._equated_columns[pk] for pk in pk_cols]
2991
2992 return self.query_info(
2993 True, False, in_expr, pk_cols, zero_idx, lookup_cols
2994 )
2995
2996 def _init_for_join(self):
2997 self._parent_alias = AliasedClass(self.parent.class_)
2998 pa_insp = inspect(self._parent_alias)
2999 pk_cols = [
3000 pa_insp._adapt_element(col) for col in self.parent.primary_key
3001 ]
3002 if len(pk_cols) > 1:
3003 in_expr = sql.tuple_(*pk_cols)
3004 zero_idx = False
3005 else:
3006 in_expr = pk_cols[0]
3007 zero_idx = True
3008 return self.query_info(False, True, in_expr, pk_cols, zero_idx, None)
3009
3010 def init_class_attribute(self, mapper):
3011 self.parent_property._get_strategy(
3012 (("lazy", "select"),)
3013 ).init_class_attribute(mapper)
3014
3015 def create_row_processor(
3016 self,
3017 context,
3018 query_entity,
3019 path,
3020 loadopt,
3021 mapper,
3022 result,
3023 adapter,
3024 populators,
3025 ):
3026 if context.refresh_state:
3027 return self._immediateload_create_row_processor(
3028 context,
3029 query_entity,
3030 path,
3031 loadopt,
3032 mapper,
3033 result,
3034 adapter,
3035 populators,
3036 )
3037
3038 (
3039 effective_path,
3040 run_loader,
3041 execution_options,
3042 recursion_depth,
3043 ) = self._setup_for_recursion(
3044 context, path, loadopt, join_depth=self.join_depth
3045 )
3046
3047 if not run_loader:
3048 return
3049
3050 if not self.parent.class_manager[self.key].impl.supports_population:
3051 raise sa_exc.InvalidRequestError(
3052 "'%s' does not support object "
3053 "population - eager loading cannot be applied." % self
3054 )
3055
3056 # a little dance here as the "path" is still something that only
3057 # semi-tracks the exact series of things we are loading, still not
3058 # telling us about with_polymorphic() and stuff like that when it's at
3059 # the root.. the initial MapperEntity is more accurate for this case.
3060 if len(path) == 1:
3061 if not orm_util._entity_isa(query_entity.entity_zero, self.parent):
3062 return
3063 elif not orm_util._entity_isa(path[-1], self.parent):
3064 return
3065
3066 selectin_path = effective_path
3067
3068 path_w_prop = path[self.parent_property]
3069
3070 # build up a path indicating the path from the leftmost
3071 # entity to the thing we're subquery loading.
3072 with_poly_entity = path_w_prop.get(
3073 context.attributes, "path_with_polymorphic", None
3074 )
3075 if with_poly_entity is not None:
3076 effective_entity = inspect(with_poly_entity)
3077 else:
3078 effective_entity = self.entity
3079
3080 loading.PostLoad.callable_for_path(
3081 context,
3082 selectin_path,
3083 self.parent,
3084 self.parent_property,
3085 self._load_for_path,
3086 effective_entity,
3087 loadopt,
3088 recursion_depth,
3089 execution_options,
3090 )
3091
3092 def _load_for_path(
3093 self,
3094 context,
3095 path,
3096 states,
3097 load_only,
3098 effective_entity,
3099 loadopt,
3100 recursion_depth,
3101 execution_options,
3102 ):
3103 if load_only and self.key not in load_only:
3104 return
3105
3106 query_info = self._query_info
3107
3108 if query_info.load_only_child:
3109 our_states = collections.defaultdict(list)
3110 none_states = []
3111
3112 mapper = self.parent
3113
3114 for state, overwrite in states:
3115 state_dict = state.dict
3116 related_ident = tuple(
3117 mapper._get_state_attr_by_column(
3118 state,
3119 state_dict,
3120 lk,
3121 passive=attributes.PASSIVE_NO_FETCH,
3122 )
3123 for lk in query_info.child_lookup_cols
3124 )
3125 # if the loaded parent objects do not have the foreign key
3126 # to the related item loaded, then degrade into the joined
3127 # version of selectinload
3128 if LoaderCallableStatus.PASSIVE_NO_RESULT in related_ident:
3129 query_info = self._fallback_query_info
3130 break
3131
3132 # organize states into lists keyed to particular foreign
3133 # key values.
3134 if None not in related_ident:
3135 our_states[related_ident].append(
3136 (state, state_dict, overwrite)
3137 )
3138 else:
3139 # For FK values that have None, add them to a
3140 # separate collection that will be populated separately
3141 none_states.append((state, state_dict, overwrite))
3142
3143 # note the above conditional may have changed query_info
3144 if not query_info.load_only_child:
3145 our_states = [
3146 (state.key[1], state, state.dict, overwrite)
3147 for state, overwrite in states
3148 ]
3149
3150 pk_cols = query_info.pk_cols
3151 in_expr = query_info.in_expr
3152
3153 if not query_info.load_with_join:
3154 # in "omit join" mode, the primary key column and the
3155 # "in" expression are in terms of the related entity. So
3156 # if the related entity is polymorphic or otherwise aliased,
3157 # we need to adapt our "pk_cols" and "in_expr" to that
3158 # entity. in non-"omit join" mode, these are against the
3159 # parent entity and do not need adaption.
3160 if effective_entity.is_aliased_class:
3161 pk_cols = [
3162 effective_entity._adapt_element(col) for col in pk_cols
3163 ]
3164 in_expr = effective_entity._adapt_element(in_expr)
3165
3166 bundle_ent = orm_util.Bundle("pk", *pk_cols)
3167 bundle_sql = bundle_ent.__clause_element__()
3168
3169 entity_sql = effective_entity.__clause_element__()
3170 q = Select._create_raw_select(
3171 _raw_columns=[bundle_sql, entity_sql],
3172 _label_style=LABEL_STYLE_TABLENAME_PLUS_COL,
3173 _compile_options=ORMCompileState.default_compile_options,
3174 _propagate_attrs={
3175 "compile_state_plugin": "orm",
3176 "plugin_subject": effective_entity,
3177 },
3178 )
3179
3180 if not query_info.load_with_join:
3181 # the Bundle we have in the "omit_join" case is against raw, non
3182 # annotated columns, so to ensure the Query knows its primary
3183 # entity, we add it explicitly. If we made the Bundle against
3184 # annotated columns, we hit a performance issue in this specific
3185 # case, which is detailed in issue #4347.
3186 q = q.select_from(effective_entity)
3187 else:
3188 # in the non-omit_join case, the Bundle is against the annotated/
3189 # mapped column of the parent entity, but the #4347 issue does not
3190 # occur in this case.
3191 q = q.select_from(self._parent_alias).join(
3192 getattr(self._parent_alias, self.parent_property.key).of_type(
3193 effective_entity
3194 )
3195 )
3196
3197 q = q.filter(in_expr.in_(sql.bindparam("primary_keys")))
3198
3199 # a test which exercises what these comments talk about is
3200 # test_selectin_relations.py -> test_twolevel_selectin_w_polymorphic
3201 #
3202 # effective_entity above is given to us in terms of the cached
3203 # statement, namely this one:
3204 orig_query = context.compile_state.select_statement
3205
3206 # the actual statement that was requested is this one:
3207 # context_query = context.query
3208 #
3209 # that's not the cached one, however. So while it is of the identical
3210 # structure, if it has entities like AliasedInsp, which we get from
3211 # aliased() or with_polymorphic(), the AliasedInsp will likely be a
3212 # different object identity each time, and will not match up
3213 # hashing-wise to the corresponding AliasedInsp that's in the
3214 # cached query, meaning it won't match on paths and loader lookups
3215 # and loaders like this one will be skipped if it is used in options.
3216 #
3217 # as it turns out, standard loader options like selectinload(),
3218 # lazyload() that have a path need
3219 # to come from the cached query so that the AliasedInsp etc. objects
3220 # that are in the query line up with the object that's in the path
3221 # of the strategy object. however other options like
3222 # with_loader_criteria() that doesn't have a path (has a fixed entity)
3223 # and needs to have access to the latest closure state in order to
3224 # be correct, we need to use the uncached one.
3225 #
3226 # as of #8399 we let the loader option itself figure out what it
3227 # wants to do given cached and uncached version of itself.
3228
3229 effective_path = path[self.parent_property]
3230
3231 if orig_query is context.query:
3232 new_options = orig_query._with_options
3233 else:
3234 cached_options = orig_query._with_options
3235 uncached_options = context.query._with_options
3236
3237 # propagate compile state options from the original query,
3238 # updating their "extra_criteria" as necessary.
3239 # note this will create a different cache key than
3240 # "orig" options if extra_criteria is present, because the copy
3241 # of extra_criteria will have different boundparam than that of
3242 # the QueryableAttribute in the path
3243 new_options = [
3244 orig_opt._adapt_cached_option_to_uncached_option(
3245 context, uncached_opt
3246 )
3247 for orig_opt, uncached_opt in zip(
3248 cached_options, uncached_options
3249 )
3250 ]
3251
3252 if loadopt and loadopt._extra_criteria:
3253 new_options += (
3254 orm_util.LoaderCriteriaOption(
3255 effective_entity,
3256 loadopt._generate_extra_criteria(context),
3257 ),
3258 )
3259
3260 if recursion_depth is not None:
3261 effective_path = effective_path._truncate_recursive()
3262
3263 q = q.options(*new_options)
3264
3265 q = q._update_compile_options({"_current_path": effective_path})
3266 if context.populate_existing:
3267 q = q.execution_options(populate_existing=True)
3268
3269 if self.parent_property.order_by:
3270 if not query_info.load_with_join:
3271 eager_order_by = self.parent_property.order_by
3272 if effective_entity.is_aliased_class:
3273 eager_order_by = [
3274 effective_entity._adapt_element(elem)
3275 for elem in eager_order_by
3276 ]
3277 q = q.order_by(*eager_order_by)
3278 else:
3279
3280 def _setup_outermost_orderby(compile_context):
3281 compile_context.eager_order_by += tuple(
3282 util.to_list(self.parent_property.order_by)
3283 )
3284
3285 q = q._add_context_option(
3286 _setup_outermost_orderby, self.parent_property
3287 )
3288
3289 if query_info.load_only_child:
3290 self._load_via_child(
3291 our_states,
3292 none_states,
3293 query_info,
3294 q,
3295 context,
3296 execution_options,
3297 )
3298 else:
3299 self._load_via_parent(
3300 our_states, query_info, q, context, execution_options
3301 )
3302
3303 def _load_via_child(
3304 self,
3305 our_states,
3306 none_states,
3307 query_info,
3308 q,
3309 context,
3310 execution_options,
3311 ):
3312 uselist = self.uselist
3313
3314 # this sort is really for the benefit of the unit tests
3315 our_keys = sorted(our_states)
3316 while our_keys:
3317 chunk = our_keys[0 : self._chunksize]
3318 our_keys = our_keys[self._chunksize :]
3319 data = {
3320 k: v
3321 for k, v in context.session.execute(
3322 q,
3323 params={
3324 "primary_keys": [
3325 key[0] if query_info.zero_idx else key
3326 for key in chunk
3327 ]
3328 },
3329 execution_options=execution_options,
3330 ).unique()
3331 }
3332
3333 for key in chunk:
3334 # for a real foreign key and no concurrent changes to the
3335 # DB while running this method, "key" is always present in
3336 # data. However, for primaryjoins without real foreign keys
3337 # a non-None primaryjoin condition may still refer to no
3338 # related object.
3339 related_obj = data.get(key, None)
3340 for state, dict_, overwrite in our_states[key]:
3341 if not overwrite and self.key in dict_:
3342 continue
3343
3344 state.get_impl(self.key).set_committed_value(
3345 state,
3346 dict_,
3347 related_obj if not uselist else [related_obj],
3348 )
3349 # populate none states with empty value / collection
3350 for state, dict_, overwrite in none_states:
3351 if not overwrite and self.key in dict_:
3352 continue
3353
3354 # note it's OK if this is a uselist=True attribute, the empty
3355 # collection will be populated
3356 state.get_impl(self.key).set_committed_value(state, dict_, None)
3357
3358 def _load_via_parent(
3359 self, our_states, query_info, q, context, execution_options
3360 ):
3361 uselist = self.uselist
3362 _empty_result = () if uselist else None
3363
3364 while our_states:
3365 chunk = our_states[0 : self._chunksize]
3366 our_states = our_states[self._chunksize :]
3367
3368 primary_keys = [
3369 key[0] if query_info.zero_idx else key
3370 for key, state, state_dict, overwrite in chunk
3371 ]
3372
3373 data = collections.defaultdict(list)
3374 for k, v in itertools.groupby(
3375 context.session.execute(
3376 q,
3377 params={"primary_keys": primary_keys},
3378 execution_options=execution_options,
3379 ).unique(),
3380 lambda x: x[0],
3381 ):
3382 data[k].extend(vv[1] for vv in v)
3383
3384 for key, state, state_dict, overwrite in chunk:
3385 if not overwrite and self.key in state_dict:
3386 continue
3387
3388 collection = data.get(key, _empty_result)
3389
3390 if not uselist and collection:
3391 if len(collection) > 1:
3392 util.warn(
3393 "Multiple rows returned with "
3394 "uselist=False for eagerly-loaded "
3395 "attribute '%s' " % self
3396 )
3397 state.get_impl(self.key).set_committed_value(
3398 state, state_dict, collection[0]
3399 )
3400 else:
3401 # note that empty tuple set on uselist=False sets the
3402 # value to None
3403 state.get_impl(self.key).set_committed_value(
3404 state, state_dict, collection
3405 )
3406
3407
3408def single_parent_validator(desc, prop):
3409 def _do_check(state, value, oldvalue, initiator):
3410 if value is not None and initiator.key == prop.key:
3411 hasparent = initiator.hasparent(attributes.instance_state(value))
3412 if hasparent and oldvalue is not value:
3413 raise sa_exc.InvalidRequestError(
3414 "Instance %s is already associated with an instance "
3415 "of %s via its %s attribute, and is only allowed a "
3416 "single parent."
3417 % (orm_util.instance_str(value), state.class_, prop),
3418 code="bbf1",
3419 )
3420 return value
3421
3422 def append(state, value, initiator):
3423 return _do_check(state, value, None, initiator)
3424
3425 def set_(state, value, oldvalue, initiator):
3426 return _do_check(state, value, oldvalue, initiator)
3427
3428 event.listen(
3429 desc, "append", append, raw=True, retval=True, active_history=True
3430 )
3431 event.listen(desc, "set", set_, raw=True, retval=True, active_history=True)