1# engine/result.py
2# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7
8"""Define generic result set constructs."""
9
10
11import functools
12import itertools
13import operator
14
15from .row import _baserow_usecext
16from .row import Row
17from .. import exc
18from .. import util
19from ..sql.base import _generative
20from ..sql.base import HasMemoized
21from ..sql.base import InPlaceGenerative
22from ..util import collections_abc
23from ..util import py2k
24
25
26if _baserow_usecext:
27 from sqlalchemy.cresultproxy import tuplegetter
28
29 _row_as_tuple = tuplegetter
30else:
31
32 def tuplegetter(*indexes):
33 it = operator.itemgetter(*indexes)
34
35 if len(indexes) > 1:
36 return it
37 else:
38 return lambda row: (it(row),)
39
40 def _row_as_tuple(*indexes):
41 # circumvent LegacyRow.__getitem__ pointing to
42 # _get_by_key_impl_mapping for now. otherwise we could
43 # use itemgetter
44 getters = [
45 operator.methodcaller("_get_by_int_impl", index)
46 for index in indexes
47 ]
48 return lambda rec: tuple([getter(rec) for getter in getters])
49
50
51class ResultMetaData(object):
52 """Base for metadata about result rows."""
53
54 __slots__ = ()
55
56 _tuplefilter = None
57 _translated_indexes = None
58 _unique_filters = None
59
60 @property
61 def keys(self):
62 return RMKeyView(self)
63
64 def _has_key(self, key):
65 raise NotImplementedError()
66
67 def _for_freeze(self):
68 raise NotImplementedError()
69
70 def _key_fallback(self, key, err, raiseerr=True):
71 assert raiseerr
72 util.raise_(KeyError(key), replace_context=err)
73
74 def _warn_for_nonint(self, key):
75 util.warn_deprecated_20(
76 "Retrieving row members using strings or other non-integers is "
77 "deprecated; use row._mapping for a dictionary interface "
78 "to the row"
79 )
80
81 def _raise_for_nonint(self, key):
82 raise TypeError(
83 "TypeError: tuple indices must be integers or slices, not %s"
84 % type(key).__name__
85 )
86
87 def _index_for_key(self, keys, raiseerr):
88 raise NotImplementedError()
89
90 def _metadata_for_keys(self, key):
91 raise NotImplementedError()
92
93 def _reduce(self, keys):
94 raise NotImplementedError()
95
96 def _getter(self, key, raiseerr=True):
97
98 index = self._index_for_key(key, raiseerr)
99
100 if index is not None:
101 return operator.itemgetter(index)
102 else:
103 return None
104
105 def _row_as_tuple_getter(self, keys):
106 indexes = self._indexes_for_keys(keys)
107 return _row_as_tuple(*indexes)
108
109
110class RMKeyView(collections_abc.KeysView):
111 __slots__ = ("_parent", "_keys")
112
113 def __init__(self, parent):
114 self._parent = parent
115 self._keys = [k for k in parent._keys if k is not None]
116
117 def __len__(self):
118 return len(self._keys)
119
120 def __repr__(self):
121 return "{0.__class__.__name__}({0._keys!r})".format(self)
122
123 def __iter__(self):
124 return iter(self._keys)
125
126 def __contains__(self, item):
127 if not _baserow_usecext and isinstance(item, int):
128 return False
129
130 # note this also includes special key fallback behaviors
131 # which also don't seem to be tested in test_resultset right now
132 return self._parent._has_key(item)
133
134 def __eq__(self, other):
135 return list(other) == list(self)
136
137 def __ne__(self, other):
138 return list(other) != list(self)
139
140
141class SimpleResultMetaData(ResultMetaData):
142 """result metadata for in-memory collections."""
143
144 __slots__ = (
145 "_keys",
146 "_keymap",
147 "_processors",
148 "_tuplefilter",
149 "_translated_indexes",
150 "_unique_filters",
151 )
152
153 def __init__(
154 self,
155 keys,
156 extra=None,
157 _processors=None,
158 _tuplefilter=None,
159 _translated_indexes=None,
160 _unique_filters=None,
161 ):
162 self._keys = list(keys)
163 self._tuplefilter = _tuplefilter
164 self._translated_indexes = _translated_indexes
165 self._unique_filters = _unique_filters
166
167 if extra:
168 recs_names = [
169 (
170 (name,) + (extras if extras else ()),
171 (index, name, extras),
172 )
173 for index, (name, extras) in enumerate(zip(self._keys, extra))
174 ]
175 else:
176 recs_names = [
177 ((name,), (index, name, ()))
178 for index, name in enumerate(self._keys)
179 ]
180
181 self._keymap = {key: rec for keys, rec in recs_names for key in keys}
182
183 self._processors = _processors
184
185 def _has_key(self, key):
186 return key in self._keymap
187
188 def _for_freeze(self):
189 unique_filters = self._unique_filters
190 if unique_filters and self._tuplefilter:
191 unique_filters = self._tuplefilter(unique_filters)
192
193 # TODO: are we freezing the result with or without uniqueness
194 # applied?
195 return SimpleResultMetaData(
196 self._keys,
197 extra=[self._keymap[key][2] for key in self._keys],
198 _unique_filters=unique_filters,
199 )
200
201 def __getstate__(self):
202 return {
203 "_keys": self._keys,
204 "_translated_indexes": self._translated_indexes,
205 }
206
207 def __setstate__(self, state):
208 if state["_translated_indexes"]:
209 _translated_indexes = state["_translated_indexes"]
210 _tuplefilter = tuplegetter(*_translated_indexes)
211 else:
212 _translated_indexes = _tuplefilter = None
213 self.__init__(
214 state["_keys"],
215 _translated_indexes=_translated_indexes,
216 _tuplefilter=_tuplefilter,
217 )
218
219 def _contains(self, value, row):
220 return value in row._data
221
222 def _index_for_key(self, key, raiseerr=True):
223 if int in key.__class__.__mro__:
224 key = self._keys[key]
225 try:
226 rec = self._keymap[key]
227 except KeyError as ke:
228 rec = self._key_fallback(key, ke, raiseerr)
229
230 return rec[0]
231
232 def _indexes_for_keys(self, keys):
233 return [self._keymap[key][0] for key in keys]
234
235 def _metadata_for_keys(self, keys):
236 for key in keys:
237 if int in key.__class__.__mro__:
238 key = self._keys[key]
239
240 try:
241 rec = self._keymap[key]
242 except KeyError as ke:
243 rec = self._key_fallback(key, ke, True)
244
245 yield rec
246
247 def _reduce(self, keys):
248 try:
249 metadata_for_keys = [
250 self._keymap[
251 self._keys[key] if int in key.__class__.__mro__ else key
252 ]
253 for key in keys
254 ]
255 except KeyError as ke:
256 self._key_fallback(ke.args[0], ke, True)
257
258 indexes, new_keys, extra = zip(*metadata_for_keys)
259
260 if self._translated_indexes:
261 indexes = [self._translated_indexes[idx] for idx in indexes]
262
263 tup = tuplegetter(*indexes)
264
265 new_metadata = SimpleResultMetaData(
266 new_keys,
267 extra=extra,
268 _tuplefilter=tup,
269 _translated_indexes=indexes,
270 _processors=self._processors,
271 _unique_filters=self._unique_filters,
272 )
273
274 return new_metadata
275
276
277def result_tuple(fields, extra=None):
278 parent = SimpleResultMetaData(fields, extra)
279 return functools.partial(
280 Row, parent, parent._processors, parent._keymap, Row._default_key_style
281 )
282
283
284# a symbol that indicates to internal Result methods that
285# "no row is returned". We can't use None for those cases where a scalar
286# filter is applied to rows.
287_NO_ROW = util.symbol("NO_ROW")
288
289
290class ResultInternal(InPlaceGenerative):
291 _real_result = None
292 _generate_rows = True
293 _unique_filter_state = None
294 _post_creational_filter = None
295 _is_cursor = False
296
297 @HasMemoized.memoized_attribute
298 def _row_getter(self):
299 real_result = self._real_result if self._real_result else self
300
301 if real_result._source_supports_scalars:
302 if not self._generate_rows:
303 return None
304 else:
305 _proc = real_result._process_row
306
307 def process_row(
308 metadata, processors, keymap, key_style, scalar_obj
309 ):
310 return _proc(
311 metadata, processors, keymap, key_style, (scalar_obj,)
312 )
313
314 else:
315 process_row = real_result._process_row
316
317 key_style = real_result._process_row._default_key_style
318 metadata = self._metadata
319
320 keymap = metadata._keymap
321 processors = metadata._processors
322 tf = metadata._tuplefilter
323
324 if tf and not real_result._source_supports_scalars:
325 if processors:
326 processors = tf(processors)
327
328 _make_row_orig = functools.partial(
329 process_row, metadata, processors, keymap, key_style
330 )
331
332 def make_row(row):
333 return _make_row_orig(tf(row))
334
335 else:
336 make_row = functools.partial(
337 process_row, metadata, processors, keymap, key_style
338 )
339
340 fns = ()
341
342 if real_result._row_logging_fn:
343 fns = (real_result._row_logging_fn,)
344 else:
345 fns = ()
346
347 if fns:
348 _make_row = make_row
349
350 def make_row(row):
351 row = _make_row(row)
352 for fn in fns:
353 row = fn(row)
354 return row
355
356 return make_row
357
358 @HasMemoized.memoized_attribute
359 def _iterator_getter(self):
360
361 make_row = self._row_getter
362
363 post_creational_filter = self._post_creational_filter
364
365 if self._unique_filter_state:
366 uniques, strategy = self._unique_strategy
367
368 def iterrows(self):
369 for row in self._fetchiter_impl():
370 obj = make_row(row) if make_row else row
371 hashed = strategy(obj) if strategy else obj
372 if hashed in uniques:
373 continue
374 uniques.add(hashed)
375 if post_creational_filter:
376 obj = post_creational_filter(obj)
377 yield obj
378
379 else:
380
381 def iterrows(self):
382 for row in self._fetchiter_impl():
383 row = make_row(row) if make_row else row
384 if post_creational_filter:
385 row = post_creational_filter(row)
386 yield row
387
388 return iterrows
389
390 def _raw_all_rows(self):
391 make_row = self._row_getter
392 rows = self._fetchall_impl()
393 return [make_row(row) for row in rows]
394
395 def _allrows(self):
396
397 post_creational_filter = self._post_creational_filter
398
399 make_row = self._row_getter
400
401 rows = self._fetchall_impl()
402 if make_row:
403 made_rows = [make_row(row) for row in rows]
404 else:
405 made_rows = rows
406
407 if self._unique_filter_state:
408 uniques, strategy = self._unique_strategy
409
410 rows = [
411 made_row
412 for made_row, sig_row in [
413 (
414 made_row,
415 strategy(made_row) if strategy else made_row,
416 )
417 for made_row in made_rows
418 ]
419 if sig_row not in uniques and not uniques.add(sig_row)
420 ]
421 else:
422 rows = made_rows
423
424 if post_creational_filter:
425 rows = [post_creational_filter(row) for row in rows]
426 return rows
427
428 @HasMemoized.memoized_attribute
429 def _onerow_getter(self):
430 make_row = self._row_getter
431
432 post_creational_filter = self._post_creational_filter
433
434 if self._unique_filter_state:
435 uniques, strategy = self._unique_strategy
436
437 def onerow(self):
438 _onerow = self._fetchone_impl
439 while True:
440 row = _onerow()
441 if row is None:
442 return _NO_ROW
443 else:
444 obj = make_row(row) if make_row else row
445 hashed = strategy(obj) if strategy else obj
446 if hashed in uniques:
447 continue
448 else:
449 uniques.add(hashed)
450 if post_creational_filter:
451 obj = post_creational_filter(obj)
452 return obj
453
454 else:
455
456 def onerow(self):
457 row = self._fetchone_impl()
458 if row is None:
459 return _NO_ROW
460 else:
461 row = make_row(row) if make_row else row
462 if post_creational_filter:
463 row = post_creational_filter(row)
464 return row
465
466 return onerow
467
468 @HasMemoized.memoized_attribute
469 def _manyrow_getter(self):
470 make_row = self._row_getter
471
472 post_creational_filter = self._post_creational_filter
473
474 if self._unique_filter_state:
475 uniques, strategy = self._unique_strategy
476
477 def filterrows(make_row, rows, strategy, uniques):
478 if make_row:
479 rows = [make_row(row) for row in rows]
480
481 if strategy:
482 made_rows = (
483 (made_row, strategy(made_row)) for made_row in rows
484 )
485 else:
486 made_rows = ((made_row, made_row) for made_row in rows)
487 return [
488 made_row
489 for made_row, sig_row in made_rows
490 if sig_row not in uniques and not uniques.add(sig_row)
491 ]
492
493 def manyrows(self, num):
494 collect = []
495
496 _manyrows = self._fetchmany_impl
497
498 if num is None:
499 # if None is passed, we don't know the default
500 # manyrows number, DBAPI has this as cursor.arraysize
501 # different DBAPIs / fetch strategies may be different.
502 # do a fetch to find what the number is. if there are
503 # only fewer rows left, then it doesn't matter.
504 real_result = (
505 self._real_result if self._real_result else self
506 )
507 if real_result._yield_per:
508 num_required = num = real_result._yield_per
509 else:
510 rows = _manyrows(num)
511 num = len(rows)
512 collect.extend(
513 filterrows(make_row, rows, strategy, uniques)
514 )
515 num_required = num - len(collect)
516 else:
517 num_required = num
518
519 while num_required:
520 rows = _manyrows(num_required)
521 if not rows:
522 break
523
524 collect.extend(
525 filterrows(make_row, rows, strategy, uniques)
526 )
527 num_required = num - len(collect)
528
529 if post_creational_filter:
530 collect = [post_creational_filter(row) for row in collect]
531 return collect
532
533 else:
534
535 def manyrows(self, num):
536 if num is None:
537 real_result = (
538 self._real_result if self._real_result else self
539 )
540 num = real_result._yield_per
541
542 rows = self._fetchmany_impl(num)
543 if make_row:
544 rows = [make_row(row) for row in rows]
545 if post_creational_filter:
546 rows = [post_creational_filter(row) for row in rows]
547 return rows
548
549 return manyrows
550
551 def _only_one_row(
552 self,
553 raise_for_second_row,
554 raise_for_none,
555 scalar,
556 ):
557 onerow = self._fetchone_impl
558
559 row = onerow(hard_close=True)
560 if row is None:
561 if raise_for_none:
562 raise exc.NoResultFound(
563 "No row was found when one was required"
564 )
565 else:
566 return None
567
568 if scalar and self._source_supports_scalars:
569 self._generate_rows = False
570 make_row = None
571 else:
572 make_row = self._row_getter
573
574 try:
575 row = make_row(row) if make_row else row
576 except:
577 self._soft_close(hard=True)
578 raise
579
580 if raise_for_second_row:
581 if self._unique_filter_state:
582 # for no second row but uniqueness, need to essentially
583 # consume the entire result :(
584 uniques, strategy = self._unique_strategy
585
586 existing_row_hash = strategy(row) if strategy else row
587
588 while True:
589 next_row = onerow(hard_close=True)
590 if next_row is None:
591 next_row = _NO_ROW
592 break
593
594 try:
595 next_row = make_row(next_row) if make_row else next_row
596
597 if strategy:
598 if existing_row_hash == strategy(next_row):
599 continue
600 elif row == next_row:
601 continue
602 # here, we have a row and it's different
603 break
604 except:
605 self._soft_close(hard=True)
606 raise
607 else:
608 next_row = onerow(hard_close=True)
609 if next_row is None:
610 next_row = _NO_ROW
611
612 if next_row is not _NO_ROW:
613 self._soft_close(hard=True)
614 raise exc.MultipleResultsFound(
615 "Multiple rows were found when exactly one was required"
616 if raise_for_none
617 else "Multiple rows were found when one or none "
618 "was required"
619 )
620 else:
621 next_row = _NO_ROW
622 # if we checked for second row then that would have
623 # closed us :)
624 self._soft_close(hard=True)
625
626 if not scalar:
627 post_creational_filter = self._post_creational_filter
628 if post_creational_filter:
629 row = post_creational_filter(row)
630
631 if scalar and make_row:
632 return row[0]
633 else:
634 return row
635
636 def _iter_impl(self):
637 return self._iterator_getter(self)
638
639 def _next_impl(self):
640 row = self._onerow_getter(self)
641 if row is _NO_ROW:
642 raise StopIteration()
643 else:
644 return row
645
646 @_generative
647 def _column_slices(self, indexes):
648 real_result = self._real_result if self._real_result else self
649
650 if real_result._source_supports_scalars and len(indexes) == 1:
651 util.warn_deprecated(
652 "The Result.columns() method has a bug in SQLAlchemy 1.4 that "
653 "is causing it to yield scalar values, rather than Row "
654 "objects, in the case where a single index is passed and the "
655 "result is against ORM mapped objects. In SQLAlchemy 2.0, "
656 "Result will continue yield Row objects in this scenario. "
657 "Use the Result.scalars() method to yield scalar values.",
658 "2.0",
659 )
660 self._generate_rows = False
661 else:
662 self._generate_rows = True
663 self._metadata = self._metadata._reduce(indexes)
664
665 @HasMemoized.memoized_attribute
666 def _unique_strategy(self):
667 uniques, strategy = self._unique_filter_state
668
669 real_result = (
670 self._real_result if self._real_result is not None else self
671 )
672
673 if not strategy and self._metadata._unique_filters:
674 if (
675 real_result._source_supports_scalars
676 and not self._generate_rows
677 ):
678 strategy = self._metadata._unique_filters[0]
679 else:
680 filters = self._metadata._unique_filters
681 if self._metadata._tuplefilter:
682 filters = self._metadata._tuplefilter(filters)
683
684 strategy = operator.methodcaller("_filter_on_values", filters)
685 return uniques, strategy
686
687
688class _WithKeys(object):
689 # used mainly to share documentation on the keys method.
690 # py2k does not allow overriding the __doc__ attribute.
691 def keys(self):
692 """Return an iterable view which yields the string keys that would
693 be represented by each :class:`_engine.Row`.
694
695 The keys can represent the labels of the columns returned by a core
696 statement or the names of the orm classes returned by an orm
697 execution.
698
699 The view also can be tested for key containment using the Python
700 ``in`` operator, which will test both for the string keys represented
701 in the view, as well as for alternate keys such as column objects.
702
703 .. versionchanged:: 1.4 a key view object is returned rather than a
704 plain list.
705
706
707 """
708 return self._metadata.keys
709
710
711class Result(_WithKeys, ResultInternal):
712 """Represent a set of database results.
713
714 .. versionadded:: 1.4 The :class:`_engine.Result` object provides a
715 completely updated usage model and calling facade for SQLAlchemy
716 Core and SQLAlchemy ORM. In Core, it forms the basis of the
717 :class:`_engine.CursorResult` object which replaces the previous
718 :class:`_engine.ResultProxy` interface. When using the ORM, a
719 higher level object called :class:`_engine.ChunkedIteratorResult`
720 is normally used.
721
722 .. note:: In SQLAlchemy 1.4 and above, this object is
723 used for ORM results returned by :meth:`_orm.Session.execute`, which can
724 yield instances of ORM mapped objects either individually or within
725 tuple-like rows. Note that the :class:`_engine.Result` object does not
726 deduplicate instances or rows automatically as is the case with the
727 legacy :class:`_orm.Query` object. For in-Python de-duplication of
728 instances or rows, use the :meth:`_engine.Result.unique` modifier
729 method.
730
731 .. seealso::
732
733 :ref:`tutorial_fetching_rows` - in the :doc:`/tutorial/index`
734
735 """
736
737 _process_row = Row
738
739 _row_logging_fn = None
740
741 _source_supports_scalars = False
742
743 _yield_per = None
744
745 _attributes = util.immutabledict()
746
747 def __init__(self, cursor_metadata):
748 self._metadata = cursor_metadata
749
750 def _soft_close(self, hard=False):
751 raise NotImplementedError()
752
753 def close(self):
754 """close this :class:`_engine.Result`.
755
756 The behavior of this method is implementation specific, and is
757 not implemented by default. The method should generally end
758 the resources in use by the result object and also cause any
759 subsequent iteration or row fetching to raise
760 :class:`.ResourceClosedError`.
761
762 .. versionadded:: 1.4.27 - ``.close()`` was previously not generally
763 available for all :class:`_engine.Result` classes, instead only
764 being available on the :class:`_engine.CursorResult` returned for
765 Core statement executions. As most other result objects, namely the
766 ones used by the ORM, are proxying a :class:`_engine.CursorResult`
767 in any case, this allows the underlying cursor result to be closed
768 from the outside facade for the case when the ORM query is using
769 the ``yield_per`` execution option where it does not immediately
770 exhaust and autoclose the database cursor.
771
772 """
773 self._soft_close(hard=True)
774
775 @property
776 def _soft_closed(self):
777 raise NotImplementedError()
778
779 @property
780 def closed(self):
781 """return ``True`` if this :class:`_engine.Result` reports .closed
782
783 .. versionadded:: 1.4.43
784
785 """
786 raise NotImplementedError()
787
788 @_generative
789 def yield_per(self, num):
790 """Configure the row-fetching strategy to fetch ``num`` rows at a time.
791
792 This impacts the underlying behavior of the result when iterating over
793 the result object, or otherwise making use of methods such as
794 :meth:`_engine.Result.fetchone` that return one row at a time. Data
795 from the underlying cursor or other data source will be buffered up to
796 this many rows in memory, and the buffered collection will then be
797 yielded out one row at at time or as many rows are requested. Each time
798 the buffer clears, it will be refreshed to this many rows or as many
799 rows remain if fewer remain.
800
801 The :meth:`_engine.Result.yield_per` method is generally used in
802 conjunction with the
803 :paramref:`_engine.Connection.execution_options.stream_results`
804 execution option, which will allow the database dialect in use to make
805 use of a server side cursor, if the DBAPI supports a specific "server
806 side cursor" mode separate from its default mode of operation.
807
808 .. tip::
809
810 Consider using the
811 :paramref:`_engine.Connection.execution_options.yield_per`
812 execution option, which will simultaneously set
813 :paramref:`_engine.Connection.execution_options.stream_results`
814 to ensure the use of server side cursors, as well as automatically
815 invoke the :meth:`_engine.Result.yield_per` method to establish
816 a fixed row buffer size at once.
817
818 The :paramref:`_engine.Connection.execution_options.yield_per`
819 execution option is available for ORM operations, with
820 :class:`_orm.Session`-oriented use described at
821 :ref:`orm_queryguide_yield_per`. The Core-only version which works
822 with :class:`_engine.Connection` is new as of SQLAlchemy 1.4.40.
823
824 .. versionadded:: 1.4
825
826 :param num: number of rows to fetch each time the buffer is refilled.
827 If set to a value below 1, fetches all rows for the next buffer.
828
829 .. seealso::
830
831 :ref:`engine_stream_results` - describes Core behavior for
832 :meth:`_engine.Result.yield_per`
833
834 :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
835
836 """
837 self._yield_per = num
838
839 @_generative
840 def unique(self, strategy=None):
841 """Apply unique filtering to the objects returned by this
842 :class:`_engine.Result`.
843
844 When this filter is applied with no arguments, the rows or objects
845 returned will filtered such that each row is returned uniquely. The
846 algorithm used to determine this uniqueness is by default the Python
847 hashing identity of the whole tuple. In some cases a specialized
848 per-entity hashing scheme may be used, such as when using the ORM, a
849 scheme is applied which works against the primary key identity of
850 returned objects.
851
852 The unique filter is applied **after all other filters**, which means
853 if the columns returned have been refined using a method such as the
854 :meth:`_engine.Result.columns` or :meth:`_engine.Result.scalars`
855 method, the uniquing is applied to **only the column or columns
856 returned**. This occurs regardless of the order in which these
857 methods have been called upon the :class:`_engine.Result` object.
858
859 The unique filter also changes the calculus used for methods like
860 :meth:`_engine.Result.fetchmany` and :meth:`_engine.Result.partitions`.
861 When using :meth:`_engine.Result.unique`, these methods will continue
862 to yield the number of rows or objects requested, after uniquing
863 has been applied. However, this necessarily impacts the buffering
864 behavior of the underlying cursor or datasource, such that multiple
865 underlying calls to ``cursor.fetchmany()`` may be necessary in order
866 to accumulate enough objects in order to provide a unique collection
867 of the requested size.
868
869 :param strategy: a callable that will be applied to rows or objects
870 being iterated, which should return an object that represents the
871 unique value of the row. A Python ``set()`` is used to store
872 these identities. If not passed, a default uniqueness strategy
873 is used which may have been assembled by the source of this
874 :class:`_engine.Result` object.
875
876 """
877 self._unique_filter_state = (set(), strategy)
878
879 def columns(self, *col_expressions):
880 r"""Establish the columns that should be returned in each row.
881
882 This method may be used to limit the columns returned as well
883 as to reorder them. The given list of expressions are normally
884 a series of integers or string key names. They may also be
885 appropriate :class:`.ColumnElement` objects which correspond to
886 a given statement construct.
887
888 E.g.::
889
890 statement = select(table.c.x, table.c.y, table.c.z)
891 result = connection.execute(statement)
892
893 for z, y in result.columns('z', 'y'):
894 # ...
895
896
897 Example of using the column objects from the statement itself::
898
899 for z, y in result.columns(
900 statement.selected_columns.c.z,
901 statement.selected_columns.c.y
902 ):
903 # ...
904
905 .. versionadded:: 1.4
906
907 :param \*col_expressions: indicates columns to be returned. Elements
908 may be integer row indexes, string column names, or appropriate
909 :class:`.ColumnElement` objects corresponding to a select construct.
910
911 :return: this :class:`_engine.Result` object with the modifications
912 given.
913
914 """
915 return self._column_slices(col_expressions)
916
917 def scalars(self, index=0):
918 """Return a :class:`_engine.ScalarResult` filtering object which
919 will return single elements rather than :class:`_row.Row` objects.
920
921 E.g.::
922
923 >>> result = conn.execute(text("select int_id from table"))
924 >>> result.scalars().all()
925 [1, 2, 3]
926
927 When results are fetched from the :class:`_engine.ScalarResult`
928 filtering object, the single column-row that would be returned by the
929 :class:`_engine.Result` is instead returned as the column's value.
930
931 .. versionadded:: 1.4
932
933 :param index: integer or row key indicating the column to be fetched
934 from each row, defaults to ``0`` indicating the first column.
935
936 :return: a new :class:`_engine.ScalarResult` filtering object referring
937 to this :class:`_engine.Result` object.
938
939 """
940 return ScalarResult(self, index)
941
942 def _getter(self, key, raiseerr=True):
943 """return a callable that will retrieve the given key from a
944 :class:`_engine.Row`.
945
946 """
947 if self._source_supports_scalars:
948 raise NotImplementedError(
949 "can't use this function in 'only scalars' mode"
950 )
951 return self._metadata._getter(key, raiseerr)
952
953 def _tuple_getter(self, keys):
954 """return a callable that will retrieve the given keys from a
955 :class:`_engine.Row`.
956
957 """
958 if self._source_supports_scalars:
959 raise NotImplementedError(
960 "can't use this function in 'only scalars' mode"
961 )
962 return self._metadata._row_as_tuple_getter(keys)
963
964 def mappings(self):
965 """Apply a mappings filter to returned rows, returning an instance of
966 :class:`_engine.MappingResult`.
967
968 When this filter is applied, fetching rows will return
969 :class:`_engine.RowMapping` objects instead of :class:`_engine.Row`
970 objects.
971
972 .. versionadded:: 1.4
973
974 :return: a new :class:`_engine.MappingResult` filtering object
975 referring to this :class:`_engine.Result` object.
976
977 """
978
979 return MappingResult(self)
980
981 def _raw_row_iterator(self):
982 """Return a safe iterator that yields raw row data.
983
984 This is used by the :meth:`_engine.Result.merge` method
985 to merge multiple compatible results together.
986
987 """
988 raise NotImplementedError()
989
990 def _fetchiter_impl(self):
991 raise NotImplementedError()
992
993 def _fetchone_impl(self, hard_close=False):
994 raise NotImplementedError()
995
996 def _fetchall_impl(self):
997 raise NotImplementedError()
998
999 def _fetchmany_impl(self, size=None):
1000 raise NotImplementedError()
1001
1002 def __iter__(self):
1003 return self._iter_impl()
1004
1005 def __next__(self):
1006 return self._next_impl()
1007
1008 if py2k:
1009
1010 def next(self): # noqa
1011 return self._next_impl()
1012
1013 def partitions(self, size=None):
1014 """Iterate through sub-lists of rows of the size given.
1015
1016 Each list will be of the size given, excluding the last list to
1017 be yielded, which may have a small number of rows. No empty
1018 lists will be yielded.
1019
1020 The result object is automatically closed when the iterator
1021 is fully consumed.
1022
1023 Note that the backend driver will usually buffer the entire result
1024 ahead of time unless the
1025 :paramref:`.Connection.execution_options.stream_results` execution
1026 option is used indicating that the driver should not pre-buffer
1027 results, if possible. Not all drivers support this option and
1028 the option is silently ignored for those who do not.
1029
1030 When using the ORM, the :meth:`_engine.Result.partitions` method
1031 is typically more effective from a memory perspective when it is
1032 combined with use of the
1033 :ref:`yield_per execution option <orm_queryguide_yield_per>`,
1034 which instructs both the DBAPI driver to use server side cursors,
1035 if available, as well as instructs the ORM loading internals to only
1036 build a certain amount of ORM objects from a result at a time before
1037 yielding them out.
1038
1039 .. versionadded:: 1.4
1040
1041 :param size: indicate the maximum number of rows to be present
1042 in each list yielded. If None, makes use of the value set by
1043 the :meth:`_engine.Result.yield_per`, method, if it were called,
1044 or the :paramref:`_engine.Connection.execution_options.yield_per`
1045 execution option, which is equivalent in this regard. If
1046 yield_per weren't set, it makes use of the
1047 :meth:`_engine.Result.fetchmany` default, which may be backend
1048 specific and not well defined.
1049
1050 :return: iterator of lists
1051
1052 .. seealso::
1053
1054 :ref:`engine_stream_results`
1055
1056 :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
1057
1058 """
1059
1060 getter = self._manyrow_getter
1061
1062 while True:
1063 partition = getter(self, size)
1064 if partition:
1065 yield partition
1066 else:
1067 break
1068
1069 def fetchall(self):
1070 """A synonym for the :meth:`_engine.Result.all` method."""
1071
1072 return self._allrows()
1073
1074 def fetchone(self):
1075 """Fetch one row.
1076
1077 When all rows are exhausted, returns None.
1078
1079 This method is provided for backwards compatibility with
1080 SQLAlchemy 1.x.x.
1081
1082 To fetch the first row of a result only, use the
1083 :meth:`_engine.Result.first` method. To iterate through all
1084 rows, iterate the :class:`_engine.Result` object directly.
1085
1086 :return: a :class:`_engine.Row` object if no filters are applied,
1087 or ``None`` if no rows remain.
1088
1089 """
1090 row = self._onerow_getter(self)
1091 if row is _NO_ROW:
1092 return None
1093 else:
1094 return row
1095
1096 def fetchmany(self, size=None):
1097 """Fetch many rows.
1098
1099 When all rows are exhausted, returns an empty list.
1100
1101 This method is provided for backwards compatibility with
1102 SQLAlchemy 1.x.x.
1103
1104 To fetch rows in groups, use the :meth:`_engine.Result.partitions`
1105 method.
1106
1107 :return: a list of :class:`_engine.Row` objects.
1108
1109 .. seealso::
1110
1111 :meth:`_engine.Result.partitions`
1112
1113 """
1114
1115 return self._manyrow_getter(self, size)
1116
1117 def all(self):
1118 """Return all rows in a list.
1119
1120 Closes the result set after invocation. Subsequent invocations
1121 will return an empty list.
1122
1123 .. versionadded:: 1.4
1124
1125 :return: a list of :class:`_engine.Row` objects.
1126
1127 """
1128
1129 return self._allrows()
1130
1131 def first(self):
1132 """Fetch the first row or ``None`` if no row is present.
1133
1134 Closes the result set and discards remaining rows.
1135
1136 .. note:: This method returns one **row**, e.g. tuple, by default.
1137 To return exactly one single scalar value, that is, the first
1138 column of the first row, use the
1139 :meth:`_engine.Result.scalar` method,
1140 or combine :meth:`_engine.Result.scalars` and
1141 :meth:`_engine.Result.first`.
1142
1143 Additionally, in contrast to the behavior of the legacy ORM
1144 :meth:`_orm.Query.first` method, **no limit is applied** to the
1145 SQL query which was invoked to produce this
1146 :class:`_engine.Result`;
1147 for a DBAPI driver that buffers results in memory before yielding
1148 rows, all rows will be sent to the Python process and all but
1149 the first row will be discarded.
1150
1151 .. seealso::
1152
1153 :ref:`migration_20_unify_select`
1154
1155 :return: a :class:`_engine.Row` object, or None
1156 if no rows remain.
1157
1158 .. seealso::
1159
1160 :meth:`_engine.Result.scalar`
1161
1162 :meth:`_engine.Result.one`
1163
1164 """
1165
1166 return self._only_one_row(
1167 raise_for_second_row=False, raise_for_none=False, scalar=False
1168 )
1169
1170 def one_or_none(self):
1171 """Return at most one result or raise an exception.
1172
1173 Returns ``None`` if the result has no rows.
1174 Raises :class:`.MultipleResultsFound`
1175 if multiple rows are returned.
1176
1177 .. versionadded:: 1.4
1178
1179 :return: The first :class:`_engine.Row` or ``None`` if no row
1180 is available.
1181
1182 :raises: :class:`.MultipleResultsFound`
1183
1184 .. seealso::
1185
1186 :meth:`_engine.Result.first`
1187
1188 :meth:`_engine.Result.one`
1189
1190 """
1191 return self._only_one_row(
1192 raise_for_second_row=True, raise_for_none=False, scalar=False
1193 )
1194
1195 def scalar_one(self):
1196 """Return exactly one scalar result or raise an exception.
1197
1198 This is equivalent to calling :meth:`_engine.Result.scalars` and
1199 then :meth:`_engine.Result.one`.
1200
1201 .. seealso::
1202
1203 :meth:`_engine.Result.one`
1204
1205 :meth:`_engine.Result.scalars`
1206
1207 """
1208 return self._only_one_row(
1209 raise_for_second_row=True, raise_for_none=True, scalar=True
1210 )
1211
1212 def scalar_one_or_none(self):
1213 """Return exactly one scalar result or ``None``.
1214
1215 This is equivalent to calling :meth:`_engine.Result.scalars` and
1216 then :meth:`_engine.Result.one_or_none`.
1217
1218 .. seealso::
1219
1220 :meth:`_engine.Result.one_or_none`
1221
1222 :meth:`_engine.Result.scalars`
1223
1224 """
1225 return self._only_one_row(
1226 raise_for_second_row=True, raise_for_none=False, scalar=True
1227 )
1228
1229 def one(self):
1230 """Return exactly one row or raise an exception.
1231
1232 Raises :class:`.NoResultFound` if the result returns no
1233 rows, or :class:`.MultipleResultsFound` if multiple rows
1234 would be returned.
1235
1236 .. note:: This method returns one **row**, e.g. tuple, by default.
1237 To return exactly one single scalar value, that is, the first
1238 column of the first row, use the
1239 :meth:`_engine.Result.scalar_one` method, or combine
1240 :meth:`_engine.Result.scalars` and
1241 :meth:`_engine.Result.one`.
1242
1243 .. versionadded:: 1.4
1244
1245 :return: The first :class:`_engine.Row`.
1246
1247 :raises: :class:`.MultipleResultsFound`, :class:`.NoResultFound`
1248
1249 .. seealso::
1250
1251 :meth:`_engine.Result.first`
1252
1253 :meth:`_engine.Result.one_or_none`
1254
1255 :meth:`_engine.Result.scalar_one`
1256
1257 """
1258 return self._only_one_row(
1259 raise_for_second_row=True, raise_for_none=True, scalar=False
1260 )
1261
1262 def scalar(self):
1263 """Fetch the first column of the first row, and close the result set.
1264
1265 Returns ``None`` if there are no rows to fetch.
1266
1267 No validation is performed to test if additional rows remain.
1268
1269 After calling this method, the object is fully closed,
1270 e.g. the :meth:`_engine.CursorResult.close`
1271 method will have been called.
1272
1273 :return: a Python scalar value, or ``None`` if no rows remain.
1274
1275 """
1276 return self._only_one_row(
1277 raise_for_second_row=False, raise_for_none=False, scalar=True
1278 )
1279
1280 def freeze(self):
1281 """Return a callable object that will produce copies of this
1282 :class:`_engine.Result` when invoked.
1283
1284 The callable object returned is an instance of
1285 :class:`_engine.FrozenResult`.
1286
1287 This is used for result set caching. The method must be called
1288 on the result when it has been unconsumed, and calling the method
1289 will consume the result fully. When the :class:`_engine.FrozenResult`
1290 is retrieved from a cache, it can be called any number of times where
1291 it will produce a new :class:`_engine.Result` object each time
1292 against its stored set of rows.
1293
1294 .. seealso::
1295
1296 :ref:`do_orm_execute_re_executing` - example usage within the
1297 ORM to implement a result-set cache.
1298
1299 """
1300
1301 return FrozenResult(self)
1302
1303 def merge(self, *others):
1304 """Merge this :class:`_engine.Result` with other compatible result
1305 objects.
1306
1307 The object returned is an instance of :class:`_engine.MergedResult`,
1308 which will be composed of iterators from the given result
1309 objects.
1310
1311 The new result will use the metadata from this result object.
1312 The subsequent result objects must be against an identical
1313 set of result / cursor metadata, otherwise the behavior is
1314 undefined.
1315
1316 """
1317 return MergedResult(self._metadata, (self,) + others)
1318
1319
1320class FilterResult(ResultInternal):
1321 """A wrapper for a :class:`_engine.Result` that returns objects other than
1322 :class:`_engine.Row` objects, such as dictionaries or scalar objects.
1323
1324 :class:`_engine.FilterResult` is the common base for additional result
1325 APIs including :class:`_engine.MappingResult`,
1326 :class:`_engine.ScalarResult` and :class:`_engine.AsyncResult`.
1327
1328 """
1329
1330 _post_creational_filter = None
1331
1332 @_generative
1333 def yield_per(self, num):
1334 """Configure the row-fetching strategy to fetch ``num`` rows at a time.
1335
1336 The :meth:`_engine.FilterResult.yield_per` method is a pass through
1337 to the :meth:`_engine.Result.yield_per` method. See that method's
1338 documentation for usage notes.
1339
1340 .. versionadded:: 1.4.40 - added :meth:`_engine.FilterResult.yield_per`
1341 so that the method is available on all result set implementations
1342
1343 .. seealso::
1344
1345 :ref:`engine_stream_results` - describes Core behavior for
1346 :meth:`_engine.Result.yield_per`
1347
1348 :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
1349
1350 """
1351 self._real_result = self._real_result.yield_per(num)
1352
1353 def _soft_close(self, hard=False):
1354 self._real_result._soft_close(hard=hard)
1355
1356 @property
1357 def _soft_closed(self):
1358 return self._real_result._soft_closed
1359
1360 @property
1361 def closed(self):
1362 """Return ``True`` if the underlying :class:`_engine.Result` reports
1363 closed
1364
1365 .. versionadded:: 1.4.43
1366
1367 """
1368 return self._real_result.closed # type: ignore
1369
1370 def close(self):
1371 """Close this :class:`_engine.FilterResult`.
1372
1373 .. versionadded:: 1.4.43
1374
1375 """
1376 self._real_result.close()
1377
1378 @property
1379 def _attributes(self):
1380 return self._real_result._attributes
1381
1382 def _fetchiter_impl(self):
1383 return self._real_result._fetchiter_impl()
1384
1385 def _fetchone_impl(self, hard_close=False):
1386 return self._real_result._fetchone_impl(hard_close=hard_close)
1387
1388 def _fetchall_impl(self):
1389 return self._real_result._fetchall_impl()
1390
1391 def _fetchmany_impl(self, size=None):
1392 return self._real_result._fetchmany_impl(size=size)
1393
1394
1395class ScalarResult(FilterResult):
1396 """A wrapper for a :class:`_engine.Result` that returns scalar values
1397 rather than :class:`_row.Row` values.
1398
1399 The :class:`_engine.ScalarResult` object is acquired by calling the
1400 :meth:`_engine.Result.scalars` method.
1401
1402 A special limitation of :class:`_engine.ScalarResult` is that it has
1403 no ``fetchone()`` method; since the semantics of ``fetchone()`` are that
1404 the ``None`` value indicates no more results, this is not compatible
1405 with :class:`_engine.ScalarResult` since there is no way to distinguish
1406 between ``None`` as a row value versus ``None`` as an indicator. Use
1407 ``next(result)`` to receive values individually.
1408
1409 """
1410
1411 _generate_rows = False
1412
1413 def __init__(self, real_result, index):
1414 self._real_result = real_result
1415
1416 if real_result._source_supports_scalars:
1417 self._metadata = real_result._metadata
1418 self._post_creational_filter = None
1419 else:
1420 self._metadata = real_result._metadata._reduce([index])
1421 self._post_creational_filter = operator.itemgetter(0)
1422
1423 self._unique_filter_state = real_result._unique_filter_state
1424
1425 def unique(self, strategy=None):
1426 """Apply unique filtering to the objects returned by this
1427 :class:`_engine.ScalarResult`.
1428
1429 See :meth:`_engine.Result.unique` for usage details.
1430
1431 """
1432 self._unique_filter_state = (set(), strategy)
1433 return self
1434
1435 def partitions(self, size=None):
1436 """Iterate through sub-lists of elements of the size given.
1437
1438 Equivalent to :meth:`_engine.Result.partitions` except that
1439 scalar values, rather than :class:`_engine.Row` objects,
1440 are returned.
1441
1442 """
1443
1444 getter = self._manyrow_getter
1445
1446 while True:
1447 partition = getter(self, size)
1448 if partition:
1449 yield partition
1450 else:
1451 break
1452
1453 def fetchall(self):
1454 """A synonym for the :meth:`_engine.ScalarResult.all` method."""
1455
1456 return self._allrows()
1457
1458 def fetchmany(self, size=None):
1459 """Fetch many objects.
1460
1461 Equivalent to :meth:`_engine.Result.fetchmany` except that
1462 scalar values, rather than :class:`_engine.Row` objects,
1463 are returned.
1464
1465 """
1466 return self._manyrow_getter(self, size)
1467
1468 def all(self):
1469 """Return all scalar values in a list.
1470
1471 Equivalent to :meth:`_engine.Result.all` except that
1472 scalar values, rather than :class:`_engine.Row` objects,
1473 are returned.
1474
1475 """
1476 return self._allrows()
1477
1478 def __iter__(self):
1479 return self._iter_impl()
1480
1481 def __next__(self):
1482 return self._next_impl()
1483
1484 if py2k:
1485
1486 def next(self): # noqa
1487 return self._next_impl()
1488
1489 def first(self):
1490 """Fetch the first object or ``None`` if no object is present.
1491
1492 Equivalent to :meth:`_engine.Result.first` except that
1493 scalar values, rather than :class:`_engine.Row` objects,
1494 are returned.
1495
1496
1497 """
1498 return self._only_one_row(
1499 raise_for_second_row=False, raise_for_none=False, scalar=False
1500 )
1501
1502 def one_or_none(self):
1503 """Return at most one object or raise an exception.
1504
1505 Equivalent to :meth:`_engine.Result.one_or_none` except that
1506 scalar values, rather than :class:`_engine.Row` objects,
1507 are returned.
1508
1509 """
1510 return self._only_one_row(
1511 raise_for_second_row=True, raise_for_none=False, scalar=False
1512 )
1513
1514 def one(self):
1515 """Return exactly one object or raise an exception.
1516
1517 Equivalent to :meth:`_engine.Result.one` except that
1518 scalar values, rather than :class:`_engine.Row` objects,
1519 are returned.
1520
1521 """
1522 return self._only_one_row(
1523 raise_for_second_row=True, raise_for_none=True, scalar=False
1524 )
1525
1526
1527class MappingResult(_WithKeys, FilterResult):
1528 """A wrapper for a :class:`_engine.Result` that returns dictionary values
1529 rather than :class:`_engine.Row` values.
1530
1531 The :class:`_engine.MappingResult` object is acquired by calling the
1532 :meth:`_engine.Result.mappings` method.
1533
1534 """
1535
1536 _generate_rows = True
1537
1538 _post_creational_filter = operator.attrgetter("_mapping")
1539
1540 def __init__(self, result):
1541 self._real_result = result
1542 self._unique_filter_state = result._unique_filter_state
1543 self._metadata = result._metadata
1544 if result._source_supports_scalars:
1545 self._metadata = self._metadata._reduce([0])
1546
1547 def unique(self, strategy=None):
1548 """Apply unique filtering to the objects returned by this
1549 :class:`_engine.MappingResult`.
1550
1551 See :meth:`_engine.Result.unique` for usage details.
1552
1553 """
1554 self._unique_filter_state = (set(), strategy)
1555 return self
1556
1557 def columns(self, *col_expressions):
1558 r"""Establish the columns that should be returned in each row."""
1559 return self._column_slices(col_expressions)
1560
1561 def partitions(self, size=None):
1562 """Iterate through sub-lists of elements of the size given.
1563
1564 Equivalent to :meth:`_engine.Result.partitions` except that
1565 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1566 objects, are returned.
1567
1568 """
1569
1570 getter = self._manyrow_getter
1571
1572 while True:
1573 partition = getter(self, size)
1574 if partition:
1575 yield partition
1576 else:
1577 break
1578
1579 def fetchall(self):
1580 """A synonym for the :meth:`_engine.MappingResult.all` method."""
1581
1582 return self._allrows()
1583
1584 def fetchone(self):
1585 """Fetch one object.
1586
1587 Equivalent to :meth:`_engine.Result.fetchone` except that
1588 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1589 objects, are returned.
1590
1591 """
1592
1593 row = self._onerow_getter(self)
1594 if row is _NO_ROW:
1595 return None
1596 else:
1597 return row
1598
1599 def fetchmany(self, size=None):
1600 """Fetch many objects.
1601
1602 Equivalent to :meth:`_engine.Result.fetchmany` except that
1603 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1604 objects, are returned.
1605
1606 """
1607
1608 return self._manyrow_getter(self, size)
1609
1610 def all(self):
1611 """Return all scalar values in a list.
1612
1613 Equivalent to :meth:`_engine.Result.all` except that
1614 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1615 objects, are returned.
1616
1617 """
1618
1619 return self._allrows()
1620
1621 def __iter__(self):
1622 return self._iter_impl()
1623
1624 def __next__(self):
1625 return self._next_impl()
1626
1627 if py2k:
1628
1629 def next(self): # noqa
1630 return self._next_impl()
1631
1632 def first(self):
1633 """Fetch the first object or ``None`` if no object is present.
1634
1635 Equivalent to :meth:`_engine.Result.first` except that
1636 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1637 objects, are returned.
1638
1639
1640 """
1641 return self._only_one_row(
1642 raise_for_second_row=False, raise_for_none=False, scalar=False
1643 )
1644
1645 def one_or_none(self):
1646 """Return at most one object or raise an exception.
1647
1648 Equivalent to :meth:`_engine.Result.one_or_none` except that
1649 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1650 objects, are returned.
1651
1652 """
1653 return self._only_one_row(
1654 raise_for_second_row=True, raise_for_none=False, scalar=False
1655 )
1656
1657 def one(self):
1658 """Return exactly one object or raise an exception.
1659
1660 Equivalent to :meth:`_engine.Result.one` except that
1661 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1662 objects, are returned.
1663
1664 """
1665 return self._only_one_row(
1666 raise_for_second_row=True, raise_for_none=True, scalar=False
1667 )
1668
1669
1670class FrozenResult(object):
1671 """Represents a :class:`_engine.Result` object in a "frozen" state suitable
1672 for caching.
1673
1674 The :class:`_engine.FrozenResult` object is returned from the
1675 :meth:`_engine.Result.freeze` method of any :class:`_engine.Result`
1676 object.
1677
1678 A new iterable :class:`_engine.Result` object is generated from a fixed
1679 set of data each time the :class:`_engine.FrozenResult` is invoked as
1680 a callable::
1681
1682
1683 result = connection.execute(query)
1684
1685 frozen = result.freeze()
1686
1687 unfrozen_result_one = frozen()
1688
1689 for row in unfrozen_result_one:
1690 print(row)
1691
1692 unfrozen_result_two = frozen()
1693 rows = unfrozen_result_two.all()
1694
1695 # ... etc
1696
1697 .. versionadded:: 1.4
1698
1699 .. seealso::
1700
1701 :ref:`do_orm_execute_re_executing` - example usage within the
1702 ORM to implement a result-set cache.
1703
1704 :func:`_orm.loading.merge_frozen_result` - ORM function to merge
1705 a frozen result back into a :class:`_orm.Session`.
1706
1707 """
1708
1709 def __init__(self, result):
1710 self.metadata = result._metadata._for_freeze()
1711 self._source_supports_scalars = result._source_supports_scalars
1712 self._attributes = result._attributes
1713
1714 if self._source_supports_scalars:
1715 self.data = list(result._raw_row_iterator())
1716 else:
1717 self.data = result.fetchall()
1718
1719 def rewrite_rows(self):
1720 if self._source_supports_scalars:
1721 return [[elem] for elem in self.data]
1722 else:
1723 return [list(row) for row in self.data]
1724
1725 def with_new_rows(self, tuple_data):
1726 fr = FrozenResult.__new__(FrozenResult)
1727 fr.metadata = self.metadata
1728 fr._attributes = self._attributes
1729 fr._source_supports_scalars = self._source_supports_scalars
1730
1731 if self._source_supports_scalars:
1732 fr.data = [d[0] for d in tuple_data]
1733 else:
1734 fr.data = tuple_data
1735 return fr
1736
1737 def __call__(self):
1738 result = IteratorResult(self.metadata, iter(self.data))
1739 result._attributes = self._attributes
1740 result._source_supports_scalars = self._source_supports_scalars
1741 return result
1742
1743
1744class IteratorResult(Result):
1745 """A :class:`_engine.Result` that gets data from a Python iterator of
1746 :class:`_engine.Row` objects or similar row-like data.
1747
1748 .. versionadded:: 1.4
1749
1750 """
1751
1752 _hard_closed = False
1753 _soft_closed = False
1754
1755 def __init__(
1756 self,
1757 cursor_metadata,
1758 iterator,
1759 raw=None,
1760 _source_supports_scalars=False,
1761 ):
1762 self._metadata = cursor_metadata
1763 self.iterator = iterator
1764 self.raw = raw
1765 self._source_supports_scalars = _source_supports_scalars
1766
1767 @property
1768 def closed(self):
1769 """Return ``True`` if this :class:`_engine.IteratorResult` has
1770 been closed
1771
1772 .. versionadded:: 1.4.43
1773
1774 """
1775 return self._hard_closed
1776
1777 def _soft_close(self, hard=False, **kw):
1778 if hard:
1779 self._hard_closed = True
1780 if self.raw is not None:
1781 self.raw._soft_close(hard=hard, **kw)
1782 self.iterator = iter([])
1783 self._reset_memoizations()
1784 self._soft_closed = True
1785
1786 def _raise_hard_closed(self):
1787 raise exc.ResourceClosedError("This result object is closed.")
1788
1789 def _raw_row_iterator(self):
1790 return self.iterator
1791
1792 def _fetchiter_impl(self):
1793 if self._hard_closed:
1794 self._raise_hard_closed()
1795 return self.iterator
1796
1797 def _fetchone_impl(self, hard_close=False):
1798 if self._hard_closed:
1799 self._raise_hard_closed()
1800
1801 row = next(self.iterator, _NO_ROW)
1802 if row is _NO_ROW:
1803 self._soft_close(hard=hard_close)
1804 return None
1805 else:
1806 return row
1807
1808 def _fetchall_impl(self):
1809 if self._hard_closed:
1810 self._raise_hard_closed()
1811
1812 try:
1813 return list(self.iterator)
1814 finally:
1815 self._soft_close()
1816
1817 def _fetchmany_impl(self, size=None):
1818 if self._hard_closed:
1819 self._raise_hard_closed()
1820
1821 return list(itertools.islice(self.iterator, 0, size))
1822
1823
1824def null_result():
1825 return IteratorResult(SimpleResultMetaData([]), iter([]))
1826
1827
1828class ChunkedIteratorResult(IteratorResult):
1829 """An :class:`_engine.IteratorResult` that works from an
1830 iterator-producing callable.
1831
1832 The given ``chunks`` argument is a function that is given a number of rows
1833 to return in each chunk, or ``None`` for all rows. The function should
1834 then return an un-consumed iterator of lists, each list of the requested
1835 size.
1836
1837 The function can be called at any time again, in which case it should
1838 continue from the same result set but adjust the chunk size as given.
1839
1840 .. versionadded:: 1.4
1841
1842 """
1843
1844 def __init__(
1845 self,
1846 cursor_metadata,
1847 chunks,
1848 source_supports_scalars=False,
1849 raw=None,
1850 dynamic_yield_per=False,
1851 ):
1852 self._metadata = cursor_metadata
1853 self.chunks = chunks
1854 self._source_supports_scalars = source_supports_scalars
1855 self.raw = raw
1856 self.iterator = itertools.chain.from_iterable(self.chunks(None))
1857 self.dynamic_yield_per = dynamic_yield_per
1858
1859 @_generative
1860 def yield_per(self, num):
1861 # TODO: this throws away the iterator which may be holding
1862 # onto a chunk. the yield_per cannot be changed once any
1863 # rows have been fetched. either find a way to enforce this,
1864 # or we can't use itertools.chain and will instead have to
1865 # keep track.
1866
1867 self._yield_per = num
1868 self.iterator = itertools.chain.from_iterable(self.chunks(num))
1869
1870 def _soft_close(self, **kw):
1871 super(ChunkedIteratorResult, self)._soft_close(**kw)
1872 self.chunks = lambda size: []
1873
1874 def _fetchmany_impl(self, size=None):
1875 if self.dynamic_yield_per:
1876 self.iterator = itertools.chain.from_iterable(self.chunks(size))
1877 return super(ChunkedIteratorResult, self)._fetchmany_impl(size=size)
1878
1879
1880class MergedResult(IteratorResult):
1881 """A :class:`_engine.Result` that is merged from any number of
1882 :class:`_engine.Result` objects.
1883
1884 Returned by the :meth:`_engine.Result.merge` method.
1885
1886 .. versionadded:: 1.4
1887
1888 """
1889
1890 closed = False
1891
1892 def __init__(self, cursor_metadata, results):
1893 self._results = results
1894 super(MergedResult, self).__init__(
1895 cursor_metadata,
1896 itertools.chain.from_iterable(
1897 r._raw_row_iterator() for r in results
1898 ),
1899 )
1900
1901 self._unique_filter_state = results[0]._unique_filter_state
1902 self._yield_per = results[0]._yield_per
1903
1904 # going to try something w/ this in next rev
1905 self._source_supports_scalars = results[0]._source_supports_scalars
1906
1907 self._attributes = self._attributes.merge_with(
1908 *[r._attributes for r in results]
1909 )
1910
1911 def _soft_close(self, hard=False, **kw):
1912 for r in self._results:
1913 r._soft_close(hard=hard, **kw)
1914 if hard:
1915 self.closed = True