Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/result.py: 30%
675 statements
« prev ^ index » next coverage.py v7.0.1, created at 2022-12-25 06:11 +0000
« prev ^ index » next coverage.py v7.0.1, created at 2022-12-25 06:11 +0000
1# engine/result.py
2# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
8"""Define generic result set constructs."""
11import functools
12import itertools
13import operator
15from .row import _baserow_usecext
16from .row import Row
17from .. import exc
18from .. import util
19from ..sql.base import _generative
20from ..sql.base import HasMemoized
21from ..sql.base import InPlaceGenerative
22from ..util import collections_abc
23from ..util import py2k
26if _baserow_usecext:
27 from sqlalchemy.cresultproxy import tuplegetter
29 _row_as_tuple = tuplegetter
30else:
32 def tuplegetter(*indexes):
33 it = operator.itemgetter(*indexes)
35 if len(indexes) > 1:
36 return it
37 else:
38 return lambda row: (it(row),)
40 def _row_as_tuple(*indexes):
41 # circumvent LegacyRow.__getitem__ pointing to
42 # _get_by_key_impl_mapping for now. otherwise we could
43 # use itemgetter
44 getters = [
45 operator.methodcaller("_get_by_int_impl", index)
46 for index in indexes
47 ]
48 return lambda rec: tuple([getter(rec) for getter in getters])
51class ResultMetaData(object):
52 """Base for metadata about result rows."""
54 __slots__ = ()
56 _tuplefilter = None
57 _translated_indexes = None
58 _unique_filters = None
60 @property
61 def keys(self):
62 return RMKeyView(self)
64 def _has_key(self, key):
65 raise NotImplementedError()
67 def _for_freeze(self):
68 raise NotImplementedError()
70 def _key_fallback(self, key, err, raiseerr=True):
71 assert raiseerr
72 util.raise_(KeyError(key), replace_context=err)
74 def _warn_for_nonint(self, key):
75 util.warn_deprecated_20(
76 "Retrieving row members using strings or other non-integers is "
77 "deprecated; use row._mapping for a dictionary interface "
78 "to the row"
79 )
81 def _raise_for_nonint(self, key):
82 raise TypeError(
83 "TypeError: tuple indices must be integers or slices, not %s"
84 % type(key).__name__
85 )
87 def _index_for_key(self, keys, raiseerr):
88 raise NotImplementedError()
90 def _metadata_for_keys(self, key):
91 raise NotImplementedError()
93 def _reduce(self, keys):
94 raise NotImplementedError()
96 def _getter(self, key, raiseerr=True):
98 index = self._index_for_key(key, raiseerr)
100 if index is not None:
101 return operator.itemgetter(index)
102 else:
103 return None
105 def _row_as_tuple_getter(self, keys):
106 indexes = self._indexes_for_keys(keys)
107 return _row_as_tuple(*indexes)
110class RMKeyView(collections_abc.KeysView):
111 __slots__ = ("_parent", "_keys")
113 def __init__(self, parent):
114 self._parent = parent
115 self._keys = [k for k in parent._keys if k is not None]
117 def __len__(self):
118 return len(self._keys)
120 def __repr__(self):
121 return "{0.__class__.__name__}({0._keys!r})".format(self)
123 def __iter__(self):
124 return iter(self._keys)
126 def __contains__(self, item):
127 if not _baserow_usecext and isinstance(item, int):
128 return False
130 # note this also includes special key fallback behaviors
131 # which also don't seem to be tested in test_resultset right now
132 return self._parent._has_key(item)
134 def __eq__(self, other):
135 return list(other) == list(self)
137 def __ne__(self, other):
138 return list(other) != list(self)
141class SimpleResultMetaData(ResultMetaData):
142 """result metadata for in-memory collections."""
144 __slots__ = (
145 "_keys",
146 "_keymap",
147 "_processors",
148 "_tuplefilter",
149 "_translated_indexes",
150 "_unique_filters",
151 )
153 def __init__(
154 self,
155 keys,
156 extra=None,
157 _processors=None,
158 _tuplefilter=None,
159 _translated_indexes=None,
160 _unique_filters=None,
161 ):
162 self._keys = list(keys)
163 self._tuplefilter = _tuplefilter
164 self._translated_indexes = _translated_indexes
165 self._unique_filters = _unique_filters
167 if extra:
168 recs_names = [
169 (
170 (name,) + (extras if extras else ()),
171 (index, name, extras),
172 )
173 for index, (name, extras) in enumerate(zip(self._keys, extra))
174 ]
175 else:
176 recs_names = [
177 ((name,), (index, name, ()))
178 for index, name in enumerate(self._keys)
179 ]
181 self._keymap = {key: rec for keys, rec in recs_names for key in keys}
183 self._processors = _processors
185 def _has_key(self, key):
186 return key in self._keymap
188 def _for_freeze(self):
189 unique_filters = self._unique_filters
190 if unique_filters and self._tuplefilter:
191 unique_filters = self._tuplefilter(unique_filters)
193 # TODO: are we freezing the result with or without uniqueness
194 # applied?
195 return SimpleResultMetaData(
196 self._keys,
197 extra=[self._keymap[key][2] for key in self._keys],
198 _unique_filters=unique_filters,
199 )
201 def __getstate__(self):
202 return {
203 "_keys": self._keys,
204 "_translated_indexes": self._translated_indexes,
205 }
207 def __setstate__(self, state):
208 if state["_translated_indexes"]:
209 _translated_indexes = state["_translated_indexes"]
210 _tuplefilter = tuplegetter(*_translated_indexes)
211 else:
212 _translated_indexes = _tuplefilter = None
213 self.__init__(
214 state["_keys"],
215 _translated_indexes=_translated_indexes,
216 _tuplefilter=_tuplefilter,
217 )
219 def _contains(self, value, row):
220 return value in row._data
222 def _index_for_key(self, key, raiseerr=True):
223 if int in key.__class__.__mro__:
224 key = self._keys[key]
225 try:
226 rec = self._keymap[key]
227 except KeyError as ke:
228 rec = self._key_fallback(key, ke, raiseerr)
230 return rec[0]
232 def _indexes_for_keys(self, keys):
233 return [self._keymap[key][0] for key in keys]
235 def _metadata_for_keys(self, keys):
236 for key in keys:
237 if int in key.__class__.__mro__:
238 key = self._keys[key]
240 try:
241 rec = self._keymap[key]
242 except KeyError as ke:
243 rec = self._key_fallback(key, ke, True)
245 yield rec
247 def _reduce(self, keys):
248 try:
249 metadata_for_keys = [
250 self._keymap[
251 self._keys[key] if int in key.__class__.__mro__ else key
252 ]
253 for key in keys
254 ]
255 except KeyError as ke:
256 self._key_fallback(ke.args[0], ke, True)
258 indexes, new_keys, extra = zip(*metadata_for_keys)
260 if self._translated_indexes:
261 indexes = [self._translated_indexes[idx] for idx in indexes]
263 tup = tuplegetter(*indexes)
265 new_metadata = SimpleResultMetaData(
266 new_keys,
267 extra=extra,
268 _tuplefilter=tup,
269 _translated_indexes=indexes,
270 _processors=self._processors,
271 _unique_filters=self._unique_filters,
272 )
274 return new_metadata
277def result_tuple(fields, extra=None):
278 parent = SimpleResultMetaData(fields, extra)
279 return functools.partial(
280 Row, parent, parent._processors, parent._keymap, Row._default_key_style
281 )
284# a symbol that indicates to internal Result methods that
285# "no row is returned". We can't use None for those cases where a scalar
286# filter is applied to rows.
287_NO_ROW = util.symbol("NO_ROW")
290class ResultInternal(InPlaceGenerative):
291 _real_result = None
292 _generate_rows = True
293 _unique_filter_state = None
294 _post_creational_filter = None
295 _is_cursor = False
297 @HasMemoized.memoized_attribute
298 def _row_getter(self):
299 real_result = self._real_result if self._real_result else self
301 if real_result._source_supports_scalars:
302 if not self._generate_rows:
303 return None
304 else:
305 _proc = real_result._process_row
307 def process_row(
308 metadata, processors, keymap, key_style, scalar_obj
309 ):
310 return _proc(
311 metadata, processors, keymap, key_style, (scalar_obj,)
312 )
314 else:
315 process_row = real_result._process_row
317 key_style = real_result._process_row._default_key_style
318 metadata = self._metadata
320 keymap = metadata._keymap
321 processors = metadata._processors
322 tf = metadata._tuplefilter
324 if tf and not real_result._source_supports_scalars:
325 if processors:
326 processors = tf(processors)
328 _make_row_orig = functools.partial(
329 process_row, metadata, processors, keymap, key_style
330 )
332 def make_row(row):
333 return _make_row_orig(tf(row))
335 else:
336 make_row = functools.partial(
337 process_row, metadata, processors, keymap, key_style
338 )
340 fns = ()
342 if real_result._row_logging_fn:
343 fns = (real_result._row_logging_fn,)
344 else:
345 fns = ()
347 if fns:
348 _make_row = make_row
350 def make_row(row):
351 row = _make_row(row)
352 for fn in fns:
353 row = fn(row)
354 return row
356 return make_row
358 @HasMemoized.memoized_attribute
359 def _iterator_getter(self):
361 make_row = self._row_getter
363 post_creational_filter = self._post_creational_filter
365 if self._unique_filter_state:
366 uniques, strategy = self._unique_strategy
368 def iterrows(self):
369 for row in self._fetchiter_impl():
370 obj = make_row(row) if make_row else row
371 hashed = strategy(obj) if strategy else obj
372 if hashed in uniques:
373 continue
374 uniques.add(hashed)
375 if post_creational_filter:
376 obj = post_creational_filter(obj)
377 yield obj
379 else:
381 def iterrows(self):
382 for row in self._fetchiter_impl():
383 row = make_row(row) if make_row else row
384 if post_creational_filter:
385 row = post_creational_filter(row)
386 yield row
388 return iterrows
390 def _raw_all_rows(self):
391 make_row = self._row_getter
392 rows = self._fetchall_impl()
393 return [make_row(row) for row in rows]
395 def _allrows(self):
397 post_creational_filter = self._post_creational_filter
399 make_row = self._row_getter
401 rows = self._fetchall_impl()
402 if make_row:
403 made_rows = [make_row(row) for row in rows]
404 else:
405 made_rows = rows
407 if self._unique_filter_state:
408 uniques, strategy = self._unique_strategy
410 rows = [
411 made_row
412 for made_row, sig_row in [
413 (
414 made_row,
415 strategy(made_row) if strategy else made_row,
416 )
417 for made_row in made_rows
418 ]
419 if sig_row not in uniques and not uniques.add(sig_row)
420 ]
421 else:
422 rows = made_rows
424 if post_creational_filter:
425 rows = [post_creational_filter(row) for row in rows]
426 return rows
428 @HasMemoized.memoized_attribute
429 def _onerow_getter(self):
430 make_row = self._row_getter
432 post_creational_filter = self._post_creational_filter
434 if self._unique_filter_state:
435 uniques, strategy = self._unique_strategy
437 def onerow(self):
438 _onerow = self._fetchone_impl
439 while True:
440 row = _onerow()
441 if row is None:
442 return _NO_ROW
443 else:
444 obj = make_row(row) if make_row else row
445 hashed = strategy(obj) if strategy else obj
446 if hashed in uniques:
447 continue
448 else:
449 uniques.add(hashed)
450 if post_creational_filter:
451 obj = post_creational_filter(obj)
452 return obj
454 else:
456 def onerow(self):
457 row = self._fetchone_impl()
458 if row is None:
459 return _NO_ROW
460 else:
461 row = make_row(row) if make_row else row
462 if post_creational_filter:
463 row = post_creational_filter(row)
464 return row
466 return onerow
468 @HasMemoized.memoized_attribute
469 def _manyrow_getter(self):
470 make_row = self._row_getter
472 post_creational_filter = self._post_creational_filter
474 if self._unique_filter_state:
475 uniques, strategy = self._unique_strategy
477 def filterrows(make_row, rows, strategy, uniques):
478 if make_row:
479 rows = [make_row(row) for row in rows]
481 if strategy:
482 made_rows = (
483 (made_row, strategy(made_row)) for made_row in rows
484 )
485 else:
486 made_rows = ((made_row, made_row) for made_row in rows)
487 return [
488 made_row
489 for made_row, sig_row in made_rows
490 if sig_row not in uniques and not uniques.add(sig_row)
491 ]
493 def manyrows(self, num):
494 collect = []
496 _manyrows = self._fetchmany_impl
498 if num is None:
499 # if None is passed, we don't know the default
500 # manyrows number, DBAPI has this as cursor.arraysize
501 # different DBAPIs / fetch strategies may be different.
502 # do a fetch to find what the number is. if there are
503 # only fewer rows left, then it doesn't matter.
504 real_result = (
505 self._real_result if self._real_result else self
506 )
507 if real_result._yield_per:
508 num_required = num = real_result._yield_per
509 else:
510 rows = _manyrows(num)
511 num = len(rows)
512 collect.extend(
513 filterrows(make_row, rows, strategy, uniques)
514 )
515 num_required = num - len(collect)
516 else:
517 num_required = num
519 while num_required:
520 rows = _manyrows(num_required)
521 if not rows:
522 break
524 collect.extend(
525 filterrows(make_row, rows, strategy, uniques)
526 )
527 num_required = num - len(collect)
529 if post_creational_filter:
530 collect = [post_creational_filter(row) for row in collect]
531 return collect
533 else:
535 def manyrows(self, num):
536 if num is None:
537 real_result = (
538 self._real_result if self._real_result else self
539 )
540 num = real_result._yield_per
542 rows = self._fetchmany_impl(num)
543 if make_row:
544 rows = [make_row(row) for row in rows]
545 if post_creational_filter:
546 rows = [post_creational_filter(row) for row in rows]
547 return rows
549 return manyrows
551 def _only_one_row(
552 self,
553 raise_for_second_row,
554 raise_for_none,
555 scalar,
556 ):
557 onerow = self._fetchone_impl
559 row = onerow(hard_close=True)
560 if row is None:
561 if raise_for_none:
562 raise exc.NoResultFound(
563 "No row was found when one was required"
564 )
565 else:
566 return None
568 if scalar and self._source_supports_scalars:
569 self._generate_rows = False
570 make_row = None
571 else:
572 make_row = self._row_getter
574 try:
575 row = make_row(row) if make_row else row
576 except:
577 self._soft_close(hard=True)
578 raise
580 if raise_for_second_row:
581 if self._unique_filter_state:
582 # for no second row but uniqueness, need to essentially
583 # consume the entire result :(
584 uniques, strategy = self._unique_strategy
586 existing_row_hash = strategy(row) if strategy else row
588 while True:
589 next_row = onerow(hard_close=True)
590 if next_row is None:
591 next_row = _NO_ROW
592 break
594 try:
595 next_row = make_row(next_row) if make_row else next_row
597 if strategy:
598 if existing_row_hash == strategy(next_row):
599 continue
600 elif row == next_row:
601 continue
602 # here, we have a row and it's different
603 break
604 except:
605 self._soft_close(hard=True)
606 raise
607 else:
608 next_row = onerow(hard_close=True)
609 if next_row is None:
610 next_row = _NO_ROW
612 if next_row is not _NO_ROW:
613 self._soft_close(hard=True)
614 raise exc.MultipleResultsFound(
615 "Multiple rows were found when exactly one was required"
616 if raise_for_none
617 else "Multiple rows were found when one or none "
618 "was required"
619 )
620 else:
621 next_row = _NO_ROW
622 # if we checked for second row then that would have
623 # closed us :)
624 self._soft_close(hard=True)
626 if not scalar:
627 post_creational_filter = self._post_creational_filter
628 if post_creational_filter:
629 row = post_creational_filter(row)
631 if scalar and make_row:
632 return row[0]
633 else:
634 return row
636 def _iter_impl(self):
637 return self._iterator_getter(self)
639 def _next_impl(self):
640 row = self._onerow_getter(self)
641 if row is _NO_ROW:
642 raise StopIteration()
643 else:
644 return row
646 @_generative
647 def _column_slices(self, indexes):
648 real_result = self._real_result if self._real_result else self
650 if real_result._source_supports_scalars and len(indexes) == 1:
651 util.warn_deprecated(
652 "The Result.columns() method has a bug in SQLAlchemy 1.4 that "
653 "is causing it to yield scalar values, rather than Row "
654 "objects, in the case where a single index is passed and the "
655 "result is against ORM mapped objects. In SQLAlchemy 2.0, "
656 "Result will continue yield Row objects in this scenario. "
657 "Use the Result.scalars() method to yield scalar values.",
658 "2.0",
659 )
660 self._generate_rows = False
661 else:
662 self._generate_rows = True
663 self._metadata = self._metadata._reduce(indexes)
665 @HasMemoized.memoized_attribute
666 def _unique_strategy(self):
667 uniques, strategy = self._unique_filter_state
669 real_result = (
670 self._real_result if self._real_result is not None else self
671 )
673 if not strategy and self._metadata._unique_filters:
674 if (
675 real_result._source_supports_scalars
676 and not self._generate_rows
677 ):
678 strategy = self._metadata._unique_filters[0]
679 else:
680 filters = self._metadata._unique_filters
681 if self._metadata._tuplefilter:
682 filters = self._metadata._tuplefilter(filters)
684 strategy = operator.methodcaller("_filter_on_values", filters)
685 return uniques, strategy
688class _WithKeys(object):
689 # used mainly to share documentation on the keys method.
690 # py2k does not allow overriding the __doc__ attribute.
691 def keys(self):
692 """Return an iterable view which yields the string keys that would
693 be represented by each :class:`_engine.Row`.
695 The keys can represent the labels of the columns returned by a core
696 statement or the names of the orm classes returned by an orm
697 execution.
699 The view also can be tested for key containment using the Python
700 ``in`` operator, which will test both for the string keys represented
701 in the view, as well as for alternate keys such as column objects.
703 .. versionchanged:: 1.4 a key view object is returned rather than a
704 plain list.
707 """
708 return self._metadata.keys
711class Result(_WithKeys, ResultInternal):
712 """Represent a set of database results.
714 .. versionadded:: 1.4 The :class:`_engine.Result` object provides a
715 completely updated usage model and calling facade for SQLAlchemy
716 Core and SQLAlchemy ORM. In Core, it forms the basis of the
717 :class:`_engine.CursorResult` object which replaces the previous
718 :class:`_engine.ResultProxy` interface. When using the ORM, a
719 higher level object called :class:`_engine.ChunkedIteratorResult`
720 is normally used.
722 .. note:: In SQLAlchemy 1.4 and above, this object is
723 used for ORM results returned by :meth:`_orm.Session.execute`, which can
724 yield instances of ORM mapped objects either individually or within
725 tuple-like rows. Note that the :class:`_engine.Result` object does not
726 deduplicate instances or rows automatically as is the case with the
727 legacy :class:`_orm.Query` object. For in-Python de-duplication of
728 instances or rows, use the :meth:`_engine.Result.unique` modifier
729 method.
731 .. seealso::
733 :ref:`tutorial_fetching_rows` - in the :doc:`/tutorial/index`
735 """
737 _process_row = Row
739 _row_logging_fn = None
741 _source_supports_scalars = False
743 _yield_per = None
745 _attributes = util.immutabledict()
747 def __init__(self, cursor_metadata):
748 self._metadata = cursor_metadata
750 def _soft_close(self, hard=False):
751 raise NotImplementedError()
753 def close(self):
754 """close this :class:`_engine.Result`.
756 The behavior of this method is implementation specific, and is
757 not implemented by default. The method should generally end
758 the resources in use by the result object and also cause any
759 subsequent iteration or row fetching to raise
760 :class:`.ResourceClosedError`.
762 .. versionadded:: 1.4.27 - ``.close()`` was previously not generally
763 available for all :class:`_engine.Result` classes, instead only
764 being available on the :class:`_engine.CursorResult` returned for
765 Core statement executions. As most other result objects, namely the
766 ones used by the ORM, are proxying a :class:`_engine.CursorResult`
767 in any case, this allows the underlying cursor result to be closed
768 from the outside facade for the case when the ORM query is using
769 the ``yield_per`` execution option where it does not immediately
770 exhaust and autoclose the database cursor.
772 """
773 self._soft_close(hard=True)
775 @property
776 def _soft_closed(self):
777 raise NotImplementedError()
779 @property
780 def closed(self):
781 """return ``True`` if this :class:`_engine.Result` reports .closed
783 .. versionadded:: 1.4.43
785 """
786 raise NotImplementedError()
788 @_generative
789 def yield_per(self, num):
790 """Configure the row-fetching strategy to fetch ``num`` rows at a time.
792 This impacts the underlying behavior of the result when iterating over
793 the result object, or otherwise making use of methods such as
794 :meth:`_engine.Result.fetchone` that return one row at a time. Data
795 from the underlying cursor or other data source will be buffered up to
796 this many rows in memory, and the buffered collection will then be
797 yielded out one row at at time or as many rows are requested. Each time
798 the buffer clears, it will be refreshed to this many rows or as many
799 rows remain if fewer remain.
801 The :meth:`_engine.Result.yield_per` method is generally used in
802 conjunction with the
803 :paramref:`_engine.Connection.execution_options.stream_results`
804 execution option, which will allow the database dialect in use to make
805 use of a server side cursor, if the DBAPI supports a specific "server
806 side cursor" mode separate from its default mode of operation.
808 .. tip::
810 Consider using the
811 :paramref:`_engine.Connection.execution_options.yield_per`
812 execution option, which will simultaneously set
813 :paramref:`_engine.Connection.execution_options.stream_results`
814 to ensure the use of server side cursors, as well as automatically
815 invoke the :meth:`_engine.Result.yield_per` method to establish
816 a fixed row buffer size at once.
818 The :paramref:`_engine.Connection.execution_options.yield_per`
819 execution option is available for ORM operations, with
820 :class:`_orm.Session`-oriented use described at
821 :ref:`orm_queryguide_yield_per`. The Core-only version which works
822 with :class:`_engine.Connection` is new as of SQLAlchemy 1.4.40.
824 .. versionadded:: 1.4
826 :param num: number of rows to fetch each time the buffer is refilled.
827 If set to a value below 1, fetches all rows for the next buffer.
829 .. seealso::
831 :ref:`engine_stream_results` - describes Core behavior for
832 :meth:`_engine.Result.yield_per`
834 :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
836 """
837 self._yield_per = num
839 @_generative
840 def unique(self, strategy=None):
841 """Apply unique filtering to the objects returned by this
842 :class:`_engine.Result`.
844 When this filter is applied with no arguments, the rows or objects
845 returned will filtered such that each row is returned uniquely. The
846 algorithm used to determine this uniqueness is by default the Python
847 hashing identity of the whole tuple. In some cases a specialized
848 per-entity hashing scheme may be used, such as when using the ORM, a
849 scheme is applied which works against the primary key identity of
850 returned objects.
852 The unique filter is applied **after all other filters**, which means
853 if the columns returned have been refined using a method such as the
854 :meth:`_engine.Result.columns` or :meth:`_engine.Result.scalars`
855 method, the uniquing is applied to **only the column or columns
856 returned**. This occurs regardless of the order in which these
857 methods have been called upon the :class:`_engine.Result` object.
859 The unique filter also changes the calculus used for methods like
860 :meth:`_engine.Result.fetchmany` and :meth:`_engine.Result.partitions`.
861 When using :meth:`_engine.Result.unique`, these methods will continue
862 to yield the number of rows or objects requested, after uniquing
863 has been applied. However, this necessarily impacts the buffering
864 behavior of the underlying cursor or datasource, such that multiple
865 underlying calls to ``cursor.fetchmany()`` may be necessary in order
866 to accumulate enough objects in order to provide a unique collection
867 of the requested size.
869 :param strategy: a callable that will be applied to rows or objects
870 being iterated, which should return an object that represents the
871 unique value of the row. A Python ``set()`` is used to store
872 these identities. If not passed, a default uniqueness strategy
873 is used which may have been assembled by the source of this
874 :class:`_engine.Result` object.
876 """
877 self._unique_filter_state = (set(), strategy)
879 def columns(self, *col_expressions):
880 r"""Establish the columns that should be returned in each row.
882 This method may be used to limit the columns returned as well
883 as to reorder them. The given list of expressions are normally
884 a series of integers or string key names. They may also be
885 appropriate :class:`.ColumnElement` objects which correspond to
886 a given statement construct.
888 E.g.::
890 statement = select(table.c.x, table.c.y, table.c.z)
891 result = connection.execute(statement)
893 for z, y in result.columns('z', 'y'):
894 # ...
897 Example of using the column objects from the statement itself::
899 for z, y in result.columns(
900 statement.selected_columns.c.z,
901 statement.selected_columns.c.y
902 ):
903 # ...
905 .. versionadded:: 1.4
907 :param \*col_expressions: indicates columns to be returned. Elements
908 may be integer row indexes, string column names, or appropriate
909 :class:`.ColumnElement` objects corresponding to a select construct.
911 :return: this :class:`_engine.Result` object with the modifications
912 given.
914 """
915 return self._column_slices(col_expressions)
917 def scalars(self, index=0):
918 """Return a :class:`_engine.ScalarResult` filtering object which
919 will return single elements rather than :class:`_row.Row` objects.
921 E.g.::
923 >>> result = conn.execute(text("select int_id from table"))
924 >>> result.scalars().all()
925 [1, 2, 3]
927 When results are fetched from the :class:`_engine.ScalarResult`
928 filtering object, the single column-row that would be returned by the
929 :class:`_engine.Result` is instead returned as the column's value.
931 .. versionadded:: 1.4
933 :param index: integer or row key indicating the column to be fetched
934 from each row, defaults to ``0`` indicating the first column.
936 :return: a new :class:`_engine.ScalarResult` filtering object referring
937 to this :class:`_engine.Result` object.
939 """
940 return ScalarResult(self, index)
942 def _getter(self, key, raiseerr=True):
943 """return a callable that will retrieve the given key from a
944 :class:`_engine.Row`.
946 """
947 if self._source_supports_scalars:
948 raise NotImplementedError(
949 "can't use this function in 'only scalars' mode"
950 )
951 return self._metadata._getter(key, raiseerr)
953 def _tuple_getter(self, keys):
954 """return a callable that will retrieve the given keys from a
955 :class:`_engine.Row`.
957 """
958 if self._source_supports_scalars:
959 raise NotImplementedError(
960 "can't use this function in 'only scalars' mode"
961 )
962 return self._metadata._row_as_tuple_getter(keys)
964 def mappings(self):
965 """Apply a mappings filter to returned rows, returning an instance of
966 :class:`_engine.MappingResult`.
968 When this filter is applied, fetching rows will return
969 :class:`_engine.RowMapping` objects instead of :class:`_engine.Row`
970 objects.
972 .. versionadded:: 1.4
974 :return: a new :class:`_engine.MappingResult` filtering object
975 referring to this :class:`_engine.Result` object.
977 """
979 return MappingResult(self)
981 def _raw_row_iterator(self):
982 """Return a safe iterator that yields raw row data.
984 This is used by the :meth:`_engine.Result.merge` method
985 to merge multiple compatible results together.
987 """
988 raise NotImplementedError()
990 def _fetchiter_impl(self):
991 raise NotImplementedError()
993 def _fetchone_impl(self, hard_close=False):
994 raise NotImplementedError()
996 def _fetchall_impl(self):
997 raise NotImplementedError()
999 def _fetchmany_impl(self, size=None):
1000 raise NotImplementedError()
1002 def __iter__(self):
1003 return self._iter_impl()
1005 def __next__(self):
1006 return self._next_impl()
1008 if py2k:
1010 def next(self): # noqa
1011 return self._next_impl()
1013 def partitions(self, size=None):
1014 """Iterate through sub-lists of rows of the size given.
1016 Each list will be of the size given, excluding the last list to
1017 be yielded, which may have a small number of rows. No empty
1018 lists will be yielded.
1020 The result object is automatically closed when the iterator
1021 is fully consumed.
1023 Note that the backend driver will usually buffer the entire result
1024 ahead of time unless the
1025 :paramref:`.Connection.execution_options.stream_results` execution
1026 option is used indicating that the driver should not pre-buffer
1027 results, if possible. Not all drivers support this option and
1028 the option is silently ignored for those who do not.
1030 When using the ORM, the :meth:`_engine.Result.partitions` method
1031 is typically more effective from a memory perspective when it is
1032 combined with use of the
1033 :ref:`yield_per execution option <orm_queryguide_yield_per>`,
1034 which instructs both the DBAPI driver to use server side cursors,
1035 if available, as well as instructs the ORM loading internals to only
1036 build a certain amount of ORM objects from a result at a time before
1037 yielding them out.
1039 .. versionadded:: 1.4
1041 :param size: indicate the maximum number of rows to be present
1042 in each list yielded. If None, makes use of the value set by
1043 the :meth:`_engine.Result.yield_per`, method, if it were called,
1044 or the :paramref:`_engine.Connection.execution_options.yield_per`
1045 execution option, which is equivalent in this regard. If
1046 yield_per weren't set, it makes use of the
1047 :meth:`_engine.Result.fetchmany` default, which may be backend
1048 specific and not well defined.
1050 :return: iterator of lists
1052 .. seealso::
1054 :ref:`engine_stream_results`
1056 :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
1058 """
1060 getter = self._manyrow_getter
1062 while True:
1063 partition = getter(self, size)
1064 if partition:
1065 yield partition
1066 else:
1067 break
1069 def fetchall(self):
1070 """A synonym for the :meth:`_engine.Result.all` method."""
1072 return self._allrows()
1074 def fetchone(self):
1075 """Fetch one row.
1077 When all rows are exhausted, returns None.
1079 This method is provided for backwards compatibility with
1080 SQLAlchemy 1.x.x.
1082 To fetch the first row of a result only, use the
1083 :meth:`_engine.Result.first` method. To iterate through all
1084 rows, iterate the :class:`_engine.Result` object directly.
1086 :return: a :class:`_engine.Row` object if no filters are applied,
1087 or ``None`` if no rows remain.
1089 """
1090 row = self._onerow_getter(self)
1091 if row is _NO_ROW:
1092 return None
1093 else:
1094 return row
1096 def fetchmany(self, size=None):
1097 """Fetch many rows.
1099 When all rows are exhausted, returns an empty list.
1101 This method is provided for backwards compatibility with
1102 SQLAlchemy 1.x.x.
1104 To fetch rows in groups, use the :meth:`_engine.Result.partitions`
1105 method.
1107 :return: a list of :class:`_engine.Row` objects.
1109 .. seealso::
1111 :meth:`_engine.Result.partitions`
1113 """
1115 return self._manyrow_getter(self, size)
1117 def all(self):
1118 """Return all rows in a list.
1120 Closes the result set after invocation. Subsequent invocations
1121 will return an empty list.
1123 .. versionadded:: 1.4
1125 :return: a list of :class:`_engine.Row` objects.
1127 """
1129 return self._allrows()
1131 def first(self):
1132 """Fetch the first row or ``None`` if no row is present.
1134 Closes the result set and discards remaining rows.
1136 .. note:: This method returns one **row**, e.g. tuple, by default.
1137 To return exactly one single scalar value, that is, the first
1138 column of the first row, use the
1139 :meth:`_engine.Result.scalar` method,
1140 or combine :meth:`_engine.Result.scalars` and
1141 :meth:`_engine.Result.first`.
1143 Additionally, in contrast to the behavior of the legacy ORM
1144 :meth:`_orm.Query.first` method, **no limit is applied** to the
1145 SQL query which was invoked to produce this
1146 :class:`_engine.Result`;
1147 for a DBAPI driver that buffers results in memory before yielding
1148 rows, all rows will be sent to the Python process and all but
1149 the first row will be discarded.
1151 .. seealso::
1153 :ref:`migration_20_unify_select`
1155 :return: a :class:`_engine.Row` object, or None
1156 if no rows remain.
1158 .. seealso::
1160 :meth:`_engine.Result.scalar`
1162 :meth:`_engine.Result.one`
1164 """
1166 return self._only_one_row(
1167 raise_for_second_row=False, raise_for_none=False, scalar=False
1168 )
1170 def one_or_none(self):
1171 """Return at most one result or raise an exception.
1173 Returns ``None`` if the result has no rows.
1174 Raises :class:`.MultipleResultsFound`
1175 if multiple rows are returned.
1177 .. versionadded:: 1.4
1179 :return: The first :class:`_engine.Row` or ``None`` if no row
1180 is available.
1182 :raises: :class:`.MultipleResultsFound`
1184 .. seealso::
1186 :meth:`_engine.Result.first`
1188 :meth:`_engine.Result.one`
1190 """
1191 return self._only_one_row(
1192 raise_for_second_row=True, raise_for_none=False, scalar=False
1193 )
1195 def scalar_one(self):
1196 """Return exactly one scalar result or raise an exception.
1198 This is equivalent to calling :meth:`_engine.Result.scalars` and
1199 then :meth:`_engine.Result.one`.
1201 .. seealso::
1203 :meth:`_engine.Result.one`
1205 :meth:`_engine.Result.scalars`
1207 """
1208 return self._only_one_row(
1209 raise_for_second_row=True, raise_for_none=True, scalar=True
1210 )
1212 def scalar_one_or_none(self):
1213 """Return exactly one scalar result or ``None``.
1215 This is equivalent to calling :meth:`_engine.Result.scalars` and
1216 then :meth:`_engine.Result.one_or_none`.
1218 .. seealso::
1220 :meth:`_engine.Result.one_or_none`
1222 :meth:`_engine.Result.scalars`
1224 """
1225 return self._only_one_row(
1226 raise_for_second_row=True, raise_for_none=False, scalar=True
1227 )
1229 def one(self):
1230 """Return exactly one row or raise an exception.
1232 Raises :class:`.NoResultFound` if the result returns no
1233 rows, or :class:`.MultipleResultsFound` if multiple rows
1234 would be returned.
1236 .. note:: This method returns one **row**, e.g. tuple, by default.
1237 To return exactly one single scalar value, that is, the first
1238 column of the first row, use the
1239 :meth:`_engine.Result.scalar_one` method, or combine
1240 :meth:`_engine.Result.scalars` and
1241 :meth:`_engine.Result.one`.
1243 .. versionadded:: 1.4
1245 :return: The first :class:`_engine.Row`.
1247 :raises: :class:`.MultipleResultsFound`, :class:`.NoResultFound`
1249 .. seealso::
1251 :meth:`_engine.Result.first`
1253 :meth:`_engine.Result.one_or_none`
1255 :meth:`_engine.Result.scalar_one`
1257 """
1258 return self._only_one_row(
1259 raise_for_second_row=True, raise_for_none=True, scalar=False
1260 )
1262 def scalar(self):
1263 """Fetch the first column of the first row, and close the result set.
1265 Returns ``None`` if there are no rows to fetch.
1267 No validation is performed to test if additional rows remain.
1269 After calling this method, the object is fully closed,
1270 e.g. the :meth:`_engine.CursorResult.close`
1271 method will have been called.
1273 :return: a Python scalar value, or ``None`` if no rows remain.
1275 """
1276 return self._only_one_row(
1277 raise_for_second_row=False, raise_for_none=False, scalar=True
1278 )
1280 def freeze(self):
1281 """Return a callable object that will produce copies of this
1282 :class:`_engine.Result` when invoked.
1284 The callable object returned is an instance of
1285 :class:`_engine.FrozenResult`.
1287 This is used for result set caching. The method must be called
1288 on the result when it has been unconsumed, and calling the method
1289 will consume the result fully. When the :class:`_engine.FrozenResult`
1290 is retrieved from a cache, it can be called any number of times where
1291 it will produce a new :class:`_engine.Result` object each time
1292 against its stored set of rows.
1294 .. seealso::
1296 :ref:`do_orm_execute_re_executing` - example usage within the
1297 ORM to implement a result-set cache.
1299 """
1301 return FrozenResult(self)
1303 def merge(self, *others):
1304 """Merge this :class:`_engine.Result` with other compatible result
1305 objects.
1307 The object returned is an instance of :class:`_engine.MergedResult`,
1308 which will be composed of iterators from the given result
1309 objects.
1311 The new result will use the metadata from this result object.
1312 The subsequent result objects must be against an identical
1313 set of result / cursor metadata, otherwise the behavior is
1314 undefined.
1316 """
1317 return MergedResult(self._metadata, (self,) + others)
1320class FilterResult(ResultInternal):
1321 """A wrapper for a :class:`_engine.Result` that returns objects other than
1322 :class:`_engine.Row` objects, such as dictionaries or scalar objects.
1324 :class:`_engine.FilterResult` is the common base for additional result
1325 APIs including :class:`_engine.MappingResult`,
1326 :class:`_engine.ScalarResult` and :class:`_engine.AsyncResult`.
1328 """
1330 _post_creational_filter = None
1332 @_generative
1333 def yield_per(self, num):
1334 """Configure the row-fetching strategy to fetch ``num`` rows at a time.
1336 The :meth:`_engine.FilterResult.yield_per` method is a pass through
1337 to the :meth:`_engine.Result.yield_per` method. See that method's
1338 documentation for usage notes.
1340 .. versionadded:: 1.4.40 - added :meth:`_engine.FilterResult.yield_per`
1341 so that the method is available on all result set implementations
1343 .. seealso::
1345 :ref:`engine_stream_results` - describes Core behavior for
1346 :meth:`_engine.Result.yield_per`
1348 :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
1350 """
1351 self._real_result = self._real_result.yield_per(num)
1353 def _soft_close(self, hard=False):
1354 self._real_result._soft_close(hard=hard)
1356 @property
1357 def _soft_closed(self):
1358 return self._real_result._soft_closed
1360 @property
1361 def closed(self):
1362 """Return ``True`` if the underlying :class:`_engine.Result` reports
1363 closed
1365 .. versionadded:: 1.4.43
1367 """
1368 return self._real_result.closed # type: ignore
1370 def close(self):
1371 """Close this :class:`_engine.FilterResult`.
1373 .. versionadded:: 1.4.43
1375 """
1376 self._real_result.close()
1378 @property
1379 def _attributes(self):
1380 return self._real_result._attributes
1382 def _fetchiter_impl(self):
1383 return self._real_result._fetchiter_impl()
1385 def _fetchone_impl(self, hard_close=False):
1386 return self._real_result._fetchone_impl(hard_close=hard_close)
1388 def _fetchall_impl(self):
1389 return self._real_result._fetchall_impl()
1391 def _fetchmany_impl(self, size=None):
1392 return self._real_result._fetchmany_impl(size=size)
1395class ScalarResult(FilterResult):
1396 """A wrapper for a :class:`_engine.Result` that returns scalar values
1397 rather than :class:`_row.Row` values.
1399 The :class:`_engine.ScalarResult` object is acquired by calling the
1400 :meth:`_engine.Result.scalars` method.
1402 A special limitation of :class:`_engine.ScalarResult` is that it has
1403 no ``fetchone()`` method; since the semantics of ``fetchone()`` are that
1404 the ``None`` value indicates no more results, this is not compatible
1405 with :class:`_engine.ScalarResult` since there is no way to distinguish
1406 between ``None`` as a row value versus ``None`` as an indicator. Use
1407 ``next(result)`` to receive values individually.
1409 """
1411 _generate_rows = False
1413 def __init__(self, real_result, index):
1414 self._real_result = real_result
1416 if real_result._source_supports_scalars:
1417 self._metadata = real_result._metadata
1418 self._post_creational_filter = None
1419 else:
1420 self._metadata = real_result._metadata._reduce([index])
1421 self._post_creational_filter = operator.itemgetter(0)
1423 self._unique_filter_state = real_result._unique_filter_state
1425 def unique(self, strategy=None):
1426 """Apply unique filtering to the objects returned by this
1427 :class:`_engine.ScalarResult`.
1429 See :meth:`_engine.Result.unique` for usage details.
1431 """
1432 self._unique_filter_state = (set(), strategy)
1433 return self
1435 def partitions(self, size=None):
1436 """Iterate through sub-lists of elements of the size given.
1438 Equivalent to :meth:`_engine.Result.partitions` except that
1439 scalar values, rather than :class:`_engine.Row` objects,
1440 are returned.
1442 """
1444 getter = self._manyrow_getter
1446 while True:
1447 partition = getter(self, size)
1448 if partition:
1449 yield partition
1450 else:
1451 break
1453 def fetchall(self):
1454 """A synonym for the :meth:`_engine.ScalarResult.all` method."""
1456 return self._allrows()
1458 def fetchmany(self, size=None):
1459 """Fetch many objects.
1461 Equivalent to :meth:`_engine.Result.fetchmany` except that
1462 scalar values, rather than :class:`_engine.Row` objects,
1463 are returned.
1465 """
1466 return self._manyrow_getter(self, size)
1468 def all(self):
1469 """Return all scalar values in a list.
1471 Equivalent to :meth:`_engine.Result.all` except that
1472 scalar values, rather than :class:`_engine.Row` objects,
1473 are returned.
1475 """
1476 return self._allrows()
1478 def __iter__(self):
1479 return self._iter_impl()
1481 def __next__(self):
1482 return self._next_impl()
1484 if py2k:
1486 def next(self): # noqa
1487 return self._next_impl()
1489 def first(self):
1490 """Fetch the first object or ``None`` if no object is present.
1492 Equivalent to :meth:`_engine.Result.first` except that
1493 scalar values, rather than :class:`_engine.Row` objects,
1494 are returned.
1497 """
1498 return self._only_one_row(
1499 raise_for_second_row=False, raise_for_none=False, scalar=False
1500 )
1502 def one_or_none(self):
1503 """Return at most one object or raise an exception.
1505 Equivalent to :meth:`_engine.Result.one_or_none` except that
1506 scalar values, rather than :class:`_engine.Row` objects,
1507 are returned.
1509 """
1510 return self._only_one_row(
1511 raise_for_second_row=True, raise_for_none=False, scalar=False
1512 )
1514 def one(self):
1515 """Return exactly one object or raise an exception.
1517 Equivalent to :meth:`_engine.Result.one` except that
1518 scalar values, rather than :class:`_engine.Row` objects,
1519 are returned.
1521 """
1522 return self._only_one_row(
1523 raise_for_second_row=True, raise_for_none=True, scalar=False
1524 )
1527class MappingResult(_WithKeys, FilterResult):
1528 """A wrapper for a :class:`_engine.Result` that returns dictionary values
1529 rather than :class:`_engine.Row` values.
1531 The :class:`_engine.MappingResult` object is acquired by calling the
1532 :meth:`_engine.Result.mappings` method.
1534 """
1536 _generate_rows = True
1538 _post_creational_filter = operator.attrgetter("_mapping")
1540 def __init__(self, result):
1541 self._real_result = result
1542 self._unique_filter_state = result._unique_filter_state
1543 self._metadata = result._metadata
1544 if result._source_supports_scalars:
1545 self._metadata = self._metadata._reduce([0])
1547 def unique(self, strategy=None):
1548 """Apply unique filtering to the objects returned by this
1549 :class:`_engine.MappingResult`.
1551 See :meth:`_engine.Result.unique` for usage details.
1553 """
1554 self._unique_filter_state = (set(), strategy)
1555 return self
1557 def columns(self, *col_expressions):
1558 r"""Establish the columns that should be returned in each row."""
1559 return self._column_slices(col_expressions)
1561 def partitions(self, size=None):
1562 """Iterate through sub-lists of elements of the size given.
1564 Equivalent to :meth:`_engine.Result.partitions` except that
1565 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1566 objects, are returned.
1568 """
1570 getter = self._manyrow_getter
1572 while True:
1573 partition = getter(self, size)
1574 if partition:
1575 yield partition
1576 else:
1577 break
1579 def fetchall(self):
1580 """A synonym for the :meth:`_engine.MappingResult.all` method."""
1582 return self._allrows()
1584 def fetchone(self):
1585 """Fetch one object.
1587 Equivalent to :meth:`_engine.Result.fetchone` except that
1588 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1589 objects, are returned.
1591 """
1593 row = self._onerow_getter(self)
1594 if row is _NO_ROW:
1595 return None
1596 else:
1597 return row
1599 def fetchmany(self, size=None):
1600 """Fetch many objects.
1602 Equivalent to :meth:`_engine.Result.fetchmany` except that
1603 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1604 objects, are returned.
1606 """
1608 return self._manyrow_getter(self, size)
1610 def all(self):
1611 """Return all scalar values in a list.
1613 Equivalent to :meth:`_engine.Result.all` except that
1614 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1615 objects, are returned.
1617 """
1619 return self._allrows()
1621 def __iter__(self):
1622 return self._iter_impl()
1624 def __next__(self):
1625 return self._next_impl()
1627 if py2k:
1629 def next(self): # noqa
1630 return self._next_impl()
1632 def first(self):
1633 """Fetch the first object or ``None`` if no object is present.
1635 Equivalent to :meth:`_engine.Result.first` except that
1636 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1637 objects, are returned.
1640 """
1641 return self._only_one_row(
1642 raise_for_second_row=False, raise_for_none=False, scalar=False
1643 )
1645 def one_or_none(self):
1646 """Return at most one object or raise an exception.
1648 Equivalent to :meth:`_engine.Result.one_or_none` except that
1649 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1650 objects, are returned.
1652 """
1653 return self._only_one_row(
1654 raise_for_second_row=True, raise_for_none=False, scalar=False
1655 )
1657 def one(self):
1658 """Return exactly one object or raise an exception.
1660 Equivalent to :meth:`_engine.Result.one` except that
1661 :class:`_engine.RowMapping` values, rather than :class:`_engine.Row`
1662 objects, are returned.
1664 """
1665 return self._only_one_row(
1666 raise_for_second_row=True, raise_for_none=True, scalar=False
1667 )
1670class FrozenResult(object):
1671 """Represents a :class:`_engine.Result` object in a "frozen" state suitable
1672 for caching.
1674 The :class:`_engine.FrozenResult` object is returned from the
1675 :meth:`_engine.Result.freeze` method of any :class:`_engine.Result`
1676 object.
1678 A new iterable :class:`_engine.Result` object is generated from a fixed
1679 set of data each time the :class:`_engine.FrozenResult` is invoked as
1680 a callable::
1683 result = connection.execute(query)
1685 frozen = result.freeze()
1687 unfrozen_result_one = frozen()
1689 for row in unfrozen_result_one:
1690 print(row)
1692 unfrozen_result_two = frozen()
1693 rows = unfrozen_result_two.all()
1695 # ... etc
1697 .. versionadded:: 1.4
1699 .. seealso::
1701 :ref:`do_orm_execute_re_executing` - example usage within the
1702 ORM to implement a result-set cache.
1704 :func:`_orm.loading.merge_frozen_result` - ORM function to merge
1705 a frozen result back into a :class:`_orm.Session`.
1707 """
1709 def __init__(self, result):
1710 self.metadata = result._metadata._for_freeze()
1711 self._source_supports_scalars = result._source_supports_scalars
1712 self._attributes = result._attributes
1714 if self._source_supports_scalars:
1715 self.data = list(result._raw_row_iterator())
1716 else:
1717 self.data = result.fetchall()
1719 def rewrite_rows(self):
1720 if self._source_supports_scalars:
1721 return [[elem] for elem in self.data]
1722 else:
1723 return [list(row) for row in self.data]
1725 def with_new_rows(self, tuple_data):
1726 fr = FrozenResult.__new__(FrozenResult)
1727 fr.metadata = self.metadata
1728 fr._attributes = self._attributes
1729 fr._source_supports_scalars = self._source_supports_scalars
1731 if self._source_supports_scalars:
1732 fr.data = [d[0] for d in tuple_data]
1733 else:
1734 fr.data = tuple_data
1735 return fr
1737 def __call__(self):
1738 result = IteratorResult(self.metadata, iter(self.data))
1739 result._attributes = self._attributes
1740 result._source_supports_scalars = self._source_supports_scalars
1741 return result
1744class IteratorResult(Result):
1745 """A :class:`_engine.Result` that gets data from a Python iterator of
1746 :class:`_engine.Row` objects or similar row-like data.
1748 .. versionadded:: 1.4
1750 """
1752 _hard_closed = False
1753 _soft_closed = False
1755 def __init__(
1756 self,
1757 cursor_metadata,
1758 iterator,
1759 raw=None,
1760 _source_supports_scalars=False,
1761 ):
1762 self._metadata = cursor_metadata
1763 self.iterator = iterator
1764 self.raw = raw
1765 self._source_supports_scalars = _source_supports_scalars
1767 @property
1768 def closed(self):
1769 """Return ``True`` if this :class:`_engine.IteratorResult` has
1770 been closed
1772 .. versionadded:: 1.4.43
1774 """
1775 return self._hard_closed
1777 def _soft_close(self, hard=False, **kw):
1778 if hard:
1779 self._hard_closed = True
1780 if self.raw is not None:
1781 self.raw._soft_close(hard=hard, **kw)
1782 self.iterator = iter([])
1783 self._reset_memoizations()
1784 self._soft_closed = True
1786 def _raise_hard_closed(self):
1787 raise exc.ResourceClosedError("This result object is closed.")
1789 def _raw_row_iterator(self):
1790 return self.iterator
1792 def _fetchiter_impl(self):
1793 if self._hard_closed:
1794 self._raise_hard_closed()
1795 return self.iterator
1797 def _fetchone_impl(self, hard_close=False):
1798 if self._hard_closed:
1799 self._raise_hard_closed()
1801 row = next(self.iterator, _NO_ROW)
1802 if row is _NO_ROW:
1803 self._soft_close(hard=hard_close)
1804 return None
1805 else:
1806 return row
1808 def _fetchall_impl(self):
1809 if self._hard_closed:
1810 self._raise_hard_closed()
1812 try:
1813 return list(self.iterator)
1814 finally:
1815 self._soft_close()
1817 def _fetchmany_impl(self, size=None):
1818 if self._hard_closed:
1819 self._raise_hard_closed()
1821 return list(itertools.islice(self.iterator, 0, size))
1824def null_result():
1825 return IteratorResult(SimpleResultMetaData([]), iter([]))
1828class ChunkedIteratorResult(IteratorResult):
1829 """An :class:`_engine.IteratorResult` that works from an
1830 iterator-producing callable.
1832 The given ``chunks`` argument is a function that is given a number of rows
1833 to return in each chunk, or ``None`` for all rows. The function should
1834 then return an un-consumed iterator of lists, each list of the requested
1835 size.
1837 The function can be called at any time again, in which case it should
1838 continue from the same result set but adjust the chunk size as given.
1840 .. versionadded:: 1.4
1842 """
1844 def __init__(
1845 self,
1846 cursor_metadata,
1847 chunks,
1848 source_supports_scalars=False,
1849 raw=None,
1850 dynamic_yield_per=False,
1851 ):
1852 self._metadata = cursor_metadata
1853 self.chunks = chunks
1854 self._source_supports_scalars = source_supports_scalars
1855 self.raw = raw
1856 self.iterator = itertools.chain.from_iterable(self.chunks(None))
1857 self.dynamic_yield_per = dynamic_yield_per
1859 @_generative
1860 def yield_per(self, num):
1861 # TODO: this throws away the iterator which may be holding
1862 # onto a chunk. the yield_per cannot be changed once any
1863 # rows have been fetched. either find a way to enforce this,
1864 # or we can't use itertools.chain and will instead have to
1865 # keep track.
1867 self._yield_per = num
1868 self.iterator = itertools.chain.from_iterable(self.chunks(num))
1870 def _soft_close(self, **kw):
1871 super(ChunkedIteratorResult, self)._soft_close(**kw)
1872 self.chunks = lambda size: []
1874 def _fetchmany_impl(self, size=None):
1875 if self.dynamic_yield_per:
1876 self.iterator = itertools.chain.from_iterable(self.chunks(size))
1877 return super(ChunkedIteratorResult, self)._fetchmany_impl(size=size)
1880class MergedResult(IteratorResult):
1881 """A :class:`_engine.Result` that is merged from any number of
1882 :class:`_engine.Result` objects.
1884 Returned by the :meth:`_engine.Result.merge` method.
1886 .. versionadded:: 1.4
1888 """
1890 closed = False
1892 def __init__(self, cursor_metadata, results):
1893 self._results = results
1894 super(MergedResult, self).__init__(
1895 cursor_metadata,
1896 itertools.chain.from_iterable(
1897 r._raw_row_iterator() for r in results
1898 ),
1899 )
1901 self._unique_filter_state = results[0]._unique_filter_state
1902 self._yield_per = results[0]._yield_per
1904 # going to try something w/ this in next rev
1905 self._source_supports_scalars = results[0]._source_supports_scalars
1907 self._attributes = self._attributes.merge_with(
1908 *[r._attributes for r in results]
1909 )
1911 def _soft_close(self, hard=False, **kw):
1912 for r in self._results:
1913 r._soft_close(hard=hard, **kw)
1914 if hard:
1915 self.closed = True