1# engine/result.py
2# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
7
8"""Define result set constructs including :class:`_engine.ResultProxy`
9and :class:`.RowProxy`."""
10
11
12import collections
13import operator
14
15from .. import exc
16from .. import util
17from ..sql import expression
18from ..sql import sqltypes
19from ..sql import util as sql_util
20
21
22# This reconstructor is necessary so that pickles with the C extension or
23# without use the same Binary format.
24try:
25 # We need a different reconstructor on the C extension so that we can
26 # add extra checks that fields have correctly been initialized by
27 # __setstate__.
28 from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
29
30 # The extra function embedding is needed so that the
31 # reconstructor function has the same signature whether or not
32 # the extension is present.
33 def rowproxy_reconstructor(cls, state):
34 return safe_rowproxy_reconstructor(cls, state)
35
36
37except ImportError:
38
39 def rowproxy_reconstructor(cls, state):
40 obj = cls.__new__(cls)
41 obj.__setstate__(state)
42 return obj
43
44
45try:
46 from sqlalchemy.cresultproxy import BaseRowProxy
47
48 _baserowproxy_usecext = True
49except ImportError:
50 _baserowproxy_usecext = False
51
52 class BaseRowProxy(object):
53 __slots__ = ("_parent", "_row", "_processors", "_keymap")
54
55 def __init__(self, parent, row, processors, keymap):
56 """RowProxy objects are constructed by ResultProxy objects."""
57
58 self._parent = parent
59 self._row = row
60 self._processors = processors
61 self._keymap = keymap
62
63 def __reduce__(self):
64 return (
65 rowproxy_reconstructor,
66 (self.__class__, self.__getstate__()),
67 )
68
69 def values(self):
70 """Return the values represented by this RowProxy as a list."""
71 return list(self)
72
73 def __iter__(self):
74 for processor, value in zip(self._processors, self._row):
75 if processor is None:
76 yield value
77 else:
78 yield processor(value)
79
80 def __len__(self):
81 return len(self._row)
82
83 def __getitem__(self, key):
84 try:
85 processor, obj, index = self._keymap[key]
86 except KeyError as err:
87 processor, obj, index = self._parent._key_fallback(key, err)
88 except TypeError:
89 if isinstance(key, slice):
90 l = []
91 for processor, value in zip(
92 self._processors[key], self._row[key]
93 ):
94 if processor is None:
95 l.append(value)
96 else:
97 l.append(processor(value))
98 return tuple(l)
99 else:
100 raise
101 if index is None:
102 raise exc.InvalidRequestError(
103 "Ambiguous column name '%s' in "
104 "result set column descriptions" % obj
105 )
106 if processor is not None:
107 return processor(self._row[index])
108 else:
109 return self._row[index]
110
111 def __getattr__(self, name):
112 try:
113 return self[name]
114 except KeyError as e:
115 util.raise_(AttributeError(e.args[0]), replace_context=e)
116
117
118class RowProxy(BaseRowProxy):
119 """Represent a single result row.
120
121 The :class:`.RowProxy` object is retrieved from a database result, from the
122 :class:`_engine.ResultProxy` object using methods like
123 :meth:`_engine.ResultProxy.fetchall`.
124
125 The :class:`.RowProxy` object seeks to act mostly like a Python named
126 tuple, but also provides some Python dictionary behaviors at the same time.
127
128 .. seealso::
129
130 :ref:`coretutorial_selecting` - includes examples of selecting
131 rows from SELECT statements.
132
133 """
134
135 __slots__ = ()
136
137 def __contains__(self, key):
138 return self._parent._has_key(key)
139
140 def __getstate__(self):
141 return {"_parent": self._parent, "_row": tuple(self)}
142
143 def __setstate__(self, state):
144 self._parent = parent = state["_parent"]
145 self._row = state["_row"]
146 self._processors = parent._processors
147 self._keymap = parent._keymap
148
149 __hash__ = None
150
151 def _op(self, other, op):
152 return (
153 op(tuple(self), tuple(other))
154 if isinstance(other, RowProxy)
155 else op(tuple(self), other)
156 )
157
158 def __lt__(self, other):
159 return self._op(other, operator.lt)
160
161 def __le__(self, other):
162 return self._op(other, operator.le)
163
164 def __ge__(self, other):
165 return self._op(other, operator.ge)
166
167 def __gt__(self, other):
168 return self._op(other, operator.gt)
169
170 def __eq__(self, other):
171 return self._op(other, operator.eq)
172
173 def __ne__(self, other):
174 return self._op(other, operator.ne)
175
176 def __repr__(self):
177 return repr(sql_util._repr_row(self))
178
179 def has_key(self, key):
180 """Return True if this :class:`.RowProxy` contains the given key.
181
182 Through the SQLAlchemy 1.x series, the ``__contains__()`` method
183 of :class:`.RowProxy` also links to :meth:`.RowProxy.has_key`, in that
184 an expression such as ::
185
186 "some_col" in row
187
188 Will return True if the row contains a column named ``"some_col"``,
189 in the way that a Python mapping works.
190
191 However, it is planned that the 2.0 series of SQLAlchemy will reverse
192 this behavior so that ``__contains__()`` will refer to a value being
193 present in the row, in the way that a Python tuple works.
194
195 """
196
197 return self._parent._has_key(key)
198
199 def items(self):
200 """Return a list of tuples, each tuple containing a key/value pair.
201
202 This method is analogous to the Python dictionary ``.items()`` method,
203 except that it returns a list, not an iterator.
204
205 """
206
207 return [(key, self[key]) for key in self.keys()]
208
209 def keys(self):
210 """Return the list of keys as strings represented by this
211 :class:`.RowProxy`.
212
213 This method is analogous to the Python dictionary ``.keys()`` method,
214 except that it returns a list, not an iterator.
215
216 """
217
218 return self._parent.keys
219
220 def iterkeys(self):
221 """Return a an iterator against the :meth:`.RowProxy.keys` method.
222
223 This method is analogous to the Python-2-only dictionary
224 ``.iterkeys()`` method.
225
226 """
227 return iter(self._parent.keys)
228
229 def itervalues(self):
230 """Return a an iterator against the :meth:`.RowProxy.values` method.
231
232 This method is analogous to the Python-2-only dictionary
233 ``.itervalues()`` method.
234
235 """
236 return iter(self)
237
238 def values(self):
239 """Return the values represented by this :class:`.RowProxy` as a list.
240
241 This method is analogous to the Python dictionary ``.values()`` method,
242 except that it returns a list, not an iterator.
243
244 """
245 return super(RowProxy, self).values()
246
247
248try:
249 # Register RowProxy with Sequence,
250 # so sequence protocol is implemented
251 util.collections_abc.Sequence.register(RowProxy)
252except ImportError:
253 pass
254
255
256class ResultMetaData(object):
257 """Handle cursor.description, applying additional info from an execution
258 context."""
259
260 __slots__ = (
261 "_keymap",
262 "case_sensitive",
263 "matched_on_name",
264 "_processors",
265 "keys",
266 "_orig_processors",
267 )
268
269 def __init__(self, parent, cursor_description):
270 context = parent.context
271 dialect = context.dialect
272 self.case_sensitive = dialect.case_sensitive
273 self.matched_on_name = False
274 self._orig_processors = None
275
276 if context.result_column_struct:
277 (
278 result_columns,
279 cols_are_ordered,
280 textual_ordered,
281 ) = context.result_column_struct
282 num_ctx_cols = len(result_columns)
283 else:
284 result_columns = (
285 cols_are_ordered
286 ) = num_ctx_cols = textual_ordered = False
287
288 # merge cursor.description with the column info
289 # present in the compiled structure, if any
290 raw = self._merge_cursor_description(
291 context,
292 cursor_description,
293 result_columns,
294 num_ctx_cols,
295 cols_are_ordered,
296 textual_ordered,
297 )
298
299 self._keymap = {}
300 if not _baserowproxy_usecext:
301 # keymap indexes by integer index: this is only used
302 # in the pure Python BaseRowProxy.__getitem__
303 # implementation to avoid an expensive
304 # isinstance(key, util.int_types) in the most common
305 # case path
306
307 len_raw = len(raw)
308
309 self._keymap.update(
310 [(elem[0], (elem[3], elem[4], elem[0])) for elem in raw]
311 + [
312 (elem[0] - len_raw, (elem[3], elem[4], elem[0]))
313 for elem in raw
314 ]
315 )
316
317 # processors in key order for certain per-row
318 # views like __iter__ and slices
319 self._processors = [elem[3] for elem in raw]
320
321 # keymap by primary string...
322 by_key = dict([(elem[2], (elem[3], elem[4], elem[0])) for elem in raw])
323
324 # for compiled SQL constructs, copy additional lookup keys into
325 # the key lookup map, such as Column objects, labels,
326 # column keys and other names
327 if num_ctx_cols:
328
329 # if by-primary-string dictionary smaller (or bigger?!) than
330 # number of columns, assume we have dupes, rewrite
331 # dupe records with "None" for index which results in
332 # ambiguous column exception when accessed.
333 if len(by_key) != num_ctx_cols:
334 seen = set()
335 for rec in raw:
336 key = rec[1]
337 if key in seen:
338 # this is an "ambiguous" element, replacing
339 # the full record in the map
340 key = key.lower() if not self.case_sensitive else key
341 by_key[key] = (None, key, None)
342 seen.add(key)
343
344 # copy secondary elements from compiled columns
345 # into self._keymap, write in the potentially "ambiguous"
346 # element
347 self._keymap.update(
348 [
349 (obj_elem, by_key[elem[2]])
350 for elem in raw
351 if elem[4]
352 for obj_elem in elem[4]
353 ]
354 )
355
356 # if we did a pure positional match, then reset the
357 # original "expression element" back to the "unambiguous"
358 # entry. This is a new behavior in 1.1 which impacts
359 # TextAsFrom but also straight compiled SQL constructs.
360 if not self.matched_on_name:
361 self._keymap.update(
362 [
363 (elem[4][0], (elem[3], elem[4], elem[0]))
364 for elem in raw
365 if elem[4]
366 ]
367 )
368 else:
369 # no dupes - copy secondary elements from compiled
370 # columns into self._keymap
371 self._keymap.update(
372 [
373 (obj_elem, (elem[3], elem[4], elem[0]))
374 for elem in raw
375 if elem[4]
376 for obj_elem in elem[4]
377 ]
378 )
379
380 # update keymap with primary string names taking
381 # precedence
382 self._keymap.update(by_key)
383
384 # update keymap with "translated" names (sqlite-only thing)
385 if not num_ctx_cols and context._translate_colname:
386 self._keymap.update(
387 [(elem[5], self._keymap[elem[2]]) for elem in raw if elem[5]]
388 )
389
390 def _merge_cursor_description(
391 self,
392 context,
393 cursor_description,
394 result_columns,
395 num_ctx_cols,
396 cols_are_ordered,
397 textual_ordered,
398 ):
399 """Merge a cursor.description with compiled result column information.
400
401 There are at least four separate strategies used here, selected
402 depending on the type of SQL construct used to start with.
403
404 The most common case is that of the compiled SQL expression construct,
405 which generated the column names present in the raw SQL string and
406 which has the identical number of columns as were reported by
407 cursor.description. In this case, we assume a 1-1 positional mapping
408 between the entries in cursor.description and the compiled object.
409 This is also the most performant case as we disregard extracting /
410 decoding the column names present in cursor.description since we
411 already have the desired name we generated in the compiled SQL
412 construct.
413
414 The next common case is that of the completely raw string SQL,
415 such as passed to connection.execute(). In this case we have no
416 compiled construct to work with, so we extract and decode the
417 names from cursor.description and index those as the primary
418 result row target keys.
419
420 The remaining fairly common case is that of the textual SQL
421 that includes at least partial column information; this is when
422 we use a :class:`.TextAsFrom` construct. This construct may have
423 unordered or ordered column information. In the ordered case, we
424 merge the cursor.description and the compiled construct's information
425 positionally, and warn if there are additional description names
426 present, however we still decode the names in cursor.description
427 as we don't have a guarantee that the names in the columns match
428 on these. In the unordered case, we match names in cursor.description
429 to that of the compiled construct based on name matching.
430 In both of these cases, the cursor.description names and the column
431 expression objects and names are indexed as result row target keys.
432
433 The final case is much less common, where we have a compiled
434 non-textual SQL expression construct, but the number of columns
435 in cursor.description doesn't match what's in the compiled
436 construct. We make the guess here that there might be textual
437 column expressions in the compiled construct that themselves include
438 a comma in them causing them to split. We do the same name-matching
439 as with textual non-ordered columns.
440
441 The name-matched system of merging is the same as that used by
442 SQLAlchemy for all cases up through te 0.9 series. Positional
443 matching for compiled SQL expressions was introduced in 1.0 as a
444 major performance feature, and positional matching for textual
445 :class:`.TextAsFrom` objects in 1.1. As name matching is no longer
446 a common case, it was acceptable to factor it into smaller generator-
447 oriented methods that are easier to understand, but incur slightly
448 more performance overhead.
449
450 """
451
452 case_sensitive = context.dialect.case_sensitive
453
454 if (
455 num_ctx_cols
456 and cols_are_ordered
457 and not textual_ordered
458 and num_ctx_cols == len(cursor_description)
459 ):
460 self.keys = [elem[0] for elem in result_columns]
461 # pure positional 1-1 case; doesn't need to read
462 # the names from cursor.description
463 return [
464 (
465 idx,
466 key,
467 name.lower() if not case_sensitive else name,
468 context.get_result_processor(
469 type_, key, cursor_description[idx][1]
470 ),
471 obj,
472 None,
473 )
474 for idx, (key, name, obj, type_) in enumerate(result_columns)
475 ]
476 else:
477 # name-based or text-positional cases, where we need
478 # to read cursor.description names
479 if textual_ordered:
480 # textual positional case
481 raw_iterator = self._merge_textual_cols_by_position(
482 context, cursor_description, result_columns
483 )
484 elif num_ctx_cols:
485 # compiled SQL with a mismatch of description cols
486 # vs. compiled cols, or textual w/ unordered columns
487 raw_iterator = self._merge_cols_by_name(
488 context, cursor_description, result_columns
489 )
490 else:
491 # no compiled SQL, just a raw string
492 raw_iterator = self._merge_cols_by_none(
493 context, cursor_description
494 )
495
496 return [
497 (
498 idx,
499 colname,
500 colname,
501 context.get_result_processor(
502 mapped_type, colname, coltype
503 ),
504 obj,
505 untranslated,
506 )
507 for (
508 idx,
509 colname,
510 mapped_type,
511 coltype,
512 obj,
513 untranslated,
514 ) in raw_iterator
515 ]
516
517 def _colnames_from_description(self, context, cursor_description):
518 """Extract column names and data types from a cursor.description.
519
520 Applies unicode decoding, column translation, "normalization",
521 and case sensitivity rules to the names based on the dialect.
522
523 """
524
525 dialect = context.dialect
526 case_sensitive = dialect.case_sensitive
527 translate_colname = context._translate_colname
528 description_decoder = (
529 dialect._description_decoder
530 if dialect.description_encoding
531 else None
532 )
533 normalize_name = (
534 dialect.normalize_name if dialect.requires_name_normalize else None
535 )
536 untranslated = None
537
538 self.keys = []
539
540 for idx, rec in enumerate(cursor_description):
541 colname = rec[0]
542 coltype = rec[1]
543
544 if description_decoder:
545 colname = description_decoder(colname)
546
547 if translate_colname:
548 colname, untranslated = translate_colname(colname)
549
550 if normalize_name:
551 colname = normalize_name(colname)
552
553 self.keys.append(colname)
554 if not case_sensitive:
555 colname = colname.lower()
556
557 yield idx, colname, untranslated, coltype
558
559 def _merge_textual_cols_by_position(
560 self, context, cursor_description, result_columns
561 ):
562 num_ctx_cols = len(result_columns) if result_columns else None
563
564 if num_ctx_cols > len(cursor_description):
565 util.warn(
566 "Number of columns in textual SQL (%d) is "
567 "smaller than number of columns requested (%d)"
568 % (num_ctx_cols, len(cursor_description))
569 )
570 seen = set()
571 for (
572 idx,
573 colname,
574 untranslated,
575 coltype,
576 ) in self._colnames_from_description(context, cursor_description):
577 if idx < num_ctx_cols:
578 ctx_rec = result_columns[idx]
579 obj = ctx_rec[2]
580 mapped_type = ctx_rec[3]
581 if obj[0] in seen:
582 raise exc.InvalidRequestError(
583 "Duplicate column expression requested "
584 "in textual SQL: %r" % obj[0]
585 )
586 seen.add(obj[0])
587 else:
588 mapped_type = sqltypes.NULLTYPE
589 obj = None
590
591 yield idx, colname, mapped_type, coltype, obj, untranslated
592
593 def _merge_cols_by_name(self, context, cursor_description, result_columns):
594 dialect = context.dialect
595 case_sensitive = dialect.case_sensitive
596 result_map = self._create_result_map(result_columns, case_sensitive)
597
598 self.matched_on_name = True
599 for (
600 idx,
601 colname,
602 untranslated,
603 coltype,
604 ) in self._colnames_from_description(context, cursor_description):
605 try:
606 ctx_rec = result_map[colname]
607 except KeyError:
608 mapped_type = sqltypes.NULLTYPE
609 obj = None
610 else:
611 obj = ctx_rec[1]
612 mapped_type = ctx_rec[2]
613 yield idx, colname, mapped_type, coltype, obj, untranslated
614
615 def _merge_cols_by_none(self, context, cursor_description):
616 for (
617 idx,
618 colname,
619 untranslated,
620 coltype,
621 ) in self._colnames_from_description(context, cursor_description):
622 yield idx, colname, sqltypes.NULLTYPE, coltype, None, untranslated
623
624 @classmethod
625 def _create_result_map(cls, result_columns, case_sensitive=True):
626 d = {}
627 for elem in result_columns:
628 key, rec = elem[0], elem[1:]
629 if not case_sensitive:
630 key = key.lower()
631 if key in d:
632 # conflicting keyname, just double up the list
633 # of objects. this will cause an "ambiguous name"
634 # error if an attempt is made by the result set to
635 # access.
636 e_name, e_obj, e_type = d[key]
637 d[key] = e_name, e_obj + rec[1], e_type
638 else:
639 d[key] = rec
640 return d
641
642 def _key_fallback(self, key, err, raiseerr=True):
643 map_ = self._keymap
644 result = None
645 if isinstance(key, util.string_types):
646 result = map_.get(key if self.case_sensitive else key.lower())
647 # fallback for targeting a ColumnElement to a textual expression
648 # this is a rare use case which only occurs when matching text()
649 # or colummn('name') constructs to ColumnElements, or after a
650 # pickle/unpickle roundtrip
651 elif isinstance(key, expression.ColumnElement):
652 if (
653 key._label
654 and (key._label if self.case_sensitive else key._label.lower())
655 in map_
656 ):
657 result = map_[
658 key._label if self.case_sensitive else key._label.lower()
659 ]
660 elif (
661 hasattr(key, "name")
662 and (key.name if self.case_sensitive else key.name.lower())
663 in map_
664 ):
665 # match is only on name.
666 result = map_[
667 key.name if self.case_sensitive else key.name.lower()
668 ]
669 # search extra hard to make sure this
670 # isn't a column/label name overlap.
671 # this check isn't currently available if the row
672 # was unpickled.
673 if result is not None and result[1] is not None:
674 for obj in result[1]:
675 if key._compare_name_for_result(obj):
676 break
677 else:
678 result = None
679 if result is None:
680 if raiseerr:
681 util.raise_(
682 exc.NoSuchColumnError(
683 "Could not locate column in row for column '%s'"
684 % expression._string_or_unprintable(key)
685 ),
686 replace_context=err,
687 )
688 else:
689 return None
690 else:
691 map_[key] = result
692 return result
693
694 def _has_key(self, key):
695 if key in self._keymap:
696 return True
697 else:
698 return self._key_fallback(key, None, False) is not None
699
700 def _getter(self, key, raiseerr=True):
701 if key in self._keymap:
702 processor, obj, index = self._keymap[key]
703 else:
704 ret = self._key_fallback(key, None, raiseerr)
705 if ret is None:
706 return None
707 processor, obj, index = ret
708
709 if index is None:
710 util.raise_(
711 exc.InvalidRequestError(
712 "Ambiguous column name '%s' in "
713 "result set column descriptions" % obj
714 ),
715 from_=None,
716 )
717
718 return operator.itemgetter(index)
719
720 def __getstate__(self):
721 return {
722 "_pickled_keymap": dict(
723 (key, index)
724 for key, (processor, obj, index) in self._keymap.items()
725 if isinstance(key, util.string_types + util.int_types)
726 ),
727 "keys": self.keys,
728 "case_sensitive": self.case_sensitive,
729 "matched_on_name": self.matched_on_name,
730 }
731
732 def __setstate__(self, state):
733 # the row has been processed at pickling time so we don't need any
734 # processor anymore
735 self._processors = [None for _ in range(len(state["keys"]))]
736 self._keymap = keymap = {}
737 for key, index in state["_pickled_keymap"].items():
738 # not preserving "obj" here, unfortunately our
739 # proxy comparison fails with the unpickle
740 keymap[key] = (None, None, index)
741 self.keys = state["keys"]
742 self.case_sensitive = state["case_sensitive"]
743 self.matched_on_name = state["matched_on_name"]
744
745
746class ResultProxy(object):
747 """A facade around a DBAPI cursor object.
748
749 Returns database rows via the :class:`.RowProxy` class, which provides
750 additional API features and behaviors on top of the raw data returned
751 by the DBAPI.
752
753 .. seealso::
754
755 :ref:`coretutorial_selecting` - introductory material for accessing
756 :class:`_engine.ResultProxy` and :class:`.RowProxy` objects.
757
758 """
759
760 _process_row = RowProxy
761 out_parameters = None
762 _autoclose_connection = False
763 _metadata = None
764 _soft_closed = False
765 closed = False
766
767 def __init__(self, context):
768 self.context = context
769 self.dialect = context.dialect
770 self.cursor = self._saved_cursor = context.cursor
771 self.connection = context.root_connection
772 self._echo = (
773 self.connection._echo and context.engine._should_log_debug()
774 )
775 self._init_metadata()
776
777 def _getter(self, key, raiseerr=True):
778 try:
779 getter = self._metadata._getter
780 except AttributeError as err:
781 return self._non_result(None, err)
782 else:
783 return getter(key, raiseerr)
784
785 def _has_key(self, key):
786 try:
787 has_key = self._metadata._has_key
788 except AttributeError as err:
789 return self._non_result(None, err)
790 else:
791 return has_key(key)
792
793 def _init_metadata(self):
794 cursor_description = self._cursor_description()
795 if cursor_description is not None:
796 if (
797 self.context.compiled
798 and "compiled_cache" in self.context.execution_options
799 ):
800 if self.context.compiled._cached_metadata:
801 self._metadata = self.context.compiled._cached_metadata
802 else:
803 self._metadata = (
804 self.context.compiled._cached_metadata
805 ) = ResultMetaData(self, cursor_description)
806 else:
807 self._metadata = ResultMetaData(self, cursor_description)
808 if self._echo:
809 self.context.engine.logger.debug(
810 "Col %r", tuple(x[0] for x in cursor_description)
811 )
812
813 def keys(self):
814 """Return the list of string keys that would represented by each
815 :class:`.RowProxy`."""
816
817 if self._metadata:
818 return self._metadata.keys
819 else:
820 return []
821
822 @util.memoized_property
823 def rowcount(self):
824 """Return the 'rowcount' for this result.
825
826 The 'rowcount' reports the number of rows *matched*
827 by the WHERE criterion of an UPDATE or DELETE statement.
828
829 .. note::
830
831 Notes regarding :attr:`_engine.ResultProxy.rowcount`:
832
833
834 * This attribute returns the number of rows *matched*,
835 which is not necessarily the same as the number of rows
836 that were actually *modified* - an UPDATE statement, for example,
837 may have no net change on a given row if the SET values
838 given are the same as those present in the row already.
839 Such a row would be matched but not modified.
840 On backends that feature both styles, such as MySQL,
841 rowcount is configured by default to return the match
842 count in all cases.
843
844 * :attr:`_engine.ResultProxy.rowcount`
845 is *only* useful in conjunction
846 with an UPDATE or DELETE statement. Contrary to what the Python
847 DBAPI says, it does *not* return the
848 number of rows available from the results of a SELECT statement
849 as DBAPIs cannot support this functionality when rows are
850 unbuffered.
851
852 * :attr:`_engine.ResultProxy.rowcount`
853 may not be fully implemented by
854 all dialects. In particular, most DBAPIs do not support an
855 aggregate rowcount result from an executemany call.
856 The :meth:`_engine.ResultProxy.supports_sane_rowcount` and
857 :meth:`_engine.ResultProxy.supports_sane_multi_rowcount` methods
858 will report from the dialect if each usage is known to be
859 supported.
860
861 * Statements that use RETURNING may not return a correct
862 rowcount.
863
864 """
865 try:
866 return self.context.rowcount
867 except BaseException as e:
868 self.connection._handle_dbapi_exception(
869 e, None, None, self.cursor, self.context
870 )
871
872 @property
873 def lastrowid(self):
874 """Return the 'lastrowid' accessor on the DBAPI cursor.
875
876 This is a DBAPI specific method and is only functional
877 for those backends which support it, for statements
878 where it is appropriate. It's behavior is not
879 consistent across backends.
880
881 Usage of this method is normally unnecessary when
882 using insert() expression constructs; the
883 :attr:`~ResultProxy.inserted_primary_key` attribute provides a
884 tuple of primary key values for a newly inserted row,
885 regardless of database backend.
886
887 """
888 try:
889 return self._saved_cursor.lastrowid
890 except BaseException as e:
891 self.connection._handle_dbapi_exception(
892 e, None, None, self._saved_cursor, self.context
893 )
894
895 @property
896 def returns_rows(self):
897 """True if this :class:`_engine.ResultProxy` returns rows.
898
899 I.e. if it is legal to call the methods
900 :meth:`_engine.ResultProxy.fetchone`,
901 :meth:`_engine.ResultProxy.fetchmany`
902 :meth:`_engine.ResultProxy.fetchall`.
903
904 """
905 return self._metadata is not None
906
907 @property
908 def is_insert(self):
909 """True if this :class:`_engine.ResultProxy` is the result
910 of a executing an expression language compiled
911 :func:`_expression.insert` construct.
912
913 When True, this implies that the
914 :attr:`inserted_primary_key` attribute is accessible,
915 assuming the statement did not include
916 a user defined "returning" construct.
917
918 """
919 return self.context.isinsert
920
921 def _cursor_description(self):
922 """May be overridden by subclasses."""
923
924 return self._saved_cursor.description
925
926 def _soft_close(self):
927 """Soft close this :class:`_engine.ResultProxy`.
928
929 This releases all DBAPI cursor resources, but leaves the
930 ResultProxy "open" from a semantic perspective, meaning the
931 fetchXXX() methods will continue to return empty results.
932
933 This method is called automatically when:
934
935 * all result rows are exhausted using the fetchXXX() methods.
936 * cursor.description is None.
937
938 This method is **not public**, but is documented in order to clarify
939 the "autoclose" process used.
940
941 .. versionadded:: 1.0.0
942
943 .. seealso::
944
945 :meth:`_engine.ResultProxy.close`
946
947
948 """
949 if self._soft_closed:
950 return
951 self._soft_closed = True
952 cursor = self.cursor
953 self.connection._safe_close_cursor(cursor)
954 if self._autoclose_connection:
955 self.connection.close()
956 self.cursor = None
957
958 def close(self):
959 """Close this ResultProxy.
960
961 This closes out the underlying DBAPI cursor corresponding
962 to the statement execution, if one is still present. Note that the
963 DBAPI cursor is automatically released when the
964 :class:`_engine.ResultProxy`
965 exhausts all available rows. :meth:`_engine.ResultProxy.close`
966 is generally
967 an optional method except in the case when discarding a
968 :class:`_engine.ResultProxy`
969 that still has additional rows pending for fetch.
970
971 In the case of a result that is the product of
972 :ref:`connectionless execution <dbengine_implicit>`,
973 the underlying :class:`_engine.Connection` object is also closed,
974 which
975 :term:`releases` DBAPI connection resources.
976
977 After this method is called, it is no longer valid to call upon
978 the fetch methods, which will raise a :class:`.ResourceClosedError`
979 on subsequent use.
980
981 .. versionchanged:: 1.0.0 - the :meth:`_engine.ResultProxy.close`
982 method
983 has been separated out from the process that releases the underlying
984 DBAPI cursor resource. The "auto close" feature of the
985 :class:`_engine.Connection` now performs a so-called "soft close",
986 which
987 releases the underlying DBAPI cursor, but allows the
988 :class:`_engine.ResultProxy`
989 to still behave as an open-but-exhausted
990 result set; the actual :meth:`_engine.ResultProxy.close`
991 method is never
992 called. It is still safe to discard a
993 :class:`_engine.ResultProxy`
994 that has been fully exhausted without calling this method.
995
996 .. seealso::
997
998 :ref:`connections_toplevel`
999
1000 """
1001
1002 if not self.closed:
1003 self._soft_close()
1004 self.closed = True
1005
1006 def __iter__(self):
1007 """Implement iteration protocol."""
1008
1009 while True:
1010 row = self.fetchone()
1011 if row is None:
1012 return
1013 else:
1014 yield row
1015
1016 def __next__(self):
1017 """Implement the Python next() protocol.
1018
1019 This method, mirrored as both ``.next()`` and ``.__next__()``, is part
1020 of Python's API for producing iterator-like behavior.
1021
1022 .. versionadded:: 1.2
1023
1024 """
1025 row = self.fetchone()
1026 if row is None:
1027 raise StopIteration()
1028 else:
1029 return row
1030
1031 next = __next__
1032
1033 @util.memoized_property
1034 def inserted_primary_key(self):
1035 """Return the primary key for the row just inserted.
1036
1037 The return value is a list of scalar values
1038 corresponding to the list of primary key columns
1039 in the target table.
1040
1041 This only applies to single row :func:`_expression.insert`
1042 constructs which did not explicitly specify
1043 :meth:`_expression.Insert.returning`.
1044
1045 Note that primary key columns which specify a
1046 server_default clause,
1047 or otherwise do not qualify as "autoincrement"
1048 columns (see the notes at :class:`_schema.Column`), and were
1049 generated using the database-side default, will
1050 appear in this list as ``None`` unless the backend
1051 supports "returning" and the insert statement executed
1052 with the "implicit returning" enabled.
1053
1054 Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
1055 statement is not a compiled expression construct
1056 or is not an insert() construct.
1057
1058 """
1059
1060 if not self.context.compiled:
1061 raise exc.InvalidRequestError(
1062 "Statement is not a compiled " "expression construct."
1063 )
1064 elif not self.context.isinsert:
1065 raise exc.InvalidRequestError(
1066 "Statement is not an insert() " "expression construct."
1067 )
1068 elif self.context._is_explicit_returning:
1069 raise exc.InvalidRequestError(
1070 "Can't call inserted_primary_key "
1071 "when returning() "
1072 "is used."
1073 )
1074
1075 return self.context.inserted_primary_key
1076
1077 def last_updated_params(self):
1078 """Return the collection of updated parameters from this
1079 execution.
1080
1081 Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
1082 statement is not a compiled expression construct
1083 or is not an update() construct.
1084
1085 """
1086 if not self.context.compiled:
1087 raise exc.InvalidRequestError(
1088 "Statement is not a compiled " "expression construct."
1089 )
1090 elif not self.context.isupdate:
1091 raise exc.InvalidRequestError(
1092 "Statement is not an update() " "expression construct."
1093 )
1094 elif self.context.executemany:
1095 return self.context.compiled_parameters
1096 else:
1097 return self.context.compiled_parameters[0]
1098
1099 def last_inserted_params(self):
1100 """Return the collection of inserted parameters from this
1101 execution.
1102
1103 Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
1104 statement is not a compiled expression construct
1105 or is not an insert() construct.
1106
1107 """
1108 if not self.context.compiled:
1109 raise exc.InvalidRequestError(
1110 "Statement is not a compiled " "expression construct."
1111 )
1112 elif not self.context.isinsert:
1113 raise exc.InvalidRequestError(
1114 "Statement is not an insert() " "expression construct."
1115 )
1116 elif self.context.executemany:
1117 return self.context.compiled_parameters
1118 else:
1119 return self.context.compiled_parameters[0]
1120
1121 @property
1122 def returned_defaults(self):
1123 """Return the values of default columns that were fetched using
1124 the :meth:`.ValuesBase.return_defaults` feature.
1125
1126 The value is an instance of :class:`.RowProxy`, or ``None``
1127 if :meth:`.ValuesBase.return_defaults` was not used or if the
1128 backend does not support RETURNING.
1129
1130 .. versionadded:: 0.9.0
1131
1132 .. seealso::
1133
1134 :meth:`.ValuesBase.return_defaults`
1135
1136 """
1137 return self.context.returned_defaults
1138
1139 def lastrow_has_defaults(self):
1140 """Return ``lastrow_has_defaults()`` from the underlying
1141 :class:`.ExecutionContext`.
1142
1143 See :class:`.ExecutionContext` for details.
1144
1145 """
1146
1147 return self.context.lastrow_has_defaults()
1148
1149 def postfetch_cols(self):
1150 """Return ``postfetch_cols()`` from the underlying
1151 :class:`.ExecutionContext`.
1152
1153 See :class:`.ExecutionContext` for details.
1154
1155 Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
1156 statement is not a compiled expression construct
1157 or is not an insert() or update() construct.
1158
1159 """
1160
1161 if not self.context.compiled:
1162 raise exc.InvalidRequestError(
1163 "Statement is not a compiled " "expression construct."
1164 )
1165 elif not self.context.isinsert and not self.context.isupdate:
1166 raise exc.InvalidRequestError(
1167 "Statement is not an insert() or update() "
1168 "expression construct."
1169 )
1170 return self.context.postfetch_cols
1171
1172 def prefetch_cols(self):
1173 """Return ``prefetch_cols()`` from the underlying
1174 :class:`.ExecutionContext`.
1175
1176 See :class:`.ExecutionContext` for details.
1177
1178 Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
1179 statement is not a compiled expression construct
1180 or is not an insert() or update() construct.
1181
1182 """
1183
1184 if not self.context.compiled:
1185 raise exc.InvalidRequestError(
1186 "Statement is not a compiled " "expression construct."
1187 )
1188 elif not self.context.isinsert and not self.context.isupdate:
1189 raise exc.InvalidRequestError(
1190 "Statement is not an insert() or update() "
1191 "expression construct."
1192 )
1193 return self.context.prefetch_cols
1194
1195 def supports_sane_rowcount(self):
1196 """Return ``supports_sane_rowcount`` from the dialect.
1197
1198 See :attr:`_engine.ResultProxy.rowcount` for background.
1199
1200 """
1201
1202 return self.dialect.supports_sane_rowcount
1203
1204 def supports_sane_multi_rowcount(self):
1205 """Return ``supports_sane_multi_rowcount`` from the dialect.
1206
1207 See :attr:`_engine.ResultProxy.rowcount` for background.
1208
1209 """
1210
1211 return self.dialect.supports_sane_multi_rowcount
1212
1213 def _fetchone_impl(self):
1214 try:
1215 return self.cursor.fetchone()
1216 except AttributeError as err:
1217 return self._non_result(None, err)
1218
1219 def _fetchmany_impl(self, size=None):
1220 try:
1221 if size is None:
1222 return self.cursor.fetchmany()
1223 else:
1224 return self.cursor.fetchmany(size)
1225 except AttributeError as err:
1226 return self._non_result([], err)
1227
1228 def _fetchall_impl(self):
1229 try:
1230 return self.cursor.fetchall()
1231 except AttributeError as err:
1232 return self._non_result([], err)
1233
1234 def _non_result(self, default, err=None):
1235 if self._metadata is None:
1236 util.raise_(
1237 exc.ResourceClosedError(
1238 "This result object does not return rows. "
1239 "It has been closed automatically."
1240 ),
1241 replace_context=err,
1242 )
1243 elif self.closed:
1244 util.raise_(
1245 exc.ResourceClosedError("This result object is closed."),
1246 replace_context=err,
1247 )
1248 else:
1249 return default
1250
1251 def process_rows(self, rows):
1252 process_row = self._process_row
1253 metadata = self._metadata
1254 keymap = metadata._keymap
1255 processors = metadata._processors
1256 if self._echo:
1257 log = self.context.engine.logger.debug
1258 l = []
1259 for row in rows:
1260 log("Row %r", sql_util._repr_row(row))
1261 l.append(process_row(metadata, row, processors, keymap))
1262 return l
1263 else:
1264 return [
1265 process_row(metadata, row, processors, keymap) for row in rows
1266 ]
1267
1268 def fetchall(self):
1269 """Fetch all rows, just like DB-API ``cursor.fetchall()``.
1270
1271 After all rows have been exhausted, the underlying DBAPI
1272 cursor resource is released, and the object may be safely
1273 discarded.
1274
1275 Subsequent calls to :meth:`_engine.ResultProxy.fetchall` will return
1276 an empty list. After the :meth:`_engine.ResultProxy.close` method is
1277 called, the method will raise :class:`.ResourceClosedError`.
1278
1279 :return: a list of :class:`.RowProxy` objects
1280
1281 """
1282
1283 try:
1284 l = self.process_rows(self._fetchall_impl())
1285 self._soft_close()
1286 return l
1287 except BaseException as e:
1288 self.connection._handle_dbapi_exception(
1289 e, None, None, self.cursor, self.context
1290 )
1291
1292 def fetchmany(self, size=None):
1293 """Fetch many rows, just like DB-API
1294 ``cursor.fetchmany(size=cursor.arraysize)``.
1295
1296 After all rows have been exhausted, the underlying DBAPI
1297 cursor resource is released, and the object may be safely
1298 discarded.
1299
1300 Calls to :meth:`_engine.ResultProxy.fetchmany`
1301 after all rows have been
1302 exhausted will return
1303 an empty list. After the :meth:`_engine.ResultProxy.close` method is
1304 called, the method will raise :class:`.ResourceClosedError`.
1305
1306 :return: a list of :class:`.RowProxy` objects
1307
1308 """
1309
1310 try:
1311 l = self.process_rows(self._fetchmany_impl(size))
1312 if len(l) == 0:
1313 self._soft_close()
1314 return l
1315 except BaseException as e:
1316 self.connection._handle_dbapi_exception(
1317 e, None, None, self.cursor, self.context
1318 )
1319
1320 def fetchone(self):
1321 """Fetch one row, just like DB-API ``cursor.fetchone()``.
1322
1323 After all rows have been exhausted, the underlying DBAPI
1324 cursor resource is released, and the object may be safely
1325 discarded.
1326
1327 Calls to :meth:`_engine.ResultProxy.fetchone` after all rows have
1328 been exhausted will return ``None``.
1329 After the :meth:`_engine.ResultProxy.close` method is
1330 called, the method will raise :class:`.ResourceClosedError`.
1331
1332 :return: a :class:`.RowProxy` object, or None if no rows remain
1333
1334 """
1335 try:
1336 row = self._fetchone_impl()
1337 if row is not None:
1338 return self.process_rows([row])[0]
1339 else:
1340 self._soft_close()
1341 return None
1342 except BaseException as e:
1343 self.connection._handle_dbapi_exception(
1344 e, None, None, self.cursor, self.context
1345 )
1346
1347 def first(self):
1348 """Fetch the first row and then close the result set unconditionally.
1349
1350 After calling this method, the object is fully closed,
1351 e.g. the :meth:`_engine.ResultProxy.close`
1352 method will have been called.
1353
1354 :return: a :class:`.RowProxy` object, or None if no rows remain
1355
1356 """
1357 if self._metadata is None:
1358 return self._non_result(None)
1359
1360 try:
1361 row = self._fetchone_impl()
1362 except BaseException as e:
1363 self.connection._handle_dbapi_exception(
1364 e, None, None, self.cursor, self.context
1365 )
1366
1367 try:
1368 if row is not None:
1369 return self.process_rows([row])[0]
1370 else:
1371 return None
1372 finally:
1373 self.close()
1374
1375 def scalar(self):
1376 """Fetch the first column of the first row, and close the result set.
1377
1378 After calling this method, the object is fully closed,
1379 e.g. the :meth:`_engine.ResultProxy.close`
1380 method will have been called.
1381
1382 :return: a Python scalar value , or None if no rows remain
1383
1384 """
1385 row = self.first()
1386 if row is not None:
1387 return row[0]
1388 else:
1389 return None
1390
1391
1392class BufferedRowResultProxy(ResultProxy):
1393 """A ResultProxy with row buffering behavior.
1394
1395 ``ResultProxy`` that buffers the contents of a selection of rows
1396 before ``fetchone()`` is called. This is to allow the results of
1397 ``cursor.description`` to be available immediately, when
1398 interfacing with a DB-API that requires rows to be consumed before
1399 this information is available (currently psycopg2, when used with
1400 server-side cursors).
1401
1402 The pre-fetching behavior fetches only one row initially, and then
1403 grows its buffer size by a fixed amount with each successive need
1404 for additional rows up to a size of 1000.
1405
1406 The size argument is configurable using the ``max_row_buffer``
1407 execution option::
1408
1409 with psycopg2_engine.connect() as conn:
1410
1411 result = conn.execution_options(
1412 stream_results=True, max_row_buffer=50
1413 ).execute("select * from table")
1414
1415 .. versionadded:: 1.0.6 Added the ``max_row_buffer`` option.
1416
1417 .. seealso::
1418
1419 :ref:`psycopg2_execution_options`
1420 """
1421
1422 def _init_metadata(self):
1423 self._max_row_buffer = self.context.execution_options.get(
1424 "max_row_buffer", None
1425 )
1426 self.__buffer_rows()
1427 super(BufferedRowResultProxy, self)._init_metadata()
1428
1429 # this is a "growth chart" for the buffering of rows.
1430 # each successive __buffer_rows call will use the next
1431 # value in the list for the buffer size until the max
1432 # is reached
1433 size_growth = {
1434 1: 5,
1435 5: 10,
1436 10: 20,
1437 20: 50,
1438 50: 100,
1439 100: 250,
1440 250: 500,
1441 500: 1000,
1442 }
1443
1444 def __buffer_rows(self):
1445 if self.cursor is None:
1446 return
1447 size = getattr(self, "_bufsize", 1)
1448 self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
1449 self._bufsize = self.size_growth.get(size, size)
1450 if self._max_row_buffer is not None:
1451 self._bufsize = min(self._max_row_buffer, self._bufsize)
1452
1453 def _soft_close(self, **kw):
1454 self.__rowbuffer.clear()
1455 super(BufferedRowResultProxy, self)._soft_close(**kw)
1456
1457 def _fetchone_impl(self):
1458 if self.cursor is None:
1459 return self._non_result(None)
1460 if not self.__rowbuffer:
1461 self.__buffer_rows()
1462 if not self.__rowbuffer:
1463 return None
1464 return self.__rowbuffer.popleft()
1465
1466 def _fetchmany_impl(self, size=None):
1467 if size is None:
1468 return self._fetchall_impl()
1469 result = []
1470 for x in range(0, size):
1471 row = self._fetchone_impl()
1472 if row is None:
1473 break
1474 result.append(row)
1475 return result
1476
1477 def _fetchall_impl(self):
1478 if self.cursor is None:
1479 return self._non_result([])
1480 self.__rowbuffer.extend(self.cursor.fetchall())
1481 ret = self.__rowbuffer
1482 self.__rowbuffer = collections.deque()
1483 return ret
1484
1485
1486class FullyBufferedResultProxy(ResultProxy):
1487 """A result proxy that buffers rows fully upon creation.
1488
1489 Used for operations where a result is to be delivered
1490 after the database conversation can not be continued,
1491 such as MSSQL INSERT...OUTPUT after an autocommit.
1492
1493 """
1494
1495 def _init_metadata(self):
1496 super(FullyBufferedResultProxy, self)._init_metadata()
1497 self.__rowbuffer = self._buffer_rows()
1498
1499 def _buffer_rows(self):
1500 return collections.deque(self.cursor.fetchall())
1501
1502 def _soft_close(self, **kw):
1503 self.__rowbuffer.clear()
1504 super(FullyBufferedResultProxy, self)._soft_close(**kw)
1505
1506 def _fetchone_impl(self):
1507 if self.__rowbuffer:
1508 return self.__rowbuffer.popleft()
1509 else:
1510 return self._non_result(None)
1511
1512 def _fetchmany_impl(self, size=None):
1513 if size is None:
1514 return self._fetchall_impl()
1515 result = []
1516 for x in range(0, size):
1517 row = self._fetchone_impl()
1518 if row is None:
1519 break
1520 result.append(row)
1521 return result
1522
1523 def _fetchall_impl(self):
1524 if not self.cursor:
1525 return self._non_result([])
1526 ret = self.__rowbuffer
1527 self.__rowbuffer = collections.deque()
1528 return ret
1529
1530
1531class BufferedColumnRow(RowProxy):
1532 def __init__(self, parent, row, processors, keymap):
1533 # preprocess row
1534 row = list(row)
1535 # this is a tad faster than using enumerate
1536 index = 0
1537 for processor in parent._orig_processors:
1538 if processor is not None:
1539 row[index] = processor(row[index])
1540 index += 1
1541 row = tuple(row)
1542 super(BufferedColumnRow, self).__init__(
1543 parent, row, processors, keymap
1544 )
1545
1546
1547class BufferedColumnResultProxy(ResultProxy):
1548 """A ResultProxy with column buffering behavior.
1549
1550 ``ResultProxy`` that loads all columns into memory each time
1551 fetchone() is called. If fetchmany() or fetchall() are called,
1552 the full grid of results is fetched. This is to operate with
1553 databases where result rows contain "live" results that fall out
1554 of scope unless explicitly fetched.
1555
1556 .. versionchanged:: 1.2 This :class:`_engine.ResultProxy` is not used by
1557 any SQLAlchemy-included dialects.
1558
1559 """
1560
1561 _process_row = BufferedColumnRow
1562
1563 def _init_metadata(self):
1564 super(BufferedColumnResultProxy, self)._init_metadata()
1565
1566 metadata = self._metadata
1567
1568 # don't double-replace the processors, in the case
1569 # of a cached ResultMetaData
1570 if metadata._orig_processors is None:
1571 # orig_processors will be used to preprocess each row when
1572 # they are constructed.
1573 metadata._orig_processors = metadata._processors
1574 # replace the all type processors by None processors.
1575 metadata._processors = [None for _ in range(len(metadata.keys))]
1576 keymap = {}
1577 for k, (func, obj, index) in metadata._keymap.items():
1578 keymap[k] = (None, obj, index)
1579 metadata._keymap = keymap
1580
1581 def fetchall(self):
1582 # can't call cursor.fetchall(), since rows must be
1583 # fully processed before requesting more from the DBAPI.
1584 l = []
1585 while True:
1586 row = self.fetchone()
1587 if row is None:
1588 break
1589 l.append(row)
1590 return l
1591
1592 def fetchmany(self, size=None):
1593 # can't call cursor.fetchmany(), since rows must be
1594 # fully processed before requesting more from the DBAPI.
1595 if size is None:
1596 return self.fetchall()
1597 l = []
1598 for i in range(size):
1599 row = self.fetchone()
1600 if row is None:
1601 break
1602 l.append(row)
1603 return l