Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/sqlalchemy/sql/compiler.py: 19%
2183 statements
« prev ^ index » next coverage.py v7.0.1, created at 2022-12-25 06:11 +0000
« prev ^ index » next coverage.py v7.0.1, created at 2022-12-25 06:11 +0000
1# sql/compiler.py
2# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
8"""Base SQL and DDL compiler implementations.
10Classes provided include:
12:class:`.compiler.SQLCompiler` - renders SQL
13strings
15:class:`.compiler.DDLCompiler` - renders DDL
16(data definition language) strings
18:class:`.compiler.GenericTypeCompiler` - renders
19type specification strings.
21To generate user-defined SQL strings, see
22:doc:`/ext/compiler`.
24"""
26import collections
27import contextlib
28import itertools
29import operator
30import re
32from . import base
33from . import coercions
34from . import crud
35from . import elements
36from . import functions
37from . import operators
38from . import schema
39from . import selectable
40from . import sqltypes
41from . import util as sql_util
42from .base import NO_ARG
43from .base import prefix_anon_map
44from .elements import quoted_name
45from .. import exc
46from .. import util
48RESERVED_WORDS = set(
49 [
50 "all",
51 "analyse",
52 "analyze",
53 "and",
54 "any",
55 "array",
56 "as",
57 "asc",
58 "asymmetric",
59 "authorization",
60 "between",
61 "binary",
62 "both",
63 "case",
64 "cast",
65 "check",
66 "collate",
67 "column",
68 "constraint",
69 "create",
70 "cross",
71 "current_date",
72 "current_role",
73 "current_time",
74 "current_timestamp",
75 "current_user",
76 "default",
77 "deferrable",
78 "desc",
79 "distinct",
80 "do",
81 "else",
82 "end",
83 "except",
84 "false",
85 "for",
86 "foreign",
87 "freeze",
88 "from",
89 "full",
90 "grant",
91 "group",
92 "having",
93 "ilike",
94 "in",
95 "initially",
96 "inner",
97 "intersect",
98 "into",
99 "is",
100 "isnull",
101 "join",
102 "leading",
103 "left",
104 "like",
105 "limit",
106 "localtime",
107 "localtimestamp",
108 "natural",
109 "new",
110 "not",
111 "notnull",
112 "null",
113 "off",
114 "offset",
115 "old",
116 "on",
117 "only",
118 "or",
119 "order",
120 "outer",
121 "overlaps",
122 "placing",
123 "primary",
124 "references",
125 "right",
126 "select",
127 "session_user",
128 "set",
129 "similar",
130 "some",
131 "symmetric",
132 "table",
133 "then",
134 "to",
135 "trailing",
136 "true",
137 "union",
138 "unique",
139 "user",
140 "using",
141 "verbose",
142 "when",
143 "where",
144 ]
145)
147LEGAL_CHARACTERS = re.compile(r"^[A-Z0-9_$]+$", re.I)
148LEGAL_CHARACTERS_PLUS_SPACE = re.compile(r"^[A-Z0-9_ $]+$", re.I)
149ILLEGAL_INITIAL_CHARACTERS = {str(x) for x in range(0, 10)}.union(["$"])
151FK_ON_DELETE = re.compile(
152 r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I
153)
154FK_ON_UPDATE = re.compile(
155 r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I
156)
157FK_INITIALLY = re.compile(r"^(?:DEFERRED|IMMEDIATE)$", re.I)
158BIND_PARAMS = re.compile(r"(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])", re.UNICODE)
159BIND_PARAMS_ESC = re.compile(r"\x5c(:[\w\$]*)(?![:\w\$])", re.UNICODE)
161BIND_TEMPLATES = {
162 "pyformat": "%%(%(name)s)s",
163 "qmark": "?",
164 "format": "%%s",
165 "numeric": ":[_POSITION]",
166 "named": ":%(name)s",
167}
169_BIND_TRANSLATE_RE = re.compile(r"[%\(\):\[\] ]")
170_BIND_TRANSLATE_CHARS = dict(zip("%():[] ", "PAZC___"))
172OPERATORS = {
173 # binary
174 operators.and_: " AND ",
175 operators.or_: " OR ",
176 operators.add: " + ",
177 operators.mul: " * ",
178 operators.sub: " - ",
179 operators.div: " / ",
180 operators.mod: " % ",
181 operators.truediv: " / ",
182 operators.neg: "-",
183 operators.lt: " < ",
184 operators.le: " <= ",
185 operators.ne: " != ",
186 operators.gt: " > ",
187 operators.ge: " >= ",
188 operators.eq: " = ",
189 operators.is_distinct_from: " IS DISTINCT FROM ",
190 operators.is_not_distinct_from: " IS NOT DISTINCT FROM ",
191 operators.concat_op: " || ",
192 operators.match_op: " MATCH ",
193 operators.not_match_op: " NOT MATCH ",
194 operators.in_op: " IN ",
195 operators.not_in_op: " NOT IN ",
196 operators.comma_op: ", ",
197 operators.from_: " FROM ",
198 operators.as_: " AS ",
199 operators.is_: " IS ",
200 operators.is_not: " IS NOT ",
201 operators.collate: " COLLATE ",
202 # unary
203 operators.exists: "EXISTS ",
204 operators.distinct_op: "DISTINCT ",
205 operators.inv: "NOT ",
206 operators.any_op: "ANY ",
207 operators.all_op: "ALL ",
208 # modifiers
209 operators.desc_op: " DESC",
210 operators.asc_op: " ASC",
211 operators.nulls_first_op: " NULLS FIRST",
212 operators.nulls_last_op: " NULLS LAST",
213}
215FUNCTIONS = {
216 functions.coalesce: "coalesce",
217 functions.current_date: "CURRENT_DATE",
218 functions.current_time: "CURRENT_TIME",
219 functions.current_timestamp: "CURRENT_TIMESTAMP",
220 functions.current_user: "CURRENT_USER",
221 functions.localtime: "LOCALTIME",
222 functions.localtimestamp: "LOCALTIMESTAMP",
223 functions.random: "random",
224 functions.sysdate: "sysdate",
225 functions.session_user: "SESSION_USER",
226 functions.user: "USER",
227 functions.cube: "CUBE",
228 functions.rollup: "ROLLUP",
229 functions.grouping_sets: "GROUPING SETS",
230}
232EXTRACT_MAP = {
233 "month": "month",
234 "day": "day",
235 "year": "year",
236 "second": "second",
237 "hour": "hour",
238 "doy": "doy",
239 "minute": "minute",
240 "quarter": "quarter",
241 "dow": "dow",
242 "week": "week",
243 "epoch": "epoch",
244 "milliseconds": "milliseconds",
245 "microseconds": "microseconds",
246 "timezone_hour": "timezone_hour",
247 "timezone_minute": "timezone_minute",
248}
250COMPOUND_KEYWORDS = {
251 selectable.CompoundSelect.UNION: "UNION",
252 selectable.CompoundSelect.UNION_ALL: "UNION ALL",
253 selectable.CompoundSelect.EXCEPT: "EXCEPT",
254 selectable.CompoundSelect.EXCEPT_ALL: "EXCEPT ALL",
255 selectable.CompoundSelect.INTERSECT: "INTERSECT",
256 selectable.CompoundSelect.INTERSECT_ALL: "INTERSECT ALL",
257}
260RM_RENDERED_NAME = 0
261RM_NAME = 1
262RM_OBJECTS = 2
263RM_TYPE = 3
266ExpandedState = collections.namedtuple(
267 "ExpandedState",
268 [
269 "statement",
270 "additional_parameters",
271 "processors",
272 "positiontup",
273 "parameter_expansion",
274 ],
275)
278NO_LINTING = util.symbol("NO_LINTING", "Disable all linting.", canonical=0)
280COLLECT_CARTESIAN_PRODUCTS = util.symbol(
281 "COLLECT_CARTESIAN_PRODUCTS",
282 "Collect data on FROMs and cartesian products and gather "
283 "into 'self.from_linter'",
284 canonical=1,
285)
287WARN_LINTING = util.symbol(
288 "WARN_LINTING", "Emit warnings for linters that find problems", canonical=2
289)
291FROM_LINTING = util.symbol(
292 "FROM_LINTING",
293 "Warn for cartesian products; "
294 "combines COLLECT_CARTESIAN_PRODUCTS and WARN_LINTING",
295 canonical=COLLECT_CARTESIAN_PRODUCTS | WARN_LINTING,
296)
299class FromLinter(collections.namedtuple("FromLinter", ["froms", "edges"])):
300 def lint(self, start=None):
301 froms = self.froms
302 if not froms:
303 return None, None
305 edges = set(self.edges)
306 the_rest = set(froms)
308 if start is not None:
309 start_with = start
310 the_rest.remove(start_with)
311 else:
312 start_with = the_rest.pop()
314 stack = collections.deque([start_with])
316 while stack and the_rest:
317 node = stack.popleft()
318 the_rest.discard(node)
320 # comparison of nodes in edges here is based on hash equality, as
321 # there are "annotated" elements that match the non-annotated ones.
322 # to remove the need for in-python hash() calls, use native
323 # containment routines (e.g. "node in edge", "edge.index(node)")
324 to_remove = {edge for edge in edges if node in edge}
326 # appendleft the node in each edge that is not
327 # the one that matched.
328 stack.extendleft(edge[not edge.index(node)] for edge in to_remove)
329 edges.difference_update(to_remove)
331 # FROMS left over? boom
332 if the_rest:
333 return the_rest, start_with
334 else:
335 return None, None
337 def warn(self):
338 the_rest, start_with = self.lint()
340 # FROMS left over? boom
341 if the_rest:
343 froms = the_rest
344 if froms:
345 template = (
346 "SELECT statement has a cartesian product between "
347 "FROM element(s) {froms} and "
348 'FROM element "{start}". Apply join condition(s) '
349 "between each element to resolve."
350 )
351 froms_str = ", ".join(
352 '"{elem}"'.format(elem=self.froms[from_])
353 for from_ in froms
354 )
355 message = template.format(
356 froms=froms_str, start=self.froms[start_with]
357 )
359 util.warn(message)
362class Compiled(object):
364 """Represent a compiled SQL or DDL expression.
366 The ``__str__`` method of the ``Compiled`` object should produce
367 the actual text of the statement. ``Compiled`` objects are
368 specific to their underlying database dialect, and also may
369 or may not be specific to the columns referenced within a
370 particular set of bind parameters. In no case should the
371 ``Compiled`` object be dependent on the actual values of those
372 bind parameters, even though it may reference those values as
373 defaults.
374 """
376 _cached_metadata = None
378 _result_columns = None
380 schema_translate_map = None
382 execution_options = util.EMPTY_DICT
383 """
384 Execution options propagated from the statement. In some cases,
385 sub-elements of the statement can modify these.
386 """
388 _annotations = util.EMPTY_DICT
390 compile_state = None
391 """Optional :class:`.CompileState` object that maintains additional
392 state used by the compiler.
394 Major executable objects such as :class:`_expression.Insert`,
395 :class:`_expression.Update`, :class:`_expression.Delete`,
396 :class:`_expression.Select` will generate this
397 state when compiled in order to calculate additional information about the
398 object. For the top level object that is to be executed, the state can be
399 stored here where it can also have applicability towards result set
400 processing.
402 .. versionadded:: 1.4
404 """
406 dml_compile_state = None
407 """Optional :class:`.CompileState` assigned at the same point that
408 .isinsert, .isupdate, or .isdelete is assigned.
410 This will normally be the same object as .compile_state, with the
411 exception of cases like the :class:`.ORMFromStatementCompileState`
412 object.
414 .. versionadded:: 1.4.40
416 """
418 cache_key = None
419 _gen_time = None
421 def __init__(
422 self,
423 dialect,
424 statement,
425 schema_translate_map=None,
426 render_schema_translate=False,
427 compile_kwargs=util.immutabledict(),
428 ):
429 """Construct a new :class:`.Compiled` object.
431 :param dialect: :class:`.Dialect` to compile against.
433 :param statement: :class:`_expression.ClauseElement` to be compiled.
435 :param schema_translate_map: dictionary of schema names to be
436 translated when forming the resultant SQL
438 .. versionadded:: 1.1
440 .. seealso::
442 :ref:`schema_translating`
444 :param compile_kwargs: additional kwargs that will be
445 passed to the initial call to :meth:`.Compiled.process`.
448 """
450 self.dialect = dialect
451 self.preparer = self.dialect.identifier_preparer
452 if schema_translate_map:
453 self.schema_translate_map = schema_translate_map
454 self.preparer = self.preparer._with_schema_translate(
455 schema_translate_map
456 )
458 if statement is not None:
459 self.statement = statement
460 self.can_execute = statement.supports_execution
461 self._annotations = statement._annotations
462 if self.can_execute:
463 self.execution_options = statement._execution_options
464 self.string = self.process(self.statement, **compile_kwargs)
466 if render_schema_translate:
467 self.string = self.preparer._render_schema_translates(
468 self.string, schema_translate_map
469 )
470 self._gen_time = util.perf_counter()
472 def _execute_on_connection(
473 self, connection, multiparams, params, execution_options
474 ):
475 if self.can_execute:
476 return connection._execute_compiled(
477 self, multiparams, params, execution_options
478 )
479 else:
480 raise exc.ObjectNotExecutableError(self.statement)
482 def visit_unsupported_compilation(self, element, err):
483 util.raise_(
484 exc.UnsupportedCompilationError(self, type(element)),
485 replace_context=err,
486 )
488 @property
489 def sql_compiler(self):
490 """Return a Compiled that is capable of processing SQL expressions.
492 If this compiler is one, it would likely just return 'self'.
494 """
496 raise NotImplementedError()
498 def process(self, obj, **kwargs):
499 return obj._compiler_dispatch(self, **kwargs)
501 def __str__(self):
502 """Return the string text of the generated SQL or DDL."""
504 return self.string or ""
506 def construct_params(
507 self, params=None, extracted_parameters=None, escape_names=True
508 ):
509 """Return the bind params for this compiled object.
511 :param params: a dict of string/object pairs whose values will
512 override bind values compiled in to the
513 statement.
514 """
516 raise NotImplementedError()
518 @property
519 def params(self):
520 """Return the bind params for this compiled object."""
521 return self.construct_params()
524class TypeCompiler(util.with_metaclass(util.EnsureKWArgType, object)):
525 """Produces DDL specification for TypeEngine objects."""
527 ensure_kwarg = r"visit_\w+"
529 def __init__(self, dialect):
530 self.dialect = dialect
532 def process(self, type_, **kw):
533 return type_._compiler_dispatch(self, **kw)
535 def visit_unsupported_compilation(self, element, err, **kw):
536 util.raise_(
537 exc.UnsupportedCompilationError(self, element),
538 replace_context=err,
539 )
542# this was a Visitable, but to allow accurate detection of
543# column elements this is actually a column element
544class _CompileLabel(elements.ColumnElement):
546 """lightweight label object which acts as an expression.Label."""
548 __visit_name__ = "label"
549 __slots__ = "element", "name"
551 def __init__(self, col, name, alt_names=()):
552 self.element = col
553 self.name = name
554 self._alt_names = (col,) + alt_names
556 @property
557 def proxy_set(self):
558 return self.element.proxy_set
560 @property
561 def type(self):
562 return self.element.type
564 def self_group(self, **kw):
565 return self
568class SQLCompiler(Compiled):
569 """Default implementation of :class:`.Compiled`.
571 Compiles :class:`_expression.ClauseElement` objects into SQL strings.
573 """
575 extract_map = EXTRACT_MAP
577 compound_keywords = COMPOUND_KEYWORDS
579 isdelete = isinsert = isupdate = False
580 """class-level defaults which can be set at the instance
581 level to define if this Compiled instance represents
582 INSERT/UPDATE/DELETE
583 """
585 isplaintext = False
587 returning = None
588 """holds the "returning" collection of columns if
589 the statement is CRUD and defines returning columns
590 either implicitly or explicitly
591 """
593 returning_precedes_values = False
594 """set to True classwide to generate RETURNING
595 clauses before the VALUES or WHERE clause (i.e. MSSQL)
596 """
598 render_table_with_column_in_update_from = False
599 """set to True classwide to indicate the SET clause
600 in a multi-table UPDATE statement should qualify
601 columns with the table name (i.e. MySQL only)
602 """
604 ansi_bind_rules = False
605 """SQL 92 doesn't allow bind parameters to be used
606 in the columns clause of a SELECT, nor does it allow
607 ambiguous expressions like "? = ?". A compiler
608 subclass can set this flag to False if the target
609 driver/DB enforces this
610 """
612 _textual_ordered_columns = False
613 """tell the result object that the column names as rendered are important,
614 but they are also "ordered" vs. what is in the compiled object here.
616 As of 1.4.42 this condition is only present when the statement is a
617 TextualSelect, e.g. text("....").columns(...), where it is required
618 that the columns are considered positionally and not by name.
620 """
622 _ad_hoc_textual = False
623 """tell the result that we encountered text() or '*' constructs in the
624 middle of the result columns, but we also have compiled columns, so
625 if the number of columns in cursor.description does not match how many
626 expressions we have, that means we can't rely on positional at all and
627 should match on name.
629 """
631 _ordered_columns = True
632 """
633 if False, means we can't be sure the list of entries
634 in _result_columns is actually the rendered order. Usually
635 True unless using an unordered TextualSelect.
636 """
638 _loose_column_name_matching = False
639 """tell the result object that the SQL statement is textual, wants to match
640 up to Column objects, and may be using the ._tq_label in the SELECT rather
641 than the base name.
643 """
645 _numeric_binds = False
646 """
647 True if paramstyle is "numeric". This paramstyle is trickier than
648 all the others.
650 """
652 _render_postcompile = False
653 """
654 whether to render out POSTCOMPILE params during the compile phase.
656 """
658 insert_single_values_expr = None
659 """When an INSERT is compiled with a single set of parameters inside
660 a VALUES expression, the string is assigned here, where it can be
661 used for insert batching schemes to rewrite the VALUES expression.
663 .. versionadded:: 1.3.8
665 """
667 literal_execute_params = frozenset()
668 """bindparameter objects that are rendered as literal values at statement
669 execution time.
671 """
673 post_compile_params = frozenset()
674 """bindparameter objects that are rendered as bound parameter placeholders
675 at statement execution time.
677 """
679 escaped_bind_names = util.EMPTY_DICT
680 """Late escaping of bound parameter names that has to be converted
681 to the original name when looking in the parameter dictionary.
683 """
685 has_out_parameters = False
686 """if True, there are bindparam() objects that have the isoutparam
687 flag set."""
689 insert_prefetch = update_prefetch = ()
691 postfetch_lastrowid = False
692 """if True, and this in insert, use cursor.lastrowid to populate
693 result.inserted_primary_key. """
695 _cache_key_bind_match = None
696 """a mapping that will relate the BindParameter object we compile
697 to those that are part of the extracted collection of parameters
698 in the cache key, if we were given a cache key.
700 """
702 positiontup = None
703 """for a compiled construct that uses a positional paramstyle, will be
704 a sequence of strings, indicating the names of bound parameters in order.
706 This is used in order to render bound parameters in their correct order,
707 and is combined with the :attr:`_sql.Compiled.params` dictionary to
708 render parameters.
710 .. seealso::
712 :ref:`faq_sql_expression_string` - includes a usage example for
713 debugging use cases.
715 """
716 positiontup_level = None
718 inline = False
720 def __init__(
721 self,
722 dialect,
723 statement,
724 cache_key=None,
725 column_keys=None,
726 for_executemany=False,
727 linting=NO_LINTING,
728 **kwargs
729 ):
730 """Construct a new :class:`.SQLCompiler` object.
732 :param dialect: :class:`.Dialect` to be used
734 :param statement: :class:`_expression.ClauseElement` to be compiled
736 :param column_keys: a list of column names to be compiled into an
737 INSERT or UPDATE statement.
739 :param for_executemany: whether INSERT / UPDATE statements should
740 expect that they are to be invoked in an "executemany" style,
741 which may impact how the statement will be expected to return the
742 values of defaults and autoincrement / sequences and similar.
743 Depending on the backend and driver in use, support for retrieving
744 these values may be disabled which means SQL expressions may
745 be rendered inline, RETURNING may not be rendered, etc.
747 :param kwargs: additional keyword arguments to be consumed by the
748 superclass.
750 """
751 self.column_keys = column_keys
753 self.cache_key = cache_key
755 if cache_key:
756 self._cache_key_bind_match = ckbm = {
757 b.key: b for b in cache_key[1]
758 }
759 ckbm.update({b: [b] for b in cache_key[1]})
761 # compile INSERT/UPDATE defaults/sequences to expect executemany
762 # style execution, which may mean no pre-execute of defaults,
763 # or no RETURNING
764 self.for_executemany = for_executemany
766 self.linting = linting
768 # a dictionary of bind parameter keys to BindParameter
769 # instances.
770 self.binds = {}
772 # a dictionary of BindParameter instances to "compiled" names
773 # that are actually present in the generated SQL
774 self.bind_names = util.column_dict()
776 # stack which keeps track of nested SELECT statements
777 self.stack = []
779 # relates label names in the final SQL to a tuple of local
780 # column/label name, ColumnElement object (if any) and
781 # TypeEngine. CursorResult uses this for type processing and
782 # column targeting
783 self._result_columns = []
785 # true if the paramstyle is positional
786 self.positional = dialect.positional
787 if self.positional:
788 self.positiontup_level = {}
789 self.positiontup = []
790 self._numeric_binds = dialect.paramstyle == "numeric"
791 self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
793 self.ctes = None
795 self.label_length = (
796 dialect.label_length or dialect.max_identifier_length
797 )
799 # a map which tracks "anonymous" identifiers that are created on
800 # the fly here
801 self.anon_map = prefix_anon_map()
803 # a map which tracks "truncated" names based on
804 # dialect.label_length or dialect.max_identifier_length
805 self.truncated_names = {}
807 Compiled.__init__(self, dialect, statement, **kwargs)
809 if self.isinsert or self.isupdate or self.isdelete:
810 if statement._returning:
811 self.returning = statement._returning
813 if self.isinsert or self.isupdate:
814 if statement._inline:
815 self.inline = True
816 elif self.for_executemany and (
817 not self.isinsert
818 or (
819 self.dialect.insert_executemany_returning
820 and statement._return_defaults
821 )
822 ):
823 self.inline = True
825 if self.positional and self._numeric_binds:
826 self._apply_numbered_params()
828 if self._render_postcompile:
829 self._process_parameters_for_postcompile(_populate_self=True)
831 @property
832 def current_executable(self):
833 """Return the current 'executable' that is being compiled.
835 This is currently the :class:`_sql.Select`, :class:`_sql.Insert`,
836 :class:`_sql.Update`, :class:`_sql.Delete`,
837 :class:`_sql.CompoundSelect` object that is being compiled.
838 Specifically it's assigned to the ``self.stack`` list of elements.
840 When a statement like the above is being compiled, it normally
841 is also assigned to the ``.statement`` attribute of the
842 :class:`_sql.Compiler` object. However, all SQL constructs are
843 ultimately nestable, and this attribute should never be consulted
844 by a ``visit_`` method, as it is not guaranteed to be assigned
845 nor guaranteed to correspond to the current statement being compiled.
847 .. versionadded:: 1.3.21
849 For compatibility with previous versions, use the following
850 recipe::
852 statement = getattr(self, "current_executable", False)
853 if statement is False:
854 statement = self.stack[-1]["selectable"]
856 For versions 1.4 and above, ensure only .current_executable
857 is used; the format of "self.stack" may change.
860 """
861 try:
862 return self.stack[-1]["selectable"]
863 except IndexError as ie:
864 util.raise_(
865 IndexError("Compiler does not have a stack entry"),
866 replace_context=ie,
867 )
869 @property
870 def prefetch(self):
871 return list(self.insert_prefetch + self.update_prefetch)
873 @util.memoized_property
874 def _global_attributes(self):
875 return {}
877 @util.memoized_instancemethod
878 def _init_cte_state(self):
879 """Initialize collections related to CTEs only if
880 a CTE is located, to save on the overhead of
881 these collections otherwise.
883 """
884 # collect CTEs to tack on top of a SELECT
885 # To store the query to print - Dict[cte, text_query]
886 self.ctes = util.OrderedDict()
888 # Detect same CTE references - Dict[(level, name), cte]
889 # Level is required for supporting nesting
890 self.ctes_by_level_name = {}
892 # To retrieve key/level in ctes_by_level_name -
893 # Dict[cte_reference, (level, cte_name)]
894 self.level_name_by_cte = {}
896 self.ctes_recursive = False
897 if self.positional:
898 self.cte_positional = {}
899 self.cte_level = {}
900 self.cte_order = collections.defaultdict(list)
902 @contextlib.contextmanager
903 def _nested_result(self):
904 """special API to support the use case of 'nested result sets'"""
905 result_columns, ordered_columns = (
906 self._result_columns,
907 self._ordered_columns,
908 )
909 self._result_columns, self._ordered_columns = [], False
911 try:
912 if self.stack:
913 entry = self.stack[-1]
914 entry["need_result_map_for_nested"] = True
915 else:
916 entry = None
917 yield self._result_columns, self._ordered_columns
918 finally:
919 if entry:
920 entry.pop("need_result_map_for_nested")
921 self._result_columns, self._ordered_columns = (
922 result_columns,
923 ordered_columns,
924 )
926 def _apply_numbered_params(self):
927 poscount = itertools.count(1)
928 self.string = re.sub(
929 r"\[_POSITION\]", lambda m: str(util.next(poscount)), self.string
930 )
932 @util.memoized_property
933 def _bind_processors(self):
935 return dict(
936 (
937 key,
938 value,
939 )
940 for key, value in (
941 (
942 self.bind_names[bindparam],
943 bindparam.type._cached_bind_processor(self.dialect)
944 if not bindparam.type._is_tuple_type
945 else tuple(
946 elem_type._cached_bind_processor(self.dialect)
947 for elem_type in bindparam.type.types
948 ),
949 )
950 for bindparam in self.bind_names
951 )
952 if value is not None
953 )
955 def is_subquery(self):
956 return len(self.stack) > 1
958 @property
959 def sql_compiler(self):
960 return self
962 def construct_params(
963 self,
964 params=None,
965 _group_number=None,
966 _check=True,
967 extracted_parameters=None,
968 escape_names=True,
969 ):
970 """return a dictionary of bind parameter keys and values"""
972 has_escaped_names = escape_names and bool(self.escaped_bind_names)
974 if extracted_parameters:
975 # related the bound parameters collected in the original cache key
976 # to those collected in the incoming cache key. They will not have
977 # matching names but they will line up positionally in the same
978 # way. The parameters present in self.bind_names may be clones of
979 # these original cache key params in the case of DML but the .key
980 # will be guaranteed to match.
981 try:
982 orig_extracted = self.cache_key[1]
983 except TypeError as err:
984 util.raise_(
985 exc.CompileError(
986 "This compiled object has no original cache key; "
987 "can't pass extracted_parameters to construct_params"
988 ),
989 replace_context=err,
990 )
992 ckbm = self._cache_key_bind_match
993 resolved_extracted = {
994 bind: extracted
995 for b, extracted in zip(orig_extracted, extracted_parameters)
996 for bind in ckbm[b]
997 }
998 else:
999 resolved_extracted = None
1001 if params:
1002 pd = {}
1003 for bindparam, name in self.bind_names.items():
1004 escaped_name = (
1005 self.escaped_bind_names.get(name, name)
1006 if has_escaped_names
1007 else name
1008 )
1010 if bindparam.key in params:
1011 pd[escaped_name] = params[bindparam.key]
1012 elif name in params:
1013 pd[escaped_name] = params[name]
1015 elif _check and bindparam.required:
1016 if _group_number:
1017 raise exc.InvalidRequestError(
1018 "A value is required for bind parameter %r, "
1019 "in parameter group %d"
1020 % (bindparam.key, _group_number),
1021 code="cd3x",
1022 )
1023 else:
1024 raise exc.InvalidRequestError(
1025 "A value is required for bind parameter %r"
1026 % bindparam.key,
1027 code="cd3x",
1028 )
1029 else:
1030 if resolved_extracted:
1031 value_param = resolved_extracted.get(
1032 bindparam, bindparam
1033 )
1034 else:
1035 value_param = bindparam
1037 if bindparam.callable:
1038 pd[escaped_name] = value_param.effective_value
1039 else:
1040 pd[escaped_name] = value_param.value
1041 return pd
1042 else:
1043 pd = {}
1044 for bindparam, name in self.bind_names.items():
1045 escaped_name = (
1046 self.escaped_bind_names.get(name, name)
1047 if has_escaped_names
1048 else name
1049 )
1051 if _check and bindparam.required:
1052 if _group_number:
1053 raise exc.InvalidRequestError(
1054 "A value is required for bind parameter %r, "
1055 "in parameter group %d"
1056 % (bindparam.key, _group_number),
1057 code="cd3x",
1058 )
1059 else:
1060 raise exc.InvalidRequestError(
1061 "A value is required for bind parameter %r"
1062 % bindparam.key,
1063 code="cd3x",
1064 )
1066 if resolved_extracted:
1067 value_param = resolved_extracted.get(bindparam, bindparam)
1068 else:
1069 value_param = bindparam
1071 if bindparam.callable:
1072 pd[escaped_name] = value_param.effective_value
1073 else:
1074 pd[escaped_name] = value_param.value
1075 return pd
1077 @util.memoized_instancemethod
1078 def _get_set_input_sizes_lookup(
1079 self, include_types=None, exclude_types=None
1080 ):
1081 if not hasattr(self, "bind_names"):
1082 return None
1084 dialect = self.dialect
1085 dbapi = self.dialect.dbapi
1087 # _unwrapped_dialect_impl() is necessary so that we get the
1088 # correct dialect type for a custom TypeDecorator, or a Variant,
1089 # which is also a TypeDecorator. Special types like Interval,
1090 # that use TypeDecorator but also might be mapped directly
1091 # for a dialect impl, also subclass Emulated first which overrides
1092 # this behavior in those cases to behave like the default.
1094 if include_types is None and exclude_types is None:
1096 def _lookup_type(typ):
1097 dbtype = typ.dialect_impl(dialect).get_dbapi_type(dbapi)
1098 return dbtype
1100 else:
1102 def _lookup_type(typ):
1103 # note we get dbtype from the possibly TypeDecorator-wrapped
1104 # dialect_impl, but the dialect_impl itself that we use for
1105 # include/exclude is the unwrapped version.
1107 dialect_impl = typ._unwrapped_dialect_impl(dialect)
1109 dbtype = typ.dialect_impl(dialect).get_dbapi_type(dbapi)
1111 if (
1112 dbtype is not None
1113 and (
1114 exclude_types is None
1115 or dbtype not in exclude_types
1116 and type(dialect_impl) not in exclude_types
1117 )
1118 and (
1119 include_types is None
1120 or dbtype in include_types
1121 or type(dialect_impl) in include_types
1122 )
1123 ):
1124 return dbtype
1125 else:
1126 return None
1128 inputsizes = {}
1129 literal_execute_params = self.literal_execute_params
1131 for bindparam in self.bind_names:
1132 if bindparam in literal_execute_params:
1133 continue
1135 if bindparam.type._is_tuple_type:
1136 inputsizes[bindparam] = [
1137 _lookup_type(typ) for typ in bindparam.type.types
1138 ]
1139 else:
1140 inputsizes[bindparam] = _lookup_type(bindparam.type)
1142 return inputsizes
1144 @property
1145 def params(self):
1146 """Return the bind param dictionary embedded into this
1147 compiled object, for those values that are present.
1149 .. seealso::
1151 :ref:`faq_sql_expression_string` - includes a usage example for
1152 debugging use cases.
1154 """
1155 return self.construct_params(_check=False)
1157 def _process_parameters_for_postcompile(
1158 self, parameters=None, _populate_self=False
1159 ):
1160 """handle special post compile parameters.
1162 These include:
1164 * "expanding" parameters -typically IN tuples that are rendered
1165 on a per-parameter basis for an otherwise fixed SQL statement string.
1167 * literal_binds compiled with the literal_execute flag. Used for
1168 things like SQL Server "TOP N" where the driver does not accommodate
1169 N as a bound parameter.
1171 """
1173 if parameters is None:
1174 parameters = self.construct_params(escape_names=False)
1176 expanded_parameters = {}
1177 if self.positional:
1178 positiontup = []
1179 else:
1180 positiontup = None
1182 processors = self._bind_processors
1184 new_processors = {}
1186 if self.positional and self._numeric_binds:
1187 # I'm not familiar with any DBAPI that uses 'numeric'.
1188 # strategy would likely be to make use of numbers greater than
1189 # the highest number present; then for expanding parameters,
1190 # append them to the end of the parameter list. that way
1191 # we avoid having to renumber all the existing parameters.
1192 raise NotImplementedError(
1193 "'post-compile' bind parameters are not supported with "
1194 "the 'numeric' paramstyle at this time."
1195 )
1197 replacement_expressions = {}
1198 to_update_sets = {}
1200 # notes:
1201 # *unescaped* parameter names in:
1202 # self.bind_names, self.binds, self._bind_processors
1203 #
1204 # *escaped* parameter names in:
1205 # construct_params(), replacement_expressions
1207 for name in (
1208 self.positiontup if self.positional else self.bind_names.values()
1209 ):
1210 escaped_name = (
1211 self.escaped_bind_names.get(name, name)
1212 if self.escaped_bind_names
1213 else name
1214 )
1216 parameter = self.binds[name]
1217 if parameter in self.literal_execute_params:
1218 if escaped_name not in replacement_expressions:
1219 value = parameters.pop(name)
1221 replacement_expressions[
1222 escaped_name
1223 ] = self.render_literal_bindparam(
1224 parameter,
1225 render_literal_value=value,
1226 )
1227 continue
1229 if parameter in self.post_compile_params:
1230 if escaped_name in replacement_expressions:
1231 to_update = to_update_sets[escaped_name]
1232 else:
1233 # we are removing the parameter from parameters
1234 # because it is a list value, which is not expected by
1235 # TypeEngine objects that would otherwise be asked to
1236 # process it. the single name is being replaced with
1237 # individual numbered parameters for each value in the
1238 # param.
1239 #
1240 # note we are also inserting *escaped* parameter names
1241 # into the given dictionary. default dialect will
1242 # use these param names directly as they will not be
1243 # in the escaped_bind_names dictionary.
1244 values = parameters.pop(name)
1246 leep = self._literal_execute_expanding_parameter
1247 to_update, replacement_expr = leep(
1248 escaped_name, parameter, values
1249 )
1251 to_update_sets[escaped_name] = to_update
1252 replacement_expressions[escaped_name] = replacement_expr
1254 if not parameter.literal_execute:
1255 parameters.update(to_update)
1256 if parameter.type._is_tuple_type:
1257 new_processors.update(
1258 (
1259 "%s_%s_%s" % (name, i, j),
1260 processors[name][j - 1],
1261 )
1262 for i, tuple_element in enumerate(values, 1)
1263 for j, value in enumerate(tuple_element, 1)
1264 if name in processors
1265 and processors[name][j - 1] is not None
1266 )
1267 else:
1268 new_processors.update(
1269 (key, processors[name])
1270 for key, value in to_update
1271 if name in processors
1272 )
1273 if self.positional:
1274 positiontup.extend(name for name, value in to_update)
1275 expanded_parameters[name] = [
1276 expand_key for expand_key, value in to_update
1277 ]
1278 elif self.positional:
1279 positiontup.append(name)
1281 def process_expanding(m):
1282 key = m.group(1)
1283 expr = replacement_expressions[key]
1285 # if POSTCOMPILE included a bind_expression, render that
1286 # around each element
1287 if m.group(2):
1288 tok = m.group(2).split("~~")
1289 be_left, be_right = tok[1], tok[3]
1290 expr = ", ".join(
1291 "%s%s%s" % (be_left, exp, be_right)
1292 for exp in expr.split(", ")
1293 )
1294 return expr
1296 statement = re.sub(
1297 r"__\[POSTCOMPILE_(\S+?)(~~.+?~~)?\]",
1298 process_expanding,
1299 self.string,
1300 )
1302 expanded_state = ExpandedState(
1303 statement,
1304 parameters,
1305 new_processors,
1306 positiontup,
1307 expanded_parameters,
1308 )
1310 if _populate_self:
1311 # this is for the "render_postcompile" flag, which is not
1312 # otherwise used internally and is for end-user debugging and
1313 # special use cases.
1314 self.string = expanded_state.statement
1315 self._bind_processors.update(expanded_state.processors)
1316 self.positiontup = expanded_state.positiontup
1317 self.post_compile_params = frozenset()
1318 for key in expanded_state.parameter_expansion:
1319 bind = self.binds.pop(key)
1320 self.bind_names.pop(bind)
1321 for value, expanded_key in zip(
1322 bind.value, expanded_state.parameter_expansion[key]
1323 ):
1324 self.binds[expanded_key] = new_param = bind._with_value(
1325 value
1326 )
1327 self.bind_names[new_param] = expanded_key
1329 return expanded_state
1331 @util.preload_module("sqlalchemy.engine.cursor")
1332 def _create_result_map(self):
1333 """utility method used for unit tests only."""
1334 cursor = util.preloaded.engine_cursor
1335 return cursor.CursorResultMetaData._create_description_match_map(
1336 self._result_columns
1337 )
1339 @util.memoized_property
1340 def _within_exec_param_key_getter(self):
1341 getter = self._key_getters_for_crud_column[2]
1342 return getter
1344 @util.memoized_property
1345 @util.preload_module("sqlalchemy.engine.result")
1346 def _inserted_primary_key_from_lastrowid_getter(self):
1347 result = util.preloaded.engine_result
1349 param_key_getter = self._within_exec_param_key_getter
1350 table = self.statement.table
1352 getters = [
1353 (operator.methodcaller("get", param_key_getter(col), None), col)
1354 for col in table.primary_key
1355 ]
1357 autoinc_col = table._autoincrement_column
1358 if autoinc_col is not None:
1359 # apply type post processors to the lastrowid
1360 proc = autoinc_col.type._cached_result_processor(
1361 self.dialect, None
1362 )
1363 else:
1364 proc = None
1366 row_fn = result.result_tuple([col.key for col in table.primary_key])
1368 def get(lastrowid, parameters):
1369 """given cursor.lastrowid value and the parameters used for INSERT,
1370 return a "row" that represents the primary key, either by
1371 using the "lastrowid" or by extracting values from the parameters
1372 that were sent along with the INSERT.
1374 """
1375 if proc is not None:
1376 lastrowid = proc(lastrowid)
1378 if lastrowid is None:
1379 return row_fn(getter(parameters) for getter, col in getters)
1380 else:
1381 return row_fn(
1382 lastrowid if col is autoinc_col else getter(parameters)
1383 for getter, col in getters
1384 )
1386 return get
1388 @util.memoized_property
1389 @util.preload_module("sqlalchemy.engine.result")
1390 def _inserted_primary_key_from_returning_getter(self):
1391 result = util.preloaded.engine_result
1393 param_key_getter = self._within_exec_param_key_getter
1394 table = self.statement.table
1396 ret = {col: idx for idx, col in enumerate(self.returning)}
1398 getters = [
1399 (operator.itemgetter(ret[col]), True)
1400 if col in ret
1401 else (
1402 operator.methodcaller("get", param_key_getter(col), None),
1403 False,
1404 )
1405 for col in table.primary_key
1406 ]
1408 row_fn = result.result_tuple([col.key for col in table.primary_key])
1410 def get(row, parameters):
1411 return row_fn(
1412 getter(row) if use_row else getter(parameters)
1413 for getter, use_row in getters
1414 )
1416 return get
1418 def default_from(self):
1419 """Called when a SELECT statement has no froms, and no FROM clause is
1420 to be appended.
1422 Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output.
1424 """
1425 return ""
1427 def visit_grouping(self, grouping, asfrom=False, **kwargs):
1428 return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
1430 def visit_select_statement_grouping(self, grouping, **kwargs):
1431 return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
1433 def visit_label_reference(
1434 self, element, within_columns_clause=False, **kwargs
1435 ):
1436 if self.stack and self.dialect.supports_simple_order_by_label:
1437 compile_state = self.stack[-1]["compile_state"]
1439 (
1440 with_cols,
1441 only_froms,
1442 only_cols,
1443 ) = compile_state._label_resolve_dict
1444 if within_columns_clause:
1445 resolve_dict = only_froms
1446 else:
1447 resolve_dict = only_cols
1449 # this can be None in the case that a _label_reference()
1450 # were subject to a replacement operation, in which case
1451 # the replacement of the Label element may have changed
1452 # to something else like a ColumnClause expression.
1453 order_by_elem = element.element._order_by_label_element
1455 if (
1456 order_by_elem is not None
1457 and order_by_elem.name in resolve_dict
1458 and order_by_elem.shares_lineage(
1459 resolve_dict[order_by_elem.name]
1460 )
1461 ):
1462 kwargs[
1463 "render_label_as_label"
1464 ] = element.element._order_by_label_element
1465 return self.process(
1466 element.element,
1467 within_columns_clause=within_columns_clause,
1468 **kwargs
1469 )
1471 def visit_textual_label_reference(
1472 self, element, within_columns_clause=False, **kwargs
1473 ):
1474 if not self.stack:
1475 # compiling the element outside of the context of a SELECT
1476 return self.process(element._text_clause)
1478 compile_state = self.stack[-1]["compile_state"]
1479 with_cols, only_froms, only_cols = compile_state._label_resolve_dict
1480 try:
1481 if within_columns_clause:
1482 col = only_froms[element.element]
1483 else:
1484 col = with_cols[element.element]
1485 except KeyError as err:
1486 coercions._no_text_coercion(
1487 element.element,
1488 extra=(
1489 "Can't resolve label reference for ORDER BY / "
1490 "GROUP BY / DISTINCT etc."
1491 ),
1492 exc_cls=exc.CompileError,
1493 err=err,
1494 )
1495 else:
1496 kwargs["render_label_as_label"] = col
1497 return self.process(
1498 col, within_columns_clause=within_columns_clause, **kwargs
1499 )
1501 def visit_label(
1502 self,
1503 label,
1504 add_to_result_map=None,
1505 within_label_clause=False,
1506 within_columns_clause=False,
1507 render_label_as_label=None,
1508 result_map_targets=(),
1509 **kw
1510 ):
1511 # only render labels within the columns clause
1512 # or ORDER BY clause of a select. dialect-specific compilers
1513 # can modify this behavior.
1514 render_label_with_as = (
1515 within_columns_clause and not within_label_clause
1516 )
1517 render_label_only = render_label_as_label is label
1519 if render_label_only or render_label_with_as:
1520 if isinstance(label.name, elements._truncated_label):
1521 labelname = self._truncated_identifier("colident", label.name)
1522 else:
1523 labelname = label.name
1525 if render_label_with_as:
1526 if add_to_result_map is not None:
1527 add_to_result_map(
1528 labelname,
1529 label.name,
1530 (label, labelname) + label._alt_names + result_map_targets,
1531 label.type,
1532 )
1533 return (
1534 label.element._compiler_dispatch(
1535 self,
1536 within_columns_clause=True,
1537 within_label_clause=True,
1538 **kw
1539 )
1540 + OPERATORS[operators.as_]
1541 + self.preparer.format_label(label, labelname)
1542 )
1543 elif render_label_only:
1544 return self.preparer.format_label(label, labelname)
1545 else:
1546 return label.element._compiler_dispatch(
1547 self, within_columns_clause=False, **kw
1548 )
1550 def _fallback_column_name(self, column):
1551 raise exc.CompileError(
1552 "Cannot compile Column object until " "its 'name' is assigned."
1553 )
1555 def visit_lambda_element(self, element, **kw):
1556 sql_element = element._resolved
1557 return self.process(sql_element, **kw)
1559 def visit_column(
1560 self,
1561 column,
1562 add_to_result_map=None,
1563 include_table=True,
1564 result_map_targets=(),
1565 **kwargs
1566 ):
1567 name = orig_name = column.name
1568 if name is None:
1569 name = self._fallback_column_name(column)
1571 is_literal = column.is_literal
1572 if not is_literal and isinstance(name, elements._truncated_label):
1573 name = self._truncated_identifier("colident", name)
1575 if add_to_result_map is not None:
1576 targets = (column, name, column.key) + result_map_targets
1577 if column._tq_label:
1578 targets += (column._tq_label,)
1580 add_to_result_map(name, orig_name, targets, column.type)
1582 if is_literal:
1583 # note we are not currently accommodating for
1584 # literal_column(quoted_name('ident', True)) here
1585 name = self.escape_literal_column(name)
1586 else:
1587 name = self.preparer.quote(name)
1588 table = column.table
1589 if table is None or not include_table or not table.named_with_column:
1590 return name
1591 else:
1592 effective_schema = self.preparer.schema_for_object(table)
1594 if effective_schema:
1595 schema_prefix = (
1596 self.preparer.quote_schema(effective_schema) + "."
1597 )
1598 else:
1599 schema_prefix = ""
1600 tablename = table.name
1601 if isinstance(tablename, elements._truncated_label):
1602 tablename = self._truncated_identifier("alias", tablename)
1604 return schema_prefix + self.preparer.quote(tablename) + "." + name
1606 def visit_collation(self, element, **kw):
1607 return self.preparer.format_collation(element.collation)
1609 def visit_fromclause(self, fromclause, **kwargs):
1610 return fromclause.name
1612 def visit_index(self, index, **kwargs):
1613 return index.name
1615 def visit_typeclause(self, typeclause, **kw):
1616 kw["type_expression"] = typeclause
1617 kw["identifier_preparer"] = self.preparer
1618 return self.dialect.type_compiler.process(typeclause.type, **kw)
1620 def post_process_text(self, text):
1621 if self.preparer._double_percents:
1622 text = text.replace("%", "%%")
1623 return text
1625 def escape_literal_column(self, text):
1626 if self.preparer._double_percents:
1627 text = text.replace("%", "%%")
1628 return text
1630 def visit_textclause(self, textclause, add_to_result_map=None, **kw):
1631 def do_bindparam(m):
1632 name = m.group(1)
1633 if name in textclause._bindparams:
1634 return self.process(textclause._bindparams[name], **kw)
1635 else:
1636 return self.bindparam_string(name, **kw)
1638 if not self.stack:
1639 self.isplaintext = True
1641 if add_to_result_map:
1642 # text() object is present in the columns clause of a
1643 # select(). Add a no-name entry to the result map so that
1644 # row[text()] produces a result
1645 add_to_result_map(None, None, (textclause,), sqltypes.NULLTYPE)
1647 # un-escape any \:params
1648 return BIND_PARAMS_ESC.sub(
1649 lambda m: m.group(1),
1650 BIND_PARAMS.sub(
1651 do_bindparam, self.post_process_text(textclause.text)
1652 ),
1653 )
1655 def visit_textual_select(
1656 self, taf, compound_index=None, asfrom=False, **kw
1657 ):
1659 toplevel = not self.stack
1660 entry = self._default_stack_entry if toplevel else self.stack[-1]
1662 new_entry = {
1663 "correlate_froms": set(),
1664 "asfrom_froms": set(),
1665 "selectable": taf,
1666 }
1667 self.stack.append(new_entry)
1669 if taf._independent_ctes:
1670 for cte in taf._independent_ctes:
1671 cte._compiler_dispatch(self, **kw)
1673 populate_result_map = (
1674 toplevel
1675 or (
1676 compound_index == 0
1677 and entry.get("need_result_map_for_compound", False)
1678 )
1679 or entry.get("need_result_map_for_nested", False)
1680 )
1682 if populate_result_map:
1683 self._ordered_columns = (
1684 self._textual_ordered_columns
1685 ) = taf.positional
1687 # enable looser result column matching when the SQL text links to
1688 # Column objects by name only
1689 self._loose_column_name_matching = not taf.positional and bool(
1690 taf.column_args
1691 )
1693 for c in taf.column_args:
1694 self.process(
1695 c,
1696 within_columns_clause=True,
1697 add_to_result_map=self._add_to_result_map,
1698 )
1700 text = self.process(taf.element, **kw)
1701 if self.ctes:
1702 nesting_level = len(self.stack) if not toplevel else None
1703 text = (
1704 self._render_cte_clause(
1705 nesting_level=nesting_level,
1706 visiting_cte=kw.get("visiting_cte"),
1707 )
1708 + text
1709 )
1711 self.stack.pop(-1)
1713 return text
1715 def visit_null(self, expr, **kw):
1716 return "NULL"
1718 def visit_true(self, expr, **kw):
1719 if self.dialect.supports_native_boolean:
1720 return "true"
1721 else:
1722 return "1"
1724 def visit_false(self, expr, **kw):
1725 if self.dialect.supports_native_boolean:
1726 return "false"
1727 else:
1728 return "0"
1730 def _generate_delimited_list(self, elements, separator, **kw):
1731 return separator.join(
1732 s
1733 for s in (c._compiler_dispatch(self, **kw) for c in elements)
1734 if s
1735 )
1737 def _generate_delimited_and_list(self, clauses, **kw):
1739 lcc, clauses = elements.BooleanClauseList._process_clauses_for_boolean(
1740 operators.and_,
1741 elements.True_._singleton,
1742 elements.False_._singleton,
1743 clauses,
1744 )
1745 if lcc == 1:
1746 return clauses[0]._compiler_dispatch(self, **kw)
1747 else:
1748 separator = OPERATORS[operators.and_]
1749 return separator.join(
1750 s
1751 for s in (c._compiler_dispatch(self, **kw) for c in clauses)
1752 if s
1753 )
1755 def visit_tuple(self, clauselist, **kw):
1756 return "(%s)" % self.visit_clauselist(clauselist, **kw)
1758 def visit_clauselist(self, clauselist, **kw):
1759 sep = clauselist.operator
1760 if sep is None:
1761 sep = " "
1762 else:
1763 sep = OPERATORS[clauselist.operator]
1765 return self._generate_delimited_list(clauselist.clauses, sep, **kw)
1767 def visit_case(self, clause, **kwargs):
1768 x = "CASE "
1769 if clause.value is not None:
1770 x += clause.value._compiler_dispatch(self, **kwargs) + " "
1771 for cond, result in clause.whens:
1772 x += (
1773 "WHEN "
1774 + cond._compiler_dispatch(self, **kwargs)
1775 + " THEN "
1776 + result._compiler_dispatch(self, **kwargs)
1777 + " "
1778 )
1779 if clause.else_ is not None:
1780 x += (
1781 "ELSE " + clause.else_._compiler_dispatch(self, **kwargs) + " "
1782 )
1783 x += "END"
1784 return x
1786 def visit_type_coerce(self, type_coerce, **kw):
1787 return type_coerce.typed_expression._compiler_dispatch(self, **kw)
1789 def visit_cast(self, cast, **kwargs):
1790 return "CAST(%s AS %s)" % (
1791 cast.clause._compiler_dispatch(self, **kwargs),
1792 cast.typeclause._compiler_dispatch(self, **kwargs),
1793 )
1795 def _format_frame_clause(self, range_, **kw):
1797 return "%s AND %s" % (
1798 "UNBOUNDED PRECEDING"
1799 if range_[0] is elements.RANGE_UNBOUNDED
1800 else "CURRENT ROW"
1801 if range_[0] is elements.RANGE_CURRENT
1802 else "%s PRECEDING"
1803 % (self.process(elements.literal(abs(range_[0])), **kw),)
1804 if range_[0] < 0
1805 else "%s FOLLOWING"
1806 % (self.process(elements.literal(range_[0]), **kw),),
1807 "UNBOUNDED FOLLOWING"
1808 if range_[1] is elements.RANGE_UNBOUNDED
1809 else "CURRENT ROW"
1810 if range_[1] is elements.RANGE_CURRENT
1811 else "%s PRECEDING"
1812 % (self.process(elements.literal(abs(range_[1])), **kw),)
1813 if range_[1] < 0
1814 else "%s FOLLOWING"
1815 % (self.process(elements.literal(range_[1]), **kw),),
1816 )
1818 def visit_over(self, over, **kwargs):
1819 text = over.element._compiler_dispatch(self, **kwargs)
1820 if over.range_:
1821 range_ = "RANGE BETWEEN %s" % self._format_frame_clause(
1822 over.range_, **kwargs
1823 )
1824 elif over.rows:
1825 range_ = "ROWS BETWEEN %s" % self._format_frame_clause(
1826 over.rows, **kwargs
1827 )
1828 else:
1829 range_ = None
1831 return "%s OVER (%s)" % (
1832 text,
1833 " ".join(
1834 [
1835 "%s BY %s"
1836 % (word, clause._compiler_dispatch(self, **kwargs))
1837 for word, clause in (
1838 ("PARTITION", over.partition_by),
1839 ("ORDER", over.order_by),
1840 )
1841 if clause is not None and len(clause)
1842 ]
1843 + ([range_] if range_ else [])
1844 ),
1845 )
1847 def visit_withingroup(self, withingroup, **kwargs):
1848 return "%s WITHIN GROUP (ORDER BY %s)" % (
1849 withingroup.element._compiler_dispatch(self, **kwargs),
1850 withingroup.order_by._compiler_dispatch(self, **kwargs),
1851 )
1853 def visit_funcfilter(self, funcfilter, **kwargs):
1854 return "%s FILTER (WHERE %s)" % (
1855 funcfilter.func._compiler_dispatch(self, **kwargs),
1856 funcfilter.criterion._compiler_dispatch(self, **kwargs),
1857 )
1859 def visit_extract(self, extract, **kwargs):
1860 field = self.extract_map.get(extract.field, extract.field)
1861 return "EXTRACT(%s FROM %s)" % (
1862 field,
1863 extract.expr._compiler_dispatch(self, **kwargs),
1864 )
1866 def visit_scalar_function_column(self, element, **kw):
1867 compiled_fn = self.visit_function(element.fn, **kw)
1868 compiled_col = self.visit_column(element, **kw)
1869 return "(%s).%s" % (compiled_fn, compiled_col)
1871 def visit_function(self, func, add_to_result_map=None, **kwargs):
1872 if add_to_result_map is not None:
1873 add_to_result_map(func.name, func.name, (), func.type)
1875 disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
1876 if disp:
1877 text = disp(func, **kwargs)
1878 else:
1879 name = FUNCTIONS.get(func._deannotate().__class__, None)
1880 if name:
1881 if func._has_args:
1882 name += "%(expr)s"
1883 else:
1884 name = func.name
1885 name = (
1886 self.preparer.quote(name)
1887 if self.preparer._requires_quotes_illegal_chars(name)
1888 or isinstance(name, elements.quoted_name)
1889 else name
1890 )
1891 name = name + "%(expr)s"
1892 text = ".".join(
1893 [
1894 (
1895 self.preparer.quote(tok)
1896 if self.preparer._requires_quotes_illegal_chars(tok)
1897 or isinstance(name, elements.quoted_name)
1898 else tok
1899 )
1900 for tok in func.packagenames
1901 ]
1902 + [name]
1903 ) % {"expr": self.function_argspec(func, **kwargs)}
1905 if func._with_ordinality:
1906 text += " WITH ORDINALITY"
1907 return text
1909 def visit_next_value_func(self, next_value, **kw):
1910 return self.visit_sequence(next_value.sequence)
1912 def visit_sequence(self, sequence, **kw):
1913 raise NotImplementedError(
1914 "Dialect '%s' does not support sequence increments."
1915 % self.dialect.name
1916 )
1918 def function_argspec(self, func, **kwargs):
1919 return func.clause_expr._compiler_dispatch(self, **kwargs)
1921 def visit_compound_select(
1922 self, cs, asfrom=False, compound_index=None, **kwargs
1923 ):
1924 toplevel = not self.stack
1926 compile_state = cs._compile_state_factory(cs, self, **kwargs)
1928 if toplevel and not self.compile_state:
1929 self.compile_state = compile_state
1931 compound_stmt = compile_state.statement
1933 entry = self._default_stack_entry if toplevel else self.stack[-1]
1934 need_result_map = toplevel or (
1935 not compound_index
1936 and entry.get("need_result_map_for_compound", False)
1937 )
1939 # indicates there is already a CompoundSelect in play
1940 if compound_index == 0:
1941 entry["select_0"] = cs
1943 self.stack.append(
1944 {
1945 "correlate_froms": entry["correlate_froms"],
1946 "asfrom_froms": entry["asfrom_froms"],
1947 "selectable": cs,
1948 "compile_state": compile_state,
1949 "need_result_map_for_compound": need_result_map,
1950 }
1951 )
1953 if compound_stmt._independent_ctes:
1954 for cte in compound_stmt._independent_ctes:
1955 cte._compiler_dispatch(self, **kwargs)
1957 keyword = self.compound_keywords.get(cs.keyword)
1959 text = (" " + keyword + " ").join(
1960 (
1961 c._compiler_dispatch(
1962 self, asfrom=asfrom, compound_index=i, **kwargs
1963 )
1964 for i, c in enumerate(cs.selects)
1965 )
1966 )
1968 kwargs["include_table"] = False
1969 text += self.group_by_clause(cs, **dict(asfrom=asfrom, **kwargs))
1970 text += self.order_by_clause(cs, **kwargs)
1971 if cs._has_row_limiting_clause:
1972 text += self._row_limit_clause(cs, **kwargs)
1974 if self.ctes:
1975 nesting_level = len(self.stack) if not toplevel else None
1976 text = (
1977 self._render_cte_clause(
1978 nesting_level=nesting_level,
1979 include_following_stack=True,
1980 visiting_cte=kwargs.get("visiting_cte"),
1981 )
1982 + text
1983 )
1985 self.stack.pop(-1)
1986 return text
1988 def _row_limit_clause(self, cs, **kwargs):
1989 if cs._fetch_clause is not None:
1990 return self.fetch_clause(cs, **kwargs)
1991 else:
1992 return self.limit_clause(cs, **kwargs)
1994 def _get_operator_dispatch(self, operator_, qualifier1, qualifier2):
1995 attrname = "visit_%s_%s%s" % (
1996 operator_.__name__,
1997 qualifier1,
1998 "_" + qualifier2 if qualifier2 else "",
1999 )
2000 return getattr(self, attrname, None)
2002 def visit_unary(
2003 self, unary, add_to_result_map=None, result_map_targets=(), **kw
2004 ):
2006 if add_to_result_map is not None:
2007 result_map_targets += (unary,)
2008 kw["add_to_result_map"] = add_to_result_map
2009 kw["result_map_targets"] = result_map_targets
2011 if unary.operator:
2012 if unary.modifier:
2013 raise exc.CompileError(
2014 "Unary expression does not support operator "
2015 "and modifier simultaneously"
2016 )
2017 disp = self._get_operator_dispatch(
2018 unary.operator, "unary", "operator"
2019 )
2020 if disp:
2021 return disp(unary, unary.operator, **kw)
2022 else:
2023 return self._generate_generic_unary_operator(
2024 unary, OPERATORS[unary.operator], **kw
2025 )
2026 elif unary.modifier:
2027 disp = self._get_operator_dispatch(
2028 unary.modifier, "unary", "modifier"
2029 )
2030 if disp:
2031 return disp(unary, unary.modifier, **kw)
2032 else:
2033 return self._generate_generic_unary_modifier(
2034 unary, OPERATORS[unary.modifier], **kw
2035 )
2036 else:
2037 raise exc.CompileError(
2038 "Unary expression has no operator or modifier"
2039 )
2041 def visit_is_true_unary_operator(self, element, operator, **kw):
2042 if (
2043 element._is_implicitly_boolean
2044 or self.dialect.supports_native_boolean
2045 ):
2046 return self.process(element.element, **kw)
2047 else:
2048 return "%s = 1" % self.process(element.element, **kw)
2050 def visit_is_false_unary_operator(self, element, operator, **kw):
2051 if (
2052 element._is_implicitly_boolean
2053 or self.dialect.supports_native_boolean
2054 ):
2055 return "NOT %s" % self.process(element.element, **kw)
2056 else:
2057 return "%s = 0" % self.process(element.element, **kw)
2059 def visit_not_match_op_binary(self, binary, operator, **kw):
2060 return "NOT %s" % self.visit_binary(
2061 binary, override_operator=operators.match_op
2062 )
2064 def visit_not_in_op_binary(self, binary, operator, **kw):
2065 # The brackets are required in the NOT IN operation because the empty
2066 # case is handled using the form "(col NOT IN (null) OR 1 = 1)".
2067 # The presence of the OR makes the brackets required.
2068 return "(%s)" % self._generate_generic_binary(
2069 binary, OPERATORS[operator], **kw
2070 )
2072 def visit_empty_set_op_expr(self, type_, expand_op):
2073 if expand_op is operators.not_in_op:
2074 if len(type_) > 1:
2075 return "(%s)) OR (1 = 1" % (
2076 ", ".join("NULL" for element in type_)
2077 )
2078 else:
2079 return "NULL) OR (1 = 1"
2080 elif expand_op is operators.in_op:
2081 if len(type_) > 1:
2082 return "(%s)) AND (1 != 1" % (
2083 ", ".join("NULL" for element in type_)
2084 )
2085 else:
2086 return "NULL) AND (1 != 1"
2087 else:
2088 return self.visit_empty_set_expr(type_)
2090 def visit_empty_set_expr(self, element_types):
2091 raise NotImplementedError(
2092 "Dialect '%s' does not support empty set expression."
2093 % self.dialect.name
2094 )
2096 def _literal_execute_expanding_parameter_literal_binds(
2097 self, parameter, values
2098 ):
2100 typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect)
2102 if not values:
2103 if typ_dialect_impl._is_tuple_type:
2104 replacement_expression = (
2105 "VALUES " if self.dialect.tuple_in_values else ""
2106 ) + self.visit_empty_set_op_expr(
2107 parameter.type.types, parameter.expand_op
2108 )
2110 else:
2111 replacement_expression = self.visit_empty_set_op_expr(
2112 [parameter.type], parameter.expand_op
2113 )
2115 elif typ_dialect_impl._is_tuple_type or (
2116 typ_dialect_impl._isnull
2117 and isinstance(values[0], util.collections_abc.Sequence)
2118 and not isinstance(
2119 values[0], util.string_types + util.binary_types
2120 )
2121 ):
2123 replacement_expression = (
2124 "VALUES " if self.dialect.tuple_in_values else ""
2125 ) + ", ".join(
2126 "(%s)"
2127 % (
2128 ", ".join(
2129 self.render_literal_value(value, param_type)
2130 for value, param_type in zip(
2131 tuple_element, parameter.type.types
2132 )
2133 )
2134 )
2135 for i, tuple_element in enumerate(values)
2136 )
2137 else:
2138 replacement_expression = ", ".join(
2139 self.render_literal_value(value, parameter.type)
2140 for value in values
2141 )
2143 return (), replacement_expression
2145 def _literal_execute_expanding_parameter(self, name, parameter, values):
2147 if parameter.literal_execute:
2148 return self._literal_execute_expanding_parameter_literal_binds(
2149 parameter, values
2150 )
2152 typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect)
2154 if not values:
2155 to_update = []
2156 if typ_dialect_impl._is_tuple_type:
2158 replacement_expression = self.visit_empty_set_op_expr(
2159 parameter.type.types, parameter.expand_op
2160 )
2161 else:
2162 replacement_expression = self.visit_empty_set_op_expr(
2163 [parameter.type], parameter.expand_op
2164 )
2166 elif typ_dialect_impl._is_tuple_type or (
2167 typ_dialect_impl._isnull
2168 and isinstance(values[0], util.collections_abc.Sequence)
2169 and not isinstance(
2170 values[0], util.string_types + util.binary_types
2171 )
2172 ):
2173 assert not typ_dialect_impl._is_array
2174 to_update = [
2175 ("%s_%s_%s" % (name, i, j), value)
2176 for i, tuple_element in enumerate(values, 1)
2177 for j, value in enumerate(tuple_element, 1)
2178 ]
2179 replacement_expression = (
2180 "VALUES " if self.dialect.tuple_in_values else ""
2181 ) + ", ".join(
2182 "(%s)"
2183 % (
2184 ", ".join(
2185 self.bindtemplate
2186 % {"name": to_update[i * len(tuple_element) + j][0]}
2187 for j, value in enumerate(tuple_element)
2188 )
2189 )
2190 for i, tuple_element in enumerate(values)
2191 )
2192 else:
2193 to_update = [
2194 ("%s_%s" % (name, i), value)
2195 for i, value in enumerate(values, 1)
2196 ]
2197 replacement_expression = ", ".join(
2198 self.bindtemplate % {"name": key} for key, value in to_update
2199 )
2201 return to_update, replacement_expression
2203 def visit_binary(
2204 self,
2205 binary,
2206 override_operator=None,
2207 eager_grouping=False,
2208 from_linter=None,
2209 lateral_from_linter=None,
2210 **kw
2211 ):
2212 if from_linter and operators.is_comparison(binary.operator):
2213 if lateral_from_linter is not None:
2214 enclosing_lateral = kw["enclosing_lateral"]
2215 lateral_from_linter.edges.update(
2216 itertools.product(
2217 binary.left._from_objects + [enclosing_lateral],
2218 binary.right._from_objects + [enclosing_lateral],
2219 )
2220 )
2221 else:
2222 from_linter.edges.update(
2223 itertools.product(
2224 binary.left._from_objects, binary.right._from_objects
2225 )
2226 )
2228 # don't allow "? = ?" to render
2229 if (
2230 self.ansi_bind_rules
2231 and isinstance(binary.left, elements.BindParameter)
2232 and isinstance(binary.right, elements.BindParameter)
2233 ):
2234 kw["literal_execute"] = True
2236 operator_ = override_operator or binary.operator
2237 disp = self._get_operator_dispatch(operator_, "binary", None)
2238 if disp:
2239 return disp(binary, operator_, **kw)
2240 else:
2241 try:
2242 opstring = OPERATORS[operator_]
2243 except KeyError as err:
2244 util.raise_(
2245 exc.UnsupportedCompilationError(self, operator_),
2246 replace_context=err,
2247 )
2248 else:
2249 return self._generate_generic_binary(
2250 binary,
2251 opstring,
2252 from_linter=from_linter,
2253 lateral_from_linter=lateral_from_linter,
2254 **kw
2255 )
2257 def visit_function_as_comparison_op_binary(self, element, operator, **kw):
2258 return self.process(element.sql_function, **kw)
2260 def visit_mod_binary(self, binary, operator, **kw):
2261 if self.preparer._double_percents:
2262 return (
2263 self.process(binary.left, **kw)
2264 + " %% "
2265 + self.process(binary.right, **kw)
2266 )
2267 else:
2268 return (
2269 self.process(binary.left, **kw)
2270 + " % "
2271 + self.process(binary.right, **kw)
2272 )
2274 def visit_custom_op_binary(self, element, operator, **kw):
2275 kw["eager_grouping"] = operator.eager_grouping
2276 return self._generate_generic_binary(
2277 element,
2278 " " + self.escape_literal_column(operator.opstring) + " ",
2279 **kw
2280 )
2282 def visit_custom_op_unary_operator(self, element, operator, **kw):
2283 return self._generate_generic_unary_operator(
2284 element, self.escape_literal_column(operator.opstring) + " ", **kw
2285 )
2287 def visit_custom_op_unary_modifier(self, element, operator, **kw):
2288 return self._generate_generic_unary_modifier(
2289 element, " " + self.escape_literal_column(operator.opstring), **kw
2290 )
2292 def _generate_generic_binary(
2293 self, binary, opstring, eager_grouping=False, **kw
2294 ):
2296 _in_binary = kw.get("_in_binary", False)
2298 kw["_in_binary"] = True
2299 kw["_binary_op"] = binary.operator
2300 text = (
2301 binary.left._compiler_dispatch(
2302 self, eager_grouping=eager_grouping, **kw
2303 )
2304 + opstring
2305 + binary.right._compiler_dispatch(
2306 self, eager_grouping=eager_grouping, **kw
2307 )
2308 )
2310 if _in_binary and eager_grouping:
2311 text = "(%s)" % text
2312 return text
2314 def _generate_generic_unary_operator(self, unary, opstring, **kw):
2315 return opstring + unary.element._compiler_dispatch(self, **kw)
2317 def _generate_generic_unary_modifier(self, unary, opstring, **kw):
2318 return unary.element._compiler_dispatch(self, **kw) + opstring
2320 @util.memoized_property
2321 def _like_percent_literal(self):
2322 return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE)
2324 def visit_contains_op_binary(self, binary, operator, **kw):
2325 binary = binary._clone()
2326 percent = self._like_percent_literal
2327 binary.right = percent.concat(binary.right).concat(percent)
2328 return self.visit_like_op_binary(binary, operator, **kw)
2330 def visit_not_contains_op_binary(self, binary, operator, **kw):
2331 binary = binary._clone()
2332 percent = self._like_percent_literal
2333 binary.right = percent.concat(binary.right).concat(percent)
2334 return self.visit_not_like_op_binary(binary, operator, **kw)
2336 def visit_startswith_op_binary(self, binary, operator, **kw):
2337 binary = binary._clone()
2338 percent = self._like_percent_literal
2339 binary.right = percent._rconcat(binary.right)
2340 return self.visit_like_op_binary(binary, operator, **kw)
2342 def visit_not_startswith_op_binary(self, binary, operator, **kw):
2343 binary = binary._clone()
2344 percent = self._like_percent_literal
2345 binary.right = percent._rconcat(binary.right)
2346 return self.visit_not_like_op_binary(binary, operator, **kw)
2348 def visit_endswith_op_binary(self, binary, operator, **kw):
2349 binary = binary._clone()
2350 percent = self._like_percent_literal
2351 binary.right = percent.concat(binary.right)
2352 return self.visit_like_op_binary(binary, operator, **kw)
2354 def visit_not_endswith_op_binary(self, binary, operator, **kw):
2355 binary = binary._clone()
2356 percent = self._like_percent_literal
2357 binary.right = percent.concat(binary.right)
2358 return self.visit_not_like_op_binary(binary, operator, **kw)
2360 def visit_like_op_binary(self, binary, operator, **kw):
2361 escape = binary.modifiers.get("escape", None)
2363 # TODO: use ternary here, not "and"/ "or"
2364 return "%s LIKE %s" % (
2365 binary.left._compiler_dispatch(self, **kw),
2366 binary.right._compiler_dispatch(self, **kw),
2367 ) + (
2368 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2369 if escape
2370 else ""
2371 )
2373 def visit_not_like_op_binary(self, binary, operator, **kw):
2374 escape = binary.modifiers.get("escape", None)
2375 return "%s NOT LIKE %s" % (
2376 binary.left._compiler_dispatch(self, **kw),
2377 binary.right._compiler_dispatch(self, **kw),
2378 ) + (
2379 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2380 if escape
2381 else ""
2382 )
2384 def visit_ilike_op_binary(self, binary, operator, **kw):
2385 escape = binary.modifiers.get("escape", None)
2386 return "lower(%s) LIKE lower(%s)" % (
2387 binary.left._compiler_dispatch(self, **kw),
2388 binary.right._compiler_dispatch(self, **kw),
2389 ) + (
2390 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2391 if escape
2392 else ""
2393 )
2395 def visit_not_ilike_op_binary(self, binary, operator, **kw):
2396 escape = binary.modifiers.get("escape", None)
2397 return "lower(%s) NOT LIKE lower(%s)" % (
2398 binary.left._compiler_dispatch(self, **kw),
2399 binary.right._compiler_dispatch(self, **kw),
2400 ) + (
2401 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2402 if escape
2403 else ""
2404 )
2406 def visit_between_op_binary(self, binary, operator, **kw):
2407 symmetric = binary.modifiers.get("symmetric", False)
2408 return self._generate_generic_binary(
2409 binary, " BETWEEN SYMMETRIC " if symmetric else " BETWEEN ", **kw
2410 )
2412 def visit_not_between_op_binary(self, binary, operator, **kw):
2413 symmetric = binary.modifiers.get("symmetric", False)
2414 return self._generate_generic_binary(
2415 binary,
2416 " NOT BETWEEN SYMMETRIC " if symmetric else " NOT BETWEEN ",
2417 **kw
2418 )
2420 def visit_regexp_match_op_binary(self, binary, operator, **kw):
2421 raise exc.CompileError(
2422 "%s dialect does not support regular expressions"
2423 % self.dialect.name
2424 )
2426 def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
2427 raise exc.CompileError(
2428 "%s dialect does not support regular expressions"
2429 % self.dialect.name
2430 )
2432 def visit_regexp_replace_op_binary(self, binary, operator, **kw):
2433 raise exc.CompileError(
2434 "%s dialect does not support regular expression replacements"
2435 % self.dialect.name
2436 )
2438 def visit_bindparam(
2439 self,
2440 bindparam,
2441 within_columns_clause=False,
2442 literal_binds=False,
2443 skip_bind_expression=False,
2444 literal_execute=False,
2445 render_postcompile=False,
2446 **kwargs
2447 ):
2448 if not skip_bind_expression:
2449 impl = bindparam.type.dialect_impl(self.dialect)
2450 if impl._has_bind_expression:
2451 bind_expression = impl.bind_expression(bindparam)
2452 wrapped = self.process(
2453 bind_expression,
2454 skip_bind_expression=True,
2455 within_columns_clause=within_columns_clause,
2456 literal_binds=literal_binds,
2457 literal_execute=literal_execute,
2458 render_postcompile=render_postcompile,
2459 **kwargs
2460 )
2461 if bindparam.expanding:
2462 # for postcompile w/ expanding, move the "wrapped" part
2463 # of this into the inside
2464 m = re.match(
2465 r"^(.*)\(__\[POSTCOMPILE_(\S+?)\]\)(.*)$", wrapped
2466 )
2467 wrapped = "(__[POSTCOMPILE_%s~~%s~~REPL~~%s~~])" % (
2468 m.group(2),
2469 m.group(1),
2470 m.group(3),
2471 )
2472 return wrapped
2474 if not literal_binds:
2475 literal_execute = (
2476 literal_execute
2477 or bindparam.literal_execute
2478 or (within_columns_clause and self.ansi_bind_rules)
2479 )
2480 post_compile = literal_execute or bindparam.expanding
2481 else:
2482 post_compile = False
2484 if literal_binds:
2485 ret = self.render_literal_bindparam(
2486 bindparam, within_columns_clause=True, **kwargs
2487 )
2488 if bindparam.expanding:
2489 ret = "(%s)" % ret
2490 return ret
2492 name = self._truncate_bindparam(bindparam)
2494 if name in self.binds:
2495 existing = self.binds[name]
2496 if existing is not bindparam:
2497 if (
2498 (existing.unique or bindparam.unique)
2499 and not existing.proxy_set.intersection(
2500 bindparam.proxy_set
2501 )
2502 and not existing._cloned_set.intersection(
2503 bindparam._cloned_set
2504 )
2505 ):
2506 raise exc.CompileError(
2507 "Bind parameter '%s' conflicts with "
2508 "unique bind parameter of the same name" % name
2509 )
2510 elif existing.expanding != bindparam.expanding:
2511 raise exc.CompileError(
2512 "Can't reuse bound parameter name '%s' in both "
2513 "'expanding' (e.g. within an IN expression) and "
2514 "non-expanding contexts. If this parameter is to "
2515 "receive a list/array value, set 'expanding=True' on "
2516 "it for expressions that aren't IN, otherwise use "
2517 "a different parameter name." % (name,)
2518 )
2519 elif existing._is_crud or bindparam._is_crud:
2520 raise exc.CompileError(
2521 "bindparam() name '%s' is reserved "
2522 "for automatic usage in the VALUES or SET "
2523 "clause of this "
2524 "insert/update statement. Please use a "
2525 "name other than column name when using bindparam() "
2526 "with insert() or update() (for example, 'b_%s')."
2527 % (bindparam.key, bindparam.key)
2528 )
2530 self.binds[bindparam.key] = self.binds[name] = bindparam
2532 # if we are given a cache key that we're going to match against,
2533 # relate the bindparam here to one that is most likely present
2534 # in the "extracted params" portion of the cache key. this is used
2535 # to set up a positional mapping that is used to determine the
2536 # correct parameters for a subsequent use of this compiled with
2537 # a different set of parameter values. here, we accommodate for
2538 # parameters that may have been cloned both before and after the cache
2539 # key was been generated.
2540 ckbm = self._cache_key_bind_match
2541 if ckbm:
2542 for bp in bindparam._cloned_set:
2543 if bp.key in ckbm:
2544 cb = ckbm[bp.key]
2545 ckbm[cb].append(bindparam)
2547 if bindparam.isoutparam:
2548 self.has_out_parameters = True
2550 if post_compile:
2551 if render_postcompile:
2552 self._render_postcompile = True
2554 if literal_execute:
2555 self.literal_execute_params |= {bindparam}
2556 else:
2557 self.post_compile_params |= {bindparam}
2559 ret = self.bindparam_string(
2560 name,
2561 post_compile=post_compile,
2562 expanding=bindparam.expanding,
2563 **kwargs
2564 )
2566 if bindparam.expanding:
2567 ret = "(%s)" % ret
2568 return ret
2570 def render_literal_bindparam(
2571 self, bindparam, render_literal_value=NO_ARG, **kw
2572 ):
2573 if render_literal_value is not NO_ARG:
2574 value = render_literal_value
2575 else:
2576 if bindparam.value is None and bindparam.callable is None:
2577 op = kw.get("_binary_op", None)
2578 if op and op not in (operators.is_, operators.is_not):
2579 util.warn_limited(
2580 "Bound parameter '%s' rendering literal NULL in a SQL "
2581 "expression; comparisons to NULL should not use "
2582 "operators outside of 'is' or 'is not'",
2583 (bindparam.key,),
2584 )
2585 return self.process(sqltypes.NULLTYPE, **kw)
2586 value = bindparam.effective_value
2588 if bindparam.expanding:
2589 leep = self._literal_execute_expanding_parameter_literal_binds
2590 to_update, replacement_expr = leep(bindparam, value)
2591 return replacement_expr
2592 else:
2593 return self.render_literal_value(value, bindparam.type)
2595 def render_literal_value(self, value, type_):
2596 """Render the value of a bind parameter as a quoted literal.
2598 This is used for statement sections that do not accept bind parameters
2599 on the target driver/database.
2601 This should be implemented by subclasses using the quoting services
2602 of the DBAPI.
2604 """
2606 processor = type_._cached_literal_processor(self.dialect)
2607 if processor:
2608 try:
2609 return processor(value)
2610 except Exception as e:
2611 util.raise_(
2612 exc.CompileError(
2613 "Could not render literal value "
2614 '"%s" '
2615 "with datatype "
2616 "%s; see parent stack trace for "
2617 "more detail."
2618 % (
2619 sql_util._repr_single_value(value),
2620 type_,
2621 )
2622 ),
2623 from_=e,
2624 )
2626 else:
2627 raise exc.CompileError(
2628 "No literal value renderer is available for literal value "
2629 '"%s" with datatype %s'
2630 % (sql_util._repr_single_value(value), type_)
2631 )
2633 def _truncate_bindparam(self, bindparam):
2634 if bindparam in self.bind_names:
2635 return self.bind_names[bindparam]
2637 bind_name = bindparam.key
2638 if isinstance(bind_name, elements._truncated_label):
2639 bind_name = self._truncated_identifier("bindparam", bind_name)
2641 # add to bind_names for translation
2642 self.bind_names[bindparam] = bind_name
2644 return bind_name
2646 def _truncated_identifier(self, ident_class, name):
2647 if (ident_class, name) in self.truncated_names:
2648 return self.truncated_names[(ident_class, name)]
2650 anonname = name.apply_map(self.anon_map)
2652 if len(anonname) > self.label_length - 6:
2653 counter = self.truncated_names.get(ident_class, 1)
2654 truncname = (
2655 anonname[0 : max(self.label_length - 6, 0)]
2656 + "_"
2657 + hex(counter)[2:]
2658 )
2659 self.truncated_names[ident_class] = counter + 1
2660 else:
2661 truncname = anonname
2662 self.truncated_names[(ident_class, name)] = truncname
2663 return truncname
2665 def _anonymize(self, name):
2666 return name % self.anon_map
2668 def bindparam_string(
2669 self,
2670 name,
2671 positional_names=None,
2672 post_compile=False,
2673 expanding=False,
2674 escaped_from=None,
2675 **kw
2676 ):
2678 if self.positional:
2679 if positional_names is not None:
2680 positional_names.append(name)
2681 else:
2682 self.positiontup.append(name)
2683 self.positiontup_level[name] = len(self.stack)
2684 if not escaped_from:
2686 if _BIND_TRANSLATE_RE.search(name):
2687 # not quite the translate use case as we want to
2688 # also get a quick boolean if we even found
2689 # unusual characters in the name
2690 new_name = _BIND_TRANSLATE_RE.sub(
2691 lambda m: _BIND_TRANSLATE_CHARS[m.group(0)],
2692 name,
2693 )
2694 escaped_from = name
2695 name = new_name
2697 if escaped_from:
2698 if not self.escaped_bind_names:
2699 self.escaped_bind_names = {}
2700 self.escaped_bind_names[escaped_from] = name
2701 if post_compile:
2702 return "__[POSTCOMPILE_%s]" % name
2703 else:
2704 return self.bindtemplate % {"name": name}
2706 def visit_cte(
2707 self,
2708 cte,
2709 asfrom=False,
2710 ashint=False,
2711 fromhints=None,
2712 visiting_cte=None,
2713 from_linter=None,
2714 **kwargs
2715 ):
2716 self._init_cte_state()
2718 kwargs["visiting_cte"] = cte
2720 cte_name = cte.name
2722 if isinstance(cte_name, elements._truncated_label):
2723 cte_name = self._truncated_identifier("alias", cte_name)
2725 is_new_cte = True
2726 embedded_in_current_named_cte = False
2728 _reference_cte = cte._get_reference_cte()
2730 if _reference_cte in self.level_name_by_cte:
2731 cte_level, _ = self.level_name_by_cte[_reference_cte]
2732 assert _ == cte_name
2733 else:
2734 cte_level = len(self.stack) if cte.nesting else 1
2736 cte_level_name = (cte_level, cte_name)
2737 if cte_level_name in self.ctes_by_level_name:
2738 existing_cte = self.ctes_by_level_name[cte_level_name]
2739 embedded_in_current_named_cte = visiting_cte is existing_cte
2741 # we've generated a same-named CTE that we are enclosed in,
2742 # or this is the same CTE. just return the name.
2743 if cte is existing_cte._restates or cte is existing_cte:
2744 is_new_cte = False
2745 elif existing_cte is cte._restates:
2746 # we've generated a same-named CTE that is
2747 # enclosed in us - we take precedence, so
2748 # discard the text for the "inner".
2749 del self.ctes[existing_cte]
2751 existing_cte_reference_cte = existing_cte._get_reference_cte()
2753 # TODO: determine if these assertions are correct. they
2754 # pass for current test cases
2755 # assert existing_cte_reference_cte is _reference_cte
2756 # assert existing_cte_reference_cte is existing_cte
2758 del self.level_name_by_cte[existing_cte_reference_cte]
2759 else:
2760 # if the two CTEs are deep-copy identical, consider them
2761 # the same, **if** they are clones, that is, they came from
2762 # the ORM or other visit method
2763 if (
2764 cte._is_clone_of is not None
2765 or existing_cte._is_clone_of is not None
2766 ) and cte.compare(existing_cte):
2767 is_new_cte = False
2768 else:
2769 raise exc.CompileError(
2770 "Multiple, unrelated CTEs found with "
2771 "the same name: %r" % cte_name
2772 )
2774 if not asfrom and not is_new_cte:
2775 return None
2777 if cte._cte_alias is not None:
2778 pre_alias_cte = cte._cte_alias
2779 cte_pre_alias_name = cte._cte_alias.name
2780 if isinstance(cte_pre_alias_name, elements._truncated_label):
2781 cte_pre_alias_name = self._truncated_identifier(
2782 "alias", cte_pre_alias_name
2783 )
2784 else:
2785 pre_alias_cte = cte
2786 cte_pre_alias_name = None
2788 if is_new_cte:
2789 self.ctes_by_level_name[cte_level_name] = cte
2790 self.level_name_by_cte[_reference_cte] = cte_level_name
2792 if (
2793 "autocommit" in cte.element._execution_options
2794 and "autocommit" not in self.execution_options
2795 ):
2796 self.execution_options = self.execution_options.union(
2797 {
2798 "autocommit": cte.element._execution_options[
2799 "autocommit"
2800 ]
2801 }
2802 )
2803 if self.positional:
2804 self.cte_level[cte] = cte_level
2806 if pre_alias_cte not in self.ctes:
2807 self.visit_cte(pre_alias_cte, **kwargs)
2809 if not cte_pre_alias_name and cte not in self.ctes:
2810 if cte.recursive:
2811 self.ctes_recursive = True
2812 text = self.preparer.format_alias(cte, cte_name)
2813 if cte.recursive:
2814 if isinstance(cte.element, selectable.Select):
2815 col_source = cte.element
2816 elif isinstance(cte.element, selectable.CompoundSelect):
2817 col_source = cte.element.selects[0]
2818 else:
2819 assert False, "cte should only be against SelectBase"
2821 # TODO: can we get at the .columns_plus_names collection
2822 # that is already (or will be?) generated for the SELECT
2823 # rather than calling twice?
2824 recur_cols = [
2825 # TODO: proxy_name is not technically safe,
2826 # see test_cte->
2827 # test_with_recursive_no_name_currently_buggy. not
2828 # clear what should be done with such a case
2829 fallback_label_name or proxy_name
2830 for (
2831 _,
2832 proxy_name,
2833 fallback_label_name,
2834 c,
2835 repeated,
2836 ) in (col_source._generate_columns_plus_names(True))
2837 if not repeated
2838 ]
2840 text += "(%s)" % (
2841 ", ".join(
2842 self.preparer.format_label_name(
2843 ident, anon_map=self.anon_map
2844 )
2845 for ident in recur_cols
2846 )
2847 )
2849 if self.positional:
2850 kwargs["positional_names"] = self.cte_positional[cte] = []
2852 assert kwargs.get("subquery", False) is False
2854 if not self.stack:
2855 # toplevel, this is a stringify of the
2856 # cte directly. just compile the inner
2857 # the way alias() does.
2858 return cte.element._compiler_dispatch(
2859 self, asfrom=asfrom, **kwargs
2860 )
2861 else:
2862 prefixes = self._generate_prefixes(
2863 cte, cte._prefixes, **kwargs
2864 )
2865 inner = cte.element._compiler_dispatch(
2866 self, asfrom=True, **kwargs
2867 )
2869 text += " AS %s\n(%s)" % (prefixes, inner)
2871 if cte._suffixes:
2872 text += " " + self._generate_prefixes(
2873 cte, cte._suffixes, **kwargs
2874 )
2876 self.ctes[cte] = text
2878 if asfrom:
2879 if from_linter:
2880 from_linter.froms[cte] = cte_name
2882 if not is_new_cte and embedded_in_current_named_cte:
2883 return self.preparer.format_alias(cte, cte_name)
2885 if cte_pre_alias_name:
2886 text = self.preparer.format_alias(cte, cte_pre_alias_name)
2887 if self.preparer._requires_quotes(cte_name):
2888 cte_name = self.preparer.quote(cte_name)
2889 text += self.get_render_as_alias_suffix(cte_name)
2890 return text
2891 else:
2892 return self.preparer.format_alias(cte, cte_name)
2894 def visit_table_valued_alias(self, element, **kw):
2895 if element.joins_implicitly:
2896 kw["from_linter"] = None
2897 if element._is_lateral:
2898 return self.visit_lateral(element, **kw)
2899 else:
2900 return self.visit_alias(element, **kw)
2902 def visit_table_valued_column(self, element, **kw):
2903 return self.visit_column(element, **kw)
2905 def visit_alias(
2906 self,
2907 alias,
2908 asfrom=False,
2909 ashint=False,
2910 iscrud=False,
2911 fromhints=None,
2912 subquery=False,
2913 lateral=False,
2914 enclosing_alias=None,
2915 from_linter=None,
2916 **kwargs
2917 ):
2919 if lateral:
2920 if "enclosing_lateral" not in kwargs:
2921 # if lateral is set and enclosing_lateral is not
2922 # present, we assume we are being called directly
2923 # from visit_lateral() and we need to set enclosing_lateral.
2924 assert alias._is_lateral
2925 kwargs["enclosing_lateral"] = alias
2927 # for lateral objects, we track a second from_linter that is...
2928 # lateral! to the level above us.
2929 if (
2930 from_linter
2931 and "lateral_from_linter" not in kwargs
2932 and "enclosing_lateral" in kwargs
2933 ):
2934 kwargs["lateral_from_linter"] = from_linter
2936 if enclosing_alias is not None and enclosing_alias.element is alias:
2937 inner = alias.element._compiler_dispatch(
2938 self,
2939 asfrom=asfrom,
2940 ashint=ashint,
2941 iscrud=iscrud,
2942 fromhints=fromhints,
2943 lateral=lateral,
2944 enclosing_alias=alias,
2945 **kwargs
2946 )
2947 if subquery and (asfrom or lateral):
2948 inner = "(%s)" % (inner,)
2949 return inner
2950 else:
2951 enclosing_alias = kwargs["enclosing_alias"] = alias
2953 if asfrom or ashint:
2954 if isinstance(alias.name, elements._truncated_label):
2955 alias_name = self._truncated_identifier("alias", alias.name)
2956 else:
2957 alias_name = alias.name
2959 if ashint:
2960 return self.preparer.format_alias(alias, alias_name)
2961 elif asfrom:
2962 if from_linter:
2963 from_linter.froms[alias] = alias_name
2965 inner = alias.element._compiler_dispatch(
2966 self, asfrom=True, lateral=lateral, **kwargs
2967 )
2968 if subquery:
2969 inner = "(%s)" % (inner,)
2971 ret = inner + self.get_render_as_alias_suffix(
2972 self.preparer.format_alias(alias, alias_name)
2973 )
2975 if alias._supports_derived_columns and alias._render_derived:
2976 ret += "(%s)" % (
2977 ", ".join(
2978 "%s%s"
2979 % (
2980 self.preparer.quote(col.name),
2981 " %s"
2982 % self.dialect.type_compiler.process(
2983 col.type, **kwargs
2984 )
2985 if alias._render_derived_w_types
2986 else "",
2987 )
2988 for col in alias.c
2989 )
2990 )
2992 if fromhints and alias in fromhints:
2993 ret = self.format_from_hint_text(
2994 ret, alias, fromhints[alias], iscrud
2995 )
2997 return ret
2998 else:
2999 # note we cancel the "subquery" flag here as well
3000 return alias.element._compiler_dispatch(
3001 self, lateral=lateral, **kwargs
3002 )
3004 def visit_subquery(self, subquery, **kw):
3005 kw["subquery"] = True
3006 return self.visit_alias(subquery, **kw)
3008 def visit_lateral(self, lateral_, **kw):
3009 kw["lateral"] = True
3010 return "LATERAL %s" % self.visit_alias(lateral_, **kw)
3012 def visit_tablesample(self, tablesample, asfrom=False, **kw):
3013 text = "%s TABLESAMPLE %s" % (
3014 self.visit_alias(tablesample, asfrom=True, **kw),
3015 tablesample._get_method()._compiler_dispatch(self, **kw),
3016 )
3018 if tablesample.seed is not None:
3019 text += " REPEATABLE (%s)" % (
3020 tablesample.seed._compiler_dispatch(self, **kw)
3021 )
3023 return text
3025 def visit_values(self, element, asfrom=False, from_linter=None, **kw):
3026 kw.setdefault("literal_binds", element.literal_binds)
3027 v = "VALUES %s" % ", ".join(
3028 self.process(
3029 elements.Tuple(
3030 types=element._column_types, *elem
3031 ).self_group(),
3032 **kw
3033 )
3034 for chunk in element._data
3035 for elem in chunk
3036 )
3038 if isinstance(element.name, elements._truncated_label):
3039 name = self._truncated_identifier("values", element.name)
3040 else:
3041 name = element.name
3043 if element._is_lateral:
3044 lateral = "LATERAL "
3045 else:
3046 lateral = ""
3048 if asfrom:
3049 if from_linter:
3050 from_linter.froms[element] = (
3051 name if name is not None else "(unnamed VALUES element)"
3052 )
3054 if name:
3055 v = "%s(%s)%s (%s)" % (
3056 lateral,
3057 v,
3058 self.get_render_as_alias_suffix(self.preparer.quote(name)),
3059 (
3060 ", ".join(
3061 c._compiler_dispatch(
3062 self, include_table=False, **kw
3063 )
3064 for c in element.columns
3065 )
3066 ),
3067 )
3068 else:
3069 v = "%s(%s)" % (lateral, v)
3070 return v
3072 def get_render_as_alias_suffix(self, alias_name_text):
3073 return " AS " + alias_name_text
3075 def _add_to_result_map(self, keyname, name, objects, type_):
3076 if keyname is None or keyname == "*":
3077 self._ordered_columns = False
3078 self._ad_hoc_textual = True
3079 if type_._is_tuple_type:
3080 raise exc.CompileError(
3081 "Most backends don't support SELECTing "
3082 "from a tuple() object. If this is an ORM query, "
3083 "consider using the Bundle object."
3084 )
3085 self._result_columns.append((keyname, name, objects, type_))
3087 def _label_returning_column(
3088 self, stmt, column, column_clause_args=None, **kw
3089 ):
3090 """Render a column with necessary labels inside of a RETURNING clause.
3092 This method is provided for individual dialects in place of calling
3093 the _label_select_column method directly, so that the two use cases
3094 of RETURNING vs. SELECT can be disambiguated going forward.
3096 .. versionadded:: 1.4.21
3098 """
3099 return self._label_select_column(
3100 None,
3101 column,
3102 True,
3103 False,
3104 {} if column_clause_args is None else column_clause_args,
3105 **kw
3106 )
3108 def _label_select_column(
3109 self,
3110 select,
3111 column,
3112 populate_result_map,
3113 asfrom,
3114 column_clause_args,
3115 name=None,
3116 proxy_name=None,
3117 fallback_label_name=None,
3118 within_columns_clause=True,
3119 column_is_repeated=False,
3120 need_column_expressions=False,
3121 ):
3122 """produce labeled columns present in a select()."""
3123 impl = column.type.dialect_impl(self.dialect)
3125 if impl._has_column_expression and (
3126 need_column_expressions or populate_result_map
3127 ):
3128 col_expr = impl.column_expression(column)
3129 else:
3130 col_expr = column
3132 if populate_result_map:
3133 # pass an "add_to_result_map" callable into the compilation
3134 # of embedded columns. this collects information about the
3135 # column as it will be fetched in the result and is coordinated
3136 # with cursor.description when the query is executed.
3137 add_to_result_map = self._add_to_result_map
3139 # if the SELECT statement told us this column is a repeat,
3140 # wrap the callable with one that prevents the addition of the
3141 # targets
3142 if column_is_repeated:
3143 _add_to_result_map = add_to_result_map
3145 def add_to_result_map(keyname, name, objects, type_):
3146 _add_to_result_map(keyname, name, (), type_)
3148 # if we redefined col_expr for type expressions, wrap the
3149 # callable with one that adds the original column to the targets
3150 elif col_expr is not column:
3151 _add_to_result_map = add_to_result_map
3153 def add_to_result_map(keyname, name, objects, type_):
3154 _add_to_result_map(
3155 keyname, name, (column,) + objects, type_
3156 )
3158 else:
3159 add_to_result_map = None
3161 # this method is used by some of the dialects for RETURNING,
3162 # which has different inputs. _label_returning_column was added
3163 # as the better target for this now however for 1.4 we will keep
3164 # _label_select_column directly compatible with this use case.
3165 # these assertions right now set up the current expected inputs
3166 assert within_columns_clause, (
3167 "_label_select_column is only relevant within "
3168 "the columns clause of a SELECT or RETURNING"
3169 )
3170 if isinstance(column, elements.Label):
3171 if col_expr is not column:
3172 result_expr = _CompileLabel(
3173 col_expr, column.name, alt_names=(column.element,)
3174 )
3175 else:
3176 result_expr = col_expr
3178 elif name:
3179 # here, _columns_plus_names has determined there's an explicit
3180 # label name we need to use. this is the default for
3181 # tablenames_plus_columnnames as well as when columns are being
3182 # deduplicated on name
3184 assert (
3185 proxy_name is not None
3186 ), "proxy_name is required if 'name' is passed"
3188 result_expr = _CompileLabel(
3189 col_expr,
3190 name,
3191 alt_names=(
3192 proxy_name,
3193 # this is a hack to allow legacy result column lookups
3194 # to work as they did before; this goes away in 2.0.
3195 # TODO: this only seems to be tested indirectly
3196 # via test/orm/test_deprecations.py. should be a
3197 # resultset test for this
3198 column._tq_label,
3199 ),
3200 )
3201 else:
3202 # determine here whether this column should be rendered in
3203 # a labelled context or not, as we were given no required label
3204 # name from the caller. Here we apply heuristics based on the kind
3205 # of SQL expression involved.
3207 if col_expr is not column:
3208 # type-specific expression wrapping the given column,
3209 # so we render a label
3210 render_with_label = True
3211 elif isinstance(column, elements.ColumnClause):
3212 # table-bound column, we render its name as a label if we are
3213 # inside of a subquery only
3214 render_with_label = (
3215 asfrom
3216 and not column.is_literal
3217 and column.table is not None
3218 )
3219 elif isinstance(column, elements.TextClause):
3220 render_with_label = False
3221 elif isinstance(column, elements.UnaryExpression):
3222 render_with_label = column.wraps_column_expression or asfrom
3223 elif (
3224 # general class of expressions that don't have a SQL-column
3225 # addressible name. includes scalar selects, bind parameters,
3226 # SQL functions, others
3227 not isinstance(column, elements.NamedColumn)
3228 # deeper check that indicates there's no natural "name" to
3229 # this element, which accommodates for custom SQL constructs
3230 # that might have a ".name" attribute (but aren't SQL
3231 # functions) but are not implementing this more recently added
3232 # base class. in theory the "NamedColumn" check should be
3233 # enough, however here we seek to maintain legacy behaviors
3234 # as well.
3235 and column._non_anon_label is None
3236 ):
3237 render_with_label = True
3238 else:
3239 render_with_label = False
3241 if render_with_label:
3242 if not fallback_label_name:
3243 # used by the RETURNING case right now. we generate it
3244 # here as 3rd party dialects may be referring to
3245 # _label_select_column method directly instead of the
3246 # just-added _label_returning_column method
3247 assert not column_is_repeated
3248 fallback_label_name = column._anon_name_label
3250 fallback_label_name = (
3251 elements._truncated_label(fallback_label_name)
3252 if not isinstance(
3253 fallback_label_name, elements._truncated_label
3254 )
3255 else fallback_label_name
3256 )
3258 result_expr = _CompileLabel(
3259 col_expr, fallback_label_name, alt_names=(proxy_name,)
3260 )
3261 else:
3262 result_expr = col_expr
3264 column_clause_args.update(
3265 within_columns_clause=within_columns_clause,
3266 add_to_result_map=add_to_result_map,
3267 )
3268 return result_expr._compiler_dispatch(self, **column_clause_args)
3270 def format_from_hint_text(self, sqltext, table, hint, iscrud):
3271 hinttext = self.get_from_hint_text(table, hint)
3272 if hinttext:
3273 sqltext += " " + hinttext
3274 return sqltext
3276 def get_select_hint_text(self, byfroms):
3277 return None
3279 def get_from_hint_text(self, table, text):
3280 return None
3282 def get_crud_hint_text(self, table, text):
3283 return None
3285 def get_statement_hint_text(self, hint_texts):
3286 return " ".join(hint_texts)
3288 _default_stack_entry = util.immutabledict(
3289 [("correlate_froms", frozenset()), ("asfrom_froms", frozenset())]
3290 )
3292 def _display_froms_for_select(
3293 self, select_stmt, asfrom, lateral=False, **kw
3294 ):
3295 # utility method to help external dialects
3296 # get the correct from list for a select.
3297 # specifically the oracle dialect needs this feature
3298 # right now.
3299 toplevel = not self.stack
3300 entry = self._default_stack_entry if toplevel else self.stack[-1]
3302 compile_state = select_stmt._compile_state_factory(select_stmt, self)
3304 correlate_froms = entry["correlate_froms"]
3305 asfrom_froms = entry["asfrom_froms"]
3307 if asfrom and not lateral:
3308 froms = compile_state._get_display_froms(
3309 explicit_correlate_froms=correlate_froms.difference(
3310 asfrom_froms
3311 ),
3312 implicit_correlate_froms=(),
3313 )
3314 else:
3315 froms = compile_state._get_display_froms(
3316 explicit_correlate_froms=correlate_froms,
3317 implicit_correlate_froms=asfrom_froms,
3318 )
3319 return froms
3321 translate_select_structure = None
3322 """if not ``None``, should be a callable which accepts ``(select_stmt,
3323 **kw)`` and returns a select object. this is used for structural changes
3324 mostly to accommodate for LIMIT/OFFSET schemes
3326 """
3328 def visit_select(
3329 self,
3330 select_stmt,
3331 asfrom=False,
3332 insert_into=False,
3333 fromhints=None,
3334 compound_index=None,
3335 select_wraps_for=None,
3336 lateral=False,
3337 from_linter=None,
3338 **kwargs
3339 ):
3340 assert select_wraps_for is None, (
3341 "SQLAlchemy 1.4 requires use of "
3342 "the translate_select_structure hook for structural "
3343 "translations of SELECT objects"
3344 )
3346 # initial setup of SELECT. the compile_state_factory may now
3347 # be creating a totally different SELECT from the one that was
3348 # passed in. for ORM use this will convert from an ORM-state
3349 # SELECT to a regular "Core" SELECT. other composed operations
3350 # such as computation of joins will be performed.
3352 kwargs["within_columns_clause"] = False
3354 compile_state = select_stmt._compile_state_factory(
3355 select_stmt, self, **kwargs
3356 )
3357 select_stmt = compile_state.statement
3359 toplevel = not self.stack
3361 if toplevel and not self.compile_state:
3362 self.compile_state = compile_state
3364 is_embedded_select = compound_index is not None or insert_into
3366 # translate step for Oracle, SQL Server which often need to
3367 # restructure the SELECT to allow for LIMIT/OFFSET and possibly
3368 # other conditions
3369 if self.translate_select_structure:
3370 new_select_stmt = self.translate_select_structure(
3371 select_stmt, asfrom=asfrom, **kwargs
3372 )
3374 # if SELECT was restructured, maintain a link to the originals
3375 # and assemble a new compile state
3376 if new_select_stmt is not select_stmt:
3377 compile_state_wraps_for = compile_state
3378 select_wraps_for = select_stmt
3379 select_stmt = new_select_stmt
3381 compile_state = select_stmt._compile_state_factory(
3382 select_stmt, self, **kwargs
3383 )
3384 select_stmt = compile_state.statement
3386 entry = self._default_stack_entry if toplevel else self.stack[-1]
3388 populate_result_map = need_column_expressions = (
3389 toplevel
3390 or entry.get("need_result_map_for_compound", False)
3391 or entry.get("need_result_map_for_nested", False)
3392 )
3394 # indicates there is a CompoundSelect in play and we are not the
3395 # first select
3396 if compound_index:
3397 populate_result_map = False
3399 # this was first proposed as part of #3372; however, it is not
3400 # reached in current tests and could possibly be an assertion
3401 # instead.
3402 if not populate_result_map and "add_to_result_map" in kwargs:
3403 del kwargs["add_to_result_map"]
3405 froms = self._setup_select_stack(
3406 select_stmt, compile_state, entry, asfrom, lateral, compound_index
3407 )
3409 column_clause_args = kwargs.copy()
3410 column_clause_args.update(
3411 {"within_label_clause": False, "within_columns_clause": False}
3412 )
3414 text = "SELECT " # we're off to a good start !
3416 if select_stmt._hints:
3417 hint_text, byfrom = self._setup_select_hints(select_stmt)
3418 if hint_text:
3419 text += hint_text + " "
3420 else:
3421 byfrom = None
3423 if select_stmt._independent_ctes:
3424 for cte in select_stmt._independent_ctes:
3425 cte._compiler_dispatch(self, **kwargs)
3427 if select_stmt._prefixes:
3428 text += self._generate_prefixes(
3429 select_stmt, select_stmt._prefixes, **kwargs
3430 )
3432 text += self.get_select_precolumns(select_stmt, **kwargs)
3433 # the actual list of columns to print in the SELECT column list.
3434 inner_columns = [
3435 c
3436 for c in [
3437 self._label_select_column(
3438 select_stmt,
3439 column,
3440 populate_result_map,
3441 asfrom,
3442 column_clause_args,
3443 name=name,
3444 proxy_name=proxy_name,
3445 fallback_label_name=fallback_label_name,
3446 column_is_repeated=repeated,
3447 need_column_expressions=need_column_expressions,
3448 )
3449 for (
3450 name,
3451 proxy_name,
3452 fallback_label_name,
3453 column,
3454 repeated,
3455 ) in compile_state.columns_plus_names
3456 ]
3457 if c is not None
3458 ]
3460 if populate_result_map and select_wraps_for is not None:
3461 # if this select was generated from translate_select,
3462 # rewrite the targeted columns in the result map
3464 translate = dict(
3465 zip(
3466 [
3467 name
3468 for (
3469 key,
3470 proxy_name,
3471 fallback_label_name,
3472 name,
3473 repeated,
3474 ) in compile_state.columns_plus_names
3475 ],
3476 [
3477 name
3478 for (
3479 key,
3480 proxy_name,
3481 fallback_label_name,
3482 name,
3483 repeated,
3484 ) in compile_state_wraps_for.columns_plus_names
3485 ],
3486 )
3487 )
3489 self._result_columns = [
3490 (key, name, tuple(translate.get(o, o) for o in obj), type_)
3491 for key, name, obj, type_ in self._result_columns
3492 ]
3494 text = self._compose_select_body(
3495 text,
3496 select_stmt,
3497 compile_state,
3498 inner_columns,
3499 froms,
3500 byfrom,
3501 toplevel,
3502 kwargs,
3503 )
3505 if select_stmt._statement_hints:
3506 per_dialect = [
3507 ht
3508 for (dialect_name, ht) in select_stmt._statement_hints
3509 if dialect_name in ("*", self.dialect.name)
3510 ]
3511 if per_dialect:
3512 text += " " + self.get_statement_hint_text(per_dialect)
3514 # In compound query, CTEs are shared at the compound level
3515 if self.ctes and (not is_embedded_select or toplevel):
3516 nesting_level = len(self.stack) if not toplevel else None
3517 text = (
3518 self._render_cte_clause(
3519 nesting_level=nesting_level,
3520 visiting_cte=kwargs.get("visiting_cte"),
3521 )
3522 + text
3523 )
3525 if select_stmt._suffixes:
3526 text += " " + self._generate_prefixes(
3527 select_stmt, select_stmt._suffixes, **kwargs
3528 )
3530 self.stack.pop(-1)
3532 return text
3534 def _setup_select_hints(self, select):
3535 byfrom = dict(
3536 [
3537 (
3538 from_,
3539 hinttext
3540 % {"name": from_._compiler_dispatch(self, ashint=True)},
3541 )
3542 for (from_, dialect), hinttext in select._hints.items()
3543 if dialect in ("*", self.dialect.name)
3544 ]
3545 )
3546 hint_text = self.get_select_hint_text(byfrom)
3547 return hint_text, byfrom
3549 def _setup_select_stack(
3550 self, select, compile_state, entry, asfrom, lateral, compound_index
3551 ):
3552 correlate_froms = entry["correlate_froms"]
3553 asfrom_froms = entry["asfrom_froms"]
3555 if compound_index == 0:
3556 entry["select_0"] = select
3557 elif compound_index:
3558 select_0 = entry["select_0"]
3559 numcols = len(select_0._all_selected_columns)
3561 if len(compile_state.columns_plus_names) != numcols:
3562 raise exc.CompileError(
3563 "All selectables passed to "
3564 "CompoundSelect must have identical numbers of "
3565 "columns; select #%d has %d columns, select "
3566 "#%d has %d"
3567 % (
3568 1,
3569 numcols,
3570 compound_index + 1,
3571 len(select._all_selected_columns),
3572 )
3573 )
3575 if asfrom and not lateral:
3576 froms = compile_state._get_display_froms(
3577 explicit_correlate_froms=correlate_froms.difference(
3578 asfrom_froms
3579 ),
3580 implicit_correlate_froms=(),
3581 )
3582 else:
3583 froms = compile_state._get_display_froms(
3584 explicit_correlate_froms=correlate_froms,
3585 implicit_correlate_froms=asfrom_froms,
3586 )
3588 new_correlate_froms = set(selectable._from_objects(*froms))
3589 all_correlate_froms = new_correlate_froms.union(correlate_froms)
3591 new_entry = {
3592 "asfrom_froms": new_correlate_froms,
3593 "correlate_froms": all_correlate_froms,
3594 "selectable": select,
3595 "compile_state": compile_state,
3596 }
3597 self.stack.append(new_entry)
3599 return froms
3601 def _compose_select_body(
3602 self,
3603 text,
3604 select,
3605 compile_state,
3606 inner_columns,
3607 froms,
3608 byfrom,
3609 toplevel,
3610 kwargs,
3611 ):
3612 text += ", ".join(inner_columns)
3614 if self.linting & COLLECT_CARTESIAN_PRODUCTS:
3615 from_linter = FromLinter({}, set())
3616 warn_linting = self.linting & WARN_LINTING
3617 if toplevel:
3618 self.from_linter = from_linter
3619 else:
3620 from_linter = None
3621 warn_linting = False
3623 if froms:
3624 text += " \nFROM "
3626 if select._hints:
3627 text += ", ".join(
3628 [
3629 f._compiler_dispatch(
3630 self,
3631 asfrom=True,
3632 fromhints=byfrom,
3633 from_linter=from_linter,
3634 **kwargs
3635 )
3636 for f in froms
3637 ]
3638 )
3639 else:
3640 text += ", ".join(
3641 [
3642 f._compiler_dispatch(
3643 self,
3644 asfrom=True,
3645 from_linter=from_linter,
3646 **kwargs
3647 )
3648 for f in froms
3649 ]
3650 )
3651 else:
3652 text += self.default_from()
3654 if select._where_criteria:
3655 t = self._generate_delimited_and_list(
3656 select._where_criteria, from_linter=from_linter, **kwargs
3657 )
3658 if t:
3659 text += " \nWHERE " + t
3661 if warn_linting:
3662 from_linter.warn()
3664 if select._group_by_clauses:
3665 text += self.group_by_clause(select, **kwargs)
3667 if select._having_criteria:
3668 t = self._generate_delimited_and_list(
3669 select._having_criteria, **kwargs
3670 )
3671 if t:
3672 text += " \nHAVING " + t
3674 if select._order_by_clauses:
3675 text += self.order_by_clause(select, **kwargs)
3677 if select._has_row_limiting_clause:
3678 text += self._row_limit_clause(select, **kwargs)
3680 if select._for_update_arg is not None:
3681 text += self.for_update_clause(select, **kwargs)
3683 return text
3685 def _generate_prefixes(self, stmt, prefixes, **kw):
3686 clause = " ".join(
3687 prefix._compiler_dispatch(self, **kw)
3688 for prefix, dialect_name in prefixes
3689 if dialect_name is None or dialect_name == self.dialect.name
3690 )
3691 if clause:
3692 clause += " "
3693 return clause
3695 def _render_cte_clause(
3696 self,
3697 nesting_level=None,
3698 include_following_stack=False,
3699 visiting_cte=None,
3700 ):
3701 """
3702 include_following_stack
3703 Also render the nesting CTEs on the next stack. Useful for
3704 SQL structures like UNION or INSERT that can wrap SELECT
3705 statements containing nesting CTEs.
3706 """
3707 if not self.ctes:
3708 return ""
3710 if nesting_level and nesting_level > 1:
3711 ctes = util.OrderedDict()
3712 for cte in list(self.ctes.keys()):
3713 cte_level, cte_name = self.level_name_by_cte[
3714 cte._get_reference_cte()
3715 ]
3716 is_rendered_level = cte_level == nesting_level or (
3717 include_following_stack and cte_level == nesting_level + 1
3718 )
3719 if not (cte.nesting and is_rendered_level):
3720 continue
3722 ctes[cte] = self.ctes[cte]
3724 else:
3725 ctes = self.ctes
3727 if not ctes:
3728 return ""
3729 ctes_recursive = any([cte.recursive for cte in ctes])
3731 if self.positional:
3732 self.cte_order[visiting_cte].extend(ctes)
3734 if visiting_cte is None and self.cte_order:
3735 assert self.positiontup is not None
3737 def get_nested_positional(cte):
3738 if cte in self.cte_order:
3739 children = self.cte_order.pop(cte)
3740 to_add = list(
3741 itertools.chain.from_iterable(
3742 get_nested_positional(child_cte)
3743 for child_cte in children
3744 )
3745 )
3746 if cte in self.cte_positional:
3747 return reorder_positional(
3748 self.cte_positional[cte],
3749 to_add,
3750 self.cte_level[children[0]],
3751 )
3752 else:
3753 return to_add
3754 else:
3755 return self.cte_positional.get(cte, [])
3757 def reorder_positional(pos, to_add, level):
3758 if not level:
3759 return to_add + pos
3760 index = 0
3761 for index, name in enumerate(reversed(pos)):
3762 if self.positiontup_level[name] < level: # type: ignore[index] # noqa: E501
3763 break
3764 return pos[:-index] + to_add + pos[-index:]
3766 to_add = get_nested_positional(None)
3767 self.positiontup = reorder_positional(
3768 self.positiontup, to_add, nesting_level
3769 )
3771 cte_text = self.get_cte_preamble(ctes_recursive) + " "
3772 cte_text += ", \n".join([txt for txt in ctes.values()])
3773 cte_text += "\n "
3775 if nesting_level and nesting_level > 1:
3776 for cte in list(ctes.keys()):
3777 cte_level, cte_name = self.level_name_by_cte[
3778 cte._get_reference_cte()
3779 ]
3780 del self.ctes[cte]
3781 del self.ctes_by_level_name[(cte_level, cte_name)]
3782 del self.level_name_by_cte[cte._get_reference_cte()]
3784 return cte_text
3786 def get_cte_preamble(self, recursive):
3787 if recursive:
3788 return "WITH RECURSIVE"
3789 else:
3790 return "WITH"
3792 def get_select_precolumns(self, select, **kw):
3793 """Called when building a ``SELECT`` statement, position is just
3794 before column list.
3796 """
3797 if select._distinct_on:
3798 util.warn_deprecated(
3799 "DISTINCT ON is currently supported only by the PostgreSQL "
3800 "dialect. Use of DISTINCT ON for other backends is currently "
3801 "silently ignored, however this usage is deprecated, and will "
3802 "raise CompileError in a future release for all backends "
3803 "that do not support this syntax.",
3804 version="1.4",
3805 )
3806 return "DISTINCT " if select._distinct else ""
3808 def group_by_clause(self, select, **kw):
3809 """allow dialects to customize how GROUP BY is rendered."""
3811 group_by = self._generate_delimited_list(
3812 select._group_by_clauses, OPERATORS[operators.comma_op], **kw
3813 )
3814 if group_by:
3815 return " GROUP BY " + group_by
3816 else:
3817 return ""
3819 def order_by_clause(self, select, **kw):
3820 """allow dialects to customize how ORDER BY is rendered."""
3822 order_by = self._generate_delimited_list(
3823 select._order_by_clauses, OPERATORS[operators.comma_op], **kw
3824 )
3826 if order_by:
3827 return " ORDER BY " + order_by
3828 else:
3829 return ""
3831 def for_update_clause(self, select, **kw):
3832 return " FOR UPDATE"
3834 def returning_clause(self, stmt, returning_cols):
3835 raise exc.CompileError(
3836 "RETURNING is not supported by this "
3837 "dialect's statement compiler."
3838 )
3840 def limit_clause(self, select, **kw):
3841 text = ""
3842 if select._limit_clause is not None:
3843 text += "\n LIMIT " + self.process(select._limit_clause, **kw)
3844 if select._offset_clause is not None:
3845 if select._limit_clause is None:
3846 text += "\n LIMIT -1"
3847 text += " OFFSET " + self.process(select._offset_clause, **kw)
3848 return text
3850 def fetch_clause(self, select, **kw):
3851 text = ""
3852 if select._offset_clause is not None:
3853 text += "\n OFFSET %s ROWS" % self.process(
3854 select._offset_clause, **kw
3855 )
3856 if select._fetch_clause is not None:
3857 text += "\n FETCH FIRST %s%s ROWS %s" % (
3858 self.process(select._fetch_clause, **kw),
3859 " PERCENT" if select._fetch_clause_options["percent"] else "",
3860 "WITH TIES"
3861 if select._fetch_clause_options["with_ties"]
3862 else "ONLY",
3863 )
3864 return text
3866 def visit_table(
3867 self,
3868 table,
3869 asfrom=False,
3870 iscrud=False,
3871 ashint=False,
3872 fromhints=None,
3873 use_schema=True,
3874 from_linter=None,
3875 **kwargs
3876 ):
3877 if from_linter:
3878 from_linter.froms[table] = table.fullname
3880 if asfrom or ashint:
3881 effective_schema = self.preparer.schema_for_object(table)
3883 if use_schema and effective_schema:
3884 ret = (
3885 self.preparer.quote_schema(effective_schema)
3886 + "."
3887 + self.preparer.quote(table.name)
3888 )
3889 else:
3890 ret = self.preparer.quote(table.name)
3891 if fromhints and table in fromhints:
3892 ret = self.format_from_hint_text(
3893 ret, table, fromhints[table], iscrud
3894 )
3895 return ret
3896 else:
3897 return ""
3899 def visit_join(self, join, asfrom=False, from_linter=None, **kwargs):
3900 if from_linter:
3901 from_linter.edges.update(
3902 itertools.product(
3903 join.left._from_objects, join.right._from_objects
3904 )
3905 )
3907 if join.full:
3908 join_type = " FULL OUTER JOIN "
3909 elif join.isouter:
3910 join_type = " LEFT OUTER JOIN "
3911 else:
3912 join_type = " JOIN "
3913 return (
3914 join.left._compiler_dispatch(
3915 self, asfrom=True, from_linter=from_linter, **kwargs
3916 )
3917 + join_type
3918 + join.right._compiler_dispatch(
3919 self, asfrom=True, from_linter=from_linter, **kwargs
3920 )
3921 + " ON "
3922 # TODO: likely need asfrom=True here?
3923 + join.onclause._compiler_dispatch(
3924 self, from_linter=from_linter, **kwargs
3925 )
3926 )
3928 def _setup_crud_hints(self, stmt, table_text):
3929 dialect_hints = dict(
3930 [
3931 (table, hint_text)
3932 for (table, dialect), hint_text in stmt._hints.items()
3933 if dialect in ("*", self.dialect.name)
3934 ]
3935 )
3936 if stmt.table in dialect_hints:
3937 table_text = self.format_from_hint_text(
3938 table_text, stmt.table, dialect_hints[stmt.table], True
3939 )
3940 return dialect_hints, table_text
3942 def visit_insert(self, insert_stmt, **kw):
3944 compile_state = insert_stmt._compile_state_factory(
3945 insert_stmt, self, **kw
3946 )
3947 insert_stmt = compile_state.statement
3949 toplevel = not self.stack
3951 if toplevel:
3952 self.isinsert = True
3953 if not self.dml_compile_state:
3954 self.dml_compile_state = compile_state
3955 if not self.compile_state:
3956 self.compile_state = compile_state
3958 self.stack.append(
3959 {
3960 "correlate_froms": set(),
3961 "asfrom_froms": set(),
3962 "selectable": insert_stmt,
3963 }
3964 )
3966 crud_params = crud._get_crud_params(
3967 self, insert_stmt, compile_state, **kw
3968 )
3970 if (
3971 not crud_params
3972 and not self.dialect.supports_default_values
3973 and not self.dialect.supports_default_metavalue
3974 and not self.dialect.supports_empty_insert
3975 ):
3976 raise exc.CompileError(
3977 "The '%s' dialect with current database "
3978 "version settings does not support empty "
3979 "inserts." % self.dialect.name
3980 )
3982 if compile_state._has_multi_parameters:
3983 if not self.dialect.supports_multivalues_insert:
3984 raise exc.CompileError(
3985 "The '%s' dialect with current database "
3986 "version settings does not support "
3987 "in-place multirow inserts." % self.dialect.name
3988 )
3989 crud_params_single = crud_params[0]
3990 else:
3991 crud_params_single = crud_params
3993 preparer = self.preparer
3994 supports_default_values = self.dialect.supports_default_values
3996 text = "INSERT "
3998 if insert_stmt._prefixes:
3999 text += self._generate_prefixes(
4000 insert_stmt, insert_stmt._prefixes, **kw
4001 )
4003 text += "INTO "
4004 table_text = preparer.format_table(insert_stmt.table)
4006 if insert_stmt._hints:
4007 _, table_text = self._setup_crud_hints(insert_stmt, table_text)
4009 if insert_stmt._independent_ctes:
4010 for cte in insert_stmt._independent_ctes:
4011 cte._compiler_dispatch(self, **kw)
4013 text += table_text
4015 if crud_params_single or not supports_default_values:
4016 text += " (%s)" % ", ".join(
4017 [expr for c, expr, value in crud_params_single]
4018 )
4020 if self.returning or insert_stmt._returning:
4021 returning_clause = self.returning_clause(
4022 insert_stmt, self.returning or insert_stmt._returning
4023 )
4025 if self.returning_precedes_values:
4026 text += " " + returning_clause
4027 else:
4028 returning_clause = None
4030 if insert_stmt.select is not None:
4031 # placed here by crud.py
4032 select_text = self.process(
4033 self.stack[-1]["insert_from_select"], insert_into=True, **kw
4034 )
4036 if self.ctes and self.dialect.cte_follows_insert:
4037 nesting_level = len(self.stack) if not toplevel else None
4038 text += " %s%s" % (
4039 self._render_cte_clause(
4040 nesting_level=nesting_level,
4041 include_following_stack=True,
4042 visiting_cte=kw.get("visiting_cte"),
4043 ),
4044 select_text,
4045 )
4046 else:
4047 text += " %s" % select_text
4048 elif not crud_params and supports_default_values:
4049 text += " DEFAULT VALUES"
4050 elif compile_state._has_multi_parameters:
4051 text += " VALUES %s" % (
4052 ", ".join(
4053 "(%s)"
4054 % (", ".join(value for c, expr, value in crud_param_set))
4055 for crud_param_set in crud_params
4056 )
4057 )
4058 else:
4059 insert_single_values_expr = ", ".join(
4060 [value for c, expr, value in crud_params]
4061 )
4062 text += " VALUES (%s)" % insert_single_values_expr
4063 if toplevel:
4064 self.insert_single_values_expr = insert_single_values_expr
4066 if insert_stmt._post_values_clause is not None:
4067 post_values_clause = self.process(
4068 insert_stmt._post_values_clause, **kw
4069 )
4070 if post_values_clause:
4071 text += " " + post_values_clause
4073 if returning_clause and not self.returning_precedes_values:
4074 text += " " + returning_clause
4076 if self.ctes and not self.dialect.cte_follows_insert:
4077 nesting_level = len(self.stack) if not toplevel else None
4078 text = (
4079 self._render_cte_clause(
4080 nesting_level=nesting_level,
4081 include_following_stack=True,
4082 visiting_cte=kw.get("visiting_cte"),
4083 )
4084 + text
4085 )
4087 self.stack.pop(-1)
4089 return text
4091 def update_limit_clause(self, update_stmt):
4092 """Provide a hook for MySQL to add LIMIT to the UPDATE"""
4093 return None
4095 def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
4096 """Provide a hook to override the initial table clause
4097 in an UPDATE statement.
4099 MySQL overrides this.
4101 """
4102 kw["asfrom"] = True
4103 return from_table._compiler_dispatch(self, iscrud=True, **kw)
4105 def update_from_clause(
4106 self, update_stmt, from_table, extra_froms, from_hints, **kw
4107 ):
4108 """Provide a hook to override the generation of an
4109 UPDATE..FROM clause.
4111 MySQL and MSSQL override this.
4113 """
4114 raise NotImplementedError(
4115 "This backend does not support multiple-table "
4116 "criteria within UPDATE"
4117 )
4119 def visit_update(self, update_stmt, **kw):
4120 compile_state = update_stmt._compile_state_factory(
4121 update_stmt, self, **kw
4122 )
4123 update_stmt = compile_state.statement
4125 toplevel = not self.stack
4126 if toplevel:
4127 self.isupdate = True
4128 if not self.dml_compile_state:
4129 self.dml_compile_state = compile_state
4130 if not self.compile_state:
4131 self.compile_state = compile_state
4133 extra_froms = compile_state._extra_froms
4134 is_multitable = bool(extra_froms)
4136 if is_multitable:
4137 # main table might be a JOIN
4138 main_froms = set(selectable._from_objects(update_stmt.table))
4139 render_extra_froms = [
4140 f for f in extra_froms if f not in main_froms
4141 ]
4142 correlate_froms = main_froms.union(extra_froms)
4143 else:
4144 render_extra_froms = []
4145 correlate_froms = {update_stmt.table}
4147 self.stack.append(
4148 {
4149 "correlate_froms": correlate_froms,
4150 "asfrom_froms": correlate_froms,
4151 "selectable": update_stmt,
4152 }
4153 )
4155 text = "UPDATE "
4157 if update_stmt._prefixes:
4158 text += self._generate_prefixes(
4159 update_stmt, update_stmt._prefixes, **kw
4160 )
4162 table_text = self.update_tables_clause(
4163 update_stmt, update_stmt.table, render_extra_froms, **kw
4164 )
4165 crud_params = crud._get_crud_params(
4166 self, update_stmt, compile_state, **kw
4167 )
4169 if update_stmt._hints:
4170 dialect_hints, table_text = self._setup_crud_hints(
4171 update_stmt, table_text
4172 )
4173 else:
4174 dialect_hints = None
4176 if update_stmt._independent_ctes:
4177 for cte in update_stmt._independent_ctes:
4178 cte._compiler_dispatch(self, **kw)
4180 text += table_text
4182 text += " SET "
4183 text += ", ".join(expr + "=" + value for c, expr, value in crud_params)
4185 if self.returning or update_stmt._returning:
4186 if self.returning_precedes_values:
4187 text += " " + self.returning_clause(
4188 update_stmt, self.returning or update_stmt._returning
4189 )
4191 if extra_froms:
4192 extra_from_text = self.update_from_clause(
4193 update_stmt,
4194 update_stmt.table,
4195 render_extra_froms,
4196 dialect_hints,
4197 **kw
4198 )
4199 if extra_from_text:
4200 text += " " + extra_from_text
4202 if update_stmt._where_criteria:
4203 t = self._generate_delimited_and_list(
4204 update_stmt._where_criteria, **kw
4205 )
4206 if t:
4207 text += " WHERE " + t
4209 limit_clause = self.update_limit_clause(update_stmt)
4210 if limit_clause:
4211 text += " " + limit_clause
4213 if (
4214 self.returning or update_stmt._returning
4215 ) and not self.returning_precedes_values:
4216 text += " " + self.returning_clause(
4217 update_stmt, self.returning or update_stmt._returning
4218 )
4220 if self.ctes:
4221 nesting_level = len(self.stack) if not toplevel else None
4222 text = (
4223 self._render_cte_clause(
4224 nesting_level=nesting_level,
4225 visiting_cte=kw.get("visiting_cte"),
4226 )
4227 + text
4228 )
4230 self.stack.pop(-1)
4232 return text
4234 def delete_extra_from_clause(
4235 self, update_stmt, from_table, extra_froms, from_hints, **kw
4236 ):
4237 """Provide a hook to override the generation of an
4238 DELETE..FROM clause.
4240 This can be used to implement DELETE..USING for example.
4242 MySQL and MSSQL override this.
4244 """
4245 raise NotImplementedError(
4246 "This backend does not support multiple-table "
4247 "criteria within DELETE"
4248 )
4250 def delete_table_clause(self, delete_stmt, from_table, extra_froms):
4251 return from_table._compiler_dispatch(self, asfrom=True, iscrud=True)
4253 def visit_delete(self, delete_stmt, **kw):
4254 compile_state = delete_stmt._compile_state_factory(
4255 delete_stmt, self, **kw
4256 )
4257 delete_stmt = compile_state.statement
4259 toplevel = not self.stack
4260 if toplevel:
4261 self.isdelete = True
4262 if not self.dml_compile_state:
4263 self.dml_compile_state = compile_state
4264 if not self.compile_state:
4265 self.compile_state = compile_state
4267 extra_froms = compile_state._extra_froms
4269 correlate_froms = {delete_stmt.table}.union(extra_froms)
4270 self.stack.append(
4271 {
4272 "correlate_froms": correlate_froms,
4273 "asfrom_froms": correlate_froms,
4274 "selectable": delete_stmt,
4275 }
4276 )
4278 text = "DELETE "
4280 if delete_stmt._prefixes:
4281 text += self._generate_prefixes(
4282 delete_stmt, delete_stmt._prefixes, **kw
4283 )
4285 text += "FROM "
4286 table_text = self.delete_table_clause(
4287 delete_stmt, delete_stmt.table, extra_froms
4288 )
4290 if delete_stmt._hints:
4291 dialect_hints, table_text = self._setup_crud_hints(
4292 delete_stmt, table_text
4293 )
4294 else:
4295 dialect_hints = None
4297 if delete_stmt._independent_ctes:
4298 for cte in delete_stmt._independent_ctes:
4299 cte._compiler_dispatch(self, **kw)
4301 text += table_text
4303 if delete_stmt._returning:
4304 if self.returning_precedes_values:
4305 text += " " + self.returning_clause(
4306 delete_stmt, delete_stmt._returning
4307 )
4309 if extra_froms:
4310 extra_from_text = self.delete_extra_from_clause(
4311 delete_stmt,
4312 delete_stmt.table,
4313 extra_froms,
4314 dialect_hints,
4315 **kw
4316 )
4317 if extra_from_text:
4318 text += " " + extra_from_text
4320 if delete_stmt._where_criteria:
4321 t = self._generate_delimited_and_list(
4322 delete_stmt._where_criteria, **kw
4323 )
4324 if t:
4325 text += " WHERE " + t
4327 if delete_stmt._returning and not self.returning_precedes_values:
4328 text += " " + self.returning_clause(
4329 delete_stmt, delete_stmt._returning
4330 )
4332 if self.ctes:
4333 nesting_level = len(self.stack) if not toplevel else None
4334 text = (
4335 self._render_cte_clause(
4336 nesting_level=nesting_level,
4337 visiting_cte=kw.get("visiting_cte"),
4338 )
4339 + text
4340 )
4342 self.stack.pop(-1)
4344 return text
4346 def visit_savepoint(self, savepoint_stmt):
4347 return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt)
4349 def visit_rollback_to_savepoint(self, savepoint_stmt):
4350 return "ROLLBACK TO SAVEPOINT %s" % self.preparer.format_savepoint(
4351 savepoint_stmt
4352 )
4354 def visit_release_savepoint(self, savepoint_stmt):
4355 return "RELEASE SAVEPOINT %s" % self.preparer.format_savepoint(
4356 savepoint_stmt
4357 )
4360class StrSQLCompiler(SQLCompiler):
4361 """A :class:`.SQLCompiler` subclass which allows a small selection
4362 of non-standard SQL features to render into a string value.
4364 The :class:`.StrSQLCompiler` is invoked whenever a Core expression
4365 element is directly stringified without calling upon the
4366 :meth:`_expression.ClauseElement.compile` method.
4367 It can render a limited set
4368 of non-standard SQL constructs to assist in basic stringification,
4369 however for more substantial custom or dialect-specific SQL constructs,
4370 it will be necessary to make use of
4371 :meth:`_expression.ClauseElement.compile`
4372 directly.
4374 .. seealso::
4376 :ref:`faq_sql_expression_string`
4378 """
4380 def _fallback_column_name(self, column):
4381 return "<name unknown>"
4383 @util.preload_module("sqlalchemy.engine.url")
4384 def visit_unsupported_compilation(self, element, err, **kw):
4385 if element.stringify_dialect != "default":
4386 url = util.preloaded.engine_url
4387 dialect = url.URL.create(element.stringify_dialect).get_dialect()()
4389 compiler = dialect.statement_compiler(dialect, None)
4390 if not isinstance(compiler, StrSQLCompiler):
4391 return compiler.process(element)
4393 return super(StrSQLCompiler, self).visit_unsupported_compilation(
4394 element, err
4395 )
4397 def visit_getitem_binary(self, binary, operator, **kw):
4398 return "%s[%s]" % (
4399 self.process(binary.left, **kw),
4400 self.process(binary.right, **kw),
4401 )
4403 def visit_json_getitem_op_binary(self, binary, operator, **kw):
4404 return self.visit_getitem_binary(binary, operator, **kw)
4406 def visit_json_path_getitem_op_binary(self, binary, operator, **kw):
4407 return self.visit_getitem_binary(binary, operator, **kw)
4409 def visit_sequence(self, seq, **kw):
4410 return "<next sequence value: %s>" % self.preparer.format_sequence(seq)
4412 def returning_clause(self, stmt, returning_cols):
4413 columns = [
4414 self._label_select_column(
4415 None, c, True, False, {}, fallback_label_name=c._non_anon_label
4416 )
4417 for c in base._select_iterables(returning_cols)
4418 ]
4420 return "RETURNING " + ", ".join(columns)
4422 def update_from_clause(
4423 self, update_stmt, from_table, extra_froms, from_hints, **kw
4424 ):
4425 kw["asfrom"] = True
4426 return "FROM " + ", ".join(
4427 t._compiler_dispatch(self, fromhints=from_hints, **kw)
4428 for t in extra_froms
4429 )
4431 def delete_extra_from_clause(
4432 self, update_stmt, from_table, extra_froms, from_hints, **kw
4433 ):
4434 kw["asfrom"] = True
4435 return ", " + ", ".join(
4436 t._compiler_dispatch(self, fromhints=from_hints, **kw)
4437 for t in extra_froms
4438 )
4440 def visit_empty_set_expr(self, type_):
4441 return "SELECT 1 WHERE 1!=1"
4443 def get_from_hint_text(self, table, text):
4444 return "[%s]" % text
4446 def visit_regexp_match_op_binary(self, binary, operator, **kw):
4447 return self._generate_generic_binary(binary, " <regexp> ", **kw)
4449 def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
4450 return self._generate_generic_binary(binary, " <not regexp> ", **kw)
4452 def visit_regexp_replace_op_binary(self, binary, operator, **kw):
4453 replacement = binary.modifiers["replacement"]
4454 return "<regexp replace>(%s, %s, %s)" % (
4455 binary.left._compiler_dispatch(self, **kw),
4456 binary.right._compiler_dispatch(self, **kw),
4457 replacement._compiler_dispatch(self, **kw),
4458 )
4461class DDLCompiler(Compiled):
4462 @util.memoized_property
4463 def sql_compiler(self):
4464 return self.dialect.statement_compiler(
4465 self.dialect, None, schema_translate_map=self.schema_translate_map
4466 )
4468 @util.memoized_property
4469 def type_compiler(self):
4470 return self.dialect.type_compiler
4472 def construct_params(
4473 self, params=None, extracted_parameters=None, escape_names=True
4474 ):
4475 return None
4477 def visit_ddl(self, ddl, **kwargs):
4478 # table events can substitute table and schema name
4479 context = ddl.context
4480 if isinstance(ddl.target, schema.Table):
4481 context = context.copy()
4483 preparer = self.preparer
4484 path = preparer.format_table_seq(ddl.target)
4485 if len(path) == 1:
4486 table, sch = path[0], ""
4487 else:
4488 table, sch = path[-1], path[0]
4490 context.setdefault("table", table)
4491 context.setdefault("schema", sch)
4492 context.setdefault("fullname", preparer.format_table(ddl.target))
4494 return self.sql_compiler.post_process_text(ddl.statement % context)
4496 def visit_create_schema(self, create, **kw):
4497 schema = self.preparer.format_schema(create.element)
4498 return "CREATE SCHEMA " + schema
4500 def visit_drop_schema(self, drop, **kw):
4501 schema = self.preparer.format_schema(drop.element)
4502 text = "DROP SCHEMA " + schema
4503 if drop.cascade:
4504 text += " CASCADE"
4505 return text
4507 def visit_create_table(self, create, **kw):
4508 table = create.element
4509 preparer = self.preparer
4511 text = "\nCREATE "
4512 if table._prefixes:
4513 text += " ".join(table._prefixes) + " "
4515 text += "TABLE "
4516 if create.if_not_exists:
4517 text += "IF NOT EXISTS "
4519 text += preparer.format_table(table) + " "
4521 create_table_suffix = self.create_table_suffix(table)
4522 if create_table_suffix:
4523 text += create_table_suffix + " "
4525 text += "("
4527 separator = "\n"
4529 # if only one primary key, specify it along with the column
4530 first_pk = False
4531 for create_column in create.columns:
4532 column = create_column.element
4533 try:
4534 processed = self.process(
4535 create_column, first_pk=column.primary_key and not first_pk
4536 )
4537 if processed is not None:
4538 text += separator
4539 separator = ", \n"
4540 text += "\t" + processed
4541 if column.primary_key:
4542 first_pk = True
4543 except exc.CompileError as ce:
4544 util.raise_(
4545 exc.CompileError(
4546 util.u("(in table '%s', column '%s'): %s")
4547 % (table.description, column.name, ce.args[0])
4548 ),
4549 from_=ce,
4550 )
4552 const = self.create_table_constraints(
4553 table,
4554 _include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
4555 )
4556 if const:
4557 text += separator + "\t" + const
4559 text += "\n)%s\n\n" % self.post_create_table(table)
4560 return text
4562 def visit_create_column(self, create, first_pk=False, **kw):
4563 column = create.element
4565 if column.system:
4566 return None
4568 text = self.get_column_specification(column, first_pk=first_pk)
4569 const = " ".join(
4570 self.process(constraint) for constraint in column.constraints
4571 )
4572 if const:
4573 text += " " + const
4575 return text
4577 def create_table_constraints(
4578 self, table, _include_foreign_key_constraints=None, **kw
4579 ):
4581 # On some DB order is significant: visit PK first, then the
4582 # other constraints (engine.ReflectionTest.testbasic failed on FB2)
4583 constraints = []
4584 if table.primary_key:
4585 constraints.append(table.primary_key)
4587 all_fkcs = table.foreign_key_constraints
4588 if _include_foreign_key_constraints is not None:
4589 omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints)
4590 else:
4591 omit_fkcs = set()
4593 constraints.extend(
4594 [
4595 c
4596 for c in table._sorted_constraints
4597 if c is not table.primary_key and c not in omit_fkcs
4598 ]
4599 )
4601 return ", \n\t".join(
4602 p
4603 for p in (
4604 self.process(constraint)
4605 for constraint in constraints
4606 if (
4607 constraint._create_rule is None
4608 or constraint._create_rule(self)
4609 )
4610 and (
4611 not self.dialect.supports_alter
4612 or not getattr(constraint, "use_alter", False)
4613 )
4614 )
4615 if p is not None
4616 )
4618 def visit_drop_table(self, drop, **kw):
4619 text = "\nDROP TABLE "
4620 if drop.if_exists:
4621 text += "IF EXISTS "
4622 return text + self.preparer.format_table(drop.element)
4624 def visit_drop_view(self, drop, **kw):
4625 return "\nDROP VIEW " + self.preparer.format_table(drop.element)
4627 def _verify_index_table(self, index):
4628 if index.table is None:
4629 raise exc.CompileError(
4630 "Index '%s' is not associated " "with any table." % index.name
4631 )
4633 def visit_create_index(
4634 self, create, include_schema=False, include_table_schema=True, **kw
4635 ):
4636 index = create.element
4637 self._verify_index_table(index)
4638 preparer = self.preparer
4639 text = "CREATE "
4640 if index.unique:
4641 text += "UNIQUE "
4642 if index.name is None:
4643 raise exc.CompileError(
4644 "CREATE INDEX requires that the index have a name"
4645 )
4647 text += "INDEX "
4648 if create.if_not_exists:
4649 text += "IF NOT EXISTS "
4651 text += "%s ON %s (%s)" % (
4652 self._prepared_index_name(index, include_schema=include_schema),
4653 preparer.format_table(
4654 index.table, use_schema=include_table_schema
4655 ),
4656 ", ".join(
4657 self.sql_compiler.process(
4658 expr, include_table=False, literal_binds=True
4659 )
4660 for expr in index.expressions
4661 ),
4662 )
4663 return text
4665 def visit_drop_index(self, drop, **kw):
4666 index = drop.element
4668 if index.name is None:
4669 raise exc.CompileError(
4670 "DROP INDEX requires that the index have a name"
4671 )
4672 text = "\nDROP INDEX "
4673 if drop.if_exists:
4674 text += "IF EXISTS "
4676 return text + self._prepared_index_name(index, include_schema=True)
4678 def _prepared_index_name(self, index, include_schema=False):
4679 if index.table is not None:
4680 effective_schema = self.preparer.schema_for_object(index.table)
4681 else:
4682 effective_schema = None
4683 if include_schema and effective_schema:
4684 schema_name = self.preparer.quote_schema(effective_schema)
4685 else:
4686 schema_name = None
4688 index_name = self.preparer.format_index(index)
4690 if schema_name:
4691 index_name = schema_name + "." + index_name
4692 return index_name
4694 def visit_add_constraint(self, create, **kw):
4695 return "ALTER TABLE %s ADD %s" % (
4696 self.preparer.format_table(create.element.table),
4697 self.process(create.element),
4698 )
4700 def visit_set_table_comment(self, create, **kw):
4701 return "COMMENT ON TABLE %s IS %s" % (
4702 self.preparer.format_table(create.element),
4703 self.sql_compiler.render_literal_value(
4704 create.element.comment, sqltypes.String()
4705 ),
4706 )
4708 def visit_drop_table_comment(self, drop, **kw):
4709 return "COMMENT ON TABLE %s IS NULL" % self.preparer.format_table(
4710 drop.element
4711 )
4713 def visit_set_column_comment(self, create, **kw):
4714 return "COMMENT ON COLUMN %s IS %s" % (
4715 self.preparer.format_column(
4716 create.element, use_table=True, use_schema=True
4717 ),
4718 self.sql_compiler.render_literal_value(
4719 create.element.comment, sqltypes.String()
4720 ),
4721 )
4723 def visit_drop_column_comment(self, drop, **kw):
4724 return "COMMENT ON COLUMN %s IS NULL" % self.preparer.format_column(
4725 drop.element, use_table=True
4726 )
4728 def get_identity_options(self, identity_options):
4729 text = []
4730 if identity_options.increment is not None:
4731 text.append("INCREMENT BY %d" % identity_options.increment)
4732 if identity_options.start is not None:
4733 text.append("START WITH %d" % identity_options.start)
4734 if identity_options.minvalue is not None:
4735 text.append("MINVALUE %d" % identity_options.minvalue)
4736 if identity_options.maxvalue is not None:
4737 text.append("MAXVALUE %d" % identity_options.maxvalue)
4738 if identity_options.nominvalue is not None:
4739 text.append("NO MINVALUE")
4740 if identity_options.nomaxvalue is not None:
4741 text.append("NO MAXVALUE")
4742 if identity_options.cache is not None:
4743 text.append("CACHE %d" % identity_options.cache)
4744 if identity_options.order is not None:
4745 text.append("ORDER" if identity_options.order else "NO ORDER")
4746 if identity_options.cycle is not None:
4747 text.append("CYCLE" if identity_options.cycle else "NO CYCLE")
4748 return " ".join(text)
4750 def visit_create_sequence(self, create, prefix=None, **kw):
4751 text = "CREATE SEQUENCE %s" % self.preparer.format_sequence(
4752 create.element
4753 )
4754 if prefix:
4755 text += prefix
4756 if create.element.start is None:
4757 create.element.start = self.dialect.default_sequence_base
4758 options = self.get_identity_options(create.element)
4759 if options:
4760 text += " " + options
4761 return text
4763 def visit_drop_sequence(self, drop, **kw):
4764 return "DROP SEQUENCE %s" % self.preparer.format_sequence(drop.element)
4766 def visit_drop_constraint(self, drop, **kw):
4767 constraint = drop.element
4768 if constraint.name is not None:
4769 formatted_name = self.preparer.format_constraint(constraint)
4770 else:
4771 formatted_name = None
4773 if formatted_name is None:
4774 raise exc.CompileError(
4775 "Can't emit DROP CONSTRAINT for constraint %r; "
4776 "it has no name" % drop.element
4777 )
4778 return "ALTER TABLE %s DROP CONSTRAINT %s%s" % (
4779 self.preparer.format_table(drop.element.table),
4780 formatted_name,
4781 drop.cascade and " CASCADE" or "",
4782 )
4784 def get_column_specification(self, column, **kwargs):
4785 colspec = (
4786 self.preparer.format_column(column)
4787 + " "
4788 + self.dialect.type_compiler.process(
4789 column.type, type_expression=column
4790 )
4791 )
4792 default = self.get_column_default_string(column)
4793 if default is not None:
4794 colspec += " DEFAULT " + default
4796 if column.computed is not None:
4797 colspec += " " + self.process(column.computed)
4799 if (
4800 column.identity is not None
4801 and self.dialect.supports_identity_columns
4802 ):
4803 colspec += " " + self.process(column.identity)
4805 if not column.nullable and (
4806 not column.identity or not self.dialect.supports_identity_columns
4807 ):
4808 colspec += " NOT NULL"
4809 return colspec
4811 def create_table_suffix(self, table):
4812 return ""
4814 def post_create_table(self, table):
4815 return ""
4817 def get_column_default_string(self, column):
4818 if isinstance(column.server_default, schema.DefaultClause):
4819 if isinstance(column.server_default.arg, util.string_types):
4820 return self.sql_compiler.render_literal_value(
4821 column.server_default.arg, sqltypes.STRINGTYPE
4822 )
4823 else:
4824 return self.sql_compiler.process(
4825 column.server_default.arg, literal_binds=True
4826 )
4827 else:
4828 return None
4830 def visit_table_or_column_check_constraint(self, constraint, **kw):
4831 if constraint.is_column_level:
4832 return self.visit_column_check_constraint(constraint)
4833 else:
4834 return self.visit_check_constraint(constraint)
4836 def visit_check_constraint(self, constraint, **kw):
4837 text = ""
4838 if constraint.name is not None:
4839 formatted_name = self.preparer.format_constraint(constraint)
4840 if formatted_name is not None:
4841 text += "CONSTRAINT %s " % formatted_name
4842 text += "CHECK (%s)" % self.sql_compiler.process(
4843 constraint.sqltext, include_table=False, literal_binds=True
4844 )
4845 text += self.define_constraint_deferrability(constraint)
4846 return text
4848 def visit_column_check_constraint(self, constraint, **kw):
4849 text = ""
4850 if constraint.name is not None:
4851 formatted_name = self.preparer.format_constraint(constraint)
4852 if formatted_name is not None:
4853 text += "CONSTRAINT %s " % formatted_name
4854 text += "CHECK (%s)" % self.sql_compiler.process(
4855 constraint.sqltext, include_table=False, literal_binds=True
4856 )
4857 text += self.define_constraint_deferrability(constraint)
4858 return text
4860 def visit_primary_key_constraint(self, constraint, **kw):
4861 if len(constraint) == 0:
4862 return ""
4863 text = ""
4864 if constraint.name is not None:
4865 formatted_name = self.preparer.format_constraint(constraint)
4866 if formatted_name is not None:
4867 text += "CONSTRAINT %s " % formatted_name
4868 text += "PRIMARY KEY "
4869 text += "(%s)" % ", ".join(
4870 self.preparer.quote(c.name)
4871 for c in (
4872 constraint.columns_autoinc_first
4873 if constraint._implicit_generated
4874 else constraint.columns
4875 )
4876 )
4877 text += self.define_constraint_deferrability(constraint)
4878 return text
4880 def visit_foreign_key_constraint(self, constraint, **kw):
4881 preparer = self.preparer
4882 text = ""
4883 if constraint.name is not None:
4884 formatted_name = self.preparer.format_constraint(constraint)
4885 if formatted_name is not None:
4886 text += "CONSTRAINT %s " % formatted_name
4887 remote_table = list(constraint.elements)[0].column.table
4888 text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
4889 ", ".join(
4890 preparer.quote(f.parent.name) for f in constraint.elements
4891 ),
4892 self.define_constraint_remote_table(
4893 constraint, remote_table, preparer
4894 ),
4895 ", ".join(
4896 preparer.quote(f.column.name) for f in constraint.elements
4897 ),
4898 )
4899 text += self.define_constraint_match(constraint)
4900 text += self.define_constraint_cascades(constraint)
4901 text += self.define_constraint_deferrability(constraint)
4902 return text
4904 def define_constraint_remote_table(self, constraint, table, preparer):
4905 """Format the remote table clause of a CREATE CONSTRAINT clause."""
4907 return preparer.format_table(table)
4909 def visit_unique_constraint(self, constraint, **kw):
4910 if len(constraint) == 0:
4911 return ""
4912 text = ""
4913 if constraint.name is not None:
4914 formatted_name = self.preparer.format_constraint(constraint)
4915 if formatted_name is not None:
4916 text += "CONSTRAINT %s " % formatted_name
4917 text += "UNIQUE (%s)" % (
4918 ", ".join(self.preparer.quote(c.name) for c in constraint)
4919 )
4920 text += self.define_constraint_deferrability(constraint)
4921 return text
4923 def define_constraint_cascades(self, constraint):
4924 text = ""
4925 if constraint.ondelete is not None:
4926 text += " ON DELETE %s" % self.preparer.validate_sql_phrase(
4927 constraint.ondelete, FK_ON_DELETE
4928 )
4929 if constraint.onupdate is not None:
4930 text += " ON UPDATE %s" % self.preparer.validate_sql_phrase(
4931 constraint.onupdate, FK_ON_UPDATE
4932 )
4933 return text
4935 def define_constraint_deferrability(self, constraint):
4936 text = ""
4937 if constraint.deferrable is not None:
4938 if constraint.deferrable:
4939 text += " DEFERRABLE"
4940 else:
4941 text += " NOT DEFERRABLE"
4942 if constraint.initially is not None:
4943 text += " INITIALLY %s" % self.preparer.validate_sql_phrase(
4944 constraint.initially, FK_INITIALLY
4945 )
4946 return text
4948 def define_constraint_match(self, constraint):
4949 text = ""
4950 if constraint.match is not None:
4951 text += " MATCH %s" % constraint.match
4952 return text
4954 def visit_computed_column(self, generated, **kw):
4955 text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
4956 generated.sqltext, include_table=False, literal_binds=True
4957 )
4958 if generated.persisted is True:
4959 text += " STORED"
4960 elif generated.persisted is False:
4961 text += " VIRTUAL"
4962 return text
4964 def visit_identity_column(self, identity, **kw):
4965 text = "GENERATED %s AS IDENTITY" % (
4966 "ALWAYS" if identity.always else "BY DEFAULT",
4967 )
4968 options = self.get_identity_options(identity)
4969 if options:
4970 text += " (%s)" % options
4971 return text
4974class GenericTypeCompiler(TypeCompiler):
4975 def visit_FLOAT(self, type_, **kw):
4976 return "FLOAT"
4978 def visit_REAL(self, type_, **kw):
4979 return "REAL"
4981 def visit_NUMERIC(self, type_, **kw):
4982 if type_.precision is None:
4983 return "NUMERIC"
4984 elif type_.scale is None:
4985 return "NUMERIC(%(precision)s)" % {"precision": type_.precision}
4986 else:
4987 return "NUMERIC(%(precision)s, %(scale)s)" % {
4988 "precision": type_.precision,
4989 "scale": type_.scale,
4990 }
4992 def visit_DECIMAL(self, type_, **kw):
4993 if type_.precision is None:
4994 return "DECIMAL"
4995 elif type_.scale is None:
4996 return "DECIMAL(%(precision)s)" % {"precision": type_.precision}
4997 else:
4998 return "DECIMAL(%(precision)s, %(scale)s)" % {
4999 "precision": type_.precision,
5000 "scale": type_.scale,
5001 }
5003 def visit_INTEGER(self, type_, **kw):
5004 return "INTEGER"
5006 def visit_SMALLINT(self, type_, **kw):
5007 return "SMALLINT"
5009 def visit_BIGINT(self, type_, **kw):
5010 return "BIGINT"
5012 def visit_TIMESTAMP(self, type_, **kw):
5013 return "TIMESTAMP"
5015 def visit_DATETIME(self, type_, **kw):
5016 return "DATETIME"
5018 def visit_DATE(self, type_, **kw):
5019 return "DATE"
5021 def visit_TIME(self, type_, **kw):
5022 return "TIME"
5024 def visit_CLOB(self, type_, **kw):
5025 return "CLOB"
5027 def visit_NCLOB(self, type_, **kw):
5028 return "NCLOB"
5030 def _render_string_type(self, type_, name):
5032 text = name
5033 if type_.length:
5034 text += "(%d)" % type_.length
5035 if type_.collation:
5036 text += ' COLLATE "%s"' % type_.collation
5037 return text
5039 def visit_CHAR(self, type_, **kw):
5040 return self._render_string_type(type_, "CHAR")
5042 def visit_NCHAR(self, type_, **kw):
5043 return self._render_string_type(type_, "NCHAR")
5045 def visit_VARCHAR(self, type_, **kw):
5046 return self._render_string_type(type_, "VARCHAR")
5048 def visit_NVARCHAR(self, type_, **kw):
5049 return self._render_string_type(type_, "NVARCHAR")
5051 def visit_TEXT(self, type_, **kw):
5052 return self._render_string_type(type_, "TEXT")
5054 def visit_BLOB(self, type_, **kw):
5055 return "BLOB"
5057 def visit_BINARY(self, type_, **kw):
5058 return "BINARY" + (type_.length and "(%d)" % type_.length or "")
5060 def visit_VARBINARY(self, type_, **kw):
5061 return "VARBINARY" + (type_.length and "(%d)" % type_.length or "")
5063 def visit_BOOLEAN(self, type_, **kw):
5064 return "BOOLEAN"
5066 def visit_large_binary(self, type_, **kw):
5067 return self.visit_BLOB(type_, **kw)
5069 def visit_boolean(self, type_, **kw):
5070 return self.visit_BOOLEAN(type_, **kw)
5072 def visit_time(self, type_, **kw):
5073 return self.visit_TIME(type_, **kw)
5075 def visit_datetime(self, type_, **kw):
5076 return self.visit_DATETIME(type_, **kw)
5078 def visit_date(self, type_, **kw):
5079 return self.visit_DATE(type_, **kw)
5081 def visit_big_integer(self, type_, **kw):
5082 return self.visit_BIGINT(type_, **kw)
5084 def visit_small_integer(self, type_, **kw):
5085 return self.visit_SMALLINT(type_, **kw)
5087 def visit_integer(self, type_, **kw):
5088 return self.visit_INTEGER(type_, **kw)
5090 def visit_real(self, type_, **kw):
5091 return self.visit_REAL(type_, **kw)
5093 def visit_float(self, type_, **kw):
5094 return self.visit_FLOAT(type_, **kw)
5096 def visit_numeric(self, type_, **kw):
5097 return self.visit_NUMERIC(type_, **kw)
5099 def visit_string(self, type_, **kw):
5100 return self.visit_VARCHAR(type_, **kw)
5102 def visit_unicode(self, type_, **kw):
5103 return self.visit_VARCHAR(type_, **kw)
5105 def visit_text(self, type_, **kw):
5106 return self.visit_TEXT(type_, **kw)
5108 def visit_unicode_text(self, type_, **kw):
5109 return self.visit_TEXT(type_, **kw)
5111 def visit_enum(self, type_, **kw):
5112 return self.visit_VARCHAR(type_, **kw)
5114 def visit_null(self, type_, **kw):
5115 raise exc.CompileError(
5116 "Can't generate DDL for %r; "
5117 "did you forget to specify a "
5118 "type on this Column?" % type_
5119 )
5121 def visit_type_decorator(self, type_, **kw):
5122 return self.process(type_.type_engine(self.dialect), **kw)
5124 def visit_user_defined(self, type_, **kw):
5125 return type_.get_col_spec(**kw)
5128class StrSQLTypeCompiler(GenericTypeCompiler):
5129 def process(self, type_, **kw):
5130 try:
5131 _compiler_dispatch = type_._compiler_dispatch
5132 except AttributeError:
5133 return self._visit_unknown(type_, **kw)
5134 else:
5135 return _compiler_dispatch(self, **kw)
5137 def __getattr__(self, key):
5138 if key.startswith("visit_"):
5139 return self._visit_unknown
5140 else:
5141 raise AttributeError(key)
5143 def _visit_unknown(self, type_, **kw):
5144 if type_.__class__.__name__ == type_.__class__.__name__.upper():
5145 return type_.__class__.__name__
5146 else:
5147 return repr(type_)
5149 def visit_null(self, type_, **kw):
5150 return "NULL"
5152 def visit_user_defined(self, type_, **kw):
5153 try:
5154 get_col_spec = type_.get_col_spec
5155 except AttributeError:
5156 return repr(type_)
5157 else:
5158 return get_col_spec(**kw)
5161class IdentifierPreparer(object):
5163 """Handle quoting and case-folding of identifiers based on options."""
5165 reserved_words = RESERVED_WORDS
5167 legal_characters = LEGAL_CHARACTERS
5169 illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS
5171 schema_for_object = operator.attrgetter("schema")
5172 """Return the .schema attribute for an object.
5174 For the default IdentifierPreparer, the schema for an object is always
5175 the value of the ".schema" attribute. if the preparer is replaced
5176 with one that has a non-empty schema_translate_map, the value of the
5177 ".schema" attribute is rendered a symbol that will be converted to a
5178 real schema name from the mapping post-compile.
5180 """
5182 def __init__(
5183 self,
5184 dialect,
5185 initial_quote='"',
5186 final_quote=None,
5187 escape_quote='"',
5188 quote_case_sensitive_collations=True,
5189 omit_schema=False,
5190 ):
5191 """Construct a new ``IdentifierPreparer`` object.
5193 initial_quote
5194 Character that begins a delimited identifier.
5196 final_quote
5197 Character that ends a delimited identifier. Defaults to
5198 `initial_quote`.
5200 omit_schema
5201 Prevent prepending schema name. Useful for databases that do
5202 not support schemae.
5203 """
5205 self.dialect = dialect
5206 self.initial_quote = initial_quote
5207 self.final_quote = final_quote or self.initial_quote
5208 self.escape_quote = escape_quote
5209 self.escape_to_quote = self.escape_quote * 2
5210 self.omit_schema = omit_schema
5211 self.quote_case_sensitive_collations = quote_case_sensitive_collations
5212 self._strings = {}
5213 self._double_percents = self.dialect.paramstyle in (
5214 "format",
5215 "pyformat",
5216 )
5218 def _with_schema_translate(self, schema_translate_map):
5219 prep = self.__class__.__new__(self.__class__)
5220 prep.__dict__.update(self.__dict__)
5222 def symbol_getter(obj):
5223 name = obj.schema
5224 if name in schema_translate_map and obj._use_schema_map:
5225 if name is not None and ("[" in name or "]" in name):
5226 raise exc.CompileError(
5227 "Square bracket characters ([]) not supported "
5228 "in schema translate name '%s'" % name
5229 )
5230 return quoted_name(
5231 "__[SCHEMA_%s]" % (name or "_none"), quote=False
5232 )
5233 else:
5234 return obj.schema
5236 prep.schema_for_object = symbol_getter
5237 return prep
5239 def _render_schema_translates(self, statement, schema_translate_map):
5240 d = schema_translate_map
5241 if None in d:
5242 d["_none"] = d[None]
5244 def replace(m):
5245 name = m.group(2)
5246 effective_schema = d[name]
5247 if not effective_schema:
5248 effective_schema = self.dialect.default_schema_name
5249 if not effective_schema:
5250 # TODO: no coverage here
5251 raise exc.CompileError(
5252 "Dialect has no default schema name; can't "
5253 "use None as dynamic schema target."
5254 )
5255 return self.quote_schema(effective_schema)
5257 return re.sub(r"(__\[SCHEMA_([^\]]+)\])", replace, statement)
5259 def _escape_identifier(self, value):
5260 """Escape an identifier.
5262 Subclasses should override this to provide database-dependent
5263 escaping behavior.
5264 """
5266 value = value.replace(self.escape_quote, self.escape_to_quote)
5267 if self._double_percents:
5268 value = value.replace("%", "%%")
5269 return value
5271 def _unescape_identifier(self, value):
5272 """Canonicalize an escaped identifier.
5274 Subclasses should override this to provide database-dependent
5275 unescaping behavior that reverses _escape_identifier.
5276 """
5278 return value.replace(self.escape_to_quote, self.escape_quote)
5280 def validate_sql_phrase(self, element, reg):
5281 """keyword sequence filter.
5283 a filter for elements that are intended to represent keyword sequences,
5284 such as "INITIALLY", "INITIALLY DEFERRED", etc. no special characters
5285 should be present.
5287 .. versionadded:: 1.3
5289 """
5291 if element is not None and not reg.match(element):
5292 raise exc.CompileError(
5293 "Unexpected SQL phrase: %r (matching against %r)"
5294 % (element, reg.pattern)
5295 )
5296 return element
5298 def quote_identifier(self, value):
5299 """Quote an identifier.
5301 Subclasses should override this to provide database-dependent
5302 quoting behavior.
5303 """
5305 return (
5306 self.initial_quote
5307 + self._escape_identifier(value)
5308 + self.final_quote
5309 )
5311 def _requires_quotes(self, value):
5312 """Return True if the given identifier requires quoting."""
5313 lc_value = value.lower()
5314 return (
5315 lc_value in self.reserved_words
5316 or value[0] in self.illegal_initial_characters
5317 or not self.legal_characters.match(util.text_type(value))
5318 or (lc_value != value)
5319 )
5321 def _requires_quotes_illegal_chars(self, value):
5322 """Return True if the given identifier requires quoting, but
5323 not taking case convention into account."""
5324 return not self.legal_characters.match(util.text_type(value))
5326 def quote_schema(self, schema, force=None):
5327 """Conditionally quote a schema name.
5330 The name is quoted if it is a reserved word, contains quote-necessary
5331 characters, or is an instance of :class:`.quoted_name` which includes
5332 ``quote`` set to ``True``.
5334 Subclasses can override this to provide database-dependent
5335 quoting behavior for schema names.
5337 :param schema: string schema name
5338 :param force: unused
5340 .. deprecated:: 0.9
5342 The :paramref:`.IdentifierPreparer.quote_schema.force`
5343 parameter is deprecated and will be removed in a future
5344 release. This flag has no effect on the behavior of the
5345 :meth:`.IdentifierPreparer.quote` method; please refer to
5346 :class:`.quoted_name`.
5348 """
5349 if force is not None:
5350 # not using the util.deprecated_params() decorator in this
5351 # case because of the additional function call overhead on this
5352 # very performance-critical spot.
5353 util.warn_deprecated(
5354 "The IdentifierPreparer.quote_schema.force parameter is "
5355 "deprecated and will be removed in a future release. This "
5356 "flag has no effect on the behavior of the "
5357 "IdentifierPreparer.quote method; please refer to "
5358 "quoted_name().",
5359 # deprecated 0.9. warning from 1.3
5360 version="0.9",
5361 )
5363 return self.quote(schema)
5365 def quote(self, ident, force=None):
5366 """Conditionally quote an identifier.
5368 The identifier is quoted if it is a reserved word, contains
5369 quote-necessary characters, or is an instance of
5370 :class:`.quoted_name` which includes ``quote`` set to ``True``.
5372 Subclasses can override this to provide database-dependent
5373 quoting behavior for identifier names.
5375 :param ident: string identifier
5376 :param force: unused
5378 .. deprecated:: 0.9
5380 The :paramref:`.IdentifierPreparer.quote.force`
5381 parameter is deprecated and will be removed in a future
5382 release. This flag has no effect on the behavior of the
5383 :meth:`.IdentifierPreparer.quote` method; please refer to
5384 :class:`.quoted_name`.
5386 """
5387 if force is not None:
5388 # not using the util.deprecated_params() decorator in this
5389 # case because of the additional function call overhead on this
5390 # very performance-critical spot.
5391 util.warn_deprecated(
5392 "The IdentifierPreparer.quote.force parameter is "
5393 "deprecated and will be removed in a future release. This "
5394 "flag has no effect on the behavior of the "
5395 "IdentifierPreparer.quote method; please refer to "
5396 "quoted_name().",
5397 # deprecated 0.9. warning from 1.3
5398 version="0.9",
5399 )
5401 force = getattr(ident, "quote", None)
5403 if force is None:
5404 if ident in self._strings:
5405 return self._strings[ident]
5406 else:
5407 if self._requires_quotes(ident):
5408 self._strings[ident] = self.quote_identifier(ident)
5409 else:
5410 self._strings[ident] = ident
5411 return self._strings[ident]
5412 elif force:
5413 return self.quote_identifier(ident)
5414 else:
5415 return ident
5417 def format_collation(self, collation_name):
5418 if self.quote_case_sensitive_collations:
5419 return self.quote(collation_name)
5420 else:
5421 return collation_name
5423 def format_sequence(self, sequence, use_schema=True):
5424 name = self.quote(sequence.name)
5426 effective_schema = self.schema_for_object(sequence)
5428 if (
5429 not self.omit_schema
5430 and use_schema
5431 and effective_schema is not None
5432 ):
5433 name = self.quote_schema(effective_schema) + "." + name
5434 return name
5436 def format_label(self, label, name=None):
5437 return self.quote(name or label.name)
5439 def format_alias(self, alias, name=None):
5440 return self.quote(name or alias.name)
5442 def format_savepoint(self, savepoint, name=None):
5443 # Running the savepoint name through quoting is unnecessary
5444 # for all known dialects. This is here to support potential
5445 # third party use cases
5446 ident = name or savepoint.ident
5447 if self._requires_quotes(ident):
5448 ident = self.quote_identifier(ident)
5449 return ident
5451 @util.preload_module("sqlalchemy.sql.naming")
5452 def format_constraint(self, constraint, _alembic_quote=True):
5453 naming = util.preloaded.sql_naming
5455 if constraint.name is elements._NONE_NAME:
5456 name = naming._constraint_name_for_table(
5457 constraint, constraint.table
5458 )
5460 if name is None:
5461 return None
5462 else:
5463 name = constraint.name
5465 if constraint.__visit_name__ == "index":
5466 return self.truncate_and_render_index_name(
5467 name, _alembic_quote=_alembic_quote
5468 )
5469 else:
5470 return self.truncate_and_render_constraint_name(
5471 name, _alembic_quote=_alembic_quote
5472 )
5474 def truncate_and_render_index_name(self, name, _alembic_quote=True):
5475 # calculate these at format time so that ad-hoc changes
5476 # to dialect.max_identifier_length etc. can be reflected
5477 # as IdentifierPreparer is long lived
5478 max_ = (
5479 self.dialect.max_index_name_length
5480 or self.dialect.max_identifier_length
5481 )
5482 return self._truncate_and_render_maxlen_name(
5483 name, max_, _alembic_quote
5484 )
5486 def truncate_and_render_constraint_name(self, name, _alembic_quote=True):
5487 # calculate these at format time so that ad-hoc changes
5488 # to dialect.max_identifier_length etc. can be reflected
5489 # as IdentifierPreparer is long lived
5490 max_ = (
5491 self.dialect.max_constraint_name_length
5492 or self.dialect.max_identifier_length
5493 )
5494 return self._truncate_and_render_maxlen_name(
5495 name, max_, _alembic_quote
5496 )
5498 def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote):
5499 if isinstance(name, elements._truncated_label):
5500 if len(name) > max_:
5501 name = name[0 : max_ - 8] + "_" + util.md5_hex(name)[-4:]
5502 else:
5503 self.dialect.validate_identifier(name)
5505 if not _alembic_quote:
5506 return name
5507 else:
5508 return self.quote(name)
5510 def format_index(self, index):
5511 return self.format_constraint(index)
5513 def format_table(self, table, use_schema=True, name=None):
5514 """Prepare a quoted table and schema name."""
5516 if name is None:
5517 name = table.name
5519 result = self.quote(name)
5521 effective_schema = self.schema_for_object(table)
5523 if not self.omit_schema and use_schema and effective_schema:
5524 result = self.quote_schema(effective_schema) + "." + result
5525 return result
5527 def format_schema(self, name):
5528 """Prepare a quoted schema name."""
5530 return self.quote(name)
5532 def format_label_name(
5533 self,
5534 name,
5535 anon_map=None,
5536 ):
5537 """Prepare a quoted column name."""
5539 if anon_map is not None and isinstance(
5540 name, elements._truncated_label
5541 ):
5542 name = name.apply_map(anon_map)
5544 return self.quote(name)
5546 def format_column(
5547 self,
5548 column,
5549 use_table=False,
5550 name=None,
5551 table_name=None,
5552 use_schema=False,
5553 anon_map=None,
5554 ):
5555 """Prepare a quoted column name."""
5557 if name is None:
5558 name = column.name
5560 if anon_map is not None and isinstance(
5561 name, elements._truncated_label
5562 ):
5563 name = name.apply_map(anon_map)
5565 if not getattr(column, "is_literal", False):
5566 if use_table:
5567 return (
5568 self.format_table(
5569 column.table, use_schema=use_schema, name=table_name
5570 )
5571 + "."
5572 + self.quote(name)
5573 )
5574 else:
5575 return self.quote(name)
5576 else:
5577 # literal textual elements get stuck into ColumnClause a lot,
5578 # which shouldn't get quoted
5580 if use_table:
5581 return (
5582 self.format_table(
5583 column.table, use_schema=use_schema, name=table_name
5584 )
5585 + "."
5586 + name
5587 )
5588 else:
5589 return name
5591 def format_table_seq(self, table, use_schema=True):
5592 """Format table name and schema as a tuple."""
5594 # Dialects with more levels in their fully qualified references
5595 # ('database', 'owner', etc.) could override this and return
5596 # a longer sequence.
5598 effective_schema = self.schema_for_object(table)
5600 if not self.omit_schema and use_schema and effective_schema:
5601 return (
5602 self.quote_schema(effective_schema),
5603 self.format_table(table, use_schema=False),
5604 )
5605 else:
5606 return (self.format_table(table, use_schema=False),)
5608 @util.memoized_property
5609 def _r_identifiers(self):
5610 initial, final, escaped_final = [
5611 re.escape(s)
5612 for s in (
5613 self.initial_quote,
5614 self.final_quote,
5615 self._escape_identifier(self.final_quote),
5616 )
5617 ]
5618 r = re.compile(
5619 r"(?:"
5620 r"(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s"
5621 r"|([^\.]+))(?=\.|$))+"
5622 % {"initial": initial, "final": final, "escaped": escaped_final}
5623 )
5624 return r
5626 def unformat_identifiers(self, identifiers):
5627 """Unpack 'schema.table.column'-like strings into components."""
5629 r = self._r_identifiers
5630 return [
5631 self._unescape_identifier(i)
5632 for i in [a or b for a, b in r.findall(identifiers)]
5633 ]