Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/sqlalchemy/sql/compiler.py: 19%
2197 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-07 06:35 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-07 06:35 +0000
1# sql/compiler.py
2# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
8"""Base SQL and DDL compiler implementations.
10Classes provided include:
12:class:`.compiler.SQLCompiler` - renders SQL
13strings
15:class:`.compiler.DDLCompiler` - renders DDL
16(data definition language) strings
18:class:`.compiler.GenericTypeCompiler` - renders
19type specification strings.
21To generate user-defined SQL strings, see
22:doc:`/ext/compiler`.
24"""
26import collections
27import contextlib
28import itertools
29import operator
30import re
32from . import base
33from . import coercions
34from . import crud
35from . import elements
36from . import functions
37from . import operators
38from . import schema
39from . import selectable
40from . import sqltypes
41from . import util as sql_util
42from .base import NO_ARG
43from .base import prefix_anon_map
44from .elements import quoted_name
45from .. import exc
46from .. import util
48RESERVED_WORDS = set(
49 [
50 "all",
51 "analyse",
52 "analyze",
53 "and",
54 "any",
55 "array",
56 "as",
57 "asc",
58 "asymmetric",
59 "authorization",
60 "between",
61 "binary",
62 "both",
63 "case",
64 "cast",
65 "check",
66 "collate",
67 "column",
68 "constraint",
69 "create",
70 "cross",
71 "current_date",
72 "current_role",
73 "current_time",
74 "current_timestamp",
75 "current_user",
76 "default",
77 "deferrable",
78 "desc",
79 "distinct",
80 "do",
81 "else",
82 "end",
83 "except",
84 "false",
85 "for",
86 "foreign",
87 "freeze",
88 "from",
89 "full",
90 "grant",
91 "group",
92 "having",
93 "ilike",
94 "in",
95 "initially",
96 "inner",
97 "intersect",
98 "into",
99 "is",
100 "isnull",
101 "join",
102 "leading",
103 "left",
104 "like",
105 "limit",
106 "localtime",
107 "localtimestamp",
108 "natural",
109 "new",
110 "not",
111 "notnull",
112 "null",
113 "off",
114 "offset",
115 "old",
116 "on",
117 "only",
118 "or",
119 "order",
120 "outer",
121 "overlaps",
122 "placing",
123 "primary",
124 "references",
125 "right",
126 "select",
127 "session_user",
128 "set",
129 "similar",
130 "some",
131 "symmetric",
132 "table",
133 "then",
134 "to",
135 "trailing",
136 "true",
137 "union",
138 "unique",
139 "user",
140 "using",
141 "verbose",
142 "when",
143 "where",
144 ]
145)
147LEGAL_CHARACTERS = re.compile(r"^[A-Z0-9_$]+$", re.I)
148LEGAL_CHARACTERS_PLUS_SPACE = re.compile(r"^[A-Z0-9_ $]+$", re.I)
149ILLEGAL_INITIAL_CHARACTERS = {str(x) for x in range(0, 10)}.union(["$"])
151FK_ON_DELETE = re.compile(
152 r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I
153)
154FK_ON_UPDATE = re.compile(
155 r"^(?:RESTRICT|CASCADE|SET NULL|NO ACTION|SET DEFAULT)$", re.I
156)
157FK_INITIALLY = re.compile(r"^(?:DEFERRED|IMMEDIATE)$", re.I)
158BIND_PARAMS = re.compile(r"(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])", re.UNICODE)
159BIND_PARAMS_ESC = re.compile(r"\x5c(:[\w\$]*)(?![:\w\$])", re.UNICODE)
161BIND_TEMPLATES = {
162 "pyformat": "%%(%(name)s)s",
163 "qmark": "?",
164 "format": "%%s",
165 "numeric": ":[_POSITION]",
166 "named": ":%(name)s",
167}
169_BIND_TRANSLATE_RE = re.compile(r"[%\(\):\[\] ]")
170_BIND_TRANSLATE_CHARS = dict(zip("%():[] ", "PAZC___"))
172OPERATORS = {
173 # binary
174 operators.and_: " AND ",
175 operators.or_: " OR ",
176 operators.add: " + ",
177 operators.mul: " * ",
178 operators.sub: " - ",
179 operators.div: " / ",
180 operators.mod: " % ",
181 operators.truediv: " / ",
182 operators.neg: "-",
183 operators.lt: " < ",
184 operators.le: " <= ",
185 operators.ne: " != ",
186 operators.gt: " > ",
187 operators.ge: " >= ",
188 operators.eq: " = ",
189 operators.is_distinct_from: " IS DISTINCT FROM ",
190 operators.is_not_distinct_from: " IS NOT DISTINCT FROM ",
191 operators.concat_op: " || ",
192 operators.match_op: " MATCH ",
193 operators.not_match_op: " NOT MATCH ",
194 operators.in_op: " IN ",
195 operators.not_in_op: " NOT IN ",
196 operators.comma_op: ", ",
197 operators.from_: " FROM ",
198 operators.as_: " AS ",
199 operators.is_: " IS ",
200 operators.is_not: " IS NOT ",
201 operators.collate: " COLLATE ",
202 # unary
203 operators.exists: "EXISTS ",
204 operators.distinct_op: "DISTINCT ",
205 operators.inv: "NOT ",
206 operators.any_op: "ANY ",
207 operators.all_op: "ALL ",
208 # modifiers
209 operators.desc_op: " DESC",
210 operators.asc_op: " ASC",
211 operators.nulls_first_op: " NULLS FIRST",
212 operators.nulls_last_op: " NULLS LAST",
213}
215FUNCTIONS = {
216 functions.coalesce: "coalesce",
217 functions.current_date: "CURRENT_DATE",
218 functions.current_time: "CURRENT_TIME",
219 functions.current_timestamp: "CURRENT_TIMESTAMP",
220 functions.current_user: "CURRENT_USER",
221 functions.localtime: "LOCALTIME",
222 functions.localtimestamp: "LOCALTIMESTAMP",
223 functions.random: "random",
224 functions.sysdate: "sysdate",
225 functions.session_user: "SESSION_USER",
226 functions.user: "USER",
227 functions.cube: "CUBE",
228 functions.rollup: "ROLLUP",
229 functions.grouping_sets: "GROUPING SETS",
230}
232EXTRACT_MAP = {
233 "month": "month",
234 "day": "day",
235 "year": "year",
236 "second": "second",
237 "hour": "hour",
238 "doy": "doy",
239 "minute": "minute",
240 "quarter": "quarter",
241 "dow": "dow",
242 "week": "week",
243 "epoch": "epoch",
244 "milliseconds": "milliseconds",
245 "microseconds": "microseconds",
246 "timezone_hour": "timezone_hour",
247 "timezone_minute": "timezone_minute",
248}
250COMPOUND_KEYWORDS = {
251 selectable.CompoundSelect.UNION: "UNION",
252 selectable.CompoundSelect.UNION_ALL: "UNION ALL",
253 selectable.CompoundSelect.EXCEPT: "EXCEPT",
254 selectable.CompoundSelect.EXCEPT_ALL: "EXCEPT ALL",
255 selectable.CompoundSelect.INTERSECT: "INTERSECT",
256 selectable.CompoundSelect.INTERSECT_ALL: "INTERSECT ALL",
257}
260RM_RENDERED_NAME = 0
261RM_NAME = 1
262RM_OBJECTS = 2
263RM_TYPE = 3
266ExpandedState = collections.namedtuple(
267 "ExpandedState",
268 [
269 "statement",
270 "additional_parameters",
271 "processors",
272 "positiontup",
273 "parameter_expansion",
274 ],
275)
278NO_LINTING = util.symbol("NO_LINTING", "Disable all linting.", canonical=0)
280COLLECT_CARTESIAN_PRODUCTS = util.symbol(
281 "COLLECT_CARTESIAN_PRODUCTS",
282 "Collect data on FROMs and cartesian products and gather "
283 "into 'self.from_linter'",
284 canonical=1,
285)
287WARN_LINTING = util.symbol(
288 "WARN_LINTING", "Emit warnings for linters that find problems", canonical=2
289)
291FROM_LINTING = util.symbol(
292 "FROM_LINTING",
293 "Warn for cartesian products; "
294 "combines COLLECT_CARTESIAN_PRODUCTS and WARN_LINTING",
295 canonical=COLLECT_CARTESIAN_PRODUCTS | WARN_LINTING,
296)
299class FromLinter(collections.namedtuple("FromLinter", ["froms", "edges"])):
300 def lint(self, start=None):
301 froms = self.froms
302 if not froms:
303 return None, None
305 edges = set(self.edges)
306 the_rest = set(froms)
308 if start is not None:
309 start_with = start
310 the_rest.remove(start_with)
311 else:
312 start_with = the_rest.pop()
314 stack = collections.deque([start_with])
316 while stack and the_rest:
317 node = stack.popleft()
318 the_rest.discard(node)
320 # comparison of nodes in edges here is based on hash equality, as
321 # there are "annotated" elements that match the non-annotated ones.
322 # to remove the need for in-python hash() calls, use native
323 # containment routines (e.g. "node in edge", "edge.index(node)")
324 to_remove = {edge for edge in edges if node in edge}
326 # appendleft the node in each edge that is not
327 # the one that matched.
328 stack.extendleft(edge[not edge.index(node)] for edge in to_remove)
329 edges.difference_update(to_remove)
331 # FROMS left over? boom
332 if the_rest:
333 return the_rest, start_with
334 else:
335 return None, None
337 def warn(self):
338 the_rest, start_with = self.lint()
340 # FROMS left over? boom
341 if the_rest:
343 froms = the_rest
344 if froms:
345 template = (
346 "SELECT statement has a cartesian product between "
347 "FROM element(s) {froms} and "
348 'FROM element "{start}". Apply join condition(s) '
349 "between each element to resolve."
350 )
351 froms_str = ", ".join(
352 '"{elem}"'.format(elem=self.froms[from_])
353 for from_ in froms
354 )
355 message = template.format(
356 froms=froms_str, start=self.froms[start_with]
357 )
359 util.warn(message)
362class Compiled(object):
364 """Represent a compiled SQL or DDL expression.
366 The ``__str__`` method of the ``Compiled`` object should produce
367 the actual text of the statement. ``Compiled`` objects are
368 specific to their underlying database dialect, and also may
369 or may not be specific to the columns referenced within a
370 particular set of bind parameters. In no case should the
371 ``Compiled`` object be dependent on the actual values of those
372 bind parameters, even though it may reference those values as
373 defaults.
374 """
376 _cached_metadata = None
378 _result_columns = None
380 schema_translate_map = None
382 execution_options = util.EMPTY_DICT
383 """
384 Execution options propagated from the statement. In some cases,
385 sub-elements of the statement can modify these.
386 """
388 _annotations = util.EMPTY_DICT
390 compile_state = None
391 """Optional :class:`.CompileState` object that maintains additional
392 state used by the compiler.
394 Major executable objects such as :class:`_expression.Insert`,
395 :class:`_expression.Update`, :class:`_expression.Delete`,
396 :class:`_expression.Select` will generate this
397 state when compiled in order to calculate additional information about the
398 object. For the top level object that is to be executed, the state can be
399 stored here where it can also have applicability towards result set
400 processing.
402 .. versionadded:: 1.4
404 """
406 dml_compile_state = None
407 """Optional :class:`.CompileState` assigned at the same point that
408 .isinsert, .isupdate, or .isdelete is assigned.
410 This will normally be the same object as .compile_state, with the
411 exception of cases like the :class:`.ORMFromStatementCompileState`
412 object.
414 .. versionadded:: 1.4.40
416 """
418 cache_key = None
419 _gen_time = None
421 def __init__(
422 self,
423 dialect,
424 statement,
425 schema_translate_map=None,
426 render_schema_translate=False,
427 compile_kwargs=util.immutabledict(),
428 ):
429 """Construct a new :class:`.Compiled` object.
431 :param dialect: :class:`.Dialect` to compile against.
433 :param statement: :class:`_expression.ClauseElement` to be compiled.
435 :param schema_translate_map: dictionary of schema names to be
436 translated when forming the resultant SQL
438 .. versionadded:: 1.1
440 .. seealso::
442 :ref:`schema_translating`
444 :param compile_kwargs: additional kwargs that will be
445 passed to the initial call to :meth:`.Compiled.process`.
448 """
450 self.dialect = dialect
451 self.preparer = self.dialect.identifier_preparer
452 if schema_translate_map:
453 self.schema_translate_map = schema_translate_map
454 self.preparer = self.preparer._with_schema_translate(
455 schema_translate_map
456 )
458 if statement is not None:
459 self.statement = statement
460 self.can_execute = statement.supports_execution
461 self._annotations = statement._annotations
462 if self.can_execute:
463 self.execution_options = statement._execution_options
464 self.string = self.process(self.statement, **compile_kwargs)
466 if render_schema_translate:
467 self.string = self.preparer._render_schema_translates(
468 self.string, schema_translate_map
469 )
470 self._gen_time = util.perf_counter()
472 def _execute_on_connection(
473 self, connection, multiparams, params, execution_options
474 ):
475 if self.can_execute:
476 return connection._execute_compiled(
477 self, multiparams, params, execution_options
478 )
479 else:
480 raise exc.ObjectNotExecutableError(self.statement)
482 def visit_unsupported_compilation(self, element, err):
483 util.raise_(
484 exc.UnsupportedCompilationError(self, type(element)),
485 replace_context=err,
486 )
488 @property
489 def sql_compiler(self):
490 """Return a Compiled that is capable of processing SQL expressions.
492 If this compiler is one, it would likely just return 'self'.
494 """
496 raise NotImplementedError()
498 def process(self, obj, **kwargs):
499 return obj._compiler_dispatch(self, **kwargs)
501 def __str__(self):
502 """Return the string text of the generated SQL or DDL."""
504 return self.string or ""
506 def construct_params(
507 self, params=None, extracted_parameters=None, escape_names=True
508 ):
509 """Return the bind params for this compiled object.
511 :param params: a dict of string/object pairs whose values will
512 override bind values compiled in to the
513 statement.
514 """
516 raise NotImplementedError()
518 @property
519 def params(self):
520 """Return the bind params for this compiled object."""
521 return self.construct_params()
524class TypeCompiler(util.with_metaclass(util.EnsureKWArgType, object)):
525 """Produces DDL specification for TypeEngine objects."""
527 ensure_kwarg = r"visit_\w+"
529 def __init__(self, dialect):
530 self.dialect = dialect
532 def process(self, type_, **kw):
533 return type_._compiler_dispatch(self, **kw)
535 def visit_unsupported_compilation(self, element, err, **kw):
536 util.raise_(
537 exc.UnsupportedCompilationError(self, element),
538 replace_context=err,
539 )
542# this was a Visitable, but to allow accurate detection of
543# column elements this is actually a column element
544class _CompileLabel(elements.ColumnElement):
546 """lightweight label object which acts as an expression.Label."""
548 __visit_name__ = "label"
549 __slots__ = "element", "name"
551 def __init__(self, col, name, alt_names=()):
552 self.element = col
553 self.name = name
554 self._alt_names = (col,) + alt_names
556 @property
557 def proxy_set(self):
558 return self.element.proxy_set
560 @property
561 def type(self):
562 return self.element.type
564 def self_group(self, **kw):
565 return self
568class SQLCompiler(Compiled):
569 """Default implementation of :class:`.Compiled`.
571 Compiles :class:`_expression.ClauseElement` objects into SQL strings.
573 """
575 extract_map = EXTRACT_MAP
577 compound_keywords = COMPOUND_KEYWORDS
579 isdelete = isinsert = isupdate = False
580 """class-level defaults which can be set at the instance
581 level to define if this Compiled instance represents
582 INSERT/UPDATE/DELETE
583 """
585 isplaintext = False
587 returning = None
588 """holds the "returning" collection of columns if
589 the statement is CRUD and defines returning columns
590 either implicitly or explicitly
591 """
593 returning_precedes_values = False
594 """set to True classwide to generate RETURNING
595 clauses before the VALUES or WHERE clause (i.e. MSSQL)
596 """
598 render_table_with_column_in_update_from = False
599 """set to True classwide to indicate the SET clause
600 in a multi-table UPDATE statement should qualify
601 columns with the table name (i.e. MySQL only)
602 """
604 ansi_bind_rules = False
605 """SQL 92 doesn't allow bind parameters to be used
606 in the columns clause of a SELECT, nor does it allow
607 ambiguous expressions like "? = ?". A compiler
608 subclass can set this flag to False if the target
609 driver/DB enforces this
610 """
612 _textual_ordered_columns = False
613 """tell the result object that the column names as rendered are important,
614 but they are also "ordered" vs. what is in the compiled object here.
616 As of 1.4.42 this condition is only present when the statement is a
617 TextualSelect, e.g. text("....").columns(...), where it is required
618 that the columns are considered positionally and not by name.
620 """
622 _ad_hoc_textual = False
623 """tell the result that we encountered text() or '*' constructs in the
624 middle of the result columns, but we also have compiled columns, so
625 if the number of columns in cursor.description does not match how many
626 expressions we have, that means we can't rely on positional at all and
627 should match on name.
629 """
631 _ordered_columns = True
632 """
633 if False, means we can't be sure the list of entries
634 in _result_columns is actually the rendered order. Usually
635 True unless using an unordered TextualSelect.
636 """
638 _loose_column_name_matching = False
639 """tell the result object that the SQL statement is textual, wants to match
640 up to Column objects, and may be using the ._tq_label in the SELECT rather
641 than the base name.
643 """
645 _numeric_binds = False
646 """
647 True if paramstyle is "numeric". This paramstyle is trickier than
648 all the others.
650 """
652 _render_postcompile = False
653 """
654 whether to render out POSTCOMPILE params during the compile phase.
656 """
658 insert_single_values_expr = None
659 """When an INSERT is compiled with a single set of parameters inside
660 a VALUES expression, the string is assigned here, where it can be
661 used for insert batching schemes to rewrite the VALUES expression.
663 .. versionadded:: 1.3.8
665 """
667 literal_execute_params = frozenset()
668 """bindparameter objects that are rendered as literal values at statement
669 execution time.
671 """
673 post_compile_params = frozenset()
674 """bindparameter objects that are rendered as bound parameter placeholders
675 at statement execution time.
677 """
679 escaped_bind_names = util.EMPTY_DICT
680 """Late escaping of bound parameter names that has to be converted
681 to the original name when looking in the parameter dictionary.
683 """
685 has_out_parameters = False
686 """if True, there are bindparam() objects that have the isoutparam
687 flag set."""
689 insert_prefetch = update_prefetch = ()
691 postfetch_lastrowid = False
692 """if True, and this in insert, use cursor.lastrowid to populate
693 result.inserted_primary_key. """
695 _cache_key_bind_match = None
696 """a mapping that will relate the BindParameter object we compile
697 to those that are part of the extracted collection of parameters
698 in the cache key, if we were given a cache key.
700 """
702 _post_compile_pattern = re.compile(r"__\[POSTCOMPILE_(\S+?)(~~.+?~~)?\]")
704 positiontup = None
705 """for a compiled construct that uses a positional paramstyle, will be
706 a sequence of strings, indicating the names of bound parameters in order.
708 This is used in order to render bound parameters in their correct order,
709 and is combined with the :attr:`_sql.Compiled.params` dictionary to
710 render parameters.
712 .. seealso::
714 :ref:`faq_sql_expression_string` - includes a usage example for
715 debugging use cases.
717 """
718 positiontup_level = None
720 inline = False
722 def __init__(
723 self,
724 dialect,
725 statement,
726 cache_key=None,
727 column_keys=None,
728 for_executemany=False,
729 linting=NO_LINTING,
730 **kwargs
731 ):
732 """Construct a new :class:`.SQLCompiler` object.
734 :param dialect: :class:`.Dialect` to be used
736 :param statement: :class:`_expression.ClauseElement` to be compiled
738 :param column_keys: a list of column names to be compiled into an
739 INSERT or UPDATE statement.
741 :param for_executemany: whether INSERT / UPDATE statements should
742 expect that they are to be invoked in an "executemany" style,
743 which may impact how the statement will be expected to return the
744 values of defaults and autoincrement / sequences and similar.
745 Depending on the backend and driver in use, support for retrieving
746 these values may be disabled which means SQL expressions may
747 be rendered inline, RETURNING may not be rendered, etc.
749 :param kwargs: additional keyword arguments to be consumed by the
750 superclass.
752 """
753 self.column_keys = column_keys
755 self.cache_key = cache_key
757 if cache_key:
758 self._cache_key_bind_match = ckbm = {
759 b.key: b for b in cache_key[1]
760 }
761 ckbm.update({b: [b] for b in cache_key[1]})
763 # compile INSERT/UPDATE defaults/sequences to expect executemany
764 # style execution, which may mean no pre-execute of defaults,
765 # or no RETURNING
766 self.for_executemany = for_executemany
768 self.linting = linting
770 # a dictionary of bind parameter keys to BindParameter
771 # instances.
772 self.binds = {}
774 # a dictionary of BindParameter instances to "compiled" names
775 # that are actually present in the generated SQL
776 self.bind_names = util.column_dict()
778 # stack which keeps track of nested SELECT statements
779 self.stack = []
781 # relates label names in the final SQL to a tuple of local
782 # column/label name, ColumnElement object (if any) and
783 # TypeEngine. CursorResult uses this for type processing and
784 # column targeting
785 self._result_columns = []
787 # true if the paramstyle is positional
788 self.positional = dialect.positional
789 if self.positional:
790 self.positiontup_level = {}
791 self.positiontup = []
792 self._numeric_binds = dialect.paramstyle == "numeric"
793 self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
795 self.ctes = None
797 self.label_length = (
798 dialect.label_length or dialect.max_identifier_length
799 )
801 # a map which tracks "anonymous" identifiers that are created on
802 # the fly here
803 self.anon_map = prefix_anon_map()
805 # a map which tracks "truncated" names based on
806 # dialect.label_length or dialect.max_identifier_length
807 self.truncated_names = {}
809 Compiled.__init__(self, dialect, statement, **kwargs)
811 if self.isinsert or self.isupdate or self.isdelete:
812 if statement._returning:
813 self.returning = statement._returning
815 if self.isinsert or self.isupdate:
816 if statement._inline:
817 self.inline = True
818 elif self.for_executemany and (
819 not self.isinsert
820 or (
821 self.dialect.insert_executemany_returning
822 and statement._return_defaults
823 )
824 ):
825 self.inline = True
827 if self.positional and self._numeric_binds:
828 self._apply_numbered_params()
830 if self._render_postcompile:
831 self._process_parameters_for_postcompile(_populate_self=True)
833 @property
834 def current_executable(self):
835 """Return the current 'executable' that is being compiled.
837 This is currently the :class:`_sql.Select`, :class:`_sql.Insert`,
838 :class:`_sql.Update`, :class:`_sql.Delete`,
839 :class:`_sql.CompoundSelect` object that is being compiled.
840 Specifically it's assigned to the ``self.stack`` list of elements.
842 When a statement like the above is being compiled, it normally
843 is also assigned to the ``.statement`` attribute of the
844 :class:`_sql.Compiler` object. However, all SQL constructs are
845 ultimately nestable, and this attribute should never be consulted
846 by a ``visit_`` method, as it is not guaranteed to be assigned
847 nor guaranteed to correspond to the current statement being compiled.
849 .. versionadded:: 1.3.21
851 For compatibility with previous versions, use the following
852 recipe::
854 statement = getattr(self, "current_executable", False)
855 if statement is False:
856 statement = self.stack[-1]["selectable"]
858 For versions 1.4 and above, ensure only .current_executable
859 is used; the format of "self.stack" may change.
862 """
863 try:
864 return self.stack[-1]["selectable"]
865 except IndexError as ie:
866 util.raise_(
867 IndexError("Compiler does not have a stack entry"),
868 replace_context=ie,
869 )
871 @property
872 def prefetch(self):
873 return list(self.insert_prefetch + self.update_prefetch)
875 @util.memoized_property
876 def _global_attributes(self):
877 return {}
879 @util.memoized_instancemethod
880 def _init_cte_state(self):
881 """Initialize collections related to CTEs only if
882 a CTE is located, to save on the overhead of
883 these collections otherwise.
885 """
886 # collect CTEs to tack on top of a SELECT
887 # To store the query to print - Dict[cte, text_query]
888 self.ctes = util.OrderedDict()
890 # Detect same CTE references - Dict[(level, name), cte]
891 # Level is required for supporting nesting
892 self.ctes_by_level_name = {}
894 # To retrieve key/level in ctes_by_level_name -
895 # Dict[cte_reference, (level, cte_name)]
896 self.level_name_by_cte = {}
898 self.ctes_recursive = False
899 if self.positional:
900 self.cte_positional = {}
901 self.cte_level = {}
902 self.cte_order = collections.defaultdict(list)
904 @contextlib.contextmanager
905 def _nested_result(self):
906 """special API to support the use case of 'nested result sets'"""
907 result_columns, ordered_columns = (
908 self._result_columns,
909 self._ordered_columns,
910 )
911 self._result_columns, self._ordered_columns = [], False
913 try:
914 if self.stack:
915 entry = self.stack[-1]
916 entry["need_result_map_for_nested"] = True
917 else:
918 entry = None
919 yield self._result_columns, self._ordered_columns
920 finally:
921 if entry:
922 entry.pop("need_result_map_for_nested")
923 self._result_columns, self._ordered_columns = (
924 result_columns,
925 ordered_columns,
926 )
928 def _apply_numbered_params(self):
929 poscount = itertools.count(1)
930 self.string = re.sub(
931 r"\[_POSITION\]", lambda m: str(util.next(poscount)), self.string
932 )
934 @util.memoized_property
935 def _bind_processors(self):
937 return dict(
938 (
939 key,
940 value,
941 )
942 for key, value in (
943 (
944 self.bind_names[bindparam],
945 bindparam.type._cached_bind_processor(self.dialect)
946 if not bindparam.type._is_tuple_type
947 else tuple(
948 elem_type._cached_bind_processor(self.dialect)
949 for elem_type in bindparam.type.types
950 ),
951 )
952 for bindparam in self.bind_names
953 )
954 if value is not None
955 )
957 def is_subquery(self):
958 return len(self.stack) > 1
960 @property
961 def sql_compiler(self):
962 return self
964 def construct_params(
965 self,
966 params=None,
967 _group_number=None,
968 _check=True,
969 extracted_parameters=None,
970 escape_names=True,
971 ):
972 """return a dictionary of bind parameter keys and values"""
974 has_escaped_names = escape_names and bool(self.escaped_bind_names)
976 if extracted_parameters:
977 # related the bound parameters collected in the original cache key
978 # to those collected in the incoming cache key. They will not have
979 # matching names but they will line up positionally in the same
980 # way. The parameters present in self.bind_names may be clones of
981 # these original cache key params in the case of DML but the .key
982 # will be guaranteed to match.
983 try:
984 orig_extracted = self.cache_key[1]
985 except TypeError as err:
986 util.raise_(
987 exc.CompileError(
988 "This compiled object has no original cache key; "
989 "can't pass extracted_parameters to construct_params"
990 ),
991 replace_context=err,
992 )
994 ckbm = self._cache_key_bind_match
995 resolved_extracted = {
996 bind: extracted
997 for b, extracted in zip(orig_extracted, extracted_parameters)
998 for bind in ckbm[b]
999 }
1000 else:
1001 resolved_extracted = None
1003 if params:
1004 pd = {}
1005 for bindparam, name in self.bind_names.items():
1006 escaped_name = (
1007 self.escaped_bind_names.get(name, name)
1008 if has_escaped_names
1009 else name
1010 )
1012 if bindparam.key in params:
1013 pd[escaped_name] = params[bindparam.key]
1014 elif name in params:
1015 pd[escaped_name] = params[name]
1017 elif _check and bindparam.required:
1018 if _group_number:
1019 raise exc.InvalidRequestError(
1020 "A value is required for bind parameter %r, "
1021 "in parameter group %d"
1022 % (bindparam.key, _group_number),
1023 code="cd3x",
1024 )
1025 else:
1026 raise exc.InvalidRequestError(
1027 "A value is required for bind parameter %r"
1028 % bindparam.key,
1029 code="cd3x",
1030 )
1031 else:
1032 if resolved_extracted:
1033 value_param = resolved_extracted.get(
1034 bindparam, bindparam
1035 )
1036 else:
1037 value_param = bindparam
1039 if bindparam.callable:
1040 pd[escaped_name] = value_param.effective_value
1041 else:
1042 pd[escaped_name] = value_param.value
1043 return pd
1044 else:
1045 pd = {}
1046 for bindparam, name in self.bind_names.items():
1047 escaped_name = (
1048 self.escaped_bind_names.get(name, name)
1049 if has_escaped_names
1050 else name
1051 )
1053 if _check and bindparam.required:
1054 if _group_number:
1055 raise exc.InvalidRequestError(
1056 "A value is required for bind parameter %r, "
1057 "in parameter group %d"
1058 % (bindparam.key, _group_number),
1059 code="cd3x",
1060 )
1061 else:
1062 raise exc.InvalidRequestError(
1063 "A value is required for bind parameter %r"
1064 % bindparam.key,
1065 code="cd3x",
1066 )
1068 if resolved_extracted:
1069 value_param = resolved_extracted.get(bindparam, bindparam)
1070 else:
1071 value_param = bindparam
1073 if bindparam.callable:
1074 pd[escaped_name] = value_param.effective_value
1075 else:
1076 pd[escaped_name] = value_param.value
1077 return pd
1079 @util.memoized_instancemethod
1080 def _get_set_input_sizes_lookup(
1081 self, include_types=None, exclude_types=None
1082 ):
1083 if not hasattr(self, "bind_names"):
1084 return None
1086 dialect = self.dialect
1087 dbapi = self.dialect.dbapi
1089 # _unwrapped_dialect_impl() is necessary so that we get the
1090 # correct dialect type for a custom TypeDecorator, or a Variant,
1091 # which is also a TypeDecorator. Special types like Interval,
1092 # that use TypeDecorator but also might be mapped directly
1093 # for a dialect impl, also subclass Emulated first which overrides
1094 # this behavior in those cases to behave like the default.
1096 if include_types is None and exclude_types is None:
1098 def _lookup_type(typ):
1099 dbtype = typ.dialect_impl(dialect).get_dbapi_type(dbapi)
1100 return dbtype
1102 else:
1104 def _lookup_type(typ):
1105 # note we get dbtype from the possibly TypeDecorator-wrapped
1106 # dialect_impl, but the dialect_impl itself that we use for
1107 # include/exclude is the unwrapped version.
1109 dialect_impl = typ._unwrapped_dialect_impl(dialect)
1111 dbtype = typ.dialect_impl(dialect).get_dbapi_type(dbapi)
1113 if (
1114 dbtype is not None
1115 and (
1116 exclude_types is None
1117 or dbtype not in exclude_types
1118 and type(dialect_impl) not in exclude_types
1119 )
1120 and (
1121 include_types is None
1122 or dbtype in include_types
1123 or type(dialect_impl) in include_types
1124 )
1125 ):
1126 return dbtype
1127 else:
1128 return None
1130 inputsizes = {}
1131 literal_execute_params = self.literal_execute_params
1133 for bindparam in self.bind_names:
1134 if bindparam in literal_execute_params:
1135 continue
1137 if bindparam.type._is_tuple_type:
1138 inputsizes[bindparam] = [
1139 _lookup_type(typ) for typ in bindparam.type.types
1140 ]
1141 else:
1142 inputsizes[bindparam] = _lookup_type(bindparam.type)
1144 return inputsizes
1146 @property
1147 def params(self):
1148 """Return the bind param dictionary embedded into this
1149 compiled object, for those values that are present.
1151 .. seealso::
1153 :ref:`faq_sql_expression_string` - includes a usage example for
1154 debugging use cases.
1156 """
1157 return self.construct_params(_check=False)
1159 def _process_parameters_for_postcompile(
1160 self, parameters=None, _populate_self=False
1161 ):
1162 """handle special post compile parameters.
1164 These include:
1166 * "expanding" parameters -typically IN tuples that are rendered
1167 on a per-parameter basis for an otherwise fixed SQL statement string.
1169 * literal_binds compiled with the literal_execute flag. Used for
1170 things like SQL Server "TOP N" where the driver does not accommodate
1171 N as a bound parameter.
1173 """
1175 if parameters is None:
1176 parameters = self.construct_params(escape_names=False)
1178 expanded_parameters = {}
1179 if self.positional:
1180 positiontup = []
1181 else:
1182 positiontup = None
1184 processors = self._bind_processors
1186 new_processors = {}
1188 if self.positional and self._numeric_binds:
1189 # I'm not familiar with any DBAPI that uses 'numeric'.
1190 # strategy would likely be to make use of numbers greater than
1191 # the highest number present; then for expanding parameters,
1192 # append them to the end of the parameter list. that way
1193 # we avoid having to renumber all the existing parameters.
1194 raise NotImplementedError(
1195 "'post-compile' bind parameters are not supported with "
1196 "the 'numeric' paramstyle at this time."
1197 )
1199 replacement_expressions = {}
1200 to_update_sets = {}
1202 # notes:
1203 # *unescaped* parameter names in:
1204 # self.bind_names, self.binds, self._bind_processors
1205 #
1206 # *escaped* parameter names in:
1207 # construct_params(), replacement_expressions
1209 for name in (
1210 self.positiontup if self.positional else self.bind_names.values()
1211 ):
1212 escaped_name = (
1213 self.escaped_bind_names.get(name, name)
1214 if self.escaped_bind_names
1215 else name
1216 )
1218 parameter = self.binds[name]
1219 if parameter in self.literal_execute_params:
1220 if escaped_name not in replacement_expressions:
1221 value = parameters.pop(name)
1223 replacement_expressions[
1224 escaped_name
1225 ] = self.render_literal_bindparam(
1226 parameter,
1227 render_literal_value=value,
1228 )
1229 continue
1231 if parameter in self.post_compile_params:
1232 if escaped_name in replacement_expressions:
1233 to_update = to_update_sets[escaped_name]
1234 else:
1235 # we are removing the parameter from parameters
1236 # because it is a list value, which is not expected by
1237 # TypeEngine objects that would otherwise be asked to
1238 # process it. the single name is being replaced with
1239 # individual numbered parameters for each value in the
1240 # param.
1241 #
1242 # note we are also inserting *escaped* parameter names
1243 # into the given dictionary. default dialect will
1244 # use these param names directly as they will not be
1245 # in the escaped_bind_names dictionary.
1246 values = parameters.pop(name)
1248 leep = self._literal_execute_expanding_parameter
1249 to_update, replacement_expr = leep(
1250 escaped_name, parameter, values
1251 )
1253 to_update_sets[escaped_name] = to_update
1254 replacement_expressions[escaped_name] = replacement_expr
1256 if not parameter.literal_execute:
1257 parameters.update(to_update)
1258 if parameter.type._is_tuple_type:
1259 new_processors.update(
1260 (
1261 "%s_%s_%s" % (name, i, j),
1262 processors[name][j - 1],
1263 )
1264 for i, tuple_element in enumerate(values, 1)
1265 for j, value in enumerate(tuple_element, 1)
1266 if name in processors
1267 and processors[name][j - 1] is not None
1268 )
1269 else:
1270 new_processors.update(
1271 (key, processors[name])
1272 for key, value in to_update
1273 if name in processors
1274 )
1275 if self.positional:
1276 positiontup.extend(name for name, value in to_update)
1277 expanded_parameters[name] = [
1278 expand_key for expand_key, value in to_update
1279 ]
1280 elif self.positional:
1281 positiontup.append(name)
1283 def process_expanding(m):
1284 key = m.group(1)
1285 expr = replacement_expressions[key]
1287 # if POSTCOMPILE included a bind_expression, render that
1288 # around each element
1289 if m.group(2):
1290 tok = m.group(2).split("~~")
1291 be_left, be_right = tok[1], tok[3]
1292 expr = ", ".join(
1293 "%s%s%s" % (be_left, exp, be_right)
1294 for exp in expr.split(", ")
1295 )
1296 return expr
1298 statement = re.sub(
1299 self._post_compile_pattern,
1300 process_expanding,
1301 self.string,
1302 )
1304 expanded_state = ExpandedState(
1305 statement,
1306 parameters,
1307 new_processors,
1308 positiontup,
1309 expanded_parameters,
1310 )
1312 if _populate_self:
1313 # this is for the "render_postcompile" flag, which is not
1314 # otherwise used internally and is for end-user debugging and
1315 # special use cases.
1316 self.string = expanded_state.statement
1317 self._bind_processors.update(expanded_state.processors)
1318 self.positiontup = expanded_state.positiontup
1319 self.post_compile_params = frozenset()
1320 for key in expanded_state.parameter_expansion:
1321 bind = self.binds.pop(key)
1322 self.bind_names.pop(bind)
1323 for value, expanded_key in zip(
1324 bind.value, expanded_state.parameter_expansion[key]
1325 ):
1326 self.binds[expanded_key] = new_param = bind._with_value(
1327 value
1328 )
1329 self.bind_names[new_param] = expanded_key
1331 return expanded_state
1333 @util.preload_module("sqlalchemy.engine.cursor")
1334 def _create_result_map(self):
1335 """utility method used for unit tests only."""
1336 cursor = util.preloaded.engine_cursor
1337 return cursor.CursorResultMetaData._create_description_match_map(
1338 self._result_columns
1339 )
1341 @util.memoized_property
1342 def _within_exec_param_key_getter(self):
1343 getter = self._key_getters_for_crud_column[2]
1344 return getter
1346 @util.memoized_property
1347 @util.preload_module("sqlalchemy.engine.result")
1348 def _inserted_primary_key_from_lastrowid_getter(self):
1349 result = util.preloaded.engine_result
1351 param_key_getter = self._within_exec_param_key_getter
1352 table = self.statement.table
1354 getters = [
1355 (operator.methodcaller("get", param_key_getter(col), None), col)
1356 for col in table.primary_key
1357 ]
1359 autoinc_col = table._autoincrement_column
1360 if autoinc_col is not None:
1361 # apply type post processors to the lastrowid
1362 proc = autoinc_col.type._cached_result_processor(
1363 self.dialect, None
1364 )
1365 else:
1366 proc = None
1368 row_fn = result.result_tuple([col.key for col in table.primary_key])
1370 def get(lastrowid, parameters):
1371 """given cursor.lastrowid value and the parameters used for INSERT,
1372 return a "row" that represents the primary key, either by
1373 using the "lastrowid" or by extracting values from the parameters
1374 that were sent along with the INSERT.
1376 """
1377 if proc is not None:
1378 lastrowid = proc(lastrowid)
1380 if lastrowid is None:
1381 return row_fn(getter(parameters) for getter, col in getters)
1382 else:
1383 return row_fn(
1384 lastrowid if col is autoinc_col else getter(parameters)
1385 for getter, col in getters
1386 )
1388 return get
1390 @util.memoized_property
1391 @util.preload_module("sqlalchemy.engine.result")
1392 def _inserted_primary_key_from_returning_getter(self):
1393 result = util.preloaded.engine_result
1395 param_key_getter = self._within_exec_param_key_getter
1396 table = self.statement.table
1398 ret = {col: idx for idx, col in enumerate(self.returning)}
1400 getters = [
1401 (operator.itemgetter(ret[col]), True)
1402 if col in ret
1403 else (
1404 operator.methodcaller("get", param_key_getter(col), None),
1405 False,
1406 )
1407 for col in table.primary_key
1408 ]
1410 row_fn = result.result_tuple([col.key for col in table.primary_key])
1412 def get(row, parameters):
1413 return row_fn(
1414 getter(row) if use_row else getter(parameters)
1415 for getter, use_row in getters
1416 )
1418 return get
1420 def default_from(self):
1421 """Called when a SELECT statement has no froms, and no FROM clause is
1422 to be appended.
1424 Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output.
1426 """
1427 return ""
1429 def visit_grouping(self, grouping, asfrom=False, **kwargs):
1430 return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
1432 def visit_select_statement_grouping(self, grouping, **kwargs):
1433 return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
1435 def visit_label_reference(
1436 self, element, within_columns_clause=False, **kwargs
1437 ):
1438 if self.stack and self.dialect.supports_simple_order_by_label:
1439 compile_state = self.stack[-1]["compile_state"]
1441 (
1442 with_cols,
1443 only_froms,
1444 only_cols,
1445 ) = compile_state._label_resolve_dict
1446 if within_columns_clause:
1447 resolve_dict = only_froms
1448 else:
1449 resolve_dict = only_cols
1451 # this can be None in the case that a _label_reference()
1452 # were subject to a replacement operation, in which case
1453 # the replacement of the Label element may have changed
1454 # to something else like a ColumnClause expression.
1455 order_by_elem = element.element._order_by_label_element
1457 if (
1458 order_by_elem is not None
1459 and order_by_elem.name in resolve_dict
1460 and order_by_elem.shares_lineage(
1461 resolve_dict[order_by_elem.name]
1462 )
1463 ):
1464 kwargs[
1465 "render_label_as_label"
1466 ] = element.element._order_by_label_element
1467 return self.process(
1468 element.element,
1469 within_columns_clause=within_columns_clause,
1470 **kwargs
1471 )
1473 def visit_textual_label_reference(
1474 self, element, within_columns_clause=False, **kwargs
1475 ):
1476 if not self.stack:
1477 # compiling the element outside of the context of a SELECT
1478 return self.process(element._text_clause)
1480 compile_state = self.stack[-1]["compile_state"]
1481 with_cols, only_froms, only_cols = compile_state._label_resolve_dict
1482 try:
1483 if within_columns_clause:
1484 col = only_froms[element.element]
1485 else:
1486 col = with_cols[element.element]
1487 except KeyError as err:
1488 coercions._no_text_coercion(
1489 element.element,
1490 extra=(
1491 "Can't resolve label reference for ORDER BY / "
1492 "GROUP BY / DISTINCT etc."
1493 ),
1494 exc_cls=exc.CompileError,
1495 err=err,
1496 )
1497 else:
1498 kwargs["render_label_as_label"] = col
1499 return self.process(
1500 col, within_columns_clause=within_columns_clause, **kwargs
1501 )
1503 def visit_label(
1504 self,
1505 label,
1506 add_to_result_map=None,
1507 within_label_clause=False,
1508 within_columns_clause=False,
1509 render_label_as_label=None,
1510 result_map_targets=(),
1511 **kw
1512 ):
1513 # only render labels within the columns clause
1514 # or ORDER BY clause of a select. dialect-specific compilers
1515 # can modify this behavior.
1516 render_label_with_as = (
1517 within_columns_clause and not within_label_clause
1518 )
1519 render_label_only = render_label_as_label is label
1521 if render_label_only or render_label_with_as:
1522 if isinstance(label.name, elements._truncated_label):
1523 labelname = self._truncated_identifier("colident", label.name)
1524 else:
1525 labelname = label.name
1527 if render_label_with_as:
1528 if add_to_result_map is not None:
1529 add_to_result_map(
1530 labelname,
1531 label.name,
1532 (label, labelname) + label._alt_names + result_map_targets,
1533 label.type,
1534 )
1535 return (
1536 label.element._compiler_dispatch(
1537 self,
1538 within_columns_clause=True,
1539 within_label_clause=True,
1540 **kw
1541 )
1542 + OPERATORS[operators.as_]
1543 + self.preparer.format_label(label, labelname)
1544 )
1545 elif render_label_only:
1546 return self.preparer.format_label(label, labelname)
1547 else:
1548 return label.element._compiler_dispatch(
1549 self, within_columns_clause=False, **kw
1550 )
1552 def _fallback_column_name(self, column):
1553 raise exc.CompileError(
1554 "Cannot compile Column object until " "its 'name' is assigned."
1555 )
1557 def visit_lambda_element(self, element, **kw):
1558 sql_element = element._resolved
1559 return self.process(sql_element, **kw)
1561 def visit_column(
1562 self,
1563 column,
1564 add_to_result_map=None,
1565 include_table=True,
1566 result_map_targets=(),
1567 **kwargs
1568 ):
1569 name = orig_name = column.name
1570 if name is None:
1571 name = self._fallback_column_name(column)
1573 is_literal = column.is_literal
1574 if not is_literal and isinstance(name, elements._truncated_label):
1575 name = self._truncated_identifier("colident", name)
1577 if add_to_result_map is not None:
1578 targets = (column, name, column.key) + result_map_targets
1579 if column._tq_label:
1580 targets += (column._tq_label,)
1582 add_to_result_map(name, orig_name, targets, column.type)
1584 if is_literal:
1585 # note we are not currently accommodating for
1586 # literal_column(quoted_name('ident', True)) here
1587 name = self.escape_literal_column(name)
1588 else:
1589 name = self.preparer.quote(name)
1590 table = column.table
1591 if table is None or not include_table or not table.named_with_column:
1592 return name
1593 else:
1594 effective_schema = self.preparer.schema_for_object(table)
1596 if effective_schema:
1597 schema_prefix = (
1598 self.preparer.quote_schema(effective_schema) + "."
1599 )
1600 else:
1601 schema_prefix = ""
1602 tablename = table.name
1603 if isinstance(tablename, elements._truncated_label):
1604 tablename = self._truncated_identifier("alias", tablename)
1606 return schema_prefix + self.preparer.quote(tablename) + "." + name
1608 def visit_collation(self, element, **kw):
1609 return self.preparer.format_collation(element.collation)
1611 def visit_fromclause(self, fromclause, **kwargs):
1612 return fromclause.name
1614 def visit_index(self, index, **kwargs):
1615 return index.name
1617 def visit_typeclause(self, typeclause, **kw):
1618 kw["type_expression"] = typeclause
1619 kw["identifier_preparer"] = self.preparer
1620 return self.dialect.type_compiler.process(typeclause.type, **kw)
1622 def post_process_text(self, text):
1623 if self.preparer._double_percents:
1624 text = text.replace("%", "%%")
1625 return text
1627 def escape_literal_column(self, text):
1628 if self.preparer._double_percents:
1629 text = text.replace("%", "%%")
1630 return text
1632 def visit_textclause(self, textclause, add_to_result_map=None, **kw):
1633 def do_bindparam(m):
1634 name = m.group(1)
1635 if name in textclause._bindparams:
1636 return self.process(textclause._bindparams[name], **kw)
1637 else:
1638 return self.bindparam_string(name, **kw)
1640 if not self.stack:
1641 self.isplaintext = True
1643 if add_to_result_map:
1644 # text() object is present in the columns clause of a
1645 # select(). Add a no-name entry to the result map so that
1646 # row[text()] produces a result
1647 add_to_result_map(None, None, (textclause,), sqltypes.NULLTYPE)
1649 # un-escape any \:params
1650 return BIND_PARAMS_ESC.sub(
1651 lambda m: m.group(1),
1652 BIND_PARAMS.sub(
1653 do_bindparam, self.post_process_text(textclause.text)
1654 ),
1655 )
1657 def visit_textual_select(
1658 self, taf, compound_index=None, asfrom=False, **kw
1659 ):
1661 toplevel = not self.stack
1662 entry = self._default_stack_entry if toplevel else self.stack[-1]
1664 new_entry = {
1665 "correlate_froms": set(),
1666 "asfrom_froms": set(),
1667 "selectable": taf,
1668 }
1669 self.stack.append(new_entry)
1671 if taf._independent_ctes:
1672 for cte in taf._independent_ctes:
1673 cte._compiler_dispatch(self, **kw)
1675 populate_result_map = (
1676 toplevel
1677 or (
1678 compound_index == 0
1679 and entry.get("need_result_map_for_compound", False)
1680 )
1681 or entry.get("need_result_map_for_nested", False)
1682 )
1684 if populate_result_map:
1685 self._ordered_columns = (
1686 self._textual_ordered_columns
1687 ) = taf.positional
1689 # enable looser result column matching when the SQL text links to
1690 # Column objects by name only
1691 self._loose_column_name_matching = not taf.positional and bool(
1692 taf.column_args
1693 )
1695 for c in taf.column_args:
1696 self.process(
1697 c,
1698 within_columns_clause=True,
1699 add_to_result_map=self._add_to_result_map,
1700 )
1702 text = self.process(taf.element, **kw)
1703 if self.ctes:
1704 nesting_level = len(self.stack) if not toplevel else None
1705 text = (
1706 self._render_cte_clause(
1707 nesting_level=nesting_level,
1708 visiting_cte=kw.get("visiting_cte"),
1709 )
1710 + text
1711 )
1713 self.stack.pop(-1)
1715 return text
1717 def visit_null(self, expr, **kw):
1718 return "NULL"
1720 def visit_true(self, expr, **kw):
1721 if self.dialect.supports_native_boolean:
1722 return "true"
1723 else:
1724 return "1"
1726 def visit_false(self, expr, **kw):
1727 if self.dialect.supports_native_boolean:
1728 return "false"
1729 else:
1730 return "0"
1732 def _generate_delimited_list(self, elements, separator, **kw):
1733 return separator.join(
1734 s
1735 for s in (c._compiler_dispatch(self, **kw) for c in elements)
1736 if s
1737 )
1739 def _generate_delimited_and_list(self, clauses, **kw):
1741 lcc, clauses = elements.BooleanClauseList._process_clauses_for_boolean(
1742 operators.and_,
1743 elements.True_._singleton,
1744 elements.False_._singleton,
1745 clauses,
1746 )
1747 if lcc == 1:
1748 return clauses[0]._compiler_dispatch(self, **kw)
1749 else:
1750 separator = OPERATORS[operators.and_]
1751 return separator.join(
1752 s
1753 for s in (c._compiler_dispatch(self, **kw) for c in clauses)
1754 if s
1755 )
1757 def visit_tuple(self, clauselist, **kw):
1758 return "(%s)" % self.visit_clauselist(clauselist, **kw)
1760 def visit_clauselist(self, clauselist, **kw):
1761 sep = clauselist.operator
1762 if sep is None:
1763 sep = " "
1764 else:
1765 sep = OPERATORS[clauselist.operator]
1767 return self._generate_delimited_list(clauselist.clauses, sep, **kw)
1769 def visit_case(self, clause, **kwargs):
1770 x = "CASE "
1771 if clause.value is not None:
1772 x += clause.value._compiler_dispatch(self, **kwargs) + " "
1773 for cond, result in clause.whens:
1774 x += (
1775 "WHEN "
1776 + cond._compiler_dispatch(self, **kwargs)
1777 + " THEN "
1778 + result._compiler_dispatch(self, **kwargs)
1779 + " "
1780 )
1781 if clause.else_ is not None:
1782 x += (
1783 "ELSE " + clause.else_._compiler_dispatch(self, **kwargs) + " "
1784 )
1785 x += "END"
1786 return x
1788 def visit_type_coerce(self, type_coerce, **kw):
1789 return type_coerce.typed_expression._compiler_dispatch(self, **kw)
1791 def visit_cast(self, cast, **kwargs):
1792 return "CAST(%s AS %s)" % (
1793 cast.clause._compiler_dispatch(self, **kwargs),
1794 cast.typeclause._compiler_dispatch(self, **kwargs),
1795 )
1797 def _format_frame_clause(self, range_, **kw):
1799 return "%s AND %s" % (
1800 "UNBOUNDED PRECEDING"
1801 if range_[0] is elements.RANGE_UNBOUNDED
1802 else "CURRENT ROW"
1803 if range_[0] is elements.RANGE_CURRENT
1804 else "%s PRECEDING"
1805 % (self.process(elements.literal(abs(range_[0])), **kw),)
1806 if range_[0] < 0
1807 else "%s FOLLOWING"
1808 % (self.process(elements.literal(range_[0]), **kw),),
1809 "UNBOUNDED FOLLOWING"
1810 if range_[1] is elements.RANGE_UNBOUNDED
1811 else "CURRENT ROW"
1812 if range_[1] is elements.RANGE_CURRENT
1813 else "%s PRECEDING"
1814 % (self.process(elements.literal(abs(range_[1])), **kw),)
1815 if range_[1] < 0
1816 else "%s FOLLOWING"
1817 % (self.process(elements.literal(range_[1]), **kw),),
1818 )
1820 def visit_over(self, over, **kwargs):
1821 text = over.element._compiler_dispatch(self, **kwargs)
1822 if over.range_:
1823 range_ = "RANGE BETWEEN %s" % self._format_frame_clause(
1824 over.range_, **kwargs
1825 )
1826 elif over.rows:
1827 range_ = "ROWS BETWEEN %s" % self._format_frame_clause(
1828 over.rows, **kwargs
1829 )
1830 else:
1831 range_ = None
1833 return "%s OVER (%s)" % (
1834 text,
1835 " ".join(
1836 [
1837 "%s BY %s"
1838 % (word, clause._compiler_dispatch(self, **kwargs))
1839 for word, clause in (
1840 ("PARTITION", over.partition_by),
1841 ("ORDER", over.order_by),
1842 )
1843 if clause is not None and len(clause)
1844 ]
1845 + ([range_] if range_ else [])
1846 ),
1847 )
1849 def visit_withingroup(self, withingroup, **kwargs):
1850 return "%s WITHIN GROUP (ORDER BY %s)" % (
1851 withingroup.element._compiler_dispatch(self, **kwargs),
1852 withingroup.order_by._compiler_dispatch(self, **kwargs),
1853 )
1855 def visit_funcfilter(self, funcfilter, **kwargs):
1856 return "%s FILTER (WHERE %s)" % (
1857 funcfilter.func._compiler_dispatch(self, **kwargs),
1858 funcfilter.criterion._compiler_dispatch(self, **kwargs),
1859 )
1861 def visit_extract(self, extract, **kwargs):
1862 field = self.extract_map.get(extract.field, extract.field)
1863 return "EXTRACT(%s FROM %s)" % (
1864 field,
1865 extract.expr._compiler_dispatch(self, **kwargs),
1866 )
1868 def visit_scalar_function_column(self, element, **kw):
1869 compiled_fn = self.visit_function(element.fn, **kw)
1870 compiled_col = self.visit_column(element, **kw)
1871 return "(%s).%s" % (compiled_fn, compiled_col)
1873 def visit_function(self, func, add_to_result_map=None, **kwargs):
1874 if add_to_result_map is not None:
1875 add_to_result_map(func.name, func.name, (), func.type)
1877 disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
1878 if disp:
1879 text = disp(func, **kwargs)
1880 else:
1881 name = FUNCTIONS.get(func._deannotate().__class__, None)
1882 if name:
1883 if func._has_args:
1884 name += "%(expr)s"
1885 else:
1886 name = func.name
1887 name = (
1888 self.preparer.quote(name)
1889 if self.preparer._requires_quotes_illegal_chars(name)
1890 or isinstance(name, elements.quoted_name)
1891 else name
1892 )
1893 name = name + "%(expr)s"
1894 text = ".".join(
1895 [
1896 (
1897 self.preparer.quote(tok)
1898 if self.preparer._requires_quotes_illegal_chars(tok)
1899 or isinstance(name, elements.quoted_name)
1900 else tok
1901 )
1902 for tok in func.packagenames
1903 ]
1904 + [name]
1905 ) % {"expr": self.function_argspec(func, **kwargs)}
1907 if func._with_ordinality:
1908 text += " WITH ORDINALITY"
1909 return text
1911 def visit_next_value_func(self, next_value, **kw):
1912 return self.visit_sequence(next_value.sequence)
1914 def visit_sequence(self, sequence, **kw):
1915 raise NotImplementedError(
1916 "Dialect '%s' does not support sequence increments."
1917 % self.dialect.name
1918 )
1920 def function_argspec(self, func, **kwargs):
1921 return func.clause_expr._compiler_dispatch(self, **kwargs)
1923 def visit_compound_select(
1924 self, cs, asfrom=False, compound_index=None, **kwargs
1925 ):
1926 toplevel = not self.stack
1928 compile_state = cs._compile_state_factory(cs, self, **kwargs)
1930 if toplevel and not self.compile_state:
1931 self.compile_state = compile_state
1933 compound_stmt = compile_state.statement
1935 entry = self._default_stack_entry if toplevel else self.stack[-1]
1936 need_result_map = toplevel or (
1937 not compound_index
1938 and entry.get("need_result_map_for_compound", False)
1939 )
1941 # indicates there is already a CompoundSelect in play
1942 if compound_index == 0:
1943 entry["select_0"] = cs
1945 self.stack.append(
1946 {
1947 "correlate_froms": entry["correlate_froms"],
1948 "asfrom_froms": entry["asfrom_froms"],
1949 "selectable": cs,
1950 "compile_state": compile_state,
1951 "need_result_map_for_compound": need_result_map,
1952 }
1953 )
1955 if compound_stmt._independent_ctes:
1956 for cte in compound_stmt._independent_ctes:
1957 cte._compiler_dispatch(self, **kwargs)
1959 keyword = self.compound_keywords.get(cs.keyword)
1961 text = (" " + keyword + " ").join(
1962 (
1963 c._compiler_dispatch(
1964 self, asfrom=asfrom, compound_index=i, **kwargs
1965 )
1966 for i, c in enumerate(cs.selects)
1967 )
1968 )
1970 kwargs["include_table"] = False
1971 text += self.group_by_clause(cs, **dict(asfrom=asfrom, **kwargs))
1972 text += self.order_by_clause(cs, **kwargs)
1973 if cs._has_row_limiting_clause:
1974 text += self._row_limit_clause(cs, **kwargs)
1976 if self.ctes:
1977 nesting_level = len(self.stack) if not toplevel else None
1978 text = (
1979 self._render_cte_clause(
1980 nesting_level=nesting_level,
1981 include_following_stack=True,
1982 visiting_cte=kwargs.get("visiting_cte"),
1983 )
1984 + text
1985 )
1987 self.stack.pop(-1)
1988 return text
1990 def _row_limit_clause(self, cs, **kwargs):
1991 if cs._fetch_clause is not None:
1992 return self.fetch_clause(cs, **kwargs)
1993 else:
1994 return self.limit_clause(cs, **kwargs)
1996 def _get_operator_dispatch(self, operator_, qualifier1, qualifier2):
1997 attrname = "visit_%s_%s%s" % (
1998 operator_.__name__,
1999 qualifier1,
2000 "_" + qualifier2 if qualifier2 else "",
2001 )
2002 return getattr(self, attrname, None)
2004 def visit_unary(
2005 self, unary, add_to_result_map=None, result_map_targets=(), **kw
2006 ):
2008 if add_to_result_map is not None:
2009 result_map_targets += (unary,)
2010 kw["add_to_result_map"] = add_to_result_map
2011 kw["result_map_targets"] = result_map_targets
2013 if unary.operator:
2014 if unary.modifier:
2015 raise exc.CompileError(
2016 "Unary expression does not support operator "
2017 "and modifier simultaneously"
2018 )
2019 disp = self._get_operator_dispatch(
2020 unary.operator, "unary", "operator"
2021 )
2022 if disp:
2023 return disp(unary, unary.operator, **kw)
2024 else:
2025 return self._generate_generic_unary_operator(
2026 unary, OPERATORS[unary.operator], **kw
2027 )
2028 elif unary.modifier:
2029 disp = self._get_operator_dispatch(
2030 unary.modifier, "unary", "modifier"
2031 )
2032 if disp:
2033 return disp(unary, unary.modifier, **kw)
2034 else:
2035 return self._generate_generic_unary_modifier(
2036 unary, OPERATORS[unary.modifier], **kw
2037 )
2038 else:
2039 raise exc.CompileError(
2040 "Unary expression has no operator or modifier"
2041 )
2043 def visit_is_true_unary_operator(self, element, operator, **kw):
2044 if (
2045 element._is_implicitly_boolean
2046 or self.dialect.supports_native_boolean
2047 ):
2048 return self.process(element.element, **kw)
2049 else:
2050 return "%s = 1" % self.process(element.element, **kw)
2052 def visit_is_false_unary_operator(self, element, operator, **kw):
2053 if (
2054 element._is_implicitly_boolean
2055 or self.dialect.supports_native_boolean
2056 ):
2057 return "NOT %s" % self.process(element.element, **kw)
2058 else:
2059 return "%s = 0" % self.process(element.element, **kw)
2061 def visit_not_match_op_binary(self, binary, operator, **kw):
2062 return "NOT %s" % self.visit_binary(
2063 binary, override_operator=operators.match_op
2064 )
2066 def visit_not_in_op_binary(self, binary, operator, **kw):
2067 # The brackets are required in the NOT IN operation because the empty
2068 # case is handled using the form "(col NOT IN (null) OR 1 = 1)".
2069 # The presence of the OR makes the brackets required.
2070 return "(%s)" % self._generate_generic_binary(
2071 binary, OPERATORS[operator], **kw
2072 )
2074 def visit_empty_set_op_expr(self, type_, expand_op):
2075 if expand_op is operators.not_in_op:
2076 if len(type_) > 1:
2077 return "(%s)) OR (1 = 1" % (
2078 ", ".join("NULL" for element in type_)
2079 )
2080 else:
2081 return "NULL) OR (1 = 1"
2082 elif expand_op is operators.in_op:
2083 if len(type_) > 1:
2084 return "(%s)) AND (1 != 1" % (
2085 ", ".join("NULL" for element in type_)
2086 )
2087 else:
2088 return "NULL) AND (1 != 1"
2089 else:
2090 return self.visit_empty_set_expr(type_)
2092 def visit_empty_set_expr(self, element_types):
2093 raise NotImplementedError(
2094 "Dialect '%s' does not support empty set expression."
2095 % self.dialect.name
2096 )
2098 def _literal_execute_expanding_parameter_literal_binds(
2099 self, parameter, values, bind_expression_template=None
2100 ):
2102 typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect)
2104 if not values:
2105 # empty IN expression. note we don't need to use
2106 # bind_expression_template here because there are no
2107 # expressions to render.
2109 if typ_dialect_impl._is_tuple_type:
2110 replacement_expression = (
2111 "VALUES " if self.dialect.tuple_in_values else ""
2112 ) + self.visit_empty_set_op_expr(
2113 parameter.type.types, parameter.expand_op
2114 )
2116 else:
2117 replacement_expression = self.visit_empty_set_op_expr(
2118 [parameter.type], parameter.expand_op
2119 )
2121 elif typ_dialect_impl._is_tuple_type or (
2122 typ_dialect_impl._isnull
2123 and isinstance(values[0], util.collections_abc.Sequence)
2124 and not isinstance(
2125 values[0], util.string_types + util.binary_types
2126 )
2127 ):
2129 if typ_dialect_impl._has_bind_expression:
2130 raise NotImplementedError(
2131 "bind_expression() on TupleType not supported with "
2132 "literal_binds"
2133 )
2135 replacement_expression = (
2136 "VALUES " if self.dialect.tuple_in_values else ""
2137 ) + ", ".join(
2138 "(%s)"
2139 % (
2140 ", ".join(
2141 self.render_literal_value(value, param_type)
2142 for value, param_type in zip(
2143 tuple_element, parameter.type.types
2144 )
2145 )
2146 )
2147 for i, tuple_element in enumerate(values)
2148 )
2149 else:
2150 if bind_expression_template:
2151 post_compile_pattern = self._post_compile_pattern
2152 m = post_compile_pattern.search(bind_expression_template)
2153 assert m and m.group(
2154 2
2155 ), "unexpected format for expanding parameter"
2157 tok = m.group(2).split("~~")
2158 be_left, be_right = tok[1], tok[3]
2159 replacement_expression = ", ".join(
2160 "%s%s%s"
2161 % (
2162 be_left,
2163 self.render_literal_value(value, parameter.type),
2164 be_right,
2165 )
2166 for value in values
2167 )
2168 else:
2169 replacement_expression = ", ".join(
2170 self.render_literal_value(value, parameter.type)
2171 for value in values
2172 )
2174 return (), replacement_expression
2176 def _literal_execute_expanding_parameter(self, name, parameter, values):
2178 if parameter.literal_execute:
2179 return self._literal_execute_expanding_parameter_literal_binds(
2180 parameter, values
2181 )
2183 typ_dialect_impl = parameter.type._unwrapped_dialect_impl(self.dialect)
2185 if not values:
2186 to_update = []
2187 if typ_dialect_impl._is_tuple_type:
2189 replacement_expression = self.visit_empty_set_op_expr(
2190 parameter.type.types, parameter.expand_op
2191 )
2192 else:
2193 replacement_expression = self.visit_empty_set_op_expr(
2194 [parameter.type], parameter.expand_op
2195 )
2197 elif typ_dialect_impl._is_tuple_type or (
2198 typ_dialect_impl._isnull
2199 and isinstance(values[0], util.collections_abc.Sequence)
2200 and not isinstance(
2201 values[0], util.string_types + util.binary_types
2202 )
2203 ):
2204 assert not typ_dialect_impl._is_array
2205 to_update = [
2206 ("%s_%s_%s" % (name, i, j), value)
2207 for i, tuple_element in enumerate(values, 1)
2208 for j, value in enumerate(tuple_element, 1)
2209 ]
2210 replacement_expression = (
2211 "VALUES " if self.dialect.tuple_in_values else ""
2212 ) + ", ".join(
2213 "(%s)"
2214 % (
2215 ", ".join(
2216 self.bindtemplate
2217 % {"name": to_update[i * len(tuple_element) + j][0]}
2218 for j, value in enumerate(tuple_element)
2219 )
2220 )
2221 for i, tuple_element in enumerate(values)
2222 )
2223 else:
2224 to_update = [
2225 ("%s_%s" % (name, i), value)
2226 for i, value in enumerate(values, 1)
2227 ]
2228 replacement_expression = ", ".join(
2229 self.bindtemplate % {"name": key} for key, value in to_update
2230 )
2232 return to_update, replacement_expression
2234 def visit_binary(
2235 self,
2236 binary,
2237 override_operator=None,
2238 eager_grouping=False,
2239 from_linter=None,
2240 lateral_from_linter=None,
2241 **kw
2242 ):
2243 if from_linter and operators.is_comparison(binary.operator):
2244 if lateral_from_linter is not None:
2245 enclosing_lateral = kw["enclosing_lateral"]
2246 lateral_from_linter.edges.update(
2247 itertools.product(
2248 binary.left._from_objects + [enclosing_lateral],
2249 binary.right._from_objects + [enclosing_lateral],
2250 )
2251 )
2252 else:
2253 from_linter.edges.update(
2254 itertools.product(
2255 binary.left._from_objects, binary.right._from_objects
2256 )
2257 )
2259 # don't allow "? = ?" to render
2260 if (
2261 self.ansi_bind_rules
2262 and isinstance(binary.left, elements.BindParameter)
2263 and isinstance(binary.right, elements.BindParameter)
2264 ):
2265 kw["literal_execute"] = True
2267 operator_ = override_operator or binary.operator
2268 disp = self._get_operator_dispatch(operator_, "binary", None)
2269 if disp:
2270 return disp(binary, operator_, **kw)
2271 else:
2272 try:
2273 opstring = OPERATORS[operator_]
2274 except KeyError as err:
2275 util.raise_(
2276 exc.UnsupportedCompilationError(self, operator_),
2277 replace_context=err,
2278 )
2279 else:
2280 return self._generate_generic_binary(
2281 binary,
2282 opstring,
2283 from_linter=from_linter,
2284 lateral_from_linter=lateral_from_linter,
2285 **kw
2286 )
2288 def visit_function_as_comparison_op_binary(self, element, operator, **kw):
2289 return self.process(element.sql_function, **kw)
2291 def visit_mod_binary(self, binary, operator, **kw):
2292 if self.preparer._double_percents:
2293 return (
2294 self.process(binary.left, **kw)
2295 + " %% "
2296 + self.process(binary.right, **kw)
2297 )
2298 else:
2299 return (
2300 self.process(binary.left, **kw)
2301 + " % "
2302 + self.process(binary.right, **kw)
2303 )
2305 def visit_custom_op_binary(self, element, operator, **kw):
2306 kw["eager_grouping"] = operator.eager_grouping
2307 return self._generate_generic_binary(
2308 element,
2309 " " + self.escape_literal_column(operator.opstring) + " ",
2310 **kw
2311 )
2313 def visit_custom_op_unary_operator(self, element, operator, **kw):
2314 return self._generate_generic_unary_operator(
2315 element, self.escape_literal_column(operator.opstring) + " ", **kw
2316 )
2318 def visit_custom_op_unary_modifier(self, element, operator, **kw):
2319 return self._generate_generic_unary_modifier(
2320 element, " " + self.escape_literal_column(operator.opstring), **kw
2321 )
2323 def _generate_generic_binary(
2324 self, binary, opstring, eager_grouping=False, **kw
2325 ):
2327 _in_binary = kw.get("_in_binary", False)
2329 kw["_in_binary"] = True
2330 kw["_binary_op"] = binary.operator
2331 text = (
2332 binary.left._compiler_dispatch(
2333 self, eager_grouping=eager_grouping, **kw
2334 )
2335 + opstring
2336 + binary.right._compiler_dispatch(
2337 self, eager_grouping=eager_grouping, **kw
2338 )
2339 )
2341 if _in_binary and eager_grouping:
2342 text = "(%s)" % text
2343 return text
2345 def _generate_generic_unary_operator(self, unary, opstring, **kw):
2346 return opstring + unary.element._compiler_dispatch(self, **kw)
2348 def _generate_generic_unary_modifier(self, unary, opstring, **kw):
2349 return unary.element._compiler_dispatch(self, **kw) + opstring
2351 @util.memoized_property
2352 def _like_percent_literal(self):
2353 return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE)
2355 def visit_contains_op_binary(self, binary, operator, **kw):
2356 binary = binary._clone()
2357 percent = self._like_percent_literal
2358 binary.right = percent.concat(binary.right).concat(percent)
2359 return self.visit_like_op_binary(binary, operator, **kw)
2361 def visit_not_contains_op_binary(self, binary, operator, **kw):
2362 binary = binary._clone()
2363 percent = self._like_percent_literal
2364 binary.right = percent.concat(binary.right).concat(percent)
2365 return self.visit_not_like_op_binary(binary, operator, **kw)
2367 def visit_startswith_op_binary(self, binary, operator, **kw):
2368 binary = binary._clone()
2369 percent = self._like_percent_literal
2370 binary.right = percent._rconcat(binary.right)
2371 return self.visit_like_op_binary(binary, operator, **kw)
2373 def visit_not_startswith_op_binary(self, binary, operator, **kw):
2374 binary = binary._clone()
2375 percent = self._like_percent_literal
2376 binary.right = percent._rconcat(binary.right)
2377 return self.visit_not_like_op_binary(binary, operator, **kw)
2379 def visit_endswith_op_binary(self, binary, operator, **kw):
2380 binary = binary._clone()
2381 percent = self._like_percent_literal
2382 binary.right = percent.concat(binary.right)
2383 return self.visit_like_op_binary(binary, operator, **kw)
2385 def visit_not_endswith_op_binary(self, binary, operator, **kw):
2386 binary = binary._clone()
2387 percent = self._like_percent_literal
2388 binary.right = percent.concat(binary.right)
2389 return self.visit_not_like_op_binary(binary, operator, **kw)
2391 def visit_like_op_binary(self, binary, operator, **kw):
2392 escape = binary.modifiers.get("escape", None)
2394 # TODO: use ternary here, not "and"/ "or"
2395 return "%s LIKE %s" % (
2396 binary.left._compiler_dispatch(self, **kw),
2397 binary.right._compiler_dispatch(self, **kw),
2398 ) + (
2399 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2400 if escape
2401 else ""
2402 )
2404 def visit_not_like_op_binary(self, binary, operator, **kw):
2405 escape = binary.modifiers.get("escape", None)
2406 return "%s NOT LIKE %s" % (
2407 binary.left._compiler_dispatch(self, **kw),
2408 binary.right._compiler_dispatch(self, **kw),
2409 ) + (
2410 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2411 if escape
2412 else ""
2413 )
2415 def visit_ilike_op_binary(self, binary, operator, **kw):
2416 escape = binary.modifiers.get("escape", None)
2417 return "lower(%s) LIKE lower(%s)" % (
2418 binary.left._compiler_dispatch(self, **kw),
2419 binary.right._compiler_dispatch(self, **kw),
2420 ) + (
2421 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2422 if escape
2423 else ""
2424 )
2426 def visit_not_ilike_op_binary(self, binary, operator, **kw):
2427 escape = binary.modifiers.get("escape", None)
2428 return "lower(%s) NOT LIKE lower(%s)" % (
2429 binary.left._compiler_dispatch(self, **kw),
2430 binary.right._compiler_dispatch(self, **kw),
2431 ) + (
2432 " ESCAPE " + self.render_literal_value(escape, sqltypes.STRINGTYPE)
2433 if escape
2434 else ""
2435 )
2437 def visit_between_op_binary(self, binary, operator, **kw):
2438 symmetric = binary.modifiers.get("symmetric", False)
2439 return self._generate_generic_binary(
2440 binary, " BETWEEN SYMMETRIC " if symmetric else " BETWEEN ", **kw
2441 )
2443 def visit_not_between_op_binary(self, binary, operator, **kw):
2444 symmetric = binary.modifiers.get("symmetric", False)
2445 return self._generate_generic_binary(
2446 binary,
2447 " NOT BETWEEN SYMMETRIC " if symmetric else " NOT BETWEEN ",
2448 **kw
2449 )
2451 def visit_regexp_match_op_binary(self, binary, operator, **kw):
2452 raise exc.CompileError(
2453 "%s dialect does not support regular expressions"
2454 % self.dialect.name
2455 )
2457 def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
2458 raise exc.CompileError(
2459 "%s dialect does not support regular expressions"
2460 % self.dialect.name
2461 )
2463 def visit_regexp_replace_op_binary(self, binary, operator, **kw):
2464 raise exc.CompileError(
2465 "%s dialect does not support regular expression replacements"
2466 % self.dialect.name
2467 )
2469 def visit_bindparam(
2470 self,
2471 bindparam,
2472 within_columns_clause=False,
2473 literal_binds=False,
2474 skip_bind_expression=False,
2475 literal_execute=False,
2476 render_postcompile=False,
2477 **kwargs
2478 ):
2479 if not skip_bind_expression:
2480 impl = bindparam.type.dialect_impl(self.dialect)
2481 if impl._has_bind_expression:
2482 bind_expression = impl.bind_expression(bindparam)
2483 wrapped = self.process(
2484 bind_expression,
2485 skip_bind_expression=True,
2486 within_columns_clause=within_columns_clause,
2487 literal_binds=literal_binds and not bindparam.expanding,
2488 literal_execute=literal_execute,
2489 render_postcompile=render_postcompile,
2490 **kwargs
2491 )
2492 if bindparam.expanding:
2493 # for postcompile w/ expanding, move the "wrapped" part
2494 # of this into the inside
2496 m = re.match(
2497 r"^(.*)\(__\[POSTCOMPILE_(\S+?)\]\)(.*)$", wrapped
2498 )
2499 assert m, "unexpected format for expanding parameter"
2500 wrapped = "(__[POSTCOMPILE_%s~~%s~~REPL~~%s~~])" % (
2501 m.group(2),
2502 m.group(1),
2503 m.group(3),
2504 )
2506 if literal_binds:
2507 ret = self.render_literal_bindparam(
2508 bindparam,
2509 within_columns_clause=True,
2510 bind_expression_template=wrapped,
2511 **kwargs
2512 )
2513 return "(%s)" % ret
2515 return wrapped
2517 if not literal_binds:
2518 literal_execute = (
2519 literal_execute
2520 or bindparam.literal_execute
2521 or (within_columns_clause and self.ansi_bind_rules)
2522 )
2523 post_compile = literal_execute or bindparam.expanding
2524 else:
2525 post_compile = False
2527 if literal_binds:
2528 ret = self.render_literal_bindparam(
2529 bindparam, within_columns_clause=True, **kwargs
2530 )
2531 if bindparam.expanding:
2532 ret = "(%s)" % ret
2533 return ret
2535 name = self._truncate_bindparam(bindparam)
2537 if name in self.binds:
2538 existing = self.binds[name]
2539 if existing is not bindparam:
2540 if (
2541 (existing.unique or bindparam.unique)
2542 and not existing.proxy_set.intersection(
2543 bindparam.proxy_set
2544 )
2545 and not existing._cloned_set.intersection(
2546 bindparam._cloned_set
2547 )
2548 ):
2549 raise exc.CompileError(
2550 "Bind parameter '%s' conflicts with "
2551 "unique bind parameter of the same name" % name
2552 )
2553 elif existing.expanding != bindparam.expanding:
2554 raise exc.CompileError(
2555 "Can't reuse bound parameter name '%s' in both "
2556 "'expanding' (e.g. within an IN expression) and "
2557 "non-expanding contexts. If this parameter is to "
2558 "receive a list/array value, set 'expanding=True' on "
2559 "it for expressions that aren't IN, otherwise use "
2560 "a different parameter name." % (name,)
2561 )
2562 elif existing._is_crud or bindparam._is_crud:
2563 raise exc.CompileError(
2564 "bindparam() name '%s' is reserved "
2565 "for automatic usage in the VALUES or SET "
2566 "clause of this "
2567 "insert/update statement. Please use a "
2568 "name other than column name when using bindparam() "
2569 "with insert() or update() (for example, 'b_%s')."
2570 % (bindparam.key, bindparam.key)
2571 )
2573 self.binds[bindparam.key] = self.binds[name] = bindparam
2575 # if we are given a cache key that we're going to match against,
2576 # relate the bindparam here to one that is most likely present
2577 # in the "extracted params" portion of the cache key. this is used
2578 # to set up a positional mapping that is used to determine the
2579 # correct parameters for a subsequent use of this compiled with
2580 # a different set of parameter values. here, we accommodate for
2581 # parameters that may have been cloned both before and after the cache
2582 # key was been generated.
2583 ckbm = self._cache_key_bind_match
2584 if ckbm:
2585 for bp in bindparam._cloned_set:
2586 if bp.key in ckbm:
2587 cb = ckbm[bp.key]
2588 ckbm[cb].append(bindparam)
2590 if bindparam.isoutparam:
2591 self.has_out_parameters = True
2593 if post_compile:
2594 if render_postcompile:
2595 self._render_postcompile = True
2597 if literal_execute:
2598 self.literal_execute_params |= {bindparam}
2599 else:
2600 self.post_compile_params |= {bindparam}
2602 ret = self.bindparam_string(
2603 name,
2604 post_compile=post_compile,
2605 expanding=bindparam.expanding,
2606 **kwargs
2607 )
2609 if bindparam.expanding:
2610 ret = "(%s)" % ret
2611 return ret
2613 def render_literal_bindparam(
2614 self,
2615 bindparam,
2616 render_literal_value=NO_ARG,
2617 bind_expression_template=None,
2618 **kw
2619 ):
2620 if render_literal_value is not NO_ARG:
2621 value = render_literal_value
2622 else:
2623 if bindparam.value is None and bindparam.callable is None:
2624 op = kw.get("_binary_op", None)
2625 if op and op not in (operators.is_, operators.is_not):
2626 util.warn_limited(
2627 "Bound parameter '%s' rendering literal NULL in a SQL "
2628 "expression; comparisons to NULL should not use "
2629 "operators outside of 'is' or 'is not'",
2630 (bindparam.key,),
2631 )
2632 return self.process(sqltypes.NULLTYPE, **kw)
2633 value = bindparam.effective_value
2635 if bindparam.expanding:
2636 leep = self._literal_execute_expanding_parameter_literal_binds
2637 to_update, replacement_expr = leep(
2638 bindparam,
2639 value,
2640 bind_expression_template=bind_expression_template,
2641 )
2642 return replacement_expr
2643 else:
2644 return self.render_literal_value(value, bindparam.type)
2646 def render_literal_value(self, value, type_):
2647 """Render the value of a bind parameter as a quoted literal.
2649 This is used for statement sections that do not accept bind parameters
2650 on the target driver/database.
2652 This should be implemented by subclasses using the quoting services
2653 of the DBAPI.
2655 """
2657 processor = type_._cached_literal_processor(self.dialect)
2658 if processor:
2659 try:
2660 return processor(value)
2661 except Exception as e:
2662 util.raise_(
2663 exc.CompileError(
2664 "Could not render literal value "
2665 '"%s" '
2666 "with datatype "
2667 "%s; see parent stack trace for "
2668 "more detail."
2669 % (
2670 sql_util._repr_single_value(value),
2671 type_,
2672 )
2673 ),
2674 from_=e,
2675 )
2677 else:
2678 raise exc.CompileError(
2679 "No literal value renderer is available for literal value "
2680 '"%s" with datatype %s'
2681 % (sql_util._repr_single_value(value), type_)
2682 )
2684 def _truncate_bindparam(self, bindparam):
2685 if bindparam in self.bind_names:
2686 return self.bind_names[bindparam]
2688 bind_name = bindparam.key
2689 if isinstance(bind_name, elements._truncated_label):
2690 bind_name = self._truncated_identifier("bindparam", bind_name)
2692 # add to bind_names for translation
2693 self.bind_names[bindparam] = bind_name
2695 return bind_name
2697 def _truncated_identifier(self, ident_class, name):
2698 if (ident_class, name) in self.truncated_names:
2699 return self.truncated_names[(ident_class, name)]
2701 anonname = name.apply_map(self.anon_map)
2703 if len(anonname) > self.label_length - 6:
2704 counter = self.truncated_names.get(ident_class, 1)
2705 truncname = (
2706 anonname[0 : max(self.label_length - 6, 0)]
2707 + "_"
2708 + hex(counter)[2:]
2709 )
2710 self.truncated_names[ident_class] = counter + 1
2711 else:
2712 truncname = anonname
2713 self.truncated_names[(ident_class, name)] = truncname
2714 return truncname
2716 def _anonymize(self, name):
2717 return name % self.anon_map
2719 def bindparam_string(
2720 self,
2721 name,
2722 positional_names=None,
2723 post_compile=False,
2724 expanding=False,
2725 escaped_from=None,
2726 **kw
2727 ):
2729 if self.positional:
2730 if positional_names is not None:
2731 positional_names.append(name)
2732 else:
2733 self.positiontup.append(name)
2734 self.positiontup_level[name] = len(self.stack)
2735 if not escaped_from:
2737 if _BIND_TRANSLATE_RE.search(name):
2738 # not quite the translate use case as we want to
2739 # also get a quick boolean if we even found
2740 # unusual characters in the name
2741 new_name = _BIND_TRANSLATE_RE.sub(
2742 lambda m: _BIND_TRANSLATE_CHARS[m.group(0)],
2743 name,
2744 )
2745 escaped_from = name
2746 name = new_name
2748 if escaped_from:
2749 if not self.escaped_bind_names:
2750 self.escaped_bind_names = {}
2751 self.escaped_bind_names[escaped_from] = name
2752 if post_compile:
2753 return "__[POSTCOMPILE_%s]" % name
2754 else:
2755 return self.bindtemplate % {"name": name}
2757 def visit_cte(
2758 self,
2759 cte,
2760 asfrom=False,
2761 ashint=False,
2762 fromhints=None,
2763 visiting_cte=None,
2764 from_linter=None,
2765 **kwargs
2766 ):
2767 self._init_cte_state()
2769 kwargs["visiting_cte"] = cte
2771 cte_name = cte.name
2773 if isinstance(cte_name, elements._truncated_label):
2774 cte_name = self._truncated_identifier("alias", cte_name)
2776 is_new_cte = True
2777 embedded_in_current_named_cte = False
2779 _reference_cte = cte._get_reference_cte()
2781 if _reference_cte in self.level_name_by_cte:
2782 cte_level, _ = self.level_name_by_cte[_reference_cte]
2783 assert _ == cte_name
2784 else:
2785 cte_level = len(self.stack) if cte.nesting else 1
2787 cte_level_name = (cte_level, cte_name)
2788 if cte_level_name in self.ctes_by_level_name:
2789 existing_cte = self.ctes_by_level_name[cte_level_name]
2790 embedded_in_current_named_cte = visiting_cte is existing_cte
2792 # we've generated a same-named CTE that we are enclosed in,
2793 # or this is the same CTE. just return the name.
2794 if cte is existing_cte._restates or cte is existing_cte:
2795 is_new_cte = False
2796 elif existing_cte is cte._restates:
2797 # we've generated a same-named CTE that is
2798 # enclosed in us - we take precedence, so
2799 # discard the text for the "inner".
2800 del self.ctes[existing_cte]
2802 existing_cte_reference_cte = existing_cte._get_reference_cte()
2804 # TODO: determine if these assertions are correct. they
2805 # pass for current test cases
2806 # assert existing_cte_reference_cte is _reference_cte
2807 # assert existing_cte_reference_cte is existing_cte
2809 del self.level_name_by_cte[existing_cte_reference_cte]
2810 else:
2811 # if the two CTEs are deep-copy identical, consider them
2812 # the same, **if** they are clones, that is, they came from
2813 # the ORM or other visit method
2814 if (
2815 cte._is_clone_of is not None
2816 or existing_cte._is_clone_of is not None
2817 ) and cte.compare(existing_cte):
2818 is_new_cte = False
2819 else:
2820 raise exc.CompileError(
2821 "Multiple, unrelated CTEs found with "
2822 "the same name: %r" % cte_name
2823 )
2825 if not asfrom and not is_new_cte:
2826 return None
2828 if cte._cte_alias is not None:
2829 pre_alias_cte = cte._cte_alias
2830 cte_pre_alias_name = cte._cte_alias.name
2831 if isinstance(cte_pre_alias_name, elements._truncated_label):
2832 cte_pre_alias_name = self._truncated_identifier(
2833 "alias", cte_pre_alias_name
2834 )
2835 else:
2836 pre_alias_cte = cte
2837 cte_pre_alias_name = None
2839 if is_new_cte:
2840 self.ctes_by_level_name[cte_level_name] = cte
2841 self.level_name_by_cte[_reference_cte] = cte_level_name
2843 if (
2844 "autocommit" in cte.element._execution_options
2845 and "autocommit" not in self.execution_options
2846 ):
2847 self.execution_options = self.execution_options.union(
2848 {
2849 "autocommit": cte.element._execution_options[
2850 "autocommit"
2851 ]
2852 }
2853 )
2854 if self.positional:
2855 self.cte_level[cte] = cte_level
2857 if pre_alias_cte not in self.ctes:
2858 self.visit_cte(pre_alias_cte, **kwargs)
2860 if not cte_pre_alias_name and cte not in self.ctes:
2861 if cte.recursive:
2862 self.ctes_recursive = True
2863 text = self.preparer.format_alias(cte, cte_name)
2864 if cte.recursive:
2865 if isinstance(cte.element, selectable.Select):
2866 col_source = cte.element
2867 elif isinstance(cte.element, selectable.CompoundSelect):
2868 col_source = cte.element.selects[0]
2869 else:
2870 assert False, "cte should only be against SelectBase"
2872 # TODO: can we get at the .columns_plus_names collection
2873 # that is already (or will be?) generated for the SELECT
2874 # rather than calling twice?
2875 recur_cols = [
2876 # TODO: proxy_name is not technically safe,
2877 # see test_cte->
2878 # test_with_recursive_no_name_currently_buggy. not
2879 # clear what should be done with such a case
2880 fallback_label_name or proxy_name
2881 for (
2882 _,
2883 proxy_name,
2884 fallback_label_name,
2885 c,
2886 repeated,
2887 ) in (col_source._generate_columns_plus_names(True))
2888 if not repeated
2889 ]
2891 text += "(%s)" % (
2892 ", ".join(
2893 self.preparer.format_label_name(
2894 ident, anon_map=self.anon_map
2895 )
2896 for ident in recur_cols
2897 )
2898 )
2900 if self.positional:
2901 kwargs["positional_names"] = self.cte_positional[cte] = []
2903 assert kwargs.get("subquery", False) is False
2905 if not self.stack:
2906 # toplevel, this is a stringify of the
2907 # cte directly. just compile the inner
2908 # the way alias() does.
2909 return cte.element._compiler_dispatch(
2910 self, asfrom=asfrom, **kwargs
2911 )
2912 else:
2913 prefixes = self._generate_prefixes(
2914 cte, cte._prefixes, **kwargs
2915 )
2916 inner = cte.element._compiler_dispatch(
2917 self, asfrom=True, **kwargs
2918 )
2920 text += " AS %s\n(%s)" % (prefixes, inner)
2922 if cte._suffixes:
2923 text += " " + self._generate_prefixes(
2924 cte, cte._suffixes, **kwargs
2925 )
2927 self.ctes[cte] = text
2929 if asfrom:
2930 if from_linter:
2931 from_linter.froms[cte] = cte_name
2933 if not is_new_cte and embedded_in_current_named_cte:
2934 return self.preparer.format_alias(cte, cte_name)
2936 if cte_pre_alias_name:
2937 text = self.preparer.format_alias(cte, cte_pre_alias_name)
2938 if self.preparer._requires_quotes(cte_name):
2939 cte_name = self.preparer.quote(cte_name)
2940 text += self.get_render_as_alias_suffix(cte_name)
2941 return text
2942 else:
2943 return self.preparer.format_alias(cte, cte_name)
2945 def visit_table_valued_alias(self, element, **kw):
2946 if element.joins_implicitly:
2947 kw["from_linter"] = None
2948 if element._is_lateral:
2949 return self.visit_lateral(element, **kw)
2950 else:
2951 return self.visit_alias(element, **kw)
2953 def visit_table_valued_column(self, element, **kw):
2954 return self.visit_column(element, **kw)
2956 def visit_alias(
2957 self,
2958 alias,
2959 asfrom=False,
2960 ashint=False,
2961 iscrud=False,
2962 fromhints=None,
2963 subquery=False,
2964 lateral=False,
2965 enclosing_alias=None,
2966 from_linter=None,
2967 **kwargs
2968 ):
2970 if lateral:
2971 if "enclosing_lateral" not in kwargs:
2972 # if lateral is set and enclosing_lateral is not
2973 # present, we assume we are being called directly
2974 # from visit_lateral() and we need to set enclosing_lateral.
2975 assert alias._is_lateral
2976 kwargs["enclosing_lateral"] = alias
2978 # for lateral objects, we track a second from_linter that is...
2979 # lateral! to the level above us.
2980 if (
2981 from_linter
2982 and "lateral_from_linter" not in kwargs
2983 and "enclosing_lateral" in kwargs
2984 ):
2985 kwargs["lateral_from_linter"] = from_linter
2987 if enclosing_alias is not None and enclosing_alias.element is alias:
2988 inner = alias.element._compiler_dispatch(
2989 self,
2990 asfrom=asfrom,
2991 ashint=ashint,
2992 iscrud=iscrud,
2993 fromhints=fromhints,
2994 lateral=lateral,
2995 enclosing_alias=alias,
2996 **kwargs
2997 )
2998 if subquery and (asfrom or lateral):
2999 inner = "(%s)" % (inner,)
3000 return inner
3001 else:
3002 enclosing_alias = kwargs["enclosing_alias"] = alias
3004 if asfrom or ashint:
3005 if isinstance(alias.name, elements._truncated_label):
3006 alias_name = self._truncated_identifier("alias", alias.name)
3007 else:
3008 alias_name = alias.name
3010 if ashint:
3011 return self.preparer.format_alias(alias, alias_name)
3012 elif asfrom:
3013 if from_linter:
3014 from_linter.froms[alias] = alias_name
3016 inner = alias.element._compiler_dispatch(
3017 self, asfrom=True, lateral=lateral, **kwargs
3018 )
3019 if subquery:
3020 inner = "(%s)" % (inner,)
3022 ret = inner + self.get_render_as_alias_suffix(
3023 self.preparer.format_alias(alias, alias_name)
3024 )
3026 if alias._supports_derived_columns and alias._render_derived:
3027 ret += "(%s)" % (
3028 ", ".join(
3029 "%s%s"
3030 % (
3031 self.preparer.quote(col.name),
3032 " %s"
3033 % self.dialect.type_compiler.process(
3034 col.type, **kwargs
3035 )
3036 if alias._render_derived_w_types
3037 else "",
3038 )
3039 for col in alias.c
3040 )
3041 )
3043 if fromhints and alias in fromhints:
3044 ret = self.format_from_hint_text(
3045 ret, alias, fromhints[alias], iscrud
3046 )
3048 return ret
3049 else:
3050 # note we cancel the "subquery" flag here as well
3051 return alias.element._compiler_dispatch(
3052 self, lateral=lateral, **kwargs
3053 )
3055 def visit_subquery(self, subquery, **kw):
3056 kw["subquery"] = True
3057 return self.visit_alias(subquery, **kw)
3059 def visit_lateral(self, lateral_, **kw):
3060 kw["lateral"] = True
3061 return "LATERAL %s" % self.visit_alias(lateral_, **kw)
3063 def visit_tablesample(self, tablesample, asfrom=False, **kw):
3064 text = "%s TABLESAMPLE %s" % (
3065 self.visit_alias(tablesample, asfrom=True, **kw),
3066 tablesample._get_method()._compiler_dispatch(self, **kw),
3067 )
3069 if tablesample.seed is not None:
3070 text += " REPEATABLE (%s)" % (
3071 tablesample.seed._compiler_dispatch(self, **kw)
3072 )
3074 return text
3076 def visit_values(self, element, asfrom=False, from_linter=None, **kw):
3077 kw.setdefault("literal_binds", element.literal_binds)
3078 v = "VALUES %s" % ", ".join(
3079 self.process(
3080 elements.Tuple(
3081 types=element._column_types, *elem
3082 ).self_group(),
3083 **kw
3084 )
3085 for chunk in element._data
3086 for elem in chunk
3087 )
3089 if isinstance(element.name, elements._truncated_label):
3090 name = self._truncated_identifier("values", element.name)
3091 else:
3092 name = element.name
3094 if element._is_lateral:
3095 lateral = "LATERAL "
3096 else:
3097 lateral = ""
3099 if asfrom:
3100 if from_linter:
3101 from_linter.froms[element] = (
3102 name if name is not None else "(unnamed VALUES element)"
3103 )
3105 if name:
3106 v = "%s(%s)%s (%s)" % (
3107 lateral,
3108 v,
3109 self.get_render_as_alias_suffix(self.preparer.quote(name)),
3110 (
3111 ", ".join(
3112 c._compiler_dispatch(
3113 self, include_table=False, **kw
3114 )
3115 for c in element.columns
3116 )
3117 ),
3118 )
3119 else:
3120 v = "%s(%s)" % (lateral, v)
3121 return v
3123 def get_render_as_alias_suffix(self, alias_name_text):
3124 return " AS " + alias_name_text
3126 def _add_to_result_map(self, keyname, name, objects, type_):
3127 if keyname is None or keyname == "*":
3128 self._ordered_columns = False
3129 self._ad_hoc_textual = True
3130 if type_._is_tuple_type:
3131 raise exc.CompileError(
3132 "Most backends don't support SELECTing "
3133 "from a tuple() object. If this is an ORM query, "
3134 "consider using the Bundle object."
3135 )
3136 self._result_columns.append((keyname, name, objects, type_))
3138 def _label_returning_column(
3139 self, stmt, column, column_clause_args=None, **kw
3140 ):
3141 """Render a column with necessary labels inside of a RETURNING clause.
3143 This method is provided for individual dialects in place of calling
3144 the _label_select_column method directly, so that the two use cases
3145 of RETURNING vs. SELECT can be disambiguated going forward.
3147 .. versionadded:: 1.4.21
3149 """
3150 return self._label_select_column(
3151 None,
3152 column,
3153 True,
3154 False,
3155 {} if column_clause_args is None else column_clause_args,
3156 **kw
3157 )
3159 def _label_select_column(
3160 self,
3161 select,
3162 column,
3163 populate_result_map,
3164 asfrom,
3165 column_clause_args,
3166 name=None,
3167 proxy_name=None,
3168 fallback_label_name=None,
3169 within_columns_clause=True,
3170 column_is_repeated=False,
3171 need_column_expressions=False,
3172 ):
3173 """produce labeled columns present in a select()."""
3174 impl = column.type.dialect_impl(self.dialect)
3176 if impl._has_column_expression and (
3177 need_column_expressions or populate_result_map
3178 ):
3179 col_expr = impl.column_expression(column)
3180 else:
3181 col_expr = column
3183 if populate_result_map:
3184 # pass an "add_to_result_map" callable into the compilation
3185 # of embedded columns. this collects information about the
3186 # column as it will be fetched in the result and is coordinated
3187 # with cursor.description when the query is executed.
3188 add_to_result_map = self._add_to_result_map
3190 # if the SELECT statement told us this column is a repeat,
3191 # wrap the callable with one that prevents the addition of the
3192 # targets
3193 if column_is_repeated:
3194 _add_to_result_map = add_to_result_map
3196 def add_to_result_map(keyname, name, objects, type_):
3197 _add_to_result_map(keyname, name, (), type_)
3199 # if we redefined col_expr for type expressions, wrap the
3200 # callable with one that adds the original column to the targets
3201 elif col_expr is not column:
3202 _add_to_result_map = add_to_result_map
3204 def add_to_result_map(keyname, name, objects, type_):
3205 _add_to_result_map(
3206 keyname, name, (column,) + objects, type_
3207 )
3209 else:
3210 add_to_result_map = None
3212 # this method is used by some of the dialects for RETURNING,
3213 # which has different inputs. _label_returning_column was added
3214 # as the better target for this now however for 1.4 we will keep
3215 # _label_select_column directly compatible with this use case.
3216 # these assertions right now set up the current expected inputs
3217 assert within_columns_clause, (
3218 "_label_select_column is only relevant within "
3219 "the columns clause of a SELECT or RETURNING"
3220 )
3221 if isinstance(column, elements.Label):
3222 if col_expr is not column:
3223 result_expr = _CompileLabel(
3224 col_expr, column.name, alt_names=(column.element,)
3225 )
3226 else:
3227 result_expr = col_expr
3229 elif name:
3230 # here, _columns_plus_names has determined there's an explicit
3231 # label name we need to use. this is the default for
3232 # tablenames_plus_columnnames as well as when columns are being
3233 # deduplicated on name
3235 assert (
3236 proxy_name is not None
3237 ), "proxy_name is required if 'name' is passed"
3239 result_expr = _CompileLabel(
3240 col_expr,
3241 name,
3242 alt_names=(
3243 proxy_name,
3244 # this is a hack to allow legacy result column lookups
3245 # to work as they did before; this goes away in 2.0.
3246 # TODO: this only seems to be tested indirectly
3247 # via test/orm/test_deprecations.py. should be a
3248 # resultset test for this
3249 column._tq_label,
3250 ),
3251 )
3252 else:
3253 # determine here whether this column should be rendered in
3254 # a labelled context or not, as we were given no required label
3255 # name from the caller. Here we apply heuristics based on the kind
3256 # of SQL expression involved.
3258 if col_expr is not column:
3259 # type-specific expression wrapping the given column,
3260 # so we render a label
3261 render_with_label = True
3262 elif isinstance(column, elements.ColumnClause):
3263 # table-bound column, we render its name as a label if we are
3264 # inside of a subquery only
3265 render_with_label = (
3266 asfrom
3267 and not column.is_literal
3268 and column.table is not None
3269 )
3270 elif isinstance(column, elements.TextClause):
3271 render_with_label = False
3272 elif isinstance(column, elements.UnaryExpression):
3273 render_with_label = column.wraps_column_expression or asfrom
3274 elif (
3275 # general class of expressions that don't have a SQL-column
3276 # addressible name. includes scalar selects, bind parameters,
3277 # SQL functions, others
3278 not isinstance(column, elements.NamedColumn)
3279 # deeper check that indicates there's no natural "name" to
3280 # this element, which accommodates for custom SQL constructs
3281 # that might have a ".name" attribute (but aren't SQL
3282 # functions) but are not implementing this more recently added
3283 # base class. in theory the "NamedColumn" check should be
3284 # enough, however here we seek to maintain legacy behaviors
3285 # as well.
3286 and column._non_anon_label is None
3287 ):
3288 render_with_label = True
3289 else:
3290 render_with_label = False
3292 if render_with_label:
3293 if not fallback_label_name:
3294 # used by the RETURNING case right now. we generate it
3295 # here as 3rd party dialects may be referring to
3296 # _label_select_column method directly instead of the
3297 # just-added _label_returning_column method
3298 assert not column_is_repeated
3299 fallback_label_name = column._anon_name_label
3301 fallback_label_name = (
3302 elements._truncated_label(fallback_label_name)
3303 if not isinstance(
3304 fallback_label_name, elements._truncated_label
3305 )
3306 else fallback_label_name
3307 )
3309 result_expr = _CompileLabel(
3310 col_expr, fallback_label_name, alt_names=(proxy_name,)
3311 )
3312 else:
3313 result_expr = col_expr
3315 column_clause_args.update(
3316 within_columns_clause=within_columns_clause,
3317 add_to_result_map=add_to_result_map,
3318 )
3319 return result_expr._compiler_dispatch(self, **column_clause_args)
3321 def format_from_hint_text(self, sqltext, table, hint, iscrud):
3322 hinttext = self.get_from_hint_text(table, hint)
3323 if hinttext:
3324 sqltext += " " + hinttext
3325 return sqltext
3327 def get_select_hint_text(self, byfroms):
3328 return None
3330 def get_from_hint_text(self, table, text):
3331 return None
3333 def get_crud_hint_text(self, table, text):
3334 return None
3336 def get_statement_hint_text(self, hint_texts):
3337 return " ".join(hint_texts)
3339 _default_stack_entry = util.immutabledict(
3340 [("correlate_froms", frozenset()), ("asfrom_froms", frozenset())]
3341 )
3343 def _display_froms_for_select(
3344 self, select_stmt, asfrom, lateral=False, **kw
3345 ):
3346 # utility method to help external dialects
3347 # get the correct from list for a select.
3348 # specifically the oracle dialect needs this feature
3349 # right now.
3350 toplevel = not self.stack
3351 entry = self._default_stack_entry if toplevel else self.stack[-1]
3353 compile_state = select_stmt._compile_state_factory(select_stmt, self)
3355 correlate_froms = entry["correlate_froms"]
3356 asfrom_froms = entry["asfrom_froms"]
3358 if asfrom and not lateral:
3359 froms = compile_state._get_display_froms(
3360 explicit_correlate_froms=correlate_froms.difference(
3361 asfrom_froms
3362 ),
3363 implicit_correlate_froms=(),
3364 )
3365 else:
3366 froms = compile_state._get_display_froms(
3367 explicit_correlate_froms=correlate_froms,
3368 implicit_correlate_froms=asfrom_froms,
3369 )
3370 return froms
3372 translate_select_structure = None
3373 """if not ``None``, should be a callable which accepts ``(select_stmt,
3374 **kw)`` and returns a select object. this is used for structural changes
3375 mostly to accommodate for LIMIT/OFFSET schemes
3377 """
3379 def visit_select(
3380 self,
3381 select_stmt,
3382 asfrom=False,
3383 insert_into=False,
3384 fromhints=None,
3385 compound_index=None,
3386 select_wraps_for=None,
3387 lateral=False,
3388 from_linter=None,
3389 **kwargs
3390 ):
3391 assert select_wraps_for is None, (
3392 "SQLAlchemy 1.4 requires use of "
3393 "the translate_select_structure hook for structural "
3394 "translations of SELECT objects"
3395 )
3397 # initial setup of SELECT. the compile_state_factory may now
3398 # be creating a totally different SELECT from the one that was
3399 # passed in. for ORM use this will convert from an ORM-state
3400 # SELECT to a regular "Core" SELECT. other composed operations
3401 # such as computation of joins will be performed.
3403 kwargs["within_columns_clause"] = False
3405 compile_state = select_stmt._compile_state_factory(
3406 select_stmt, self, **kwargs
3407 )
3408 select_stmt = compile_state.statement
3410 toplevel = not self.stack
3412 if toplevel and not self.compile_state:
3413 self.compile_state = compile_state
3415 is_embedded_select = compound_index is not None or insert_into
3417 # translate step for Oracle, SQL Server which often need to
3418 # restructure the SELECT to allow for LIMIT/OFFSET and possibly
3419 # other conditions
3420 if self.translate_select_structure:
3421 new_select_stmt = self.translate_select_structure(
3422 select_stmt, asfrom=asfrom, **kwargs
3423 )
3425 # if SELECT was restructured, maintain a link to the originals
3426 # and assemble a new compile state
3427 if new_select_stmt is not select_stmt:
3428 compile_state_wraps_for = compile_state
3429 select_wraps_for = select_stmt
3430 select_stmt = new_select_stmt
3432 compile_state = select_stmt._compile_state_factory(
3433 select_stmt, self, **kwargs
3434 )
3435 select_stmt = compile_state.statement
3437 entry = self._default_stack_entry if toplevel else self.stack[-1]
3439 populate_result_map = need_column_expressions = (
3440 toplevel
3441 or entry.get("need_result_map_for_compound", False)
3442 or entry.get("need_result_map_for_nested", False)
3443 )
3445 # indicates there is a CompoundSelect in play and we are not the
3446 # first select
3447 if compound_index:
3448 populate_result_map = False
3450 # this was first proposed as part of #3372; however, it is not
3451 # reached in current tests and could possibly be an assertion
3452 # instead.
3453 if not populate_result_map and "add_to_result_map" in kwargs:
3454 del kwargs["add_to_result_map"]
3456 froms = self._setup_select_stack(
3457 select_stmt, compile_state, entry, asfrom, lateral, compound_index
3458 )
3460 column_clause_args = kwargs.copy()
3461 column_clause_args.update(
3462 {"within_label_clause": False, "within_columns_clause": False}
3463 )
3465 text = "SELECT " # we're off to a good start !
3467 if select_stmt._hints:
3468 hint_text, byfrom = self._setup_select_hints(select_stmt)
3469 if hint_text:
3470 text += hint_text + " "
3471 else:
3472 byfrom = None
3474 if select_stmt._independent_ctes:
3475 for cte in select_stmt._independent_ctes:
3476 cte._compiler_dispatch(self, **kwargs)
3478 if select_stmt._prefixes:
3479 text += self._generate_prefixes(
3480 select_stmt, select_stmt._prefixes, **kwargs
3481 )
3483 text += self.get_select_precolumns(select_stmt, **kwargs)
3484 # the actual list of columns to print in the SELECT column list.
3485 inner_columns = [
3486 c
3487 for c in [
3488 self._label_select_column(
3489 select_stmt,
3490 column,
3491 populate_result_map,
3492 asfrom,
3493 column_clause_args,
3494 name=name,
3495 proxy_name=proxy_name,
3496 fallback_label_name=fallback_label_name,
3497 column_is_repeated=repeated,
3498 need_column_expressions=need_column_expressions,
3499 )
3500 for (
3501 name,
3502 proxy_name,
3503 fallback_label_name,
3504 column,
3505 repeated,
3506 ) in compile_state.columns_plus_names
3507 ]
3508 if c is not None
3509 ]
3511 if populate_result_map and select_wraps_for is not None:
3512 # if this select was generated from translate_select,
3513 # rewrite the targeted columns in the result map
3515 translate = dict(
3516 zip(
3517 [
3518 name
3519 for (
3520 key,
3521 proxy_name,
3522 fallback_label_name,
3523 name,
3524 repeated,
3525 ) in compile_state.columns_plus_names
3526 ],
3527 [
3528 name
3529 for (
3530 key,
3531 proxy_name,
3532 fallback_label_name,
3533 name,
3534 repeated,
3535 ) in compile_state_wraps_for.columns_plus_names
3536 ],
3537 )
3538 )
3540 self._result_columns = [
3541 (key, name, tuple(translate.get(o, o) for o in obj), type_)
3542 for key, name, obj, type_ in self._result_columns
3543 ]
3545 text = self._compose_select_body(
3546 text,
3547 select_stmt,
3548 compile_state,
3549 inner_columns,
3550 froms,
3551 byfrom,
3552 toplevel,
3553 kwargs,
3554 )
3556 if select_stmt._statement_hints:
3557 per_dialect = [
3558 ht
3559 for (dialect_name, ht) in select_stmt._statement_hints
3560 if dialect_name in ("*", self.dialect.name)
3561 ]
3562 if per_dialect:
3563 text += " " + self.get_statement_hint_text(per_dialect)
3565 # In compound query, CTEs are shared at the compound level
3566 if self.ctes and (not is_embedded_select or toplevel):
3567 nesting_level = len(self.stack) if not toplevel else None
3568 text = (
3569 self._render_cte_clause(
3570 nesting_level=nesting_level,
3571 visiting_cte=kwargs.get("visiting_cte"),
3572 )
3573 + text
3574 )
3576 if select_stmt._suffixes:
3577 text += " " + self._generate_prefixes(
3578 select_stmt, select_stmt._suffixes, **kwargs
3579 )
3581 self.stack.pop(-1)
3583 return text
3585 def _setup_select_hints(self, select):
3586 byfrom = dict(
3587 [
3588 (
3589 from_,
3590 hinttext
3591 % {"name": from_._compiler_dispatch(self, ashint=True)},
3592 )
3593 for (from_, dialect), hinttext in select._hints.items()
3594 if dialect in ("*", self.dialect.name)
3595 ]
3596 )
3597 hint_text = self.get_select_hint_text(byfrom)
3598 return hint_text, byfrom
3600 def _setup_select_stack(
3601 self, select, compile_state, entry, asfrom, lateral, compound_index
3602 ):
3603 correlate_froms = entry["correlate_froms"]
3604 asfrom_froms = entry["asfrom_froms"]
3606 if compound_index == 0:
3607 entry["select_0"] = select
3608 elif compound_index:
3609 select_0 = entry["select_0"]
3610 numcols = len(select_0._all_selected_columns)
3612 if len(compile_state.columns_plus_names) != numcols:
3613 raise exc.CompileError(
3614 "All selectables passed to "
3615 "CompoundSelect must have identical numbers of "
3616 "columns; select #%d has %d columns, select "
3617 "#%d has %d"
3618 % (
3619 1,
3620 numcols,
3621 compound_index + 1,
3622 len(select._all_selected_columns),
3623 )
3624 )
3626 if asfrom and not lateral:
3627 froms = compile_state._get_display_froms(
3628 explicit_correlate_froms=correlate_froms.difference(
3629 asfrom_froms
3630 ),
3631 implicit_correlate_froms=(),
3632 )
3633 else:
3634 froms = compile_state._get_display_froms(
3635 explicit_correlate_froms=correlate_froms,
3636 implicit_correlate_froms=asfrom_froms,
3637 )
3639 new_correlate_froms = set(selectable._from_objects(*froms))
3640 all_correlate_froms = new_correlate_froms.union(correlate_froms)
3642 new_entry = {
3643 "asfrom_froms": new_correlate_froms,
3644 "correlate_froms": all_correlate_froms,
3645 "selectable": select,
3646 "compile_state": compile_state,
3647 }
3648 self.stack.append(new_entry)
3650 return froms
3652 def _compose_select_body(
3653 self,
3654 text,
3655 select,
3656 compile_state,
3657 inner_columns,
3658 froms,
3659 byfrom,
3660 toplevel,
3661 kwargs,
3662 ):
3663 text += ", ".join(inner_columns)
3665 if self.linting & COLLECT_CARTESIAN_PRODUCTS:
3666 from_linter = FromLinter({}, set())
3667 warn_linting = self.linting & WARN_LINTING
3668 if toplevel:
3669 self.from_linter = from_linter
3670 else:
3671 from_linter = None
3672 warn_linting = False
3674 if froms:
3675 text += " \nFROM "
3677 if select._hints:
3678 text += ", ".join(
3679 [
3680 f._compiler_dispatch(
3681 self,
3682 asfrom=True,
3683 fromhints=byfrom,
3684 from_linter=from_linter,
3685 **kwargs
3686 )
3687 for f in froms
3688 ]
3689 )
3690 else:
3691 text += ", ".join(
3692 [
3693 f._compiler_dispatch(
3694 self,
3695 asfrom=True,
3696 from_linter=from_linter,
3697 **kwargs
3698 )
3699 for f in froms
3700 ]
3701 )
3702 else:
3703 text += self.default_from()
3705 if select._where_criteria:
3706 t = self._generate_delimited_and_list(
3707 select._where_criteria, from_linter=from_linter, **kwargs
3708 )
3709 if t:
3710 text += " \nWHERE " + t
3712 if warn_linting:
3713 from_linter.warn()
3715 if select._group_by_clauses:
3716 text += self.group_by_clause(select, **kwargs)
3718 if select._having_criteria:
3719 t = self._generate_delimited_and_list(
3720 select._having_criteria, **kwargs
3721 )
3722 if t:
3723 text += " \nHAVING " + t
3725 if select._order_by_clauses:
3726 text += self.order_by_clause(select, **kwargs)
3728 if select._has_row_limiting_clause:
3729 text += self._row_limit_clause(select, **kwargs)
3731 if select._for_update_arg is not None:
3732 text += self.for_update_clause(select, **kwargs)
3734 return text
3736 def _generate_prefixes(self, stmt, prefixes, **kw):
3737 clause = " ".join(
3738 prefix._compiler_dispatch(self, **kw)
3739 for prefix, dialect_name in prefixes
3740 if dialect_name is None or dialect_name == self.dialect.name
3741 )
3742 if clause:
3743 clause += " "
3744 return clause
3746 def _render_cte_clause(
3747 self,
3748 nesting_level=None,
3749 include_following_stack=False,
3750 visiting_cte=None,
3751 ):
3752 """
3753 include_following_stack
3754 Also render the nesting CTEs on the next stack. Useful for
3755 SQL structures like UNION or INSERT that can wrap SELECT
3756 statements containing nesting CTEs.
3757 """
3758 if not self.ctes:
3759 return ""
3761 if nesting_level and nesting_level > 1:
3762 ctes = util.OrderedDict()
3763 for cte in list(self.ctes.keys()):
3764 cte_level, cte_name = self.level_name_by_cte[
3765 cte._get_reference_cte()
3766 ]
3767 is_rendered_level = cte_level == nesting_level or (
3768 include_following_stack and cte_level == nesting_level + 1
3769 )
3770 if not (cte.nesting and is_rendered_level):
3771 continue
3773 ctes[cte] = self.ctes[cte]
3775 else:
3776 ctes = self.ctes
3778 if not ctes:
3779 return ""
3780 ctes_recursive = any([cte.recursive for cte in ctes])
3782 if self.positional:
3783 self.cte_order[visiting_cte].extend(ctes)
3785 if visiting_cte is None and self.cte_order:
3786 assert self.positiontup is not None
3788 def get_nested_positional(cte):
3789 if cte in self.cte_order:
3790 children = self.cte_order.pop(cte)
3791 to_add = list(
3792 itertools.chain.from_iterable(
3793 get_nested_positional(child_cte)
3794 for child_cte in children
3795 )
3796 )
3797 if cte in self.cte_positional:
3798 return reorder_positional(
3799 self.cte_positional[cte],
3800 to_add,
3801 self.cte_level[children[0]],
3802 )
3803 else:
3804 return to_add
3805 else:
3806 return self.cte_positional.get(cte, [])
3808 def reorder_positional(pos, to_add, level):
3809 if not level:
3810 return to_add + pos
3811 index = 0
3812 for index, name in enumerate(reversed(pos)):
3813 if self.positiontup_level[name] < level: # type: ignore[index] # noqa: E501
3814 break
3815 return pos[:-index] + to_add + pos[-index:]
3817 to_add = get_nested_positional(None)
3818 self.positiontup = reorder_positional(
3819 self.positiontup, to_add, nesting_level
3820 )
3822 cte_text = self.get_cte_preamble(ctes_recursive) + " "
3823 cte_text += ", \n".join([txt for txt in ctes.values()])
3824 cte_text += "\n "
3826 if nesting_level and nesting_level > 1:
3827 for cte in list(ctes.keys()):
3828 cte_level, cte_name = self.level_name_by_cte[
3829 cte._get_reference_cte()
3830 ]
3831 del self.ctes[cte]
3832 del self.ctes_by_level_name[(cte_level, cte_name)]
3833 del self.level_name_by_cte[cte._get_reference_cte()]
3835 return cte_text
3837 def get_cte_preamble(self, recursive):
3838 if recursive:
3839 return "WITH RECURSIVE"
3840 else:
3841 return "WITH"
3843 def get_select_precolumns(self, select, **kw):
3844 """Called when building a ``SELECT`` statement, position is just
3845 before column list.
3847 """
3848 if select._distinct_on:
3849 util.warn_deprecated(
3850 "DISTINCT ON is currently supported only by the PostgreSQL "
3851 "dialect. Use of DISTINCT ON for other backends is currently "
3852 "silently ignored, however this usage is deprecated, and will "
3853 "raise CompileError in a future release for all backends "
3854 "that do not support this syntax.",
3855 version="1.4",
3856 )
3857 return "DISTINCT " if select._distinct else ""
3859 def group_by_clause(self, select, **kw):
3860 """allow dialects to customize how GROUP BY is rendered."""
3862 group_by = self._generate_delimited_list(
3863 select._group_by_clauses, OPERATORS[operators.comma_op], **kw
3864 )
3865 if group_by:
3866 return " GROUP BY " + group_by
3867 else:
3868 return ""
3870 def order_by_clause(self, select, **kw):
3871 """allow dialects to customize how ORDER BY is rendered."""
3873 order_by = self._generate_delimited_list(
3874 select._order_by_clauses, OPERATORS[operators.comma_op], **kw
3875 )
3877 if order_by:
3878 return " ORDER BY " + order_by
3879 else:
3880 return ""
3882 def for_update_clause(self, select, **kw):
3883 return " FOR UPDATE"
3885 def returning_clause(self, stmt, returning_cols):
3886 raise exc.CompileError(
3887 "RETURNING is not supported by this "
3888 "dialect's statement compiler."
3889 )
3891 def limit_clause(self, select, **kw):
3892 text = ""
3893 if select._limit_clause is not None:
3894 text += "\n LIMIT " + self.process(select._limit_clause, **kw)
3895 if select._offset_clause is not None:
3896 if select._limit_clause is None:
3897 text += "\n LIMIT -1"
3898 text += " OFFSET " + self.process(select._offset_clause, **kw)
3899 return text
3901 def fetch_clause(self, select, **kw):
3902 text = ""
3903 if select._offset_clause is not None:
3904 text += "\n OFFSET %s ROWS" % self.process(
3905 select._offset_clause, **kw
3906 )
3907 if select._fetch_clause is not None:
3908 text += "\n FETCH FIRST %s%s ROWS %s" % (
3909 self.process(select._fetch_clause, **kw),
3910 " PERCENT" if select._fetch_clause_options["percent"] else "",
3911 "WITH TIES"
3912 if select._fetch_clause_options["with_ties"]
3913 else "ONLY",
3914 )
3915 return text
3917 def visit_table(
3918 self,
3919 table,
3920 asfrom=False,
3921 iscrud=False,
3922 ashint=False,
3923 fromhints=None,
3924 use_schema=True,
3925 from_linter=None,
3926 **kwargs
3927 ):
3928 if from_linter:
3929 from_linter.froms[table] = table.fullname
3931 if asfrom or ashint:
3932 effective_schema = self.preparer.schema_for_object(table)
3934 if use_schema and effective_schema:
3935 ret = (
3936 self.preparer.quote_schema(effective_schema)
3937 + "."
3938 + self.preparer.quote(table.name)
3939 )
3940 else:
3941 ret = self.preparer.quote(table.name)
3942 if fromhints and table in fromhints:
3943 ret = self.format_from_hint_text(
3944 ret, table, fromhints[table], iscrud
3945 )
3946 return ret
3947 else:
3948 return ""
3950 def visit_join(self, join, asfrom=False, from_linter=None, **kwargs):
3951 if from_linter:
3952 from_linter.edges.update(
3953 itertools.product(
3954 join.left._from_objects, join.right._from_objects
3955 )
3956 )
3958 if join.full:
3959 join_type = " FULL OUTER JOIN "
3960 elif join.isouter:
3961 join_type = " LEFT OUTER JOIN "
3962 else:
3963 join_type = " JOIN "
3964 return (
3965 join.left._compiler_dispatch(
3966 self, asfrom=True, from_linter=from_linter, **kwargs
3967 )
3968 + join_type
3969 + join.right._compiler_dispatch(
3970 self, asfrom=True, from_linter=from_linter, **kwargs
3971 )
3972 + " ON "
3973 # TODO: likely need asfrom=True here?
3974 + join.onclause._compiler_dispatch(
3975 self, from_linter=from_linter, **kwargs
3976 )
3977 )
3979 def _setup_crud_hints(self, stmt, table_text):
3980 dialect_hints = dict(
3981 [
3982 (table, hint_text)
3983 for (table, dialect), hint_text in stmt._hints.items()
3984 if dialect in ("*", self.dialect.name)
3985 ]
3986 )
3987 if stmt.table in dialect_hints:
3988 table_text = self.format_from_hint_text(
3989 table_text, stmt.table, dialect_hints[stmt.table], True
3990 )
3991 return dialect_hints, table_text
3993 def visit_insert(self, insert_stmt, **kw):
3995 compile_state = insert_stmt._compile_state_factory(
3996 insert_stmt, self, **kw
3997 )
3998 insert_stmt = compile_state.statement
4000 toplevel = not self.stack
4002 if toplevel:
4003 self.isinsert = True
4004 if not self.dml_compile_state:
4005 self.dml_compile_state = compile_state
4006 if not self.compile_state:
4007 self.compile_state = compile_state
4009 self.stack.append(
4010 {
4011 "correlate_froms": set(),
4012 "asfrom_froms": set(),
4013 "selectable": insert_stmt,
4014 }
4015 )
4017 crud_params = crud._get_crud_params(
4018 self, insert_stmt, compile_state, **kw
4019 )
4021 if (
4022 not crud_params
4023 and not self.dialect.supports_default_values
4024 and not self.dialect.supports_default_metavalue
4025 and not self.dialect.supports_empty_insert
4026 ):
4027 raise exc.CompileError(
4028 "The '%s' dialect with current database "
4029 "version settings does not support empty "
4030 "inserts." % self.dialect.name
4031 )
4033 if compile_state._has_multi_parameters:
4034 if not self.dialect.supports_multivalues_insert:
4035 raise exc.CompileError(
4036 "The '%s' dialect with current database "
4037 "version settings does not support "
4038 "in-place multirow inserts." % self.dialect.name
4039 )
4040 crud_params_single = crud_params[0]
4041 else:
4042 crud_params_single = crud_params
4044 preparer = self.preparer
4045 supports_default_values = self.dialect.supports_default_values
4047 text = "INSERT "
4049 if insert_stmt._prefixes:
4050 text += self._generate_prefixes(
4051 insert_stmt, insert_stmt._prefixes, **kw
4052 )
4054 text += "INTO "
4055 table_text = preparer.format_table(insert_stmt.table)
4057 if insert_stmt._hints:
4058 _, table_text = self._setup_crud_hints(insert_stmt, table_text)
4060 if insert_stmt._independent_ctes:
4061 for cte in insert_stmt._independent_ctes:
4062 cte._compiler_dispatch(self, **kw)
4064 text += table_text
4066 if crud_params_single or not supports_default_values:
4067 text += " (%s)" % ", ".join(
4068 [expr for c, expr, value in crud_params_single]
4069 )
4071 if self.returning or insert_stmt._returning:
4072 returning_clause = self.returning_clause(
4073 insert_stmt, self.returning or insert_stmt._returning
4074 )
4076 if self.returning_precedes_values:
4077 text += " " + returning_clause
4078 else:
4079 returning_clause = None
4081 if insert_stmt.select is not None:
4082 # placed here by crud.py
4083 select_text = self.process(
4084 self.stack[-1]["insert_from_select"], insert_into=True, **kw
4085 )
4087 if self.ctes and self.dialect.cte_follows_insert:
4088 nesting_level = len(self.stack) if not toplevel else None
4089 text += " %s%s" % (
4090 self._render_cte_clause(
4091 nesting_level=nesting_level,
4092 include_following_stack=True,
4093 visiting_cte=kw.get("visiting_cte"),
4094 ),
4095 select_text,
4096 )
4097 else:
4098 text += " %s" % select_text
4099 elif not crud_params and supports_default_values:
4100 text += " DEFAULT VALUES"
4101 elif compile_state._has_multi_parameters:
4102 text += " VALUES %s" % (
4103 ", ".join(
4104 "(%s)"
4105 % (", ".join(value for c, expr, value in crud_param_set))
4106 for crud_param_set in crud_params
4107 )
4108 )
4109 else:
4110 insert_single_values_expr = ", ".join(
4111 [value for c, expr, value in crud_params]
4112 )
4113 text += " VALUES (%s)" % insert_single_values_expr
4114 if toplevel:
4115 self.insert_single_values_expr = insert_single_values_expr
4117 if insert_stmt._post_values_clause is not None:
4118 post_values_clause = self.process(
4119 insert_stmt._post_values_clause, **kw
4120 )
4121 if post_values_clause:
4122 text += " " + post_values_clause
4124 if returning_clause and not self.returning_precedes_values:
4125 text += " " + returning_clause
4127 if self.ctes and not self.dialect.cte_follows_insert:
4128 nesting_level = len(self.stack) if not toplevel else None
4129 text = (
4130 self._render_cte_clause(
4131 nesting_level=nesting_level,
4132 include_following_stack=True,
4133 visiting_cte=kw.get("visiting_cte"),
4134 )
4135 + text
4136 )
4138 self.stack.pop(-1)
4140 return text
4142 def update_limit_clause(self, update_stmt):
4143 """Provide a hook for MySQL to add LIMIT to the UPDATE"""
4144 return None
4146 def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
4147 """Provide a hook to override the initial table clause
4148 in an UPDATE statement.
4150 MySQL overrides this.
4152 """
4153 kw["asfrom"] = True
4154 return from_table._compiler_dispatch(self, iscrud=True, **kw)
4156 def update_from_clause(
4157 self, update_stmt, from_table, extra_froms, from_hints, **kw
4158 ):
4159 """Provide a hook to override the generation of an
4160 UPDATE..FROM clause.
4162 MySQL and MSSQL override this.
4164 """
4165 raise NotImplementedError(
4166 "This backend does not support multiple-table "
4167 "criteria within UPDATE"
4168 )
4170 def visit_update(self, update_stmt, **kw):
4171 compile_state = update_stmt._compile_state_factory(
4172 update_stmt, self, **kw
4173 )
4174 update_stmt = compile_state.statement
4176 toplevel = not self.stack
4177 if toplevel:
4178 self.isupdate = True
4179 if not self.dml_compile_state:
4180 self.dml_compile_state = compile_state
4181 if not self.compile_state:
4182 self.compile_state = compile_state
4184 extra_froms = compile_state._extra_froms
4185 is_multitable = bool(extra_froms)
4187 if is_multitable:
4188 # main table might be a JOIN
4189 main_froms = set(selectable._from_objects(update_stmt.table))
4190 render_extra_froms = [
4191 f for f in extra_froms if f not in main_froms
4192 ]
4193 correlate_froms = main_froms.union(extra_froms)
4194 else:
4195 render_extra_froms = []
4196 correlate_froms = {update_stmt.table}
4198 self.stack.append(
4199 {
4200 "correlate_froms": correlate_froms,
4201 "asfrom_froms": correlate_froms,
4202 "selectable": update_stmt,
4203 }
4204 )
4206 text = "UPDATE "
4208 if update_stmt._prefixes:
4209 text += self._generate_prefixes(
4210 update_stmt, update_stmt._prefixes, **kw
4211 )
4213 table_text = self.update_tables_clause(
4214 update_stmt, update_stmt.table, render_extra_froms, **kw
4215 )
4216 crud_params = crud._get_crud_params(
4217 self, update_stmt, compile_state, **kw
4218 )
4220 if update_stmt._hints:
4221 dialect_hints, table_text = self._setup_crud_hints(
4222 update_stmt, table_text
4223 )
4224 else:
4225 dialect_hints = None
4227 if update_stmt._independent_ctes:
4228 for cte in update_stmt._independent_ctes:
4229 cte._compiler_dispatch(self, **kw)
4231 text += table_text
4233 text += " SET "
4234 text += ", ".join(expr + "=" + value for c, expr, value in crud_params)
4236 if self.returning or update_stmt._returning:
4237 if self.returning_precedes_values:
4238 text += " " + self.returning_clause(
4239 update_stmt, self.returning or update_stmt._returning
4240 )
4242 if extra_froms:
4243 extra_from_text = self.update_from_clause(
4244 update_stmt,
4245 update_stmt.table,
4246 render_extra_froms,
4247 dialect_hints,
4248 **kw
4249 )
4250 if extra_from_text:
4251 text += " " + extra_from_text
4253 if update_stmt._where_criteria:
4254 t = self._generate_delimited_and_list(
4255 update_stmt._where_criteria, **kw
4256 )
4257 if t:
4258 text += " WHERE " + t
4260 limit_clause = self.update_limit_clause(update_stmt)
4261 if limit_clause:
4262 text += " " + limit_clause
4264 if (
4265 self.returning or update_stmt._returning
4266 ) and not self.returning_precedes_values:
4267 text += " " + self.returning_clause(
4268 update_stmt, self.returning or update_stmt._returning
4269 )
4271 if self.ctes:
4272 nesting_level = len(self.stack) if not toplevel else None
4273 text = (
4274 self._render_cte_clause(
4275 nesting_level=nesting_level,
4276 visiting_cte=kw.get("visiting_cte"),
4277 )
4278 + text
4279 )
4281 self.stack.pop(-1)
4283 return text
4285 def delete_extra_from_clause(
4286 self, update_stmt, from_table, extra_froms, from_hints, **kw
4287 ):
4288 """Provide a hook to override the generation of an
4289 DELETE..FROM clause.
4291 This can be used to implement DELETE..USING for example.
4293 MySQL and MSSQL override this.
4295 """
4296 raise NotImplementedError(
4297 "This backend does not support multiple-table "
4298 "criteria within DELETE"
4299 )
4301 def delete_table_clause(self, delete_stmt, from_table, extra_froms):
4302 return from_table._compiler_dispatch(self, asfrom=True, iscrud=True)
4304 def visit_delete(self, delete_stmt, **kw):
4305 compile_state = delete_stmt._compile_state_factory(
4306 delete_stmt, self, **kw
4307 )
4308 delete_stmt = compile_state.statement
4310 toplevel = not self.stack
4311 if toplevel:
4312 self.isdelete = True
4313 if not self.dml_compile_state:
4314 self.dml_compile_state = compile_state
4315 if not self.compile_state:
4316 self.compile_state = compile_state
4318 extra_froms = compile_state._extra_froms
4320 correlate_froms = {delete_stmt.table}.union(extra_froms)
4321 self.stack.append(
4322 {
4323 "correlate_froms": correlate_froms,
4324 "asfrom_froms": correlate_froms,
4325 "selectable": delete_stmt,
4326 }
4327 )
4329 text = "DELETE "
4331 if delete_stmt._prefixes:
4332 text += self._generate_prefixes(
4333 delete_stmt, delete_stmt._prefixes, **kw
4334 )
4336 text += "FROM "
4337 table_text = self.delete_table_clause(
4338 delete_stmt, delete_stmt.table, extra_froms
4339 )
4341 if delete_stmt._hints:
4342 dialect_hints, table_text = self._setup_crud_hints(
4343 delete_stmt, table_text
4344 )
4345 else:
4346 dialect_hints = None
4348 if delete_stmt._independent_ctes:
4349 for cte in delete_stmt._independent_ctes:
4350 cte._compiler_dispatch(self, **kw)
4352 text += table_text
4354 if delete_stmt._returning:
4355 if self.returning_precedes_values:
4356 text += " " + self.returning_clause(
4357 delete_stmt, delete_stmt._returning
4358 )
4360 if extra_froms:
4361 extra_from_text = self.delete_extra_from_clause(
4362 delete_stmt,
4363 delete_stmt.table,
4364 extra_froms,
4365 dialect_hints,
4366 **kw
4367 )
4368 if extra_from_text:
4369 text += " " + extra_from_text
4371 if delete_stmt._where_criteria:
4372 t = self._generate_delimited_and_list(
4373 delete_stmt._where_criteria, **kw
4374 )
4375 if t:
4376 text += " WHERE " + t
4378 if delete_stmt._returning and not self.returning_precedes_values:
4379 text += " " + self.returning_clause(
4380 delete_stmt, delete_stmt._returning
4381 )
4383 if self.ctes:
4384 nesting_level = len(self.stack) if not toplevel else None
4385 text = (
4386 self._render_cte_clause(
4387 nesting_level=nesting_level,
4388 visiting_cte=kw.get("visiting_cte"),
4389 )
4390 + text
4391 )
4393 self.stack.pop(-1)
4395 return text
4397 def visit_savepoint(self, savepoint_stmt):
4398 return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt)
4400 def visit_rollback_to_savepoint(self, savepoint_stmt):
4401 return "ROLLBACK TO SAVEPOINT %s" % self.preparer.format_savepoint(
4402 savepoint_stmt
4403 )
4405 def visit_release_savepoint(self, savepoint_stmt):
4406 return "RELEASE SAVEPOINT %s" % self.preparer.format_savepoint(
4407 savepoint_stmt
4408 )
4411class StrSQLCompiler(SQLCompiler):
4412 """A :class:`.SQLCompiler` subclass which allows a small selection
4413 of non-standard SQL features to render into a string value.
4415 The :class:`.StrSQLCompiler` is invoked whenever a Core expression
4416 element is directly stringified without calling upon the
4417 :meth:`_expression.ClauseElement.compile` method.
4418 It can render a limited set
4419 of non-standard SQL constructs to assist in basic stringification,
4420 however for more substantial custom or dialect-specific SQL constructs,
4421 it will be necessary to make use of
4422 :meth:`_expression.ClauseElement.compile`
4423 directly.
4425 .. seealso::
4427 :ref:`faq_sql_expression_string`
4429 """
4431 def _fallback_column_name(self, column):
4432 return "<name unknown>"
4434 @util.preload_module("sqlalchemy.engine.url")
4435 def visit_unsupported_compilation(self, element, err, **kw):
4436 if element.stringify_dialect != "default":
4437 url = util.preloaded.engine_url
4438 dialect = url.URL.create(element.stringify_dialect).get_dialect()()
4440 compiler = dialect.statement_compiler(dialect, None)
4441 if not isinstance(compiler, StrSQLCompiler):
4442 return compiler.process(element)
4444 return super(StrSQLCompiler, self).visit_unsupported_compilation(
4445 element, err
4446 )
4448 def visit_getitem_binary(self, binary, operator, **kw):
4449 return "%s[%s]" % (
4450 self.process(binary.left, **kw),
4451 self.process(binary.right, **kw),
4452 )
4454 def visit_json_getitem_op_binary(self, binary, operator, **kw):
4455 return self.visit_getitem_binary(binary, operator, **kw)
4457 def visit_json_path_getitem_op_binary(self, binary, operator, **kw):
4458 return self.visit_getitem_binary(binary, operator, **kw)
4460 def visit_sequence(self, seq, **kw):
4461 return "<next sequence value: %s>" % self.preparer.format_sequence(seq)
4463 def returning_clause(self, stmt, returning_cols):
4464 columns = [
4465 self._label_select_column(
4466 None, c, True, False, {}, fallback_label_name=c._non_anon_label
4467 )
4468 for c in base._select_iterables(returning_cols)
4469 ]
4471 return "RETURNING " + ", ".join(columns)
4473 def update_from_clause(
4474 self, update_stmt, from_table, extra_froms, from_hints, **kw
4475 ):
4476 kw["asfrom"] = True
4477 return "FROM " + ", ".join(
4478 t._compiler_dispatch(self, fromhints=from_hints, **kw)
4479 for t in extra_froms
4480 )
4482 def delete_extra_from_clause(
4483 self, update_stmt, from_table, extra_froms, from_hints, **kw
4484 ):
4485 kw["asfrom"] = True
4486 return ", " + ", ".join(
4487 t._compiler_dispatch(self, fromhints=from_hints, **kw)
4488 for t in extra_froms
4489 )
4491 def visit_empty_set_expr(self, type_):
4492 return "SELECT 1 WHERE 1!=1"
4494 def get_from_hint_text(self, table, text):
4495 return "[%s]" % text
4497 def visit_regexp_match_op_binary(self, binary, operator, **kw):
4498 return self._generate_generic_binary(binary, " <regexp> ", **kw)
4500 def visit_not_regexp_match_op_binary(self, binary, operator, **kw):
4501 return self._generate_generic_binary(binary, " <not regexp> ", **kw)
4503 def visit_regexp_replace_op_binary(self, binary, operator, **kw):
4504 replacement = binary.modifiers["replacement"]
4505 return "<regexp replace>(%s, %s, %s)" % (
4506 binary.left._compiler_dispatch(self, **kw),
4507 binary.right._compiler_dispatch(self, **kw),
4508 replacement._compiler_dispatch(self, **kw),
4509 )
4512class DDLCompiler(Compiled):
4513 @util.memoized_property
4514 def sql_compiler(self):
4515 return self.dialect.statement_compiler(
4516 self.dialect, None, schema_translate_map=self.schema_translate_map
4517 )
4519 @util.memoized_property
4520 def type_compiler(self):
4521 return self.dialect.type_compiler
4523 def construct_params(
4524 self, params=None, extracted_parameters=None, escape_names=True
4525 ):
4526 return None
4528 def visit_ddl(self, ddl, **kwargs):
4529 # table events can substitute table and schema name
4530 context = ddl.context
4531 if isinstance(ddl.target, schema.Table):
4532 context = context.copy()
4534 preparer = self.preparer
4535 path = preparer.format_table_seq(ddl.target)
4536 if len(path) == 1:
4537 table, sch = path[0], ""
4538 else:
4539 table, sch = path[-1], path[0]
4541 context.setdefault("table", table)
4542 context.setdefault("schema", sch)
4543 context.setdefault("fullname", preparer.format_table(ddl.target))
4545 return self.sql_compiler.post_process_text(ddl.statement % context)
4547 def visit_create_schema(self, create, **kw):
4548 schema = self.preparer.format_schema(create.element)
4549 return "CREATE SCHEMA " + schema
4551 def visit_drop_schema(self, drop, **kw):
4552 schema = self.preparer.format_schema(drop.element)
4553 text = "DROP SCHEMA " + schema
4554 if drop.cascade:
4555 text += " CASCADE"
4556 return text
4558 def visit_create_table(self, create, **kw):
4559 table = create.element
4560 preparer = self.preparer
4562 text = "\nCREATE "
4563 if table._prefixes:
4564 text += " ".join(table._prefixes) + " "
4566 text += "TABLE "
4567 if create.if_not_exists:
4568 text += "IF NOT EXISTS "
4570 text += preparer.format_table(table) + " "
4572 create_table_suffix = self.create_table_suffix(table)
4573 if create_table_suffix:
4574 text += create_table_suffix + " "
4576 text += "("
4578 separator = "\n"
4580 # if only one primary key, specify it along with the column
4581 first_pk = False
4582 for create_column in create.columns:
4583 column = create_column.element
4584 try:
4585 processed = self.process(
4586 create_column, first_pk=column.primary_key and not first_pk
4587 )
4588 if processed is not None:
4589 text += separator
4590 separator = ", \n"
4591 text += "\t" + processed
4592 if column.primary_key:
4593 first_pk = True
4594 except exc.CompileError as ce:
4595 util.raise_(
4596 exc.CompileError(
4597 util.u("(in table '%s', column '%s'): %s")
4598 % (table.description, column.name, ce.args[0])
4599 ),
4600 from_=ce,
4601 )
4603 const = self.create_table_constraints(
4604 table,
4605 _include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
4606 )
4607 if const:
4608 text += separator + "\t" + const
4610 text += "\n)%s\n\n" % self.post_create_table(table)
4611 return text
4613 def visit_create_column(self, create, first_pk=False, **kw):
4614 column = create.element
4616 if column.system:
4617 return None
4619 text = self.get_column_specification(column, first_pk=first_pk)
4620 const = " ".join(
4621 self.process(constraint) for constraint in column.constraints
4622 )
4623 if const:
4624 text += " " + const
4626 return text
4628 def create_table_constraints(
4629 self, table, _include_foreign_key_constraints=None, **kw
4630 ):
4632 # On some DB order is significant: visit PK first, then the
4633 # other constraints (engine.ReflectionTest.testbasic failed on FB2)
4634 constraints = []
4635 if table.primary_key:
4636 constraints.append(table.primary_key)
4638 all_fkcs = table.foreign_key_constraints
4639 if _include_foreign_key_constraints is not None:
4640 omit_fkcs = all_fkcs.difference(_include_foreign_key_constraints)
4641 else:
4642 omit_fkcs = set()
4644 constraints.extend(
4645 [
4646 c
4647 for c in table._sorted_constraints
4648 if c is not table.primary_key and c not in omit_fkcs
4649 ]
4650 )
4652 return ", \n\t".join(
4653 p
4654 for p in (
4655 self.process(constraint)
4656 for constraint in constraints
4657 if (
4658 constraint._create_rule is None
4659 or constraint._create_rule(self)
4660 )
4661 and (
4662 not self.dialect.supports_alter
4663 or not getattr(constraint, "use_alter", False)
4664 )
4665 )
4666 if p is not None
4667 )
4669 def visit_drop_table(self, drop, **kw):
4670 text = "\nDROP TABLE "
4671 if drop.if_exists:
4672 text += "IF EXISTS "
4673 return text + self.preparer.format_table(drop.element)
4675 def visit_drop_view(self, drop, **kw):
4676 return "\nDROP VIEW " + self.preparer.format_table(drop.element)
4678 def _verify_index_table(self, index):
4679 if index.table is None:
4680 raise exc.CompileError(
4681 "Index '%s' is not associated " "with any table." % index.name
4682 )
4684 def visit_create_index(
4685 self, create, include_schema=False, include_table_schema=True, **kw
4686 ):
4687 index = create.element
4688 self._verify_index_table(index)
4689 preparer = self.preparer
4690 text = "CREATE "
4691 if index.unique:
4692 text += "UNIQUE "
4693 if index.name is None:
4694 raise exc.CompileError(
4695 "CREATE INDEX requires that the index have a name"
4696 )
4698 text += "INDEX "
4699 if create.if_not_exists:
4700 text += "IF NOT EXISTS "
4702 text += "%s ON %s (%s)" % (
4703 self._prepared_index_name(index, include_schema=include_schema),
4704 preparer.format_table(
4705 index.table, use_schema=include_table_schema
4706 ),
4707 ", ".join(
4708 self.sql_compiler.process(
4709 expr, include_table=False, literal_binds=True
4710 )
4711 for expr in index.expressions
4712 ),
4713 )
4714 return text
4716 def visit_drop_index(self, drop, **kw):
4717 index = drop.element
4719 if index.name is None:
4720 raise exc.CompileError(
4721 "DROP INDEX requires that the index have a name"
4722 )
4723 text = "\nDROP INDEX "
4724 if drop.if_exists:
4725 text += "IF EXISTS "
4727 return text + self._prepared_index_name(index, include_schema=True)
4729 def _prepared_index_name(self, index, include_schema=False):
4730 if index.table is not None:
4731 effective_schema = self.preparer.schema_for_object(index.table)
4732 else:
4733 effective_schema = None
4734 if include_schema and effective_schema:
4735 schema_name = self.preparer.quote_schema(effective_schema)
4736 else:
4737 schema_name = None
4739 index_name = self.preparer.format_index(index)
4741 if schema_name:
4742 index_name = schema_name + "." + index_name
4743 return index_name
4745 def visit_add_constraint(self, create, **kw):
4746 return "ALTER TABLE %s ADD %s" % (
4747 self.preparer.format_table(create.element.table),
4748 self.process(create.element),
4749 )
4751 def visit_set_table_comment(self, create, **kw):
4752 return "COMMENT ON TABLE %s IS %s" % (
4753 self.preparer.format_table(create.element),
4754 self.sql_compiler.render_literal_value(
4755 create.element.comment, sqltypes.String()
4756 ),
4757 )
4759 def visit_drop_table_comment(self, drop, **kw):
4760 return "COMMENT ON TABLE %s IS NULL" % self.preparer.format_table(
4761 drop.element
4762 )
4764 def visit_set_column_comment(self, create, **kw):
4765 return "COMMENT ON COLUMN %s IS %s" % (
4766 self.preparer.format_column(
4767 create.element, use_table=True, use_schema=True
4768 ),
4769 self.sql_compiler.render_literal_value(
4770 create.element.comment, sqltypes.String()
4771 ),
4772 )
4774 def visit_drop_column_comment(self, drop, **kw):
4775 return "COMMENT ON COLUMN %s IS NULL" % self.preparer.format_column(
4776 drop.element, use_table=True
4777 )
4779 def get_identity_options(self, identity_options):
4780 text = []
4781 if identity_options.increment is not None:
4782 text.append("INCREMENT BY %d" % identity_options.increment)
4783 if identity_options.start is not None:
4784 text.append("START WITH %d" % identity_options.start)
4785 if identity_options.minvalue is not None:
4786 text.append("MINVALUE %d" % identity_options.minvalue)
4787 if identity_options.maxvalue is not None:
4788 text.append("MAXVALUE %d" % identity_options.maxvalue)
4789 if identity_options.nominvalue is not None:
4790 text.append("NO MINVALUE")
4791 if identity_options.nomaxvalue is not None:
4792 text.append("NO MAXVALUE")
4793 if identity_options.cache is not None:
4794 text.append("CACHE %d" % identity_options.cache)
4795 if identity_options.order is not None:
4796 text.append("ORDER" if identity_options.order else "NO ORDER")
4797 if identity_options.cycle is not None:
4798 text.append("CYCLE" if identity_options.cycle else "NO CYCLE")
4799 return " ".join(text)
4801 def visit_create_sequence(self, create, prefix=None, **kw):
4802 text = "CREATE SEQUENCE %s" % self.preparer.format_sequence(
4803 create.element
4804 )
4805 if prefix:
4806 text += prefix
4807 if create.element.start is None:
4808 create.element.start = self.dialect.default_sequence_base
4809 options = self.get_identity_options(create.element)
4810 if options:
4811 text += " " + options
4812 return text
4814 def visit_drop_sequence(self, drop, **kw):
4815 return "DROP SEQUENCE %s" % self.preparer.format_sequence(drop.element)
4817 def visit_drop_constraint(self, drop, **kw):
4818 constraint = drop.element
4819 if constraint.name is not None:
4820 formatted_name = self.preparer.format_constraint(constraint)
4821 else:
4822 formatted_name = None
4824 if formatted_name is None:
4825 raise exc.CompileError(
4826 "Can't emit DROP CONSTRAINT for constraint %r; "
4827 "it has no name" % drop.element
4828 )
4829 return "ALTER TABLE %s DROP CONSTRAINT %s%s" % (
4830 self.preparer.format_table(drop.element.table),
4831 formatted_name,
4832 drop.cascade and " CASCADE" or "",
4833 )
4835 def get_column_specification(self, column, **kwargs):
4836 colspec = (
4837 self.preparer.format_column(column)
4838 + " "
4839 + self.dialect.type_compiler.process(
4840 column.type, type_expression=column
4841 )
4842 )
4843 default = self.get_column_default_string(column)
4844 if default is not None:
4845 colspec += " DEFAULT " + default
4847 if column.computed is not None:
4848 colspec += " " + self.process(column.computed)
4850 if (
4851 column.identity is not None
4852 and self.dialect.supports_identity_columns
4853 ):
4854 colspec += " " + self.process(column.identity)
4856 if not column.nullable and (
4857 not column.identity or not self.dialect.supports_identity_columns
4858 ):
4859 colspec += " NOT NULL"
4860 return colspec
4862 def create_table_suffix(self, table):
4863 return ""
4865 def post_create_table(self, table):
4866 return ""
4868 def get_column_default_string(self, column):
4869 if isinstance(column.server_default, schema.DefaultClause):
4870 if isinstance(column.server_default.arg, util.string_types):
4871 return self.sql_compiler.render_literal_value(
4872 column.server_default.arg, sqltypes.STRINGTYPE
4873 )
4874 else:
4875 return self.sql_compiler.process(
4876 column.server_default.arg, literal_binds=True
4877 )
4878 else:
4879 return None
4881 def visit_table_or_column_check_constraint(self, constraint, **kw):
4882 if constraint.is_column_level:
4883 return self.visit_column_check_constraint(constraint)
4884 else:
4885 return self.visit_check_constraint(constraint)
4887 def visit_check_constraint(self, constraint, **kw):
4888 text = ""
4889 if constraint.name is not None:
4890 formatted_name = self.preparer.format_constraint(constraint)
4891 if formatted_name is not None:
4892 text += "CONSTRAINT %s " % formatted_name
4893 text += "CHECK (%s)" % self.sql_compiler.process(
4894 constraint.sqltext, include_table=False, literal_binds=True
4895 )
4896 text += self.define_constraint_deferrability(constraint)
4897 return text
4899 def visit_column_check_constraint(self, constraint, **kw):
4900 text = ""
4901 if constraint.name is not None:
4902 formatted_name = self.preparer.format_constraint(constraint)
4903 if formatted_name is not None:
4904 text += "CONSTRAINT %s " % formatted_name
4905 text += "CHECK (%s)" % self.sql_compiler.process(
4906 constraint.sqltext, include_table=False, literal_binds=True
4907 )
4908 text += self.define_constraint_deferrability(constraint)
4909 return text
4911 def visit_primary_key_constraint(self, constraint, **kw):
4912 if len(constraint) == 0:
4913 return ""
4914 text = ""
4915 if constraint.name is not None:
4916 formatted_name = self.preparer.format_constraint(constraint)
4917 if formatted_name is not None:
4918 text += "CONSTRAINT %s " % formatted_name
4919 text += "PRIMARY KEY "
4920 text += "(%s)" % ", ".join(
4921 self.preparer.quote(c.name)
4922 for c in (
4923 constraint.columns_autoinc_first
4924 if constraint._implicit_generated
4925 else constraint.columns
4926 )
4927 )
4928 text += self.define_constraint_deferrability(constraint)
4929 return text
4931 def visit_foreign_key_constraint(self, constraint, **kw):
4932 preparer = self.preparer
4933 text = ""
4934 if constraint.name is not None:
4935 formatted_name = self.preparer.format_constraint(constraint)
4936 if formatted_name is not None:
4937 text += "CONSTRAINT %s " % formatted_name
4938 remote_table = list(constraint.elements)[0].column.table
4939 text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
4940 ", ".join(
4941 preparer.quote(f.parent.name) for f in constraint.elements
4942 ),
4943 self.define_constraint_remote_table(
4944 constraint, remote_table, preparer
4945 ),
4946 ", ".join(
4947 preparer.quote(f.column.name) for f in constraint.elements
4948 ),
4949 )
4950 text += self.define_constraint_match(constraint)
4951 text += self.define_constraint_cascades(constraint)
4952 text += self.define_constraint_deferrability(constraint)
4953 return text
4955 def define_constraint_remote_table(self, constraint, table, preparer):
4956 """Format the remote table clause of a CREATE CONSTRAINT clause."""
4958 return preparer.format_table(table)
4960 def visit_unique_constraint(self, constraint, **kw):
4961 if len(constraint) == 0:
4962 return ""
4963 text = ""
4964 if constraint.name is not None:
4965 formatted_name = self.preparer.format_constraint(constraint)
4966 if formatted_name is not None:
4967 text += "CONSTRAINT %s " % formatted_name
4968 text += "UNIQUE (%s)" % (
4969 ", ".join(self.preparer.quote(c.name) for c in constraint)
4970 )
4971 text += self.define_constraint_deferrability(constraint)
4972 return text
4974 def define_constraint_cascades(self, constraint):
4975 text = ""
4976 if constraint.ondelete is not None:
4977 text += " ON DELETE %s" % self.preparer.validate_sql_phrase(
4978 constraint.ondelete, FK_ON_DELETE
4979 )
4980 if constraint.onupdate is not None:
4981 text += " ON UPDATE %s" % self.preparer.validate_sql_phrase(
4982 constraint.onupdate, FK_ON_UPDATE
4983 )
4984 return text
4986 def define_constraint_deferrability(self, constraint):
4987 text = ""
4988 if constraint.deferrable is not None:
4989 if constraint.deferrable:
4990 text += " DEFERRABLE"
4991 else:
4992 text += " NOT DEFERRABLE"
4993 if constraint.initially is not None:
4994 text += " INITIALLY %s" % self.preparer.validate_sql_phrase(
4995 constraint.initially, FK_INITIALLY
4996 )
4997 return text
4999 def define_constraint_match(self, constraint):
5000 text = ""
5001 if constraint.match is not None:
5002 text += " MATCH %s" % constraint.match
5003 return text
5005 def visit_computed_column(self, generated, **kw):
5006 text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
5007 generated.sqltext, include_table=False, literal_binds=True
5008 )
5009 if generated.persisted is True:
5010 text += " STORED"
5011 elif generated.persisted is False:
5012 text += " VIRTUAL"
5013 return text
5015 def visit_identity_column(self, identity, **kw):
5016 text = "GENERATED %s AS IDENTITY" % (
5017 "ALWAYS" if identity.always else "BY DEFAULT",
5018 )
5019 options = self.get_identity_options(identity)
5020 if options:
5021 text += " (%s)" % options
5022 return text
5025class GenericTypeCompiler(TypeCompiler):
5026 def visit_FLOAT(self, type_, **kw):
5027 return "FLOAT"
5029 def visit_REAL(self, type_, **kw):
5030 return "REAL"
5032 def visit_NUMERIC(self, type_, **kw):
5033 if type_.precision is None:
5034 return "NUMERIC"
5035 elif type_.scale is None:
5036 return "NUMERIC(%(precision)s)" % {"precision": type_.precision}
5037 else:
5038 return "NUMERIC(%(precision)s, %(scale)s)" % {
5039 "precision": type_.precision,
5040 "scale": type_.scale,
5041 }
5043 def visit_DECIMAL(self, type_, **kw):
5044 if type_.precision is None:
5045 return "DECIMAL"
5046 elif type_.scale is None:
5047 return "DECIMAL(%(precision)s)" % {"precision": type_.precision}
5048 else:
5049 return "DECIMAL(%(precision)s, %(scale)s)" % {
5050 "precision": type_.precision,
5051 "scale": type_.scale,
5052 }
5054 def visit_INTEGER(self, type_, **kw):
5055 return "INTEGER"
5057 def visit_SMALLINT(self, type_, **kw):
5058 return "SMALLINT"
5060 def visit_BIGINT(self, type_, **kw):
5061 return "BIGINT"
5063 def visit_TIMESTAMP(self, type_, **kw):
5064 return "TIMESTAMP"
5066 def visit_DATETIME(self, type_, **kw):
5067 return "DATETIME"
5069 def visit_DATE(self, type_, **kw):
5070 return "DATE"
5072 def visit_TIME(self, type_, **kw):
5073 return "TIME"
5075 def visit_CLOB(self, type_, **kw):
5076 return "CLOB"
5078 def visit_NCLOB(self, type_, **kw):
5079 return "NCLOB"
5081 def _render_string_type(self, type_, name):
5083 text = name
5084 if type_.length:
5085 text += "(%d)" % type_.length
5086 if type_.collation:
5087 text += ' COLLATE "%s"' % type_.collation
5088 return text
5090 def visit_CHAR(self, type_, **kw):
5091 return self._render_string_type(type_, "CHAR")
5093 def visit_NCHAR(self, type_, **kw):
5094 return self._render_string_type(type_, "NCHAR")
5096 def visit_VARCHAR(self, type_, **kw):
5097 return self._render_string_type(type_, "VARCHAR")
5099 def visit_NVARCHAR(self, type_, **kw):
5100 return self._render_string_type(type_, "NVARCHAR")
5102 def visit_TEXT(self, type_, **kw):
5103 return self._render_string_type(type_, "TEXT")
5105 def visit_BLOB(self, type_, **kw):
5106 return "BLOB"
5108 def visit_BINARY(self, type_, **kw):
5109 return "BINARY" + (type_.length and "(%d)" % type_.length or "")
5111 def visit_VARBINARY(self, type_, **kw):
5112 return "VARBINARY" + (type_.length and "(%d)" % type_.length or "")
5114 def visit_BOOLEAN(self, type_, **kw):
5115 return "BOOLEAN"
5117 def visit_large_binary(self, type_, **kw):
5118 return self.visit_BLOB(type_, **kw)
5120 def visit_boolean(self, type_, **kw):
5121 return self.visit_BOOLEAN(type_, **kw)
5123 def visit_time(self, type_, **kw):
5124 return self.visit_TIME(type_, **kw)
5126 def visit_datetime(self, type_, **kw):
5127 return self.visit_DATETIME(type_, **kw)
5129 def visit_date(self, type_, **kw):
5130 return self.visit_DATE(type_, **kw)
5132 def visit_big_integer(self, type_, **kw):
5133 return self.visit_BIGINT(type_, **kw)
5135 def visit_small_integer(self, type_, **kw):
5136 return self.visit_SMALLINT(type_, **kw)
5138 def visit_integer(self, type_, **kw):
5139 return self.visit_INTEGER(type_, **kw)
5141 def visit_real(self, type_, **kw):
5142 return self.visit_REAL(type_, **kw)
5144 def visit_float(self, type_, **kw):
5145 return self.visit_FLOAT(type_, **kw)
5147 def visit_numeric(self, type_, **kw):
5148 return self.visit_NUMERIC(type_, **kw)
5150 def visit_string(self, type_, **kw):
5151 return self.visit_VARCHAR(type_, **kw)
5153 def visit_unicode(self, type_, **kw):
5154 return self.visit_VARCHAR(type_, **kw)
5156 def visit_text(self, type_, **kw):
5157 return self.visit_TEXT(type_, **kw)
5159 def visit_unicode_text(self, type_, **kw):
5160 return self.visit_TEXT(type_, **kw)
5162 def visit_enum(self, type_, **kw):
5163 return self.visit_VARCHAR(type_, **kw)
5165 def visit_null(self, type_, **kw):
5166 raise exc.CompileError(
5167 "Can't generate DDL for %r; "
5168 "did you forget to specify a "
5169 "type on this Column?" % type_
5170 )
5172 def visit_type_decorator(self, type_, **kw):
5173 return self.process(type_.type_engine(self.dialect), **kw)
5175 def visit_user_defined(self, type_, **kw):
5176 return type_.get_col_spec(**kw)
5179class StrSQLTypeCompiler(GenericTypeCompiler):
5180 def process(self, type_, **kw):
5181 try:
5182 _compiler_dispatch = type_._compiler_dispatch
5183 except AttributeError:
5184 return self._visit_unknown(type_, **kw)
5185 else:
5186 return _compiler_dispatch(self, **kw)
5188 def __getattr__(self, key):
5189 if key.startswith("visit_"):
5190 return self._visit_unknown
5191 else:
5192 raise AttributeError(key)
5194 def _visit_unknown(self, type_, **kw):
5195 if type_.__class__.__name__ == type_.__class__.__name__.upper():
5196 return type_.__class__.__name__
5197 else:
5198 return repr(type_)
5200 def visit_null(self, type_, **kw):
5201 return "NULL"
5203 def visit_user_defined(self, type_, **kw):
5204 try:
5205 get_col_spec = type_.get_col_spec
5206 except AttributeError:
5207 return repr(type_)
5208 else:
5209 return get_col_spec(**kw)
5212class IdentifierPreparer(object):
5214 """Handle quoting and case-folding of identifiers based on options."""
5216 reserved_words = RESERVED_WORDS
5218 legal_characters = LEGAL_CHARACTERS
5220 illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS
5222 schema_for_object = operator.attrgetter("schema")
5223 """Return the .schema attribute for an object.
5225 For the default IdentifierPreparer, the schema for an object is always
5226 the value of the ".schema" attribute. if the preparer is replaced
5227 with one that has a non-empty schema_translate_map, the value of the
5228 ".schema" attribute is rendered a symbol that will be converted to a
5229 real schema name from the mapping post-compile.
5231 """
5233 def __init__(
5234 self,
5235 dialect,
5236 initial_quote='"',
5237 final_quote=None,
5238 escape_quote='"',
5239 quote_case_sensitive_collations=True,
5240 omit_schema=False,
5241 ):
5242 """Construct a new ``IdentifierPreparer`` object.
5244 initial_quote
5245 Character that begins a delimited identifier.
5247 final_quote
5248 Character that ends a delimited identifier. Defaults to
5249 `initial_quote`.
5251 omit_schema
5252 Prevent prepending schema name. Useful for databases that do
5253 not support schemae.
5254 """
5256 self.dialect = dialect
5257 self.initial_quote = initial_quote
5258 self.final_quote = final_quote or self.initial_quote
5259 self.escape_quote = escape_quote
5260 self.escape_to_quote = self.escape_quote * 2
5261 self.omit_schema = omit_schema
5262 self.quote_case_sensitive_collations = quote_case_sensitive_collations
5263 self._strings = {}
5264 self._double_percents = self.dialect.paramstyle in (
5265 "format",
5266 "pyformat",
5267 )
5269 def _with_schema_translate(self, schema_translate_map):
5270 prep = self.__class__.__new__(self.__class__)
5271 prep.__dict__.update(self.__dict__)
5273 def symbol_getter(obj):
5274 name = obj.schema
5275 if name in schema_translate_map and obj._use_schema_map:
5276 if name is not None and ("[" in name or "]" in name):
5277 raise exc.CompileError(
5278 "Square bracket characters ([]) not supported "
5279 "in schema translate name '%s'" % name
5280 )
5281 return quoted_name(
5282 "__[SCHEMA_%s]" % (name or "_none"), quote=False
5283 )
5284 else:
5285 return obj.schema
5287 prep.schema_for_object = symbol_getter
5288 return prep
5290 def _render_schema_translates(self, statement, schema_translate_map):
5291 d = schema_translate_map
5292 if None in d:
5293 d["_none"] = d[None]
5295 def replace(m):
5296 name = m.group(2)
5297 effective_schema = d[name]
5298 if not effective_schema:
5299 effective_schema = self.dialect.default_schema_name
5300 if not effective_schema:
5301 # TODO: no coverage here
5302 raise exc.CompileError(
5303 "Dialect has no default schema name; can't "
5304 "use None as dynamic schema target."
5305 )
5306 return self.quote_schema(effective_schema)
5308 return re.sub(r"(__\[SCHEMA_([^\]]+)\])", replace, statement)
5310 def _escape_identifier(self, value):
5311 """Escape an identifier.
5313 Subclasses should override this to provide database-dependent
5314 escaping behavior.
5315 """
5317 value = value.replace(self.escape_quote, self.escape_to_quote)
5318 if self._double_percents:
5319 value = value.replace("%", "%%")
5320 return value
5322 def _unescape_identifier(self, value):
5323 """Canonicalize an escaped identifier.
5325 Subclasses should override this to provide database-dependent
5326 unescaping behavior that reverses _escape_identifier.
5327 """
5329 return value.replace(self.escape_to_quote, self.escape_quote)
5331 def validate_sql_phrase(self, element, reg):
5332 """keyword sequence filter.
5334 a filter for elements that are intended to represent keyword sequences,
5335 such as "INITIALLY", "INITIALLY DEFERRED", etc. no special characters
5336 should be present.
5338 .. versionadded:: 1.3
5340 """
5342 if element is not None and not reg.match(element):
5343 raise exc.CompileError(
5344 "Unexpected SQL phrase: %r (matching against %r)"
5345 % (element, reg.pattern)
5346 )
5347 return element
5349 def quote_identifier(self, value):
5350 """Quote an identifier.
5352 Subclasses should override this to provide database-dependent
5353 quoting behavior.
5354 """
5356 return (
5357 self.initial_quote
5358 + self._escape_identifier(value)
5359 + self.final_quote
5360 )
5362 def _requires_quotes(self, value):
5363 """Return True if the given identifier requires quoting."""
5364 lc_value = value.lower()
5365 return (
5366 lc_value in self.reserved_words
5367 or value[0] in self.illegal_initial_characters
5368 or not self.legal_characters.match(util.text_type(value))
5369 or (lc_value != value)
5370 )
5372 def _requires_quotes_illegal_chars(self, value):
5373 """Return True if the given identifier requires quoting, but
5374 not taking case convention into account."""
5375 return not self.legal_characters.match(util.text_type(value))
5377 def quote_schema(self, schema, force=None):
5378 """Conditionally quote a schema name.
5381 The name is quoted if it is a reserved word, contains quote-necessary
5382 characters, or is an instance of :class:`.quoted_name` which includes
5383 ``quote`` set to ``True``.
5385 Subclasses can override this to provide database-dependent
5386 quoting behavior for schema names.
5388 :param schema: string schema name
5389 :param force: unused
5391 .. deprecated:: 0.9
5393 The :paramref:`.IdentifierPreparer.quote_schema.force`
5394 parameter is deprecated and will be removed in a future
5395 release. This flag has no effect on the behavior of the
5396 :meth:`.IdentifierPreparer.quote` method; please refer to
5397 :class:`.quoted_name`.
5399 """
5400 if force is not None:
5401 # not using the util.deprecated_params() decorator in this
5402 # case because of the additional function call overhead on this
5403 # very performance-critical spot.
5404 util.warn_deprecated(
5405 "The IdentifierPreparer.quote_schema.force parameter is "
5406 "deprecated and will be removed in a future release. This "
5407 "flag has no effect on the behavior of the "
5408 "IdentifierPreparer.quote method; please refer to "
5409 "quoted_name().",
5410 # deprecated 0.9. warning from 1.3
5411 version="0.9",
5412 )
5414 return self.quote(schema)
5416 def quote(self, ident, force=None):
5417 """Conditionally quote an identifier.
5419 The identifier is quoted if it is a reserved word, contains
5420 quote-necessary characters, or is an instance of
5421 :class:`.quoted_name` which includes ``quote`` set to ``True``.
5423 Subclasses can override this to provide database-dependent
5424 quoting behavior for identifier names.
5426 :param ident: string identifier
5427 :param force: unused
5429 .. deprecated:: 0.9
5431 The :paramref:`.IdentifierPreparer.quote.force`
5432 parameter is deprecated and will be removed in a future
5433 release. This flag has no effect on the behavior of the
5434 :meth:`.IdentifierPreparer.quote` method; please refer to
5435 :class:`.quoted_name`.
5437 """
5438 if force is not None:
5439 # not using the util.deprecated_params() decorator in this
5440 # case because of the additional function call overhead on this
5441 # very performance-critical spot.
5442 util.warn_deprecated(
5443 "The IdentifierPreparer.quote.force parameter is "
5444 "deprecated and will be removed in a future release. This "
5445 "flag has no effect on the behavior of the "
5446 "IdentifierPreparer.quote method; please refer to "
5447 "quoted_name().",
5448 # deprecated 0.9. warning from 1.3
5449 version="0.9",
5450 )
5452 force = getattr(ident, "quote", None)
5454 if force is None:
5455 if ident in self._strings:
5456 return self._strings[ident]
5457 else:
5458 if self._requires_quotes(ident):
5459 self._strings[ident] = self.quote_identifier(ident)
5460 else:
5461 self._strings[ident] = ident
5462 return self._strings[ident]
5463 elif force:
5464 return self.quote_identifier(ident)
5465 else:
5466 return ident
5468 def format_collation(self, collation_name):
5469 if self.quote_case_sensitive_collations:
5470 return self.quote(collation_name)
5471 else:
5472 return collation_name
5474 def format_sequence(self, sequence, use_schema=True):
5475 name = self.quote(sequence.name)
5477 effective_schema = self.schema_for_object(sequence)
5479 if (
5480 not self.omit_schema
5481 and use_schema
5482 and effective_schema is not None
5483 ):
5484 name = self.quote_schema(effective_schema) + "." + name
5485 return name
5487 def format_label(self, label, name=None):
5488 return self.quote(name or label.name)
5490 def format_alias(self, alias, name=None):
5491 return self.quote(name or alias.name)
5493 def format_savepoint(self, savepoint, name=None):
5494 # Running the savepoint name through quoting is unnecessary
5495 # for all known dialects. This is here to support potential
5496 # third party use cases
5497 ident = name or savepoint.ident
5498 if self._requires_quotes(ident):
5499 ident = self.quote_identifier(ident)
5500 return ident
5502 @util.preload_module("sqlalchemy.sql.naming")
5503 def format_constraint(self, constraint, _alembic_quote=True):
5504 naming = util.preloaded.sql_naming
5506 if constraint.name is elements._NONE_NAME:
5507 name = naming._constraint_name_for_table(
5508 constraint, constraint.table
5509 )
5511 if name is None:
5512 return None
5513 else:
5514 name = constraint.name
5516 if constraint.__visit_name__ == "index":
5517 return self.truncate_and_render_index_name(
5518 name, _alembic_quote=_alembic_quote
5519 )
5520 else:
5521 return self.truncate_and_render_constraint_name(
5522 name, _alembic_quote=_alembic_quote
5523 )
5525 def truncate_and_render_index_name(self, name, _alembic_quote=True):
5526 # calculate these at format time so that ad-hoc changes
5527 # to dialect.max_identifier_length etc. can be reflected
5528 # as IdentifierPreparer is long lived
5529 max_ = (
5530 self.dialect.max_index_name_length
5531 or self.dialect.max_identifier_length
5532 )
5533 return self._truncate_and_render_maxlen_name(
5534 name, max_, _alembic_quote
5535 )
5537 def truncate_and_render_constraint_name(self, name, _alembic_quote=True):
5538 # calculate these at format time so that ad-hoc changes
5539 # to dialect.max_identifier_length etc. can be reflected
5540 # as IdentifierPreparer is long lived
5541 max_ = (
5542 self.dialect.max_constraint_name_length
5543 or self.dialect.max_identifier_length
5544 )
5545 return self._truncate_and_render_maxlen_name(
5546 name, max_, _alembic_quote
5547 )
5549 def _truncate_and_render_maxlen_name(self, name, max_, _alembic_quote):
5550 if isinstance(name, elements._truncated_label):
5551 if len(name) > max_:
5552 name = name[0 : max_ - 8] + "_" + util.md5_hex(name)[-4:]
5553 else:
5554 self.dialect.validate_identifier(name)
5556 if not _alembic_quote:
5557 return name
5558 else:
5559 return self.quote(name)
5561 def format_index(self, index):
5562 return self.format_constraint(index)
5564 def format_table(self, table, use_schema=True, name=None):
5565 """Prepare a quoted table and schema name."""
5567 if name is None:
5568 name = table.name
5570 result = self.quote(name)
5572 effective_schema = self.schema_for_object(table)
5574 if not self.omit_schema and use_schema and effective_schema:
5575 result = self.quote_schema(effective_schema) + "." + result
5576 return result
5578 def format_schema(self, name):
5579 """Prepare a quoted schema name."""
5581 return self.quote(name)
5583 def format_label_name(
5584 self,
5585 name,
5586 anon_map=None,
5587 ):
5588 """Prepare a quoted column name."""
5590 if anon_map is not None and isinstance(
5591 name, elements._truncated_label
5592 ):
5593 name = name.apply_map(anon_map)
5595 return self.quote(name)
5597 def format_column(
5598 self,
5599 column,
5600 use_table=False,
5601 name=None,
5602 table_name=None,
5603 use_schema=False,
5604 anon_map=None,
5605 ):
5606 """Prepare a quoted column name."""
5608 if name is None:
5609 name = column.name
5611 if anon_map is not None and isinstance(
5612 name, elements._truncated_label
5613 ):
5614 name = name.apply_map(anon_map)
5616 if not getattr(column, "is_literal", False):
5617 if use_table:
5618 return (
5619 self.format_table(
5620 column.table, use_schema=use_schema, name=table_name
5621 )
5622 + "."
5623 + self.quote(name)
5624 )
5625 else:
5626 return self.quote(name)
5627 else:
5628 # literal textual elements get stuck into ColumnClause a lot,
5629 # which shouldn't get quoted
5631 if use_table:
5632 return (
5633 self.format_table(
5634 column.table, use_schema=use_schema, name=table_name
5635 )
5636 + "."
5637 + name
5638 )
5639 else:
5640 return name
5642 def format_table_seq(self, table, use_schema=True):
5643 """Format table name and schema as a tuple."""
5645 # Dialects with more levels in their fully qualified references
5646 # ('database', 'owner', etc.) could override this and return
5647 # a longer sequence.
5649 effective_schema = self.schema_for_object(table)
5651 if not self.omit_schema and use_schema and effective_schema:
5652 return (
5653 self.quote_schema(effective_schema),
5654 self.format_table(table, use_schema=False),
5655 )
5656 else:
5657 return (self.format_table(table, use_schema=False),)
5659 @util.memoized_property
5660 def _r_identifiers(self):
5661 initial, final, escaped_final = [
5662 re.escape(s)
5663 for s in (
5664 self.initial_quote,
5665 self.final_quote,
5666 self._escape_identifier(self.final_quote),
5667 )
5668 ]
5669 r = re.compile(
5670 r"(?:"
5671 r"(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s"
5672 r"|([^\.]+))(?=\.|$))+"
5673 % {"initial": initial, "final": final, "escaped": escaped_final}
5674 )
5675 return r
5677 def unformat_identifiers(self, identifiers):
5678 """Unpack 'schema.table.column'-like strings into components."""
5680 r = self._r_identifiers
5681 return [
5682 self._unescape_identifier(i)
5683 for i in [a or b for a, b in r.findall(identifiers)]
5684 ]