1# sql/ddl.py
2# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: allow-untyped-defs, allow-untyped-calls
8
9"""
10Provides the hierarchy of DDL-defining schema items as well as routines
11to invoke them for a create/drop call.
12
13"""
14from __future__ import annotations
15
16import contextlib
17from enum import auto
18from enum import Flag
19import typing
20from typing import Any
21from typing import Callable
22from typing import Generic
23from typing import Iterable
24from typing import List
25from typing import Optional
26from typing import Protocol
27from typing import Sequence as typing_Sequence
28from typing import Tuple
29from typing import TypeVar
30from typing import Union
31
32from . import roles
33from .base import _generative
34from .base import Executable
35from .base import SchemaVisitor
36from .elements import ClauseElement
37from .. import exc
38from .. import util
39from ..util import topological
40from ..util.typing import Self
41
42if typing.TYPE_CHECKING:
43 from .compiler import Compiled
44 from .compiler import DDLCompiler
45 from .elements import BindParameter
46 from .schema import Column
47 from .schema import Constraint
48 from .schema import ForeignKeyConstraint
49 from .schema import Index
50 from .schema import SchemaItem
51 from .schema import Sequence as Sequence # noqa: F401
52 from .schema import Table
53 from .selectable import TableClause
54 from ..engine.base import Connection
55 from ..engine.interfaces import CacheStats
56 from ..engine.interfaces import CompiledCacheType
57 from ..engine.interfaces import Dialect
58 from ..engine.interfaces import SchemaTranslateMapType
59
60_SI = TypeVar("_SI", bound=Union["SchemaItem", str])
61
62
63class BaseDDLElement(ClauseElement):
64 """The root of DDL constructs, including those that are sub-elements
65 within the "create table" and other processes.
66
67 .. versionadded:: 2.0
68
69 """
70
71 _hierarchy_supports_caching = False
72 """disable cache warnings for all _DDLCompiles subclasses. """
73
74 def _compiler(self, dialect, **kw):
75 """Return a compiler appropriate for this ClauseElement, given a
76 Dialect."""
77
78 return dialect.ddl_compiler(dialect, self, **kw)
79
80 def _compile_w_cache(
81 self,
82 dialect: Dialect,
83 *,
84 compiled_cache: Optional[CompiledCacheType],
85 column_keys: List[str],
86 for_executemany: bool = False,
87 schema_translate_map: Optional[SchemaTranslateMapType] = None,
88 **kw: Any,
89 ) -> Tuple[
90 Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats
91 ]:
92 raise NotImplementedError()
93
94
95class DDLIfCallable(Protocol):
96 def __call__(
97 self,
98 ddl: BaseDDLElement,
99 target: Union[SchemaItem, str],
100 bind: Optional[Connection],
101 tables: Optional[List[Table]] = None,
102 state: Optional[Any] = None,
103 *,
104 dialect: Dialect,
105 compiler: Optional[DDLCompiler] = ...,
106 checkfirst: bool,
107 ) -> bool: ...
108
109
110class DDLIf(typing.NamedTuple):
111 dialect: Optional[str]
112 callable_: Optional[DDLIfCallable]
113 state: Optional[Any]
114
115 def _should_execute(
116 self,
117 ddl: BaseDDLElement,
118 target: Union[SchemaItem, str],
119 bind: Optional[Connection],
120 compiler: Optional[DDLCompiler] = None,
121 **kw: Any,
122 ) -> bool:
123 if bind is not None:
124 dialect = bind.dialect
125 elif compiler is not None:
126 dialect = compiler.dialect
127 else:
128 assert False, "compiler or dialect is required"
129
130 if isinstance(self.dialect, str):
131 if self.dialect != dialect.name:
132 return False
133 elif isinstance(self.dialect, (tuple, list, set)):
134 if dialect.name not in self.dialect:
135 return False
136 if self.callable_ is not None and not self.callable_(
137 ddl,
138 target,
139 bind,
140 state=self.state,
141 dialect=dialect,
142 compiler=compiler,
143 **kw,
144 ):
145 return False
146
147 return True
148
149
150class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement):
151 """Base class for standalone executable DDL expression constructs.
152
153 This class is the base for the general purpose :class:`.DDL` class,
154 as well as the various create/drop clause constructs such as
155 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
156 etc.
157
158 .. versionchanged:: 2.0 :class:`.ExecutableDDLElement` is renamed from
159 :class:`.DDLElement`, which still exists for backwards compatibility.
160
161 :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events,
162 introduced in :ref:`event_toplevel`. An instance of one is
163 itself an event receiving callable::
164
165 event.listen(
166 users,
167 "after_create",
168 AddConstraint(constraint).execute_if(dialect="postgresql"),
169 )
170
171 .. seealso::
172
173 :class:`.DDL`
174
175 :class:`.DDLEvents`
176
177 :ref:`event_toplevel`
178
179 :ref:`schema_ddl_sequences`
180
181 """
182
183 _ddl_if: Optional[DDLIf] = None
184 target: Union[SchemaItem, str, None] = None
185
186 def _execute_on_connection(
187 self, connection, distilled_params, execution_options
188 ):
189 return connection._execute_ddl(
190 self, distilled_params, execution_options
191 )
192
193 @_generative
194 def against(self, target: SchemaItem) -> Self:
195 """Return a copy of this :class:`_schema.ExecutableDDLElement` which
196 will include the given target.
197
198 This essentially applies the given item to the ``.target`` attribute of
199 the returned :class:`_schema.ExecutableDDLElement` object. This target
200 is then usable by event handlers and compilation routines in order to
201 provide services such as tokenization of a DDL string in terms of a
202 particular :class:`_schema.Table`.
203
204 When a :class:`_schema.ExecutableDDLElement` object is established as
205 an event handler for the :meth:`_events.DDLEvents.before_create` or
206 :meth:`_events.DDLEvents.after_create` events, and the event then
207 occurs for a given target such as a :class:`_schema.Constraint` or
208 :class:`_schema.Table`, that target is established with a copy of the
209 :class:`_schema.ExecutableDDLElement` object using this method, which
210 then proceeds to the :meth:`_schema.ExecutableDDLElement.execute`
211 method in order to invoke the actual DDL instruction.
212
213 :param target: a :class:`_schema.SchemaItem` that will be the subject
214 of a DDL operation.
215
216 :return: a copy of this :class:`_schema.ExecutableDDLElement` with the
217 ``.target`` attribute assigned to the given
218 :class:`_schema.SchemaItem`.
219
220 .. seealso::
221
222 :class:`_schema.DDL` - uses tokenization against the "target" when
223 processing the DDL string.
224
225 """
226 self.target = target
227 return self
228
229 @_generative
230 def execute_if(
231 self,
232 dialect: Optional[str] = None,
233 callable_: Optional[DDLIfCallable] = None,
234 state: Optional[Any] = None,
235 ) -> Self:
236 r"""Return a callable that will execute this
237 :class:`_ddl.ExecutableDDLElement` conditionally within an event
238 handler.
239
240 Used to provide a wrapper for event listening::
241
242 event.listen(
243 metadata,
244 "before_create",
245 DDL("my_ddl").execute_if(dialect="postgresql"),
246 )
247
248 :param dialect: May be a string or tuple of strings.
249 If a string, it will be compared to the name of the
250 executing database dialect::
251
252 DDL("something").execute_if(dialect="postgresql")
253
254 If a tuple, specifies multiple dialect names::
255
256 DDL("something").execute_if(dialect=("postgresql", "mysql"))
257
258 :param callable\_: A callable, which will be invoked with
259 three positional arguments as well as optional keyword
260 arguments:
261
262 :ddl:
263 This DDL element.
264
265 :target:
266 The :class:`_schema.Table` or :class:`_schema.MetaData`
267 object which is the
268 target of this event. May be None if the DDL is executed
269 explicitly.
270
271 :bind:
272 The :class:`_engine.Connection` being used for DDL execution.
273 May be None if this construct is being created inline within
274 a table, in which case ``compiler`` will be present.
275
276 :tables:
277 Optional keyword argument - a list of Table objects which are to
278 be created/ dropped within a MetaData.create_all() or drop_all()
279 method call.
280
281 :dialect: keyword argument, but always present - the
282 :class:`.Dialect` involved in the operation.
283
284 :compiler: keyword argument. Will be ``None`` for an engine
285 level DDL invocation, but will refer to a :class:`.DDLCompiler`
286 if this DDL element is being created inline within a table.
287
288 :state:
289 Optional keyword argument - will be the ``state`` argument
290 passed to this function.
291
292 :checkfirst:
293 Keyword argument, will be True if the 'checkfirst' flag was
294 set during the call to ``create()``, ``create_all()``,
295 ``drop()``, ``drop_all()``.
296
297 If the callable returns a True value, the DDL statement will be
298 executed.
299
300 :param state: any value which will be passed to the callable\_
301 as the ``state`` keyword argument.
302
303 .. seealso::
304
305 :meth:`.SchemaItem.ddl_if`
306
307 :class:`.DDLEvents`
308
309 :ref:`event_toplevel`
310
311 """
312 self._ddl_if = DDLIf(dialect, callable_, state)
313 return self
314
315 def _should_execute(self, target, bind, **kw):
316 if self._ddl_if is None:
317 return True
318 else:
319 return self._ddl_if._should_execute(self, target, bind, **kw)
320
321 def _invoke_with(self, bind):
322 if self._should_execute(self.target, bind):
323 return bind.execute(self)
324
325 def __call__(self, target, bind, **kw):
326 """Execute the DDL as a ddl_listener."""
327
328 self.against(target)._invoke_with(bind)
329
330 def _generate(self):
331 s = self.__class__.__new__(self.__class__)
332 s.__dict__ = self.__dict__.copy()
333 return s
334
335
336DDLElement = ExecutableDDLElement
337""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`."""
338
339
340class DDL(ExecutableDDLElement):
341 """A literal DDL statement.
342
343 Specifies literal SQL DDL to be executed by the database. DDL objects
344 function as DDL event listeners, and can be subscribed to those events
345 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
346 :class:`_schema.MetaData` objects as targets.
347 Basic templating support allows
348 a single DDL instance to handle repetitive tasks for multiple tables.
349
350 Examples::
351
352 from sqlalchemy import event, DDL
353
354 tbl = Table("users", metadata, Column("uid", Integer))
355 event.listen(tbl, "before_create", DDL("DROP TRIGGER users_trigger"))
356
357 spow = DDL("ALTER TABLE %(table)s SET secretpowers TRUE")
358 event.listen(tbl, "after_create", spow.execute_if(dialect="somedb"))
359
360 drop_spow = DDL("ALTER TABLE users SET secretpowers FALSE")
361 connection.execute(drop_spow)
362
363 When operating on Table events, the following ``statement``
364 string substitutions are available:
365
366 .. sourcecode:: text
367
368 %(table)s - the Table name, with any required quoting applied
369 %(schema)s - the schema name, with any required quoting applied
370 %(fullname)s - the Table name including schema, quoted if needed
371
372 The DDL's "context", if any, will be combined with the standard
373 substitutions noted above. Keys present in the context will override
374 the standard substitutions.
375
376 """
377
378 __visit_name__ = "ddl"
379
380 def __init__(self, statement, context=None):
381 """Create a DDL statement.
382
383 :param statement:
384 A string or unicode string to be executed. Statements will be
385 processed with Python's string formatting operator using
386 a fixed set of string substitutions, as well as additional
387 substitutions provided by the optional :paramref:`.DDL.context`
388 parameter.
389
390 A literal '%' in a statement must be escaped as '%%'.
391
392 SQL bind parameters are not available in DDL statements.
393
394 :param context:
395 Optional dictionary, defaults to None. These values will be
396 available for use in string substitutions on the DDL statement.
397
398 .. seealso::
399
400 :class:`.DDLEvents`
401
402 :ref:`event_toplevel`
403
404 """
405
406 if not isinstance(statement, str):
407 raise exc.ArgumentError(
408 "Expected a string or unicode SQL statement, got '%r'"
409 % statement
410 )
411
412 self.statement = statement
413 self.context = context or {}
414
415 def __repr__(self):
416 parts = [repr(self.statement)]
417 if self.context:
418 parts.append(f"context={self.context}")
419
420 return "<%s@%s; %s>" % (
421 type(self).__name__,
422 id(self),
423 ", ".join(parts),
424 )
425
426
427class _CreateDropBase(ExecutableDDLElement, Generic[_SI]):
428 """Base class for DDL constructs that represent CREATE and DROP or
429 equivalents.
430
431 The common theme of _CreateDropBase is a single
432 ``element`` attribute which refers to the element
433 to be created or dropped.
434
435 """
436
437 element: _SI
438
439 def __init__(self, element: _SI) -> None:
440 self.element = self.target = element
441 self._ddl_if = getattr(element, "_ddl_if", None)
442
443 @property
444 def stringify_dialect(self): # type: ignore[override]
445 assert not isinstance(self.element, str)
446 return self.element.create_drop_stringify_dialect
447
448 def _create_rule_disable(self, compiler):
449 """Allow disable of _create_rule using a callable.
450
451 Pass to _create_rule using
452 util.portable_instancemethod(self._create_rule_disable)
453 to retain serializability.
454
455 """
456 return False
457
458
459class _CreateBase(_CreateDropBase[_SI]):
460 def __init__(self, element: _SI, if_not_exists: bool = False) -> None:
461 super().__init__(element)
462 self.if_not_exists = if_not_exists
463
464
465class _DropBase(_CreateDropBase[_SI]):
466 def __init__(self, element: _SI, if_exists: bool = False) -> None:
467 super().__init__(element)
468 self.if_exists = if_exists
469
470
471class CreateSchema(_CreateBase[str]):
472 """Represent a CREATE SCHEMA statement.
473
474 The argument here is the string name of the schema.
475
476 """
477
478 __visit_name__ = "create_schema"
479
480 stringify_dialect = "default"
481
482 def __init__(
483 self,
484 name: str,
485 if_not_exists: bool = False,
486 ) -> None:
487 """Create a new :class:`.CreateSchema` construct."""
488
489 super().__init__(element=name, if_not_exists=if_not_exists)
490
491
492class DropSchema(_DropBase[str]):
493 """Represent a DROP SCHEMA statement.
494
495 The argument here is the string name of the schema.
496
497 """
498
499 __visit_name__ = "drop_schema"
500
501 stringify_dialect = "default"
502
503 def __init__(
504 self,
505 name: str,
506 cascade: bool = False,
507 if_exists: bool = False,
508 ) -> None:
509 """Create a new :class:`.DropSchema` construct."""
510
511 super().__init__(element=name, if_exists=if_exists)
512 self.cascade = cascade
513
514
515class CreateTable(_CreateBase["Table"]):
516 """Represent a CREATE TABLE statement."""
517
518 __visit_name__ = "create_table"
519
520 def __init__(
521 self,
522 element: Table,
523 include_foreign_key_constraints: Optional[
524 typing_Sequence[ForeignKeyConstraint]
525 ] = None,
526 if_not_exists: bool = False,
527 ) -> None:
528 """Create a :class:`.CreateTable` construct.
529
530 :param element: a :class:`_schema.Table` that's the subject
531 of the CREATE
532 :param on: See the description for 'on' in :class:`.DDL`.
533 :param include_foreign_key_constraints: optional sequence of
534 :class:`_schema.ForeignKeyConstraint` objects that will be included
535 inline within the CREATE construct; if omitted, all foreign key
536 constraints that do not specify use_alter=True are included.
537
538 :param if_not_exists: if True, an IF NOT EXISTS operator will be
539 applied to the construct.
540
541 .. versionadded:: 1.4.0b2
542
543 """
544 super().__init__(element, if_not_exists=if_not_exists)
545 self.columns = [CreateColumn(column) for column in element.columns]
546 self.include_foreign_key_constraints = include_foreign_key_constraints
547
548
549class _DropView(_DropBase["Table"]):
550 """Semi-public 'DROP VIEW' construct.
551
552 Used by the test suite for dialect-agnostic drops of views.
553 This object will eventually be part of a public "view" API.
554
555 """
556
557 __visit_name__ = "drop_view"
558
559
560class CreateConstraint(BaseDDLElement):
561 element: Constraint
562
563 def __init__(self, element: Constraint) -> None:
564 self.element = element
565
566
567class CreateColumn(BaseDDLElement):
568 """Represent a :class:`_schema.Column`
569 as rendered in a CREATE TABLE statement,
570 via the :class:`.CreateTable` construct.
571
572 This is provided to support custom column DDL within the generation
573 of CREATE TABLE statements, by using the
574 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
575 to extend :class:`.CreateColumn`.
576
577 Typical integration is to examine the incoming :class:`_schema.Column`
578 object, and to redirect compilation if a particular flag or condition
579 is found::
580
581 from sqlalchemy import schema
582 from sqlalchemy.ext.compiler import compiles
583
584
585 @compiles(schema.CreateColumn)
586 def compile(element, compiler, **kw):
587 column = element.element
588
589 if "special" not in column.info:
590 return compiler.visit_create_column(element, **kw)
591
592 text = "%s SPECIAL DIRECTIVE %s" % (
593 column.name,
594 compiler.type_compiler.process(column.type),
595 )
596 default = compiler.get_column_default_string(column)
597 if default is not None:
598 text += " DEFAULT " + default
599
600 if not column.nullable:
601 text += " NOT NULL"
602
603 if column.constraints:
604 text += " ".join(
605 compiler.process(const) for const in column.constraints
606 )
607 return text
608
609 The above construct can be applied to a :class:`_schema.Table`
610 as follows::
611
612 from sqlalchemy import Table, Metadata, Column, Integer, String
613 from sqlalchemy import schema
614
615 metadata = MetaData()
616
617 table = Table(
618 "mytable",
619 MetaData(),
620 Column("x", Integer, info={"special": True}, primary_key=True),
621 Column("y", String(50)),
622 Column("z", String(20), info={"special": True}),
623 )
624
625 metadata.create_all(conn)
626
627 Above, the directives we've added to the :attr:`_schema.Column.info`
628 collection
629 will be detected by our custom compilation scheme:
630
631 .. sourcecode:: sql
632
633 CREATE TABLE mytable (
634 x SPECIAL DIRECTIVE INTEGER NOT NULL,
635 y VARCHAR(50),
636 z SPECIAL DIRECTIVE VARCHAR(20),
637 PRIMARY KEY (x)
638 )
639
640 The :class:`.CreateColumn` construct can also be used to skip certain
641 columns when producing a ``CREATE TABLE``. This is accomplished by
642 creating a compilation rule that conditionally returns ``None``.
643 This is essentially how to produce the same effect as using the
644 ``system=True`` argument on :class:`_schema.Column`, which marks a column
645 as an implicitly-present "system" column.
646
647 For example, suppose we wish to produce a :class:`_schema.Table`
648 which skips
649 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
650 backend, but on other backends does render it, in anticipation of a
651 triggered rule. A conditional compilation rule could skip this name only
652 on PostgreSQL::
653
654 from sqlalchemy.schema import CreateColumn
655
656
657 @compiles(CreateColumn, "postgresql")
658 def skip_xmin(element, compiler, **kw):
659 if element.element.name == "xmin":
660 return None
661 else:
662 return compiler.visit_create_column(element, **kw)
663
664
665 my_table = Table(
666 "mytable",
667 metadata,
668 Column("id", Integer, primary_key=True),
669 Column("xmin", Integer),
670 )
671
672 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
673 which only includes the ``id`` column in the string; the ``xmin`` column
674 will be omitted, but only against the PostgreSQL backend.
675
676 """
677
678 __visit_name__ = "create_column"
679
680 element: Column[Any]
681
682 def __init__(self, element: Column[Any]) -> None:
683 self.element = element
684
685
686class DropTable(_DropBase["Table"]):
687 """Represent a DROP TABLE statement."""
688
689 __visit_name__ = "drop_table"
690
691 def __init__(self, element: Table, if_exists: bool = False) -> None:
692 """Create a :class:`.DropTable` construct.
693
694 :param element: a :class:`_schema.Table` that's the subject
695 of the DROP.
696 :param on: See the description for 'on' in :class:`.DDL`.
697 :param if_exists: if True, an IF EXISTS operator will be applied to the
698 construct.
699
700 .. versionadded:: 1.4.0b2
701
702 """
703 super().__init__(element, if_exists=if_exists)
704
705
706class CreateSequence(_CreateBase["Sequence"]):
707 """Represent a CREATE SEQUENCE statement."""
708
709 __visit_name__ = "create_sequence"
710
711
712class DropSequence(_DropBase["Sequence"]):
713 """Represent a DROP SEQUENCE statement."""
714
715 __visit_name__ = "drop_sequence"
716
717
718class CreateIndex(_CreateBase["Index"]):
719 """Represent a CREATE INDEX statement."""
720
721 __visit_name__ = "create_index"
722
723 def __init__(self, element: Index, if_not_exists: bool = False) -> None:
724 """Create a :class:`.Createindex` construct.
725
726 :param element: a :class:`_schema.Index` that's the subject
727 of the CREATE.
728 :param if_not_exists: if True, an IF NOT EXISTS operator will be
729 applied to the construct.
730
731 .. versionadded:: 1.4.0b2
732
733 """
734 super().__init__(element, if_not_exists=if_not_exists)
735
736
737class DropIndex(_DropBase["Index"]):
738 """Represent a DROP INDEX statement."""
739
740 __visit_name__ = "drop_index"
741
742 def __init__(self, element: Index, if_exists: bool = False) -> None:
743 """Create a :class:`.DropIndex` construct.
744
745 :param element: a :class:`_schema.Index` that's the subject
746 of the DROP.
747 :param if_exists: if True, an IF EXISTS operator will be applied to the
748 construct.
749
750 .. versionadded:: 1.4.0b2
751
752 """
753 super().__init__(element, if_exists=if_exists)
754
755
756class AddConstraint(_CreateBase["Constraint"]):
757 """Represent an ALTER TABLE ADD CONSTRAINT statement."""
758
759 __visit_name__ = "add_constraint"
760
761 def __init__(
762 self,
763 element: Constraint,
764 *,
765 isolate_from_table: bool = True,
766 ) -> None:
767 """Construct a new :class:`.AddConstraint` construct.
768
769 :param element: a :class:`.Constraint` object
770
771 :param isolate_from_table: optional boolean, defaults to True. Has
772 the effect of the incoming constraint being isolated from being
773 included in a CREATE TABLE sequence when associated with a
774 :class:`.Table`.
775
776 .. versionadded:: 2.0.39 - added
777 :paramref:`.AddConstraint.isolate_from_table`, defaulting
778 to True. Previously, the behavior of this parameter was implicitly
779 turned on in all cases.
780
781 """
782 super().__init__(element)
783
784 if isolate_from_table:
785 element._create_rule = self._create_rule_disable
786
787
788class DropConstraint(_DropBase["Constraint"]):
789 """Represent an ALTER TABLE DROP CONSTRAINT statement."""
790
791 __visit_name__ = "drop_constraint"
792
793 def __init__(
794 self,
795 element: Constraint,
796 *,
797 cascade: bool = False,
798 if_exists: bool = False,
799 isolate_from_table: bool = True,
800 **kw: Any,
801 ) -> None:
802 """Construct a new :class:`.DropConstraint` construct.
803
804 :param element: a :class:`.Constraint` object
805 :param cascade: optional boolean, indicates backend-specific
806 "CASCADE CONSTRAINT" directive should be rendered if available
807 :param if_exists: optional boolean, indicates backend-specific
808 "IF EXISTS" directive should be rendered if available
809 :param isolate_from_table: optional boolean, defaults to True. Has
810 the effect of the incoming constraint being isolated from being
811 included in a CREATE TABLE sequence when associated with a
812 :class:`.Table`.
813
814 .. versionadded:: 2.0.39 - added
815 :paramref:`.DropConstraint.isolate_from_table`, defaulting
816 to True. Previously, the behavior of this parameter was implicitly
817 turned on in all cases.
818
819 """
820 self.cascade = cascade
821 super().__init__(element, if_exists=if_exists, **kw)
822
823 if isolate_from_table:
824 element._create_rule = self._create_rule_disable
825
826
827class SetTableComment(_CreateDropBase["Table"]):
828 """Represent a COMMENT ON TABLE IS statement."""
829
830 __visit_name__ = "set_table_comment"
831
832
833class DropTableComment(_CreateDropBase["Table"]):
834 """Represent a COMMENT ON TABLE '' statement.
835
836 Note this varies a lot across database backends.
837
838 """
839
840 __visit_name__ = "drop_table_comment"
841
842
843class SetColumnComment(_CreateDropBase["Column[Any]"]):
844 """Represent a COMMENT ON COLUMN IS statement."""
845
846 __visit_name__ = "set_column_comment"
847
848
849class DropColumnComment(_CreateDropBase["Column[Any]"]):
850 """Represent a COMMENT ON COLUMN IS NULL statement."""
851
852 __visit_name__ = "drop_column_comment"
853
854
855class SetConstraintComment(_CreateDropBase["Constraint"]):
856 """Represent a COMMENT ON CONSTRAINT IS statement."""
857
858 __visit_name__ = "set_constraint_comment"
859
860
861class DropConstraintComment(_CreateDropBase["Constraint"]):
862 """Represent a COMMENT ON CONSTRAINT IS NULL statement."""
863
864 __visit_name__ = "drop_constraint_comment"
865
866
867class InvokeDDLBase(SchemaVisitor):
868 def __init__(self, connection, **kw):
869 self.connection = connection
870 assert not kw, f"Unexpected keywords: {kw.keys()}"
871
872 @contextlib.contextmanager
873 def with_ddl_events(self, target, **kw):
874 """helper context manager that will apply appropriate DDL events
875 to a CREATE or DROP operation."""
876
877 raise NotImplementedError()
878
879
880class InvokeCreateDDLBase(InvokeDDLBase):
881 @contextlib.contextmanager
882 def with_ddl_events(self, target, **kw):
883 """helper context manager that will apply appropriate DDL events
884 to a CREATE or DROP operation."""
885
886 target.dispatch.before_create(
887 target, self.connection, _ddl_runner=self, **kw
888 )
889 yield
890 target.dispatch.after_create(
891 target, self.connection, _ddl_runner=self, **kw
892 )
893
894
895class InvokeDropDDLBase(InvokeDDLBase):
896 @contextlib.contextmanager
897 def with_ddl_events(self, target, **kw):
898 """helper context manager that will apply appropriate DDL events
899 to a CREATE or DROP operation."""
900
901 target.dispatch.before_drop(
902 target, self.connection, _ddl_runner=self, **kw
903 )
904 yield
905 target.dispatch.after_drop(
906 target, self.connection, _ddl_runner=self, **kw
907 )
908
909
910class CheckFirst(Flag):
911 """Enumeration for the :paramref:`.MetaData.create_all.checkfirst`
912 parameter passed to methods like :meth:`.MetaData.create_all`,
913 :meth:`.MetaData.drop_all`, :meth:`.Table.create`, :meth:`.Table.drop` and
914 others.
915
916 This enumeration indicates what kinds of objects should be "checked"
917 with a separate query before emitting CREATE or DROP for that object.
918
919 Can use ``CheckFirst(bool_value)`` to convert from a boolean value.
920
921 .. versionadded:: 2.1
922
923 """
924
925 NONE = 0 # equivalent to False
926 """No items should be checked"""
927
928 # avoid 1 so that bool True doesn't match by value
929 TABLES = 2
930 """Check for tables"""
931
932 INDEXES = auto()
933 """Check for indexes"""
934
935 SEQUENCES = auto()
936 """Check for sequences"""
937
938 TYPES = auto()
939 """Check for custom datatypes that are created server-side
940
941 This is currently used by PostgreSQL.
942
943 """
944
945 ALL = TABLES | INDEXES | SEQUENCES | TYPES # equivalent to True
946
947 @classmethod
948 def _missing_(cls, value: object) -> Any:
949 if isinstance(value, bool):
950 return cls.ALL if value else cls.NONE
951 return super()._missing_(value)
952
953
954class SchemaGenerator(InvokeCreateDDLBase):
955 def __init__(
956 self,
957 dialect,
958 connection,
959 checkfirst=CheckFirst.NONE,
960 tables=None,
961 **kwargs,
962 ):
963 super().__init__(connection, **kwargs)
964 self.checkfirst = CheckFirst(checkfirst)
965 self.tables = tables
966 self.preparer = dialect.identifier_preparer
967 self.dialect = dialect
968 self.memo = {}
969
970 def _can_create_table(self, table):
971 self.dialect.validate_identifier(table.name)
972 effective_schema = self.connection.schema_for_object(table)
973 if effective_schema:
974 self.dialect.validate_identifier(effective_schema)
975 return (
976 not self.checkfirst & CheckFirst.TABLES
977 or not self.dialect.has_table(
978 self.connection, table.name, schema=effective_schema
979 )
980 )
981
982 def _can_create_index(self, index):
983 effective_schema = self.connection.schema_for_object(index.table)
984 if effective_schema:
985 self.dialect.validate_identifier(effective_schema)
986 return (
987 not self.checkfirst & CheckFirst.INDEXES
988 or not self.dialect.has_index(
989 self.connection,
990 index.table.name,
991 index.name,
992 schema=effective_schema,
993 )
994 )
995
996 def _can_create_sequence(self, sequence):
997 effective_schema = self.connection.schema_for_object(sequence)
998
999 return self.dialect.supports_sequences and (
1000 (not self.dialect.sequences_optional or not sequence.optional)
1001 and (
1002 not self.checkfirst & CheckFirst.SEQUENCES
1003 or not self.dialect.has_sequence(
1004 self.connection, sequence.name, schema=effective_schema
1005 )
1006 )
1007 )
1008
1009 def visit_metadata(self, metadata):
1010 if self.tables is not None:
1011 tables = self.tables
1012 else:
1013 tables = list(metadata.tables.values())
1014
1015 collection = sort_tables_and_constraints(
1016 [t for t in tables if self._can_create_table(t)]
1017 )
1018
1019 seq_coll = [
1020 s
1021 for s in metadata._sequences.values()
1022 if s.column is None and self._can_create_sequence(s)
1023 ]
1024
1025 event_collection = [t for (t, fks) in collection if t is not None]
1026
1027 with self.with_ddl_events(
1028 metadata,
1029 tables=event_collection,
1030 checkfirst=self.checkfirst,
1031 ):
1032 for seq in seq_coll:
1033 self.traverse_single(seq, create_ok=True)
1034
1035 for table, fkcs in collection:
1036 if table is not None:
1037 self.traverse_single(
1038 table,
1039 create_ok=True,
1040 include_foreign_key_constraints=fkcs,
1041 _is_metadata_operation=True,
1042 )
1043 else:
1044 for fkc in fkcs:
1045 self.traverse_single(fkc)
1046
1047 def visit_table(
1048 self,
1049 table,
1050 create_ok=False,
1051 include_foreign_key_constraints=None,
1052 _is_metadata_operation=False,
1053 ):
1054 if not create_ok and not self._can_create_table(table):
1055 return
1056
1057 with self.with_ddl_events(
1058 table,
1059 checkfirst=self.checkfirst,
1060 _is_metadata_operation=_is_metadata_operation,
1061 ):
1062 for column in table.columns:
1063 if column.default is not None:
1064 self.traverse_single(column.default)
1065
1066 if not self.dialect.supports_alter:
1067 # e.g., don't omit any foreign key constraints
1068 include_foreign_key_constraints = None
1069
1070 CreateTable(
1071 table,
1072 include_foreign_key_constraints=(
1073 include_foreign_key_constraints
1074 ),
1075 )._invoke_with(self.connection)
1076
1077 if hasattr(table, "indexes"):
1078 for index in table.indexes:
1079 self.traverse_single(index, create_ok=True)
1080
1081 if (
1082 self.dialect.supports_comments
1083 and not self.dialect.inline_comments
1084 ):
1085 if table.comment is not None:
1086 SetTableComment(table)._invoke_with(self.connection)
1087
1088 for column in table.columns:
1089 if column.comment is not None:
1090 SetColumnComment(column)._invoke_with(self.connection)
1091
1092 if self.dialect.supports_constraint_comments:
1093 for constraint in table.constraints:
1094 if constraint.comment is not None:
1095 self.connection.execute(
1096 SetConstraintComment(constraint)
1097 )
1098
1099 def visit_foreign_key_constraint(self, constraint):
1100 if not self.dialect.supports_alter:
1101 return
1102
1103 with self.with_ddl_events(constraint):
1104 AddConstraint(constraint)._invoke_with(self.connection)
1105
1106 def visit_sequence(self, sequence, create_ok=False):
1107 if not create_ok and not self._can_create_sequence(sequence):
1108 return
1109 with self.with_ddl_events(sequence):
1110 CreateSequence(sequence)._invoke_with(self.connection)
1111
1112 def visit_index(self, index, create_ok=False):
1113 if not create_ok and not self._can_create_index(index):
1114 return
1115 with self.with_ddl_events(index):
1116 CreateIndex(index)._invoke_with(self.connection)
1117
1118
1119class SchemaDropper(InvokeDropDDLBase):
1120 def __init__(
1121 self,
1122 dialect,
1123 connection,
1124 checkfirst=CheckFirst.NONE,
1125 tables=None,
1126 **kwargs,
1127 ):
1128 super().__init__(connection, **kwargs)
1129 self.checkfirst = CheckFirst(checkfirst)
1130 self.tables = tables
1131 self.preparer = dialect.identifier_preparer
1132 self.dialect = dialect
1133 self.memo = {}
1134
1135 def visit_metadata(self, metadata):
1136 if self.tables is not None:
1137 tables = self.tables
1138 else:
1139 tables = list(metadata.tables.values())
1140
1141 try:
1142 unsorted_tables = [t for t in tables if self._can_drop_table(t)]
1143 collection = list(
1144 reversed(
1145 sort_tables_and_constraints(
1146 unsorted_tables,
1147 filter_fn=lambda constraint: (
1148 False
1149 if not self.dialect.supports_alter
1150 or constraint.name is None
1151 else None
1152 ),
1153 )
1154 )
1155 )
1156 except exc.CircularDependencyError as err2:
1157 if not self.dialect.supports_alter:
1158 util.warn(
1159 "Can't sort tables for DROP; an "
1160 "unresolvable foreign key "
1161 "dependency exists between tables: %s; and backend does "
1162 "not support ALTER. To restore at least a partial sort, "
1163 "apply use_alter=True to ForeignKey and "
1164 "ForeignKeyConstraint "
1165 "objects involved in the cycle to mark these as known "
1166 "cycles that will be ignored."
1167 % (", ".join(sorted([t.fullname for t in err2.cycles])))
1168 )
1169 collection = [(t, ()) for t in unsorted_tables]
1170 else:
1171 raise exc.CircularDependencyError(
1172 err2.args[0],
1173 err2.cycles,
1174 err2.edges,
1175 msg="Can't sort tables for DROP; an "
1176 "unresolvable foreign key "
1177 "dependency exists between tables: %s. Please ensure "
1178 "that the ForeignKey and ForeignKeyConstraint objects "
1179 "involved in the cycle have "
1180 "names so that they can be dropped using "
1181 "DROP CONSTRAINT."
1182 % (", ".join(sorted([t.fullname for t in err2.cycles]))),
1183 ) from err2
1184
1185 seq_coll = [
1186 s
1187 for s in metadata._sequences.values()
1188 if self._can_drop_sequence(s)
1189 ]
1190
1191 event_collection = [t for (t, fks) in collection if t is not None]
1192
1193 with self.with_ddl_events(
1194 metadata,
1195 tables=event_collection,
1196 checkfirst=self.checkfirst,
1197 ):
1198 for table, fkcs in collection:
1199 if table is not None:
1200 self.traverse_single(
1201 table,
1202 drop_ok=True,
1203 _is_metadata_operation=True,
1204 _ignore_sequences=seq_coll,
1205 )
1206 else:
1207 for fkc in fkcs:
1208 self.traverse_single(fkc)
1209
1210 for seq in seq_coll:
1211 self.traverse_single(seq, drop_ok=seq.column is None)
1212
1213 def _can_drop_table(self, table):
1214 self.dialect.validate_identifier(table.name)
1215 effective_schema = self.connection.schema_for_object(table)
1216 if effective_schema:
1217 self.dialect.validate_identifier(effective_schema)
1218 return (
1219 not self.checkfirst & CheckFirst.TABLES
1220 or self.dialect.has_table(
1221 self.connection, table.name, schema=effective_schema
1222 )
1223 )
1224
1225 def _can_drop_index(self, index):
1226 effective_schema = self.connection.schema_for_object(index.table)
1227 if effective_schema:
1228 self.dialect.validate_identifier(effective_schema)
1229 return (
1230 not self.checkfirst & CheckFirst.INDEXES
1231 or self.dialect.has_index(
1232 self.connection,
1233 index.table.name,
1234 index.name,
1235 schema=effective_schema,
1236 )
1237 )
1238
1239 def _can_drop_sequence(self, sequence):
1240 effective_schema = self.connection.schema_for_object(sequence)
1241 return self.dialect.supports_sequences and (
1242 (not self.dialect.sequences_optional or not sequence.optional)
1243 and (
1244 not self.checkfirst & CheckFirst.SEQUENCES
1245 or self.dialect.has_sequence(
1246 self.connection, sequence.name, schema=effective_schema
1247 )
1248 )
1249 )
1250
1251 def visit_index(self, index, drop_ok=False):
1252 if not drop_ok and not self._can_drop_index(index):
1253 return
1254
1255 with self.with_ddl_events(index):
1256 DropIndex(index)(index, self.connection)
1257
1258 def visit_table(
1259 self,
1260 table,
1261 drop_ok=False,
1262 _is_metadata_operation=False,
1263 _ignore_sequences=(),
1264 ):
1265 if not drop_ok and not self._can_drop_table(table):
1266 return
1267
1268 with self.with_ddl_events(
1269 table,
1270 checkfirst=self.checkfirst,
1271 _is_metadata_operation=_is_metadata_operation,
1272 ):
1273 DropTable(table)._invoke_with(self.connection)
1274
1275 # traverse client side defaults which may refer to server-side
1276 # sequences. noting that some of these client side defaults may
1277 # also be set up as server side defaults
1278 # (see https://docs.sqlalchemy.org/en/
1279 # latest/core/defaults.html
1280 # #associating-a-sequence-as-the-server-side-
1281 # default), so have to be dropped after the table is dropped.
1282 for column in table.columns:
1283 if (
1284 column.default is not None
1285 and column.default not in _ignore_sequences
1286 ):
1287 self.traverse_single(column.default)
1288
1289 def visit_foreign_key_constraint(self, constraint):
1290 if not self.dialect.supports_alter:
1291 return
1292 with self.with_ddl_events(constraint):
1293 DropConstraint(constraint)._invoke_with(self.connection)
1294
1295 def visit_sequence(self, sequence, drop_ok=False):
1296 if not drop_ok and not self._can_drop_sequence(sequence):
1297 return
1298 with self.with_ddl_events(sequence):
1299 DropSequence(sequence)._invoke_with(self.connection)
1300
1301
1302def sort_tables(
1303 tables: Iterable[TableClause],
1304 skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None,
1305 extra_dependencies: Optional[
1306 typing_Sequence[Tuple[TableClause, TableClause]]
1307 ] = None,
1308) -> List[Table]:
1309 """Sort a collection of :class:`_schema.Table` objects based on
1310 dependency.
1311
1312 This is a dependency-ordered sort which will emit :class:`_schema.Table`
1313 objects such that they will follow their dependent :class:`_schema.Table`
1314 objects.
1315 Tables are dependent on another based on the presence of
1316 :class:`_schema.ForeignKeyConstraint`
1317 objects as well as explicit dependencies
1318 added by :meth:`_schema.Table.add_is_dependent_on`.
1319
1320 .. warning::
1321
1322 The :func:`._schema.sort_tables` function cannot by itself
1323 accommodate automatic resolution of dependency cycles between
1324 tables, which are usually caused by mutually dependent foreign key
1325 constraints. When these cycles are detected, the foreign keys
1326 of these tables are omitted from consideration in the sort.
1327 A warning is emitted when this condition occurs, which will be an
1328 exception raise in a future release. Tables which are not part
1329 of the cycle will still be returned in dependency order.
1330
1331 To resolve these cycles, the
1332 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
1333 applied to those constraints which create a cycle. Alternatively,
1334 the :func:`_schema.sort_tables_and_constraints` function will
1335 automatically return foreign key constraints in a separate
1336 collection when cycles are detected so that they may be applied
1337 to a schema separately.
1338
1339 :param tables: a sequence of :class:`_schema.Table` objects.
1340
1341 :param skip_fn: optional callable which will be passed a
1342 :class:`_schema.ForeignKeyConstraint` object; if it returns True, this
1343 constraint will not be considered as a dependency. Note this is
1344 **different** from the same parameter in
1345 :func:`.sort_tables_and_constraints`, which is
1346 instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
1347
1348 :param extra_dependencies: a sequence of 2-tuples of tables which will
1349 also be considered as dependent on each other.
1350
1351 .. seealso::
1352
1353 :func:`.sort_tables_and_constraints`
1354
1355 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
1356
1357
1358 """
1359
1360 if skip_fn is not None:
1361 fixed_skip_fn = skip_fn
1362
1363 def _skip_fn(fkc):
1364 for fk in fkc.elements:
1365 if fixed_skip_fn(fk):
1366 return True
1367 else:
1368 return None
1369
1370 else:
1371 _skip_fn = None # type: ignore
1372
1373 return [
1374 t
1375 for (t, fkcs) in sort_tables_and_constraints(
1376 tables,
1377 filter_fn=_skip_fn,
1378 extra_dependencies=extra_dependencies,
1379 _warn_for_cycles=True,
1380 )
1381 if t is not None
1382 ]
1383
1384
1385def sort_tables_and_constraints(
1386 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
1387):
1388 """Sort a collection of :class:`_schema.Table` /
1389 :class:`_schema.ForeignKeyConstraint`
1390 objects.
1391
1392 This is a dependency-ordered sort which will emit tuples of
1393 ``(Table, [ForeignKeyConstraint, ...])`` such that each
1394 :class:`_schema.Table` follows its dependent :class:`_schema.Table`
1395 objects.
1396 Remaining :class:`_schema.ForeignKeyConstraint`
1397 objects that are separate due to
1398 dependency rules not satisfied by the sort are emitted afterwards
1399 as ``(None, [ForeignKeyConstraint ...])``.
1400
1401 Tables are dependent on another based on the presence of
1402 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
1403 added by :meth:`_schema.Table.add_is_dependent_on`,
1404 as well as dependencies
1405 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
1406 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
1407 parameters.
1408
1409 :param tables: a sequence of :class:`_schema.Table` objects.
1410
1411 :param filter_fn: optional callable which will be passed a
1412 :class:`_schema.ForeignKeyConstraint` object,
1413 and returns a value based on
1414 whether this constraint should definitely be included or excluded as
1415 an inline constraint, or neither. If it returns False, the constraint
1416 will definitely be included as a dependency that cannot be subject
1417 to ALTER; if True, it will **only** be included as an ALTER result at
1418 the end. Returning None means the constraint is included in the
1419 table-based result unless it is detected as part of a dependency cycle.
1420
1421 :param extra_dependencies: a sequence of 2-tuples of tables which will
1422 also be considered as dependent on each other.
1423
1424 .. seealso::
1425
1426 :func:`.sort_tables`
1427
1428
1429 """
1430
1431 fixed_dependencies = set()
1432 mutable_dependencies = set()
1433
1434 if extra_dependencies is not None:
1435 fixed_dependencies.update(extra_dependencies)
1436
1437 remaining_fkcs = set()
1438 for table in tables:
1439 for fkc in table.foreign_key_constraints:
1440 if fkc.use_alter is True:
1441 remaining_fkcs.add(fkc)
1442 continue
1443
1444 if filter_fn:
1445 filtered = filter_fn(fkc)
1446
1447 if filtered is True:
1448 remaining_fkcs.add(fkc)
1449 continue
1450
1451 dependent_on = fkc.referred_table
1452 if dependent_on is not table:
1453 mutable_dependencies.add((dependent_on, table))
1454
1455 fixed_dependencies.update(
1456 (parent, table) for parent in table._extra_dependencies
1457 )
1458
1459 try:
1460 candidate_sort = list(
1461 topological.sort(
1462 fixed_dependencies.union(mutable_dependencies),
1463 tables,
1464 )
1465 )
1466 except exc.CircularDependencyError as err:
1467 if _warn_for_cycles:
1468 util.warn(
1469 "Cannot correctly sort tables; there are unresolvable cycles "
1470 'between tables "%s", which is usually caused by mutually '
1471 "dependent foreign key constraints. Foreign key constraints "
1472 "involving these tables will not be considered; this warning "
1473 "may raise an error in a future release."
1474 % (", ".join(sorted(t.fullname for t in err.cycles)),)
1475 )
1476 for edge in err.edges:
1477 if edge in mutable_dependencies:
1478 table = edge[1]
1479 if table not in err.cycles:
1480 continue
1481 can_remove = [
1482 fkc
1483 for fkc in table.foreign_key_constraints
1484 if filter_fn is None or filter_fn(fkc) is not False
1485 ]
1486 remaining_fkcs.update(can_remove)
1487 for fkc in can_remove:
1488 dependent_on = fkc.referred_table
1489 if dependent_on is not table:
1490 mutable_dependencies.discard((dependent_on, table))
1491 candidate_sort = list(
1492 topological.sort(
1493 fixed_dependencies.union(mutable_dependencies),
1494 tables,
1495 )
1496 )
1497
1498 return [
1499 (table, table.foreign_key_constraints.difference(remaining_fkcs))
1500 for table in candidate_sort
1501 ] + [(None, list(remaining_fkcs))]