1# sql/ddl.py
2# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: allow-untyped-defs, allow-untyped-calls
8
9"""
10Provides the hierarchy of DDL-defining schema items as well as routines
11to invoke them for a create/drop call.
12
13"""
14from __future__ import annotations
15
16import contextlib
17import typing
18from typing import Any
19from typing import Callable
20from typing import Generic
21from typing import Iterable
22from typing import List
23from typing import Optional
24from typing import Sequence as typing_Sequence
25from typing import Tuple
26from typing import TypeVar
27from typing import Union
28
29from . import roles
30from .base import _generative
31from .base import Executable
32from .base import SchemaVisitor
33from .elements import ClauseElement
34from .. import exc
35from .. import util
36from ..util import topological
37from ..util.typing import Protocol
38from ..util.typing import Self
39
40if typing.TYPE_CHECKING:
41 from .compiler import Compiled
42 from .compiler import DDLCompiler
43 from .elements import BindParameter
44 from .schema import Column
45 from .schema import Constraint
46 from .schema import ForeignKeyConstraint
47 from .schema import Index
48 from .schema import SchemaItem
49 from .schema import Sequence as Sequence # noqa: F401
50 from .schema import Table
51 from .selectable import TableClause
52 from ..engine.base import Connection
53 from ..engine.interfaces import CacheStats
54 from ..engine.interfaces import CompiledCacheType
55 from ..engine.interfaces import Dialect
56 from ..engine.interfaces import SchemaTranslateMapType
57
58_SI = TypeVar("_SI", bound=Union["SchemaItem", str])
59
60
61class BaseDDLElement(ClauseElement):
62 """The root of DDL constructs, including those that are sub-elements
63 within the "create table" and other processes.
64
65 .. versionadded:: 2.0
66
67 """
68
69 _hierarchy_supports_caching = False
70 """disable cache warnings for all _DDLCompiles subclasses. """
71
72 def _compiler(self, dialect, **kw):
73 """Return a compiler appropriate for this ClauseElement, given a
74 Dialect."""
75
76 return dialect.ddl_compiler(dialect, self, **kw)
77
78 def _compile_w_cache(
79 self,
80 dialect: Dialect,
81 *,
82 compiled_cache: Optional[CompiledCacheType],
83 column_keys: List[str],
84 for_executemany: bool = False,
85 schema_translate_map: Optional[SchemaTranslateMapType] = None,
86 **kw: Any,
87 ) -> Tuple[
88 Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats
89 ]:
90 raise NotImplementedError()
91
92
93class DDLIfCallable(Protocol):
94 def __call__(
95 self,
96 ddl: BaseDDLElement,
97 target: Union[SchemaItem, str],
98 bind: Optional[Connection],
99 tables: Optional[List[Table]] = None,
100 state: Optional[Any] = None,
101 *,
102 dialect: Dialect,
103 compiler: Optional[DDLCompiler] = ...,
104 checkfirst: bool,
105 ) -> bool: ...
106
107
108class DDLIf(typing.NamedTuple):
109 dialect: Optional[str]
110 callable_: Optional[DDLIfCallable]
111 state: Optional[Any]
112
113 def _should_execute(
114 self,
115 ddl: BaseDDLElement,
116 target: Union[SchemaItem, str],
117 bind: Optional[Connection],
118 compiler: Optional[DDLCompiler] = None,
119 **kw: Any,
120 ) -> bool:
121 if bind is not None:
122 dialect = bind.dialect
123 elif compiler is not None:
124 dialect = compiler.dialect
125 else:
126 assert False, "compiler or dialect is required"
127
128 if isinstance(self.dialect, str):
129 if self.dialect != dialect.name:
130 return False
131 elif isinstance(self.dialect, (tuple, list, set)):
132 if dialect.name not in self.dialect:
133 return False
134 if self.callable_ is not None and not self.callable_(
135 ddl,
136 target,
137 bind,
138 state=self.state,
139 dialect=dialect,
140 compiler=compiler,
141 **kw,
142 ):
143 return False
144
145 return True
146
147
148class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement):
149 """Base class for standalone executable DDL expression constructs.
150
151 This class is the base for the general purpose :class:`.DDL` class,
152 as well as the various create/drop clause constructs such as
153 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
154 etc.
155
156 .. versionchanged:: 2.0 :class:`.ExecutableDDLElement` is renamed from
157 :class:`.DDLElement`, which still exists for backwards compatibility.
158
159 :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events,
160 introduced in :ref:`event_toplevel`. An instance of one is
161 itself an event receiving callable::
162
163 event.listen(
164 users,
165 "after_create",
166 AddConstraint(constraint).execute_if(dialect="postgresql"),
167 )
168
169 .. seealso::
170
171 :class:`.DDL`
172
173 :class:`.DDLEvents`
174
175 :ref:`event_toplevel`
176
177 :ref:`schema_ddl_sequences`
178
179 """
180
181 _ddl_if: Optional[DDLIf] = None
182 target: Union[SchemaItem, str, None] = None
183
184 def _execute_on_connection(
185 self, connection, distilled_params, execution_options
186 ):
187 return connection._execute_ddl(
188 self, distilled_params, execution_options
189 )
190
191 @_generative
192 def against(self, target: SchemaItem) -> Self:
193 """Return a copy of this :class:`_schema.ExecutableDDLElement` which
194 will include the given target.
195
196 This essentially applies the given item to the ``.target`` attribute of
197 the returned :class:`_schema.ExecutableDDLElement` object. This target
198 is then usable by event handlers and compilation routines in order to
199 provide services such as tokenization of a DDL string in terms of a
200 particular :class:`_schema.Table`.
201
202 When a :class:`_schema.ExecutableDDLElement` object is established as
203 an event handler for the :meth:`_events.DDLEvents.before_create` or
204 :meth:`_events.DDLEvents.after_create` events, and the event then
205 occurs for a given target such as a :class:`_schema.Constraint` or
206 :class:`_schema.Table`, that target is established with a copy of the
207 :class:`_schema.ExecutableDDLElement` object using this method, which
208 then proceeds to the :meth:`_schema.ExecutableDDLElement.execute`
209 method in order to invoke the actual DDL instruction.
210
211 :param target: a :class:`_schema.SchemaItem` that will be the subject
212 of a DDL operation.
213
214 :return: a copy of this :class:`_schema.ExecutableDDLElement` with the
215 ``.target`` attribute assigned to the given
216 :class:`_schema.SchemaItem`.
217
218 .. seealso::
219
220 :class:`_schema.DDL` - uses tokenization against the "target" when
221 processing the DDL string.
222
223 """
224 self.target = target
225 return self
226
227 @_generative
228 def execute_if(
229 self,
230 dialect: Optional[str] = None,
231 callable_: Optional[DDLIfCallable] = None,
232 state: Optional[Any] = None,
233 ) -> Self:
234 r"""Return a callable that will execute this
235 :class:`_ddl.ExecutableDDLElement` conditionally within an event
236 handler.
237
238 Used to provide a wrapper for event listening::
239
240 event.listen(
241 metadata,
242 "before_create",
243 DDL("my_ddl").execute_if(dialect="postgresql"),
244 )
245
246 :param dialect: May be a string or tuple of strings.
247 If a string, it will be compared to the name of the
248 executing database dialect::
249
250 DDL("something").execute_if(dialect="postgresql")
251
252 If a tuple, specifies multiple dialect names::
253
254 DDL("something").execute_if(dialect=("postgresql", "mysql"))
255
256 :param callable\_: A callable, which will be invoked with
257 three positional arguments as well as optional keyword
258 arguments:
259
260 :ddl:
261 This DDL element.
262
263 :target:
264 The :class:`_schema.Table` or :class:`_schema.MetaData`
265 object which is the
266 target of this event. May be None if the DDL is executed
267 explicitly.
268
269 :bind:
270 The :class:`_engine.Connection` being used for DDL execution.
271 May be None if this construct is being created inline within
272 a table, in which case ``compiler`` will be present.
273
274 :tables:
275 Optional keyword argument - a list of Table objects which are to
276 be created/ dropped within a MetaData.create_all() or drop_all()
277 method call.
278
279 :dialect: keyword argument, but always present - the
280 :class:`.Dialect` involved in the operation.
281
282 :compiler: keyword argument. Will be ``None`` for an engine
283 level DDL invocation, but will refer to a :class:`.DDLCompiler`
284 if this DDL element is being created inline within a table.
285
286 :state:
287 Optional keyword argument - will be the ``state`` argument
288 passed to this function.
289
290 :checkfirst:
291 Keyword argument, will be True if the 'checkfirst' flag was
292 set during the call to ``create()``, ``create_all()``,
293 ``drop()``, ``drop_all()``.
294
295 If the callable returns a True value, the DDL statement will be
296 executed.
297
298 :param state: any value which will be passed to the callable\_
299 as the ``state`` keyword argument.
300
301 .. seealso::
302
303 :meth:`.SchemaItem.ddl_if`
304
305 :class:`.DDLEvents`
306
307 :ref:`event_toplevel`
308
309 """
310 self._ddl_if = DDLIf(dialect, callable_, state)
311 return self
312
313 def _should_execute(self, target, bind, **kw):
314 if self._ddl_if is None:
315 return True
316 else:
317 return self._ddl_if._should_execute(self, target, bind, **kw)
318
319 def _invoke_with(self, bind):
320 if self._should_execute(self.target, bind):
321 return bind.execute(self)
322
323 def __call__(self, target, bind, **kw):
324 """Execute the DDL as a ddl_listener."""
325
326 self.against(target)._invoke_with(bind)
327
328 def _generate(self):
329 s = self.__class__.__new__(self.__class__)
330 s.__dict__ = self.__dict__.copy()
331 return s
332
333
334DDLElement = ExecutableDDLElement
335""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`."""
336
337
338class DDL(ExecutableDDLElement):
339 """A literal DDL statement.
340
341 Specifies literal SQL DDL to be executed by the database. DDL objects
342 function as DDL event listeners, and can be subscribed to those events
343 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
344 :class:`_schema.MetaData` objects as targets.
345 Basic templating support allows
346 a single DDL instance to handle repetitive tasks for multiple tables.
347
348 Examples::
349
350 from sqlalchemy import event, DDL
351
352 tbl = Table("users", metadata, Column("uid", Integer))
353 event.listen(tbl, "before_create", DDL("DROP TRIGGER users_trigger"))
354
355 spow = DDL("ALTER TABLE %(table)s SET secretpowers TRUE")
356 event.listen(tbl, "after_create", spow.execute_if(dialect="somedb"))
357
358 drop_spow = DDL("ALTER TABLE users SET secretpowers FALSE")
359 connection.execute(drop_spow)
360
361 When operating on Table events, the following ``statement``
362 string substitutions are available:
363
364 .. sourcecode:: text
365
366 %(table)s - the Table name, with any required quoting applied
367 %(schema)s - the schema name, with any required quoting applied
368 %(fullname)s - the Table name including schema, quoted if needed
369
370 The DDL's "context", if any, will be combined with the standard
371 substitutions noted above. Keys present in the context will override
372 the standard substitutions.
373
374 """
375
376 __visit_name__ = "ddl"
377
378 def __init__(self, statement, context=None):
379 """Create a DDL statement.
380
381 :param statement:
382 A string or unicode string to be executed. Statements will be
383 processed with Python's string formatting operator using
384 a fixed set of string substitutions, as well as additional
385 substitutions provided by the optional :paramref:`.DDL.context`
386 parameter.
387
388 A literal '%' in a statement must be escaped as '%%'.
389
390 SQL bind parameters are not available in DDL statements.
391
392 :param context:
393 Optional dictionary, defaults to None. These values will be
394 available for use in string substitutions on the DDL statement.
395
396 .. seealso::
397
398 :class:`.DDLEvents`
399
400 :ref:`event_toplevel`
401
402 """
403
404 if not isinstance(statement, str):
405 raise exc.ArgumentError(
406 "Expected a string or unicode SQL statement, got '%r'"
407 % statement
408 )
409
410 self.statement = statement
411 self.context = context or {}
412
413 def __repr__(self):
414 parts = [repr(self.statement)]
415 if self.context:
416 parts.append(f"context={self.context}")
417
418 return "<%s@%s; %s>" % (
419 type(self).__name__,
420 id(self),
421 ", ".join(parts),
422 )
423
424
425class _CreateDropBase(ExecutableDDLElement, Generic[_SI]):
426 """Base class for DDL constructs that represent CREATE and DROP or
427 equivalents.
428
429 The common theme of _CreateDropBase is a single
430 ``element`` attribute which refers to the element
431 to be created or dropped.
432
433 """
434
435 def __init__(self, element: _SI) -> None:
436 self.element = self.target = element
437 self._ddl_if = getattr(element, "_ddl_if", None)
438
439 @property
440 def stringify_dialect(self):
441 assert not isinstance(self.element, str)
442 return self.element.create_drop_stringify_dialect
443
444 def _create_rule_disable(self, compiler):
445 """Allow disable of _create_rule using a callable.
446
447 Pass to _create_rule using
448 util.portable_instancemethod(self._create_rule_disable)
449 to retain serializability.
450
451 """
452 return False
453
454
455class _CreateBase(_CreateDropBase[_SI]):
456 def __init__(self, element: _SI, if_not_exists: bool = False) -> None:
457 super().__init__(element)
458 self.if_not_exists = if_not_exists
459
460
461class _DropBase(_CreateDropBase[_SI]):
462 def __init__(self, element: _SI, if_exists: bool = False) -> None:
463 super().__init__(element)
464 self.if_exists = if_exists
465
466
467class CreateSchema(_CreateBase[str]):
468 """Represent a CREATE SCHEMA statement.
469
470 The argument here is the string name of the schema.
471
472 """
473
474 __visit_name__ = "create_schema"
475
476 stringify_dialect = "default"
477
478 def __init__(
479 self,
480 name: str,
481 if_not_exists: bool = False,
482 ) -> None:
483 """Create a new :class:`.CreateSchema` construct."""
484
485 super().__init__(element=name, if_not_exists=if_not_exists)
486
487
488class DropSchema(_DropBase[str]):
489 """Represent a DROP SCHEMA statement.
490
491 The argument here is the string name of the schema.
492
493 """
494
495 __visit_name__ = "drop_schema"
496
497 stringify_dialect = "default"
498
499 def __init__(
500 self,
501 name: str,
502 cascade: bool = False,
503 if_exists: bool = False,
504 ) -> None:
505 """Create a new :class:`.DropSchema` construct."""
506
507 super().__init__(element=name, if_exists=if_exists)
508 self.cascade = cascade
509
510
511class CreateTable(_CreateBase["Table"]):
512 """Represent a CREATE TABLE statement."""
513
514 __visit_name__ = "create_table"
515
516 def __init__(
517 self,
518 element: Table,
519 include_foreign_key_constraints: Optional[
520 typing_Sequence[ForeignKeyConstraint]
521 ] = None,
522 if_not_exists: bool = False,
523 ) -> None:
524 """Create a :class:`.CreateTable` construct.
525
526 :param element: a :class:`_schema.Table` that's the subject
527 of the CREATE
528 :param on: See the description for 'on' in :class:`.DDL`.
529 :param include_foreign_key_constraints: optional sequence of
530 :class:`_schema.ForeignKeyConstraint` objects that will be included
531 inline within the CREATE construct; if omitted, all foreign key
532 constraints that do not specify use_alter=True are included.
533
534 :param if_not_exists: if True, an IF NOT EXISTS operator will be
535 applied to the construct.
536
537 .. versionadded:: 1.4.0b2
538
539 """
540 super().__init__(element, if_not_exists=if_not_exists)
541 self.columns = [CreateColumn(column) for column in element.columns]
542 self.include_foreign_key_constraints = include_foreign_key_constraints
543
544
545class _DropView(_DropBase["Table"]):
546 """Semi-public 'DROP VIEW' construct.
547
548 Used by the test suite for dialect-agnostic drops of views.
549 This object will eventually be part of a public "view" API.
550
551 """
552
553 __visit_name__ = "drop_view"
554
555
556class CreateConstraint(BaseDDLElement):
557 element: Constraint
558
559 def __init__(self, element: Constraint) -> None:
560 self.element = element
561
562
563class CreateColumn(BaseDDLElement):
564 """Represent a :class:`_schema.Column`
565 as rendered in a CREATE TABLE statement,
566 via the :class:`.CreateTable` construct.
567
568 This is provided to support custom column DDL within the generation
569 of CREATE TABLE statements, by using the
570 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
571 to extend :class:`.CreateColumn`.
572
573 Typical integration is to examine the incoming :class:`_schema.Column`
574 object, and to redirect compilation if a particular flag or condition
575 is found::
576
577 from sqlalchemy import schema
578 from sqlalchemy.ext.compiler import compiles
579
580
581 @compiles(schema.CreateColumn)
582 def compile(element, compiler, **kw):
583 column = element.element
584
585 if "special" not in column.info:
586 return compiler.visit_create_column(element, **kw)
587
588 text = "%s SPECIAL DIRECTIVE %s" % (
589 column.name,
590 compiler.type_compiler.process(column.type),
591 )
592 default = compiler.get_column_default_string(column)
593 if default is not None:
594 text += " DEFAULT " + default
595
596 if not column.nullable:
597 text += " NOT NULL"
598
599 if column.constraints:
600 text += " ".join(
601 compiler.process(const) for const in column.constraints
602 )
603 return text
604
605 The above construct can be applied to a :class:`_schema.Table`
606 as follows::
607
608 from sqlalchemy import Table, Metadata, Column, Integer, String
609 from sqlalchemy import schema
610
611 metadata = MetaData()
612
613 table = Table(
614 "mytable",
615 MetaData(),
616 Column("x", Integer, info={"special": True}, primary_key=True),
617 Column("y", String(50)),
618 Column("z", String(20), info={"special": True}),
619 )
620
621 metadata.create_all(conn)
622
623 Above, the directives we've added to the :attr:`_schema.Column.info`
624 collection
625 will be detected by our custom compilation scheme:
626
627 .. sourcecode:: sql
628
629 CREATE TABLE mytable (
630 x SPECIAL DIRECTIVE INTEGER NOT NULL,
631 y VARCHAR(50),
632 z SPECIAL DIRECTIVE VARCHAR(20),
633 PRIMARY KEY (x)
634 )
635
636 The :class:`.CreateColumn` construct can also be used to skip certain
637 columns when producing a ``CREATE TABLE``. This is accomplished by
638 creating a compilation rule that conditionally returns ``None``.
639 This is essentially how to produce the same effect as using the
640 ``system=True`` argument on :class:`_schema.Column`, which marks a column
641 as an implicitly-present "system" column.
642
643 For example, suppose we wish to produce a :class:`_schema.Table`
644 which skips
645 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
646 backend, but on other backends does render it, in anticipation of a
647 triggered rule. A conditional compilation rule could skip this name only
648 on PostgreSQL::
649
650 from sqlalchemy.schema import CreateColumn
651
652
653 @compiles(CreateColumn, "postgresql")
654 def skip_xmin(element, compiler, **kw):
655 if element.element.name == "xmin":
656 return None
657 else:
658 return compiler.visit_create_column(element, **kw)
659
660
661 my_table = Table(
662 "mytable",
663 metadata,
664 Column("id", Integer, primary_key=True),
665 Column("xmin", Integer),
666 )
667
668 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
669 which only includes the ``id`` column in the string; the ``xmin`` column
670 will be omitted, but only against the PostgreSQL backend.
671
672 """
673
674 __visit_name__ = "create_column"
675
676 element: Column[Any]
677
678 def __init__(self, element: Column[Any]) -> None:
679 self.element = element
680
681
682class DropTable(_DropBase["Table"]):
683 """Represent a DROP TABLE statement."""
684
685 __visit_name__ = "drop_table"
686
687 def __init__(self, element: Table, if_exists: bool = False) -> None:
688 """Create a :class:`.DropTable` construct.
689
690 :param element: a :class:`_schema.Table` that's the subject
691 of the DROP.
692 :param on: See the description for 'on' in :class:`.DDL`.
693 :param if_exists: if True, an IF EXISTS operator will be applied to the
694 construct.
695
696 .. versionadded:: 1.4.0b2
697
698 """
699 super().__init__(element, if_exists=if_exists)
700
701
702class CreateSequence(_CreateBase["Sequence"]):
703 """Represent a CREATE SEQUENCE statement."""
704
705 __visit_name__ = "create_sequence"
706
707
708class DropSequence(_DropBase["Sequence"]):
709 """Represent a DROP SEQUENCE statement."""
710
711 __visit_name__ = "drop_sequence"
712
713
714class CreateIndex(_CreateBase["Index"]):
715 """Represent a CREATE INDEX statement."""
716
717 __visit_name__ = "create_index"
718
719 def __init__(self, element: Index, if_not_exists: bool = False) -> None:
720 """Create a :class:`.Createindex` construct.
721
722 :param element: a :class:`_schema.Index` that's the subject
723 of the CREATE.
724 :param if_not_exists: if True, an IF NOT EXISTS operator will be
725 applied to the construct.
726
727 .. versionadded:: 1.4.0b2
728
729 """
730 super().__init__(element, if_not_exists=if_not_exists)
731
732
733class DropIndex(_DropBase["Index"]):
734 """Represent a DROP INDEX statement."""
735
736 __visit_name__ = "drop_index"
737
738 def __init__(self, element: Index, if_exists: bool = False) -> None:
739 """Create a :class:`.DropIndex` construct.
740
741 :param element: a :class:`_schema.Index` that's the subject
742 of the DROP.
743 :param if_exists: if True, an IF EXISTS operator will be applied to the
744 construct.
745
746 .. versionadded:: 1.4.0b2
747
748 """
749 super().__init__(element, if_exists=if_exists)
750
751
752class AddConstraint(_CreateBase["Constraint"]):
753 """Represent an ALTER TABLE ADD CONSTRAINT statement."""
754
755 __visit_name__ = "add_constraint"
756
757 def __init__(
758 self,
759 element: Constraint,
760 *,
761 isolate_from_table: bool = True,
762 ) -> None:
763 """Construct a new :class:`.AddConstraint` construct.
764
765 :param element: a :class:`.Constraint` object
766
767 :param isolate_from_table: optional boolean, defaults to True. Has
768 the effect of the incoming constraint being isolated from being
769 included in a CREATE TABLE sequence when associated with a
770 :class:`.Table`.
771
772 .. versionadded:: 2.0.39 - added
773 :paramref:`.AddConstraint.isolate_from_table`, defaulting
774 to True. Previously, the behavior of this parameter was implicitly
775 turned on in all cases.
776
777 """
778 super().__init__(element)
779
780 if isolate_from_table:
781 element._create_rule = util.portable_instancemethod(
782 self._create_rule_disable
783 )
784
785
786class DropConstraint(_DropBase["Constraint"]):
787 """Represent an ALTER TABLE DROP CONSTRAINT statement."""
788
789 __visit_name__ = "drop_constraint"
790
791 def __init__(
792 self,
793 element: Constraint,
794 *,
795 cascade: bool = False,
796 if_exists: bool = False,
797 isolate_from_table: bool = True,
798 **kw: Any,
799 ) -> None:
800 """Construct a new :class:`.DropConstraint` construct.
801
802 :param element: a :class:`.Constraint` object
803 :param cascade: optional boolean, indicates backend-specific
804 "CASCADE CONSTRAINT" directive should be rendered if available
805 :param if_exists: optional boolean, indicates backend-specific
806 "IF EXISTS" directive should be rendered if available
807 :param isolate_from_table: optional boolean, defaults to True. Has
808 the effect of the incoming constraint being isolated from being
809 included in a CREATE TABLE sequence when associated with a
810 :class:`.Table`.
811
812 .. versionadded:: 2.0.39 - added
813 :paramref:`.DropConstraint.isolate_from_table`, defaulting
814 to True. Previously, the behavior of this parameter was implicitly
815 turned on in all cases.
816
817 """
818 self.cascade = cascade
819 super().__init__(element, if_exists=if_exists, **kw)
820
821 if isolate_from_table:
822 element._create_rule = util.portable_instancemethod(
823 self._create_rule_disable
824 )
825
826
827class SetTableComment(_CreateDropBase["Table"]):
828 """Represent a COMMENT ON TABLE IS statement."""
829
830 __visit_name__ = "set_table_comment"
831
832
833class DropTableComment(_CreateDropBase["Table"]):
834 """Represent a COMMENT ON TABLE '' statement.
835
836 Note this varies a lot across database backends.
837
838 """
839
840 __visit_name__ = "drop_table_comment"
841
842
843class SetColumnComment(_CreateDropBase["Column[Any]"]):
844 """Represent a COMMENT ON COLUMN IS statement."""
845
846 __visit_name__ = "set_column_comment"
847
848
849class DropColumnComment(_CreateDropBase["Column[Any]"]):
850 """Represent a COMMENT ON COLUMN IS NULL statement."""
851
852 __visit_name__ = "drop_column_comment"
853
854
855class SetConstraintComment(_CreateDropBase["Constraint"]):
856 """Represent a COMMENT ON CONSTRAINT IS statement."""
857
858 __visit_name__ = "set_constraint_comment"
859
860
861class DropConstraintComment(_CreateDropBase["Constraint"]):
862 """Represent a COMMENT ON CONSTRAINT IS NULL statement."""
863
864 __visit_name__ = "drop_constraint_comment"
865
866
867class InvokeDDLBase(SchemaVisitor):
868 def __init__(self, connection, **kw):
869 self.connection = connection
870 assert not kw, f"Unexpected keywords: {kw.keys()}"
871
872 @contextlib.contextmanager
873 def with_ddl_events(self, target, **kw):
874 """helper context manager that will apply appropriate DDL events
875 to a CREATE or DROP operation."""
876
877 raise NotImplementedError()
878
879
880class InvokeCreateDDLBase(InvokeDDLBase):
881 @contextlib.contextmanager
882 def with_ddl_events(self, target, **kw):
883 """helper context manager that will apply appropriate DDL events
884 to a CREATE or DROP operation."""
885
886 target.dispatch.before_create(
887 target, self.connection, _ddl_runner=self, **kw
888 )
889 yield
890 target.dispatch.after_create(
891 target, self.connection, _ddl_runner=self, **kw
892 )
893
894
895class InvokeDropDDLBase(InvokeDDLBase):
896 @contextlib.contextmanager
897 def with_ddl_events(self, target, **kw):
898 """helper context manager that will apply appropriate DDL events
899 to a CREATE or DROP operation."""
900
901 target.dispatch.before_drop(
902 target, self.connection, _ddl_runner=self, **kw
903 )
904 yield
905 target.dispatch.after_drop(
906 target, self.connection, _ddl_runner=self, **kw
907 )
908
909
910class SchemaGenerator(InvokeCreateDDLBase):
911 def __init__(
912 self, dialect, connection, checkfirst=False, tables=None, **kwargs
913 ):
914 super().__init__(connection, **kwargs)
915 self.checkfirst = checkfirst
916 self.tables = tables
917 self.preparer = dialect.identifier_preparer
918 self.dialect = dialect
919 self.memo = {}
920
921 def _can_create_table(self, table):
922 self.dialect.validate_identifier(table.name)
923 effective_schema = self.connection.schema_for_object(table)
924 if effective_schema:
925 self.dialect.validate_identifier(effective_schema)
926 return not self.checkfirst or not self.dialect.has_table(
927 self.connection, table.name, schema=effective_schema
928 )
929
930 def _can_create_index(self, index):
931 effective_schema = self.connection.schema_for_object(index.table)
932 if effective_schema:
933 self.dialect.validate_identifier(effective_schema)
934 return not self.checkfirst or not self.dialect.has_index(
935 self.connection,
936 index.table.name,
937 index.name,
938 schema=effective_schema,
939 )
940
941 def _can_create_sequence(self, sequence):
942 effective_schema = self.connection.schema_for_object(sequence)
943
944 return self.dialect.supports_sequences and (
945 (not self.dialect.sequences_optional or not sequence.optional)
946 and (
947 not self.checkfirst
948 or not self.dialect.has_sequence(
949 self.connection, sequence.name, schema=effective_schema
950 )
951 )
952 )
953
954 def visit_metadata(self, metadata):
955 if self.tables is not None:
956 tables = self.tables
957 else:
958 tables = list(metadata.tables.values())
959
960 collection = sort_tables_and_constraints(
961 [t for t in tables if self._can_create_table(t)]
962 )
963
964 seq_coll = [
965 s
966 for s in metadata._sequences.values()
967 if s.column is None and self._can_create_sequence(s)
968 ]
969
970 event_collection = [t for (t, fks) in collection if t is not None]
971
972 with self.with_ddl_events(
973 metadata,
974 tables=event_collection,
975 checkfirst=self.checkfirst,
976 ):
977 for seq in seq_coll:
978 self.traverse_single(seq, create_ok=True)
979
980 for table, fkcs in collection:
981 if table is not None:
982 self.traverse_single(
983 table,
984 create_ok=True,
985 include_foreign_key_constraints=fkcs,
986 _is_metadata_operation=True,
987 )
988 else:
989 for fkc in fkcs:
990 self.traverse_single(fkc)
991
992 def visit_table(
993 self,
994 table,
995 create_ok=False,
996 include_foreign_key_constraints=None,
997 _is_metadata_operation=False,
998 ):
999 if not create_ok and not self._can_create_table(table):
1000 return
1001
1002 with self.with_ddl_events(
1003 table,
1004 checkfirst=self.checkfirst,
1005 _is_metadata_operation=_is_metadata_operation,
1006 ):
1007 for column in table.columns:
1008 if column.default is not None:
1009 self.traverse_single(column.default)
1010
1011 if not self.dialect.supports_alter:
1012 # e.g., don't omit any foreign key constraints
1013 include_foreign_key_constraints = None
1014
1015 CreateTable(
1016 table,
1017 include_foreign_key_constraints=(
1018 include_foreign_key_constraints
1019 ),
1020 )._invoke_with(self.connection)
1021
1022 if hasattr(table, "indexes"):
1023 for index in table.indexes:
1024 self.traverse_single(index, create_ok=True)
1025
1026 if (
1027 self.dialect.supports_comments
1028 and not self.dialect.inline_comments
1029 ):
1030 if table.comment is not None:
1031 SetTableComment(table)._invoke_with(self.connection)
1032
1033 for column in table.columns:
1034 if column.comment is not None:
1035 SetColumnComment(column)._invoke_with(self.connection)
1036
1037 if self.dialect.supports_constraint_comments:
1038 for constraint in table.constraints:
1039 if constraint.comment is not None:
1040 self.connection.execute(
1041 SetConstraintComment(constraint)
1042 )
1043
1044 def visit_foreign_key_constraint(self, constraint):
1045 if not self.dialect.supports_alter:
1046 return
1047
1048 with self.with_ddl_events(constraint):
1049 AddConstraint(constraint)._invoke_with(self.connection)
1050
1051 def visit_sequence(self, sequence, create_ok=False):
1052 if not create_ok and not self._can_create_sequence(sequence):
1053 return
1054 with self.with_ddl_events(sequence):
1055 CreateSequence(sequence)._invoke_with(self.connection)
1056
1057 def visit_index(self, index, create_ok=False):
1058 if not create_ok and not self._can_create_index(index):
1059 return
1060 with self.with_ddl_events(index):
1061 CreateIndex(index)._invoke_with(self.connection)
1062
1063
1064class SchemaDropper(InvokeDropDDLBase):
1065 def __init__(
1066 self, dialect, connection, checkfirst=False, tables=None, **kwargs
1067 ):
1068 super().__init__(connection, **kwargs)
1069 self.checkfirst = checkfirst
1070 self.tables = tables
1071 self.preparer = dialect.identifier_preparer
1072 self.dialect = dialect
1073 self.memo = {}
1074
1075 def visit_metadata(self, metadata):
1076 if self.tables is not None:
1077 tables = self.tables
1078 else:
1079 tables = list(metadata.tables.values())
1080
1081 try:
1082 unsorted_tables = [t for t in tables if self._can_drop_table(t)]
1083 collection = list(
1084 reversed(
1085 sort_tables_and_constraints(
1086 unsorted_tables,
1087 filter_fn=lambda constraint: (
1088 False
1089 if not self.dialect.supports_alter
1090 or constraint.name is None
1091 else None
1092 ),
1093 )
1094 )
1095 )
1096 except exc.CircularDependencyError as err2:
1097 if not self.dialect.supports_alter:
1098 util.warn(
1099 "Can't sort tables for DROP; an "
1100 "unresolvable foreign key "
1101 "dependency exists between tables: %s; and backend does "
1102 "not support ALTER. To restore at least a partial sort, "
1103 "apply use_alter=True to ForeignKey and "
1104 "ForeignKeyConstraint "
1105 "objects involved in the cycle to mark these as known "
1106 "cycles that will be ignored."
1107 % (", ".join(sorted([t.fullname for t in err2.cycles])))
1108 )
1109 collection = [(t, ()) for t in unsorted_tables]
1110 else:
1111 raise exc.CircularDependencyError(
1112 err2.args[0],
1113 err2.cycles,
1114 err2.edges,
1115 msg="Can't sort tables for DROP; an "
1116 "unresolvable foreign key "
1117 "dependency exists between tables: %s. Please ensure "
1118 "that the ForeignKey and ForeignKeyConstraint objects "
1119 "involved in the cycle have "
1120 "names so that they can be dropped using "
1121 "DROP CONSTRAINT."
1122 % (", ".join(sorted([t.fullname for t in err2.cycles]))),
1123 ) from err2
1124
1125 seq_coll = [
1126 s
1127 for s in metadata._sequences.values()
1128 if self._can_drop_sequence(s)
1129 ]
1130
1131 event_collection = [t for (t, fks) in collection if t is not None]
1132
1133 with self.with_ddl_events(
1134 metadata,
1135 tables=event_collection,
1136 checkfirst=self.checkfirst,
1137 ):
1138 for table, fkcs in collection:
1139 if table is not None:
1140 self.traverse_single(
1141 table,
1142 drop_ok=True,
1143 _is_metadata_operation=True,
1144 _ignore_sequences=seq_coll,
1145 )
1146 else:
1147 for fkc in fkcs:
1148 self.traverse_single(fkc)
1149
1150 for seq in seq_coll:
1151 self.traverse_single(seq, drop_ok=seq.column is None)
1152
1153 def _can_drop_table(self, table):
1154 self.dialect.validate_identifier(table.name)
1155 effective_schema = self.connection.schema_for_object(table)
1156 if effective_schema:
1157 self.dialect.validate_identifier(effective_schema)
1158 return not self.checkfirst or self.dialect.has_table(
1159 self.connection, table.name, schema=effective_schema
1160 )
1161
1162 def _can_drop_index(self, index):
1163 effective_schema = self.connection.schema_for_object(index.table)
1164 if effective_schema:
1165 self.dialect.validate_identifier(effective_schema)
1166 return not self.checkfirst or self.dialect.has_index(
1167 self.connection,
1168 index.table.name,
1169 index.name,
1170 schema=effective_schema,
1171 )
1172
1173 def _can_drop_sequence(self, sequence):
1174 effective_schema = self.connection.schema_for_object(sequence)
1175 return self.dialect.supports_sequences and (
1176 (not self.dialect.sequences_optional or not sequence.optional)
1177 and (
1178 not self.checkfirst
1179 or self.dialect.has_sequence(
1180 self.connection, sequence.name, schema=effective_schema
1181 )
1182 )
1183 )
1184
1185 def visit_index(self, index, drop_ok=False):
1186 if not drop_ok and not self._can_drop_index(index):
1187 return
1188
1189 with self.with_ddl_events(index):
1190 DropIndex(index)(index, self.connection)
1191
1192 def visit_table(
1193 self,
1194 table,
1195 drop_ok=False,
1196 _is_metadata_operation=False,
1197 _ignore_sequences=(),
1198 ):
1199 if not drop_ok and not self._can_drop_table(table):
1200 return
1201
1202 with self.with_ddl_events(
1203 table,
1204 checkfirst=self.checkfirst,
1205 _is_metadata_operation=_is_metadata_operation,
1206 ):
1207 DropTable(table)._invoke_with(self.connection)
1208
1209 # traverse client side defaults which may refer to server-side
1210 # sequences. noting that some of these client side defaults may
1211 # also be set up as server side defaults
1212 # (see https://docs.sqlalchemy.org/en/
1213 # latest/core/defaults.html
1214 # #associating-a-sequence-as-the-server-side-
1215 # default), so have to be dropped after the table is dropped.
1216 for column in table.columns:
1217 if (
1218 column.default is not None
1219 and column.default not in _ignore_sequences
1220 ):
1221 self.traverse_single(column.default)
1222
1223 def visit_foreign_key_constraint(self, constraint):
1224 if not self.dialect.supports_alter:
1225 return
1226 with self.with_ddl_events(constraint):
1227 DropConstraint(constraint)._invoke_with(self.connection)
1228
1229 def visit_sequence(self, sequence, drop_ok=False):
1230 if not drop_ok and not self._can_drop_sequence(sequence):
1231 return
1232 with self.with_ddl_events(sequence):
1233 DropSequence(sequence)._invoke_with(self.connection)
1234
1235
1236def sort_tables(
1237 tables: Iterable[TableClause],
1238 skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None,
1239 extra_dependencies: Optional[
1240 typing_Sequence[Tuple[TableClause, TableClause]]
1241 ] = None,
1242) -> List[Table]:
1243 """Sort a collection of :class:`_schema.Table` objects based on
1244 dependency.
1245
1246 This is a dependency-ordered sort which will emit :class:`_schema.Table`
1247 objects such that they will follow their dependent :class:`_schema.Table`
1248 objects.
1249 Tables are dependent on another based on the presence of
1250 :class:`_schema.ForeignKeyConstraint`
1251 objects as well as explicit dependencies
1252 added by :meth:`_schema.Table.add_is_dependent_on`.
1253
1254 .. warning::
1255
1256 The :func:`._schema.sort_tables` function cannot by itself
1257 accommodate automatic resolution of dependency cycles between
1258 tables, which are usually caused by mutually dependent foreign key
1259 constraints. When these cycles are detected, the foreign keys
1260 of these tables are omitted from consideration in the sort.
1261 A warning is emitted when this condition occurs, which will be an
1262 exception raise in a future release. Tables which are not part
1263 of the cycle will still be returned in dependency order.
1264
1265 To resolve these cycles, the
1266 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
1267 applied to those constraints which create a cycle. Alternatively,
1268 the :func:`_schema.sort_tables_and_constraints` function will
1269 automatically return foreign key constraints in a separate
1270 collection when cycles are detected so that they may be applied
1271 to a schema separately.
1272
1273 .. versionchanged:: 1.3.17 - a warning is emitted when
1274 :func:`_schema.sort_tables` cannot perform a proper sort due to
1275 cyclical dependencies. This will be an exception in a future
1276 release. Additionally, the sort will continue to return
1277 other tables not involved in the cycle in dependency order
1278 which was not the case previously.
1279
1280 :param tables: a sequence of :class:`_schema.Table` objects.
1281
1282 :param skip_fn: optional callable which will be passed a
1283 :class:`_schema.ForeignKeyConstraint` object; if it returns True, this
1284 constraint will not be considered as a dependency. Note this is
1285 **different** from the same parameter in
1286 :func:`.sort_tables_and_constraints`, which is
1287 instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
1288
1289 :param extra_dependencies: a sequence of 2-tuples of tables which will
1290 also be considered as dependent on each other.
1291
1292 .. seealso::
1293
1294 :func:`.sort_tables_and_constraints`
1295
1296 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
1297
1298
1299 """
1300
1301 if skip_fn is not None:
1302 fixed_skip_fn = skip_fn
1303
1304 def _skip_fn(fkc):
1305 for fk in fkc.elements:
1306 if fixed_skip_fn(fk):
1307 return True
1308 else:
1309 return None
1310
1311 else:
1312 _skip_fn = None # type: ignore
1313
1314 return [
1315 t
1316 for (t, fkcs) in sort_tables_and_constraints(
1317 tables,
1318 filter_fn=_skip_fn,
1319 extra_dependencies=extra_dependencies,
1320 _warn_for_cycles=True,
1321 )
1322 if t is not None
1323 ]
1324
1325
1326def sort_tables_and_constraints(
1327 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
1328):
1329 """Sort a collection of :class:`_schema.Table` /
1330 :class:`_schema.ForeignKeyConstraint`
1331 objects.
1332
1333 This is a dependency-ordered sort which will emit tuples of
1334 ``(Table, [ForeignKeyConstraint, ...])`` such that each
1335 :class:`_schema.Table` follows its dependent :class:`_schema.Table`
1336 objects.
1337 Remaining :class:`_schema.ForeignKeyConstraint`
1338 objects that are separate due to
1339 dependency rules not satisfied by the sort are emitted afterwards
1340 as ``(None, [ForeignKeyConstraint ...])``.
1341
1342 Tables are dependent on another based on the presence of
1343 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
1344 added by :meth:`_schema.Table.add_is_dependent_on`,
1345 as well as dependencies
1346 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
1347 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
1348 parameters.
1349
1350 :param tables: a sequence of :class:`_schema.Table` objects.
1351
1352 :param filter_fn: optional callable which will be passed a
1353 :class:`_schema.ForeignKeyConstraint` object,
1354 and returns a value based on
1355 whether this constraint should definitely be included or excluded as
1356 an inline constraint, or neither. If it returns False, the constraint
1357 will definitely be included as a dependency that cannot be subject
1358 to ALTER; if True, it will **only** be included as an ALTER result at
1359 the end. Returning None means the constraint is included in the
1360 table-based result unless it is detected as part of a dependency cycle.
1361
1362 :param extra_dependencies: a sequence of 2-tuples of tables which will
1363 also be considered as dependent on each other.
1364
1365 .. seealso::
1366
1367 :func:`.sort_tables`
1368
1369
1370 """
1371
1372 fixed_dependencies = set()
1373 mutable_dependencies = set()
1374
1375 if extra_dependencies is not None:
1376 fixed_dependencies.update(extra_dependencies)
1377
1378 remaining_fkcs = set()
1379 for table in tables:
1380 for fkc in table.foreign_key_constraints:
1381 if fkc.use_alter is True:
1382 remaining_fkcs.add(fkc)
1383 continue
1384
1385 if filter_fn:
1386 filtered = filter_fn(fkc)
1387
1388 if filtered is True:
1389 remaining_fkcs.add(fkc)
1390 continue
1391
1392 dependent_on = fkc.referred_table
1393 if dependent_on is not table:
1394 mutable_dependencies.add((dependent_on, table))
1395
1396 fixed_dependencies.update(
1397 (parent, table) for parent in table._extra_dependencies
1398 )
1399
1400 try:
1401 candidate_sort = list(
1402 topological.sort(
1403 fixed_dependencies.union(mutable_dependencies),
1404 tables,
1405 )
1406 )
1407 except exc.CircularDependencyError as err:
1408 if _warn_for_cycles:
1409 util.warn(
1410 "Cannot correctly sort tables; there are unresolvable cycles "
1411 'between tables "%s", which is usually caused by mutually '
1412 "dependent foreign key constraints. Foreign key constraints "
1413 "involving these tables will not be considered; this warning "
1414 "may raise an error in a future release."
1415 % (", ".join(sorted(t.fullname for t in err.cycles)),)
1416 )
1417 for edge in err.edges:
1418 if edge in mutable_dependencies:
1419 table = edge[1]
1420 if table not in err.cycles:
1421 continue
1422 can_remove = [
1423 fkc
1424 for fkc in table.foreign_key_constraints
1425 if filter_fn is None or filter_fn(fkc) is not False
1426 ]
1427 remaining_fkcs.update(can_remove)
1428 for fkc in can_remove:
1429 dependent_on = fkc.referred_table
1430 if dependent_on is not table:
1431 mutable_dependencies.discard((dependent_on, table))
1432 candidate_sort = list(
1433 topological.sort(
1434 fixed_dependencies.union(mutable_dependencies),
1435 tables,
1436 )
1437 )
1438
1439 return [
1440 (table, table.foreign_key_constraints.difference(remaining_fkcs))
1441 for table in candidate_sort
1442 ] + [(None, list(remaining_fkcs))]