1# sql/ddl.py
2# Copyright (C) 2009-2024 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7# mypy: allow-untyped-defs, allow-untyped-calls
8
9"""
10Provides the hierarchy of DDL-defining schema items as well as routines
11to invoke them for a create/drop call.
12
13"""
14from __future__ import annotations
15
16import contextlib
17import typing
18from typing import Any
19from typing import Callable
20from typing import Iterable
21from typing import List
22from typing import Optional
23from typing import Protocol
24from typing import Sequence as typing_Sequence
25from typing import Tuple
26
27from . import roles
28from .base import _generative
29from .base import Executable
30from .base import SchemaVisitor
31from .elements import ClauseElement
32from .. import exc
33from .. import util
34from ..util import topological
35from ..util.typing import Self
36
37if typing.TYPE_CHECKING:
38 from .compiler import Compiled
39 from .compiler import DDLCompiler
40 from .elements import BindParameter
41 from .schema import Constraint
42 from .schema import ForeignKeyConstraint
43 from .schema import SchemaItem
44 from .schema import Sequence
45 from .schema import Table
46 from .selectable import TableClause
47 from ..engine.base import Connection
48 from ..engine.interfaces import CacheStats
49 from ..engine.interfaces import CompiledCacheType
50 from ..engine.interfaces import Dialect
51 from ..engine.interfaces import SchemaTranslateMapType
52
53
54class BaseDDLElement(ClauseElement):
55 """The root of DDL constructs, including those that are sub-elements
56 within the "create table" and other processes.
57
58 .. versionadded:: 2.0
59
60 """
61
62 _hierarchy_supports_caching = False
63 """disable cache warnings for all _DDLCompiles subclasses. """
64
65 def _compiler(self, dialect, **kw):
66 """Return a compiler appropriate for this ClauseElement, given a
67 Dialect."""
68
69 return dialect.ddl_compiler(dialect, self, **kw)
70
71 def _compile_w_cache(
72 self,
73 dialect: Dialect,
74 *,
75 compiled_cache: Optional[CompiledCacheType],
76 column_keys: List[str],
77 for_executemany: bool = False,
78 schema_translate_map: Optional[SchemaTranslateMapType] = None,
79 **kw: Any,
80 ) -> Tuple[
81 Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats
82 ]:
83 raise NotImplementedError()
84
85
86class DDLIfCallable(Protocol):
87 def __call__(
88 self,
89 ddl: BaseDDLElement,
90 target: SchemaItem,
91 bind: Optional[Connection],
92 tables: Optional[List[Table]] = None,
93 state: Optional[Any] = None,
94 *,
95 dialect: Dialect,
96 compiler: Optional[DDLCompiler] = ...,
97 checkfirst: bool,
98 ) -> bool: ...
99
100
101class DDLIf(typing.NamedTuple):
102 dialect: Optional[str]
103 callable_: Optional[DDLIfCallable]
104 state: Optional[Any]
105
106 def _should_execute(
107 self,
108 ddl: BaseDDLElement,
109 target: SchemaItem,
110 bind: Optional[Connection],
111 compiler: Optional[DDLCompiler] = None,
112 **kw: Any,
113 ) -> bool:
114 if bind is not None:
115 dialect = bind.dialect
116 elif compiler is not None:
117 dialect = compiler.dialect
118 else:
119 assert False, "compiler or dialect is required"
120
121 if isinstance(self.dialect, str):
122 if self.dialect != dialect.name:
123 return False
124 elif isinstance(self.dialect, (tuple, list, set)):
125 if dialect.name not in self.dialect:
126 return False
127 if self.callable_ is not None and not self.callable_(
128 ddl,
129 target,
130 bind,
131 state=self.state,
132 dialect=dialect,
133 compiler=compiler,
134 **kw,
135 ):
136 return False
137
138 return True
139
140
141class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement):
142 """Base class for standalone executable DDL expression constructs.
143
144 This class is the base for the general purpose :class:`.DDL` class,
145 as well as the various create/drop clause constructs such as
146 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
147 etc.
148
149 .. versionchanged:: 2.0 :class:`.ExecutableDDLElement` is renamed from
150 :class:`.DDLElement`, which still exists for backwards compatibility.
151
152 :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events,
153 introduced in :ref:`event_toplevel`. An instance of one is
154 itself an event receiving callable::
155
156 event.listen(
157 users,
158 'after_create',
159 AddConstraint(constraint).execute_if(dialect='postgresql')
160 )
161
162 .. seealso::
163
164 :class:`.DDL`
165
166 :class:`.DDLEvents`
167
168 :ref:`event_toplevel`
169
170 :ref:`schema_ddl_sequences`
171
172 """
173
174 _ddl_if: Optional[DDLIf] = None
175 target: Optional[SchemaItem] = None
176
177 def _execute_on_connection(
178 self, connection, distilled_params, execution_options
179 ):
180 return connection._execute_ddl(
181 self, distilled_params, execution_options
182 )
183
184 @_generative
185 def against(self, target: SchemaItem) -> Self:
186 """Return a copy of this :class:`_schema.ExecutableDDLElement` which
187 will include the given target.
188
189 This essentially applies the given item to the ``.target`` attribute of
190 the returned :class:`_schema.ExecutableDDLElement` object. This target
191 is then usable by event handlers and compilation routines in order to
192 provide services such as tokenization of a DDL string in terms of a
193 particular :class:`_schema.Table`.
194
195 When a :class:`_schema.ExecutableDDLElement` object is established as
196 an event handler for the :meth:`_events.DDLEvents.before_create` or
197 :meth:`_events.DDLEvents.after_create` events, and the event then
198 occurs for a given target such as a :class:`_schema.Constraint` or
199 :class:`_schema.Table`, that target is established with a copy of the
200 :class:`_schema.ExecutableDDLElement` object using this method, which
201 then proceeds to the :meth:`_schema.ExecutableDDLElement.execute`
202 method in order to invoke the actual DDL instruction.
203
204 :param target: a :class:`_schema.SchemaItem` that will be the subject
205 of a DDL operation.
206
207 :return: a copy of this :class:`_schema.ExecutableDDLElement` with the
208 ``.target`` attribute assigned to the given
209 :class:`_schema.SchemaItem`.
210
211 .. seealso::
212
213 :class:`_schema.DDL` - uses tokenization against the "target" when
214 processing the DDL string.
215
216 """
217 self.target = target
218 return self
219
220 @_generative
221 def execute_if(
222 self,
223 dialect: Optional[str] = None,
224 callable_: Optional[DDLIfCallable] = None,
225 state: Optional[Any] = None,
226 ) -> Self:
227 r"""Return a callable that will execute this
228 :class:`_ddl.ExecutableDDLElement` conditionally within an event
229 handler.
230
231 Used to provide a wrapper for event listening::
232
233 event.listen(
234 metadata,
235 'before_create',
236 DDL("my_ddl").execute_if(dialect='postgresql')
237 )
238
239 :param dialect: May be a string or tuple of strings.
240 If a string, it will be compared to the name of the
241 executing database dialect::
242
243 DDL('something').execute_if(dialect='postgresql')
244
245 If a tuple, specifies multiple dialect names::
246
247 DDL('something').execute_if(dialect=('postgresql', 'mysql'))
248
249 :param callable\_: A callable, which will be invoked with
250 three positional arguments as well as optional keyword
251 arguments:
252
253 :ddl:
254 This DDL element.
255
256 :target:
257 The :class:`_schema.Table` or :class:`_schema.MetaData`
258 object which is the
259 target of this event. May be None if the DDL is executed
260 explicitly.
261
262 :bind:
263 The :class:`_engine.Connection` being used for DDL execution.
264 May be None if this construct is being created inline within
265 a table, in which case ``compiler`` will be present.
266
267 :tables:
268 Optional keyword argument - a list of Table objects which are to
269 be created/ dropped within a MetaData.create_all() or drop_all()
270 method call.
271
272 :dialect: keyword argument, but always present - the
273 :class:`.Dialect` involved in the operation.
274
275 :compiler: keyword argument. Will be ``None`` for an engine
276 level DDL invocation, but will refer to a :class:`.DDLCompiler`
277 if this DDL element is being created inline within a table.
278
279 :state:
280 Optional keyword argument - will be the ``state`` argument
281 passed to this function.
282
283 :checkfirst:
284 Keyword argument, will be True if the 'checkfirst' flag was
285 set during the call to ``create()``, ``create_all()``,
286 ``drop()``, ``drop_all()``.
287
288 If the callable returns a True value, the DDL statement will be
289 executed.
290
291 :param state: any value which will be passed to the callable\_
292 as the ``state`` keyword argument.
293
294 .. seealso::
295
296 :meth:`.SchemaItem.ddl_if`
297
298 :class:`.DDLEvents`
299
300 :ref:`event_toplevel`
301
302 """
303 self._ddl_if = DDLIf(dialect, callable_, state)
304 return self
305
306 def _should_execute(self, target, bind, **kw):
307 if self._ddl_if is None:
308 return True
309 else:
310 return self._ddl_if._should_execute(self, target, bind, **kw)
311
312 def _invoke_with(self, bind):
313 if self._should_execute(self.target, bind):
314 return bind.execute(self)
315
316 def __call__(self, target, bind, **kw):
317 """Execute the DDL as a ddl_listener."""
318
319 self.against(target)._invoke_with(bind)
320
321 def _generate(self):
322 s = self.__class__.__new__(self.__class__)
323 s.__dict__ = self.__dict__.copy()
324 return s
325
326
327DDLElement = ExecutableDDLElement
328""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`."""
329
330
331class DDL(ExecutableDDLElement):
332 """A literal DDL statement.
333
334 Specifies literal SQL DDL to be executed by the database. DDL objects
335 function as DDL event listeners, and can be subscribed to those events
336 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
337 :class:`_schema.MetaData` objects as targets.
338 Basic templating support allows
339 a single DDL instance to handle repetitive tasks for multiple tables.
340
341 Examples::
342
343 from sqlalchemy import event, DDL
344
345 tbl = Table('users', metadata, Column('uid', Integer))
346 event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
347
348 spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
349 event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
350
351 drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
352 connection.execute(drop_spow)
353
354 When operating on Table events, the following ``statement``
355 string substitutions are available::
356
357 %(table)s - the Table name, with any required quoting applied
358 %(schema)s - the schema name, with any required quoting applied
359 %(fullname)s - the Table name including schema, quoted if needed
360
361 The DDL's "context", if any, will be combined with the standard
362 substitutions noted above. Keys present in the context will override
363 the standard substitutions.
364
365 """
366
367 __visit_name__ = "ddl"
368
369 def __init__(self, statement, context=None):
370 """Create a DDL statement.
371
372 :param statement:
373 A string or unicode string to be executed. Statements will be
374 processed with Python's string formatting operator using
375 a fixed set of string substitutions, as well as additional
376 substitutions provided by the optional :paramref:`.DDL.context`
377 parameter.
378
379 A literal '%' in a statement must be escaped as '%%'.
380
381 SQL bind parameters are not available in DDL statements.
382
383 :param context:
384 Optional dictionary, defaults to None. These values will be
385 available for use in string substitutions on the DDL statement.
386
387 .. seealso::
388
389 :class:`.DDLEvents`
390
391 :ref:`event_toplevel`
392
393 """
394
395 if not isinstance(statement, str):
396 raise exc.ArgumentError(
397 "Expected a string or unicode SQL statement, got '%r'"
398 % statement
399 )
400
401 self.statement = statement
402 self.context = context or {}
403
404 def __repr__(self):
405 parts = [repr(self.statement)]
406 if self.context:
407 parts.append(f"context={self.context}")
408
409 return "<%s@%s; %s>" % (
410 type(self).__name__,
411 id(self),
412 ", ".join(parts),
413 )
414
415
416class _CreateDropBase(ExecutableDDLElement):
417 """Base class for DDL constructs that represent CREATE and DROP or
418 equivalents.
419
420 The common theme of _CreateDropBase is a single
421 ``element`` attribute which refers to the element
422 to be created or dropped.
423
424 """
425
426 def __init__(
427 self,
428 element,
429 ):
430 self.element = self.target = element
431 self._ddl_if = getattr(element, "_ddl_if", None)
432
433 @property
434 def stringify_dialect(self):
435 return self.element.create_drop_stringify_dialect
436
437 def _create_rule_disable(self, compiler):
438 """Allow disable of _create_rule using a callable.
439
440 Pass to _create_rule using
441 util.portable_instancemethod(self._create_rule_disable)
442 to retain serializability.
443
444 """
445 return False
446
447
448class _CreateBase(_CreateDropBase):
449 def __init__(self, element, if_not_exists=False):
450 super().__init__(element)
451 self.if_not_exists = if_not_exists
452
453
454class _DropBase(_CreateDropBase):
455 def __init__(self, element, if_exists=False):
456 super().__init__(element)
457 self.if_exists = if_exists
458
459
460class CreateSchema(_CreateBase):
461 """Represent a CREATE SCHEMA statement.
462
463 The argument here is the string name of the schema.
464
465 """
466
467 __visit_name__ = "create_schema"
468
469 stringify_dialect = "default"
470
471 def __init__(
472 self,
473 name,
474 if_not_exists=False,
475 ):
476 """Create a new :class:`.CreateSchema` construct."""
477
478 super().__init__(element=name, if_not_exists=if_not_exists)
479
480
481class DropSchema(_DropBase):
482 """Represent a DROP SCHEMA statement.
483
484 The argument here is the string name of the schema.
485
486 """
487
488 __visit_name__ = "drop_schema"
489
490 stringify_dialect = "default"
491
492 def __init__(
493 self,
494 name,
495 cascade=False,
496 if_exists=False,
497 ):
498 """Create a new :class:`.DropSchema` construct."""
499
500 super().__init__(element=name, if_exists=if_exists)
501 self.cascade = cascade
502
503
504class CreateTable(_CreateBase):
505 """Represent a CREATE TABLE statement."""
506
507 __visit_name__ = "create_table"
508
509 def __init__(
510 self,
511 element: Table,
512 include_foreign_key_constraints: Optional[
513 typing_Sequence[ForeignKeyConstraint]
514 ] = None,
515 if_not_exists: bool = False,
516 ):
517 """Create a :class:`.CreateTable` construct.
518
519 :param element: a :class:`_schema.Table` that's the subject
520 of the CREATE
521 :param on: See the description for 'on' in :class:`.DDL`.
522 :param include_foreign_key_constraints: optional sequence of
523 :class:`_schema.ForeignKeyConstraint` objects that will be included
524 inline within the CREATE construct; if omitted, all foreign key
525 constraints that do not specify use_alter=True are included.
526
527 :param if_not_exists: if True, an IF NOT EXISTS operator will be
528 applied to the construct.
529
530 .. versionadded:: 1.4.0b2
531
532 """
533 super().__init__(element, if_not_exists=if_not_exists)
534 self.columns = [CreateColumn(column) for column in element.columns]
535 self.include_foreign_key_constraints = include_foreign_key_constraints
536
537
538class _DropView(_DropBase):
539 """Semi-public 'DROP VIEW' construct.
540
541 Used by the test suite for dialect-agnostic drops of views.
542 This object will eventually be part of a public "view" API.
543
544 """
545
546 __visit_name__ = "drop_view"
547
548
549class CreateConstraint(BaseDDLElement):
550 def __init__(self, element: Constraint):
551 self.element = element
552
553
554class CreateColumn(BaseDDLElement):
555 """Represent a :class:`_schema.Column`
556 as rendered in a CREATE TABLE statement,
557 via the :class:`.CreateTable` construct.
558
559 This is provided to support custom column DDL within the generation
560 of CREATE TABLE statements, by using the
561 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
562 to extend :class:`.CreateColumn`.
563
564 Typical integration is to examine the incoming :class:`_schema.Column`
565 object, and to redirect compilation if a particular flag or condition
566 is found::
567
568 from sqlalchemy import schema
569 from sqlalchemy.ext.compiler import compiles
570
571 @compiles(schema.CreateColumn)
572 def compile(element, compiler, **kw):
573 column = element.element
574
575 if "special" not in column.info:
576 return compiler.visit_create_column(element, **kw)
577
578 text = "%s SPECIAL DIRECTIVE %s" % (
579 column.name,
580 compiler.type_compiler.process(column.type)
581 )
582 default = compiler.get_column_default_string(column)
583 if default is not None:
584 text += " DEFAULT " + default
585
586 if not column.nullable:
587 text += " NOT NULL"
588
589 if column.constraints:
590 text += " ".join(
591 compiler.process(const)
592 for const in column.constraints)
593 return text
594
595 The above construct can be applied to a :class:`_schema.Table`
596 as follows::
597
598 from sqlalchemy import Table, Metadata, Column, Integer, String
599 from sqlalchemy import schema
600
601 metadata = MetaData()
602
603 table = Table('mytable', MetaData(),
604 Column('x', Integer, info={"special":True}, primary_key=True),
605 Column('y', String(50)),
606 Column('z', String(20), info={"special":True})
607 )
608
609 metadata.create_all(conn)
610
611 Above, the directives we've added to the :attr:`_schema.Column.info`
612 collection
613 will be detected by our custom compilation scheme::
614
615 CREATE TABLE mytable (
616 x SPECIAL DIRECTIVE INTEGER NOT NULL,
617 y VARCHAR(50),
618 z SPECIAL DIRECTIVE VARCHAR(20),
619 PRIMARY KEY (x)
620 )
621
622 The :class:`.CreateColumn` construct can also be used to skip certain
623 columns when producing a ``CREATE TABLE``. This is accomplished by
624 creating a compilation rule that conditionally returns ``None``.
625 This is essentially how to produce the same effect as using the
626 ``system=True`` argument on :class:`_schema.Column`, which marks a column
627 as an implicitly-present "system" column.
628
629 For example, suppose we wish to produce a :class:`_schema.Table`
630 which skips
631 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
632 backend, but on other backends does render it, in anticipation of a
633 triggered rule. A conditional compilation rule could skip this name only
634 on PostgreSQL::
635
636 from sqlalchemy.schema import CreateColumn
637
638 @compiles(CreateColumn, "postgresql")
639 def skip_xmin(element, compiler, **kw):
640 if element.element.name == 'xmin':
641 return None
642 else:
643 return compiler.visit_create_column(element, **kw)
644
645
646 my_table = Table('mytable', metadata,
647 Column('id', Integer, primary_key=True),
648 Column('xmin', Integer)
649 )
650
651 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
652 which only includes the ``id`` column in the string; the ``xmin`` column
653 will be omitted, but only against the PostgreSQL backend.
654
655 """
656
657 __visit_name__ = "create_column"
658
659 def __init__(self, element):
660 self.element = element
661
662
663class DropTable(_DropBase):
664 """Represent a DROP TABLE statement."""
665
666 __visit_name__ = "drop_table"
667
668 def __init__(self, element: Table, if_exists: bool = False):
669 """Create a :class:`.DropTable` construct.
670
671 :param element: a :class:`_schema.Table` that's the subject
672 of the DROP.
673 :param on: See the description for 'on' in :class:`.DDL`.
674 :param if_exists: if True, an IF EXISTS operator will be applied to the
675 construct.
676
677 .. versionadded:: 1.4.0b2
678
679 """
680 super().__init__(element, if_exists=if_exists)
681
682
683class CreateSequence(_CreateBase):
684 """Represent a CREATE SEQUENCE statement."""
685
686 __visit_name__ = "create_sequence"
687
688 def __init__(self, element: Sequence, if_not_exists: bool = False):
689 super().__init__(element, if_not_exists=if_not_exists)
690
691
692class DropSequence(_DropBase):
693 """Represent a DROP SEQUENCE statement."""
694
695 __visit_name__ = "drop_sequence"
696
697 def __init__(self, element: Sequence, if_exists: bool = False):
698 super().__init__(element, if_exists=if_exists)
699
700
701class CreateIndex(_CreateBase):
702 """Represent a CREATE INDEX statement."""
703
704 __visit_name__ = "create_index"
705
706 def __init__(self, element, if_not_exists=False):
707 """Create a :class:`.Createindex` construct.
708
709 :param element: a :class:`_schema.Index` that's the subject
710 of the CREATE.
711 :param if_not_exists: if True, an IF NOT EXISTS operator will be
712 applied to the construct.
713
714 .. versionadded:: 1.4.0b2
715
716 """
717 super().__init__(element, if_not_exists=if_not_exists)
718
719
720class DropIndex(_DropBase):
721 """Represent a DROP INDEX statement."""
722
723 __visit_name__ = "drop_index"
724
725 def __init__(self, element, if_exists=False):
726 """Create a :class:`.DropIndex` construct.
727
728 :param element: a :class:`_schema.Index` that's the subject
729 of the DROP.
730 :param if_exists: if True, an IF EXISTS operator will be applied to the
731 construct.
732
733 .. versionadded:: 1.4.0b2
734
735 """
736 super().__init__(element, if_exists=if_exists)
737
738
739class AddConstraint(_CreateBase):
740 """Represent an ALTER TABLE ADD CONSTRAINT statement."""
741
742 __visit_name__ = "add_constraint"
743
744 def __init__(self, element):
745 super().__init__(element)
746 element._create_rule = util.portable_instancemethod(
747 self._create_rule_disable
748 )
749
750
751class DropConstraint(_DropBase):
752 """Represent an ALTER TABLE DROP CONSTRAINT statement."""
753
754 __visit_name__ = "drop_constraint"
755
756 def __init__(self, element, cascade=False, if_exists=False, **kw):
757 self.cascade = cascade
758 super().__init__(element, if_exists=if_exists, **kw)
759 element._create_rule = util.portable_instancemethod(
760 self._create_rule_disable
761 )
762
763
764class SetTableComment(_CreateDropBase):
765 """Represent a COMMENT ON TABLE IS statement."""
766
767 __visit_name__ = "set_table_comment"
768
769
770class DropTableComment(_CreateDropBase):
771 """Represent a COMMENT ON TABLE '' statement.
772
773 Note this varies a lot across database backends.
774
775 """
776
777 __visit_name__ = "drop_table_comment"
778
779
780class SetColumnComment(_CreateDropBase):
781 """Represent a COMMENT ON COLUMN IS statement."""
782
783 __visit_name__ = "set_column_comment"
784
785
786class DropColumnComment(_CreateDropBase):
787 """Represent a COMMENT ON COLUMN IS NULL statement."""
788
789 __visit_name__ = "drop_column_comment"
790
791
792class SetConstraintComment(_CreateDropBase):
793 """Represent a COMMENT ON CONSTRAINT IS statement."""
794
795 __visit_name__ = "set_constraint_comment"
796
797
798class DropConstraintComment(_CreateDropBase):
799 """Represent a COMMENT ON CONSTRAINT IS NULL statement."""
800
801 __visit_name__ = "drop_constraint_comment"
802
803
804class InvokeDDLBase(SchemaVisitor):
805 def __init__(self, connection):
806 self.connection = connection
807
808 @contextlib.contextmanager
809 def with_ddl_events(self, target, **kw):
810 """helper context manager that will apply appropriate DDL events
811 to a CREATE or DROP operation."""
812
813 raise NotImplementedError()
814
815
816class InvokeCreateDDLBase(InvokeDDLBase):
817 @contextlib.contextmanager
818 def with_ddl_events(self, target, **kw):
819 """helper context manager that will apply appropriate DDL events
820 to a CREATE or DROP operation."""
821
822 target.dispatch.before_create(
823 target, self.connection, _ddl_runner=self, **kw
824 )
825 yield
826 target.dispatch.after_create(
827 target, self.connection, _ddl_runner=self, **kw
828 )
829
830
831class InvokeDropDDLBase(InvokeDDLBase):
832 @contextlib.contextmanager
833 def with_ddl_events(self, target, **kw):
834 """helper context manager that will apply appropriate DDL events
835 to a CREATE or DROP operation."""
836
837 target.dispatch.before_drop(
838 target, self.connection, _ddl_runner=self, **kw
839 )
840 yield
841 target.dispatch.after_drop(
842 target, self.connection, _ddl_runner=self, **kw
843 )
844
845
846class SchemaGenerator(InvokeCreateDDLBase):
847 def __init__(
848 self, dialect, connection, checkfirst=False, tables=None, **kwargs
849 ):
850 super().__init__(connection, **kwargs)
851 self.checkfirst = checkfirst
852 self.tables = tables
853 self.preparer = dialect.identifier_preparer
854 self.dialect = dialect
855 self.memo = {}
856
857 def _can_create_table(self, table):
858 self.dialect.validate_identifier(table.name)
859 effective_schema = self.connection.schema_for_object(table)
860 if effective_schema:
861 self.dialect.validate_identifier(effective_schema)
862 return not self.checkfirst or not self.dialect.has_table(
863 self.connection, table.name, schema=effective_schema
864 )
865
866 def _can_create_index(self, index):
867 effective_schema = self.connection.schema_for_object(index.table)
868 if effective_schema:
869 self.dialect.validate_identifier(effective_schema)
870 return not self.checkfirst or not self.dialect.has_index(
871 self.connection,
872 index.table.name,
873 index.name,
874 schema=effective_schema,
875 )
876
877 def _can_create_sequence(self, sequence):
878 effective_schema = self.connection.schema_for_object(sequence)
879
880 return self.dialect.supports_sequences and (
881 (not self.dialect.sequences_optional or not sequence.optional)
882 and (
883 not self.checkfirst
884 or not self.dialect.has_sequence(
885 self.connection, sequence.name, schema=effective_schema
886 )
887 )
888 )
889
890 def visit_metadata(self, metadata):
891 if self.tables is not None:
892 tables = self.tables
893 else:
894 tables = list(metadata.tables.values())
895
896 collection = sort_tables_and_constraints(
897 [t for t in tables if self._can_create_table(t)]
898 )
899
900 seq_coll = [
901 s
902 for s in metadata._sequences.values()
903 if s.column is None and self._can_create_sequence(s)
904 ]
905
906 event_collection = [t for (t, fks) in collection if t is not None]
907
908 with self.with_ddl_events(
909 metadata,
910 tables=event_collection,
911 checkfirst=self.checkfirst,
912 ):
913 for seq in seq_coll:
914 self.traverse_single(seq, create_ok=True)
915
916 for table, fkcs in collection:
917 if table is not None:
918 self.traverse_single(
919 table,
920 create_ok=True,
921 include_foreign_key_constraints=fkcs,
922 _is_metadata_operation=True,
923 )
924 else:
925 for fkc in fkcs:
926 self.traverse_single(fkc)
927
928 def visit_table(
929 self,
930 table,
931 create_ok=False,
932 include_foreign_key_constraints=None,
933 _is_metadata_operation=False,
934 ):
935 if not create_ok and not self._can_create_table(table):
936 return
937
938 with self.with_ddl_events(
939 table,
940 checkfirst=self.checkfirst,
941 _is_metadata_operation=_is_metadata_operation,
942 ):
943 for column in table.columns:
944 if column.default is not None:
945 self.traverse_single(column.default)
946
947 if not self.dialect.supports_alter:
948 # e.g., don't omit any foreign key constraints
949 include_foreign_key_constraints = None
950
951 CreateTable(
952 table,
953 include_foreign_key_constraints=(
954 include_foreign_key_constraints
955 ),
956 )._invoke_with(self.connection)
957
958 if hasattr(table, "indexes"):
959 for index in table.indexes:
960 self.traverse_single(index, create_ok=True)
961
962 if (
963 self.dialect.supports_comments
964 and not self.dialect.inline_comments
965 ):
966 if table.comment is not None:
967 SetTableComment(table)._invoke_with(self.connection)
968
969 for column in table.columns:
970 if column.comment is not None:
971 SetColumnComment(column)._invoke_with(self.connection)
972
973 if self.dialect.supports_constraint_comments:
974 for constraint in table.constraints:
975 if constraint.comment is not None:
976 self.connection.execute(
977 SetConstraintComment(constraint)
978 )
979
980 def visit_foreign_key_constraint(self, constraint):
981 if not self.dialect.supports_alter:
982 return
983
984 with self.with_ddl_events(constraint):
985 AddConstraint(constraint)._invoke_with(self.connection)
986
987 def visit_sequence(self, sequence, create_ok=False):
988 if not create_ok and not self._can_create_sequence(sequence):
989 return
990 with self.with_ddl_events(sequence):
991 CreateSequence(sequence)._invoke_with(self.connection)
992
993 def visit_index(self, index, create_ok=False):
994 if not create_ok and not self._can_create_index(index):
995 return
996 with self.with_ddl_events(index):
997 CreateIndex(index)._invoke_with(self.connection)
998
999
1000class SchemaDropper(InvokeDropDDLBase):
1001 def __init__(
1002 self, dialect, connection, checkfirst=False, tables=None, **kwargs
1003 ):
1004 super().__init__(connection, **kwargs)
1005 self.checkfirst = checkfirst
1006 self.tables = tables
1007 self.preparer = dialect.identifier_preparer
1008 self.dialect = dialect
1009 self.memo = {}
1010
1011 def visit_metadata(self, metadata):
1012 if self.tables is not None:
1013 tables = self.tables
1014 else:
1015 tables = list(metadata.tables.values())
1016
1017 try:
1018 unsorted_tables = [t for t in tables if self._can_drop_table(t)]
1019 collection = list(
1020 reversed(
1021 sort_tables_and_constraints(
1022 unsorted_tables,
1023 filter_fn=lambda constraint: (
1024 False
1025 if not self.dialect.supports_alter
1026 or constraint.name is None
1027 else None
1028 ),
1029 )
1030 )
1031 )
1032 except exc.CircularDependencyError as err2:
1033 if not self.dialect.supports_alter:
1034 util.warn(
1035 "Can't sort tables for DROP; an "
1036 "unresolvable foreign key "
1037 "dependency exists between tables: %s; and backend does "
1038 "not support ALTER. To restore at least a partial sort, "
1039 "apply use_alter=True to ForeignKey and "
1040 "ForeignKeyConstraint "
1041 "objects involved in the cycle to mark these as known "
1042 "cycles that will be ignored."
1043 % (", ".join(sorted([t.fullname for t in err2.cycles])))
1044 )
1045 collection = [(t, ()) for t in unsorted_tables]
1046 else:
1047 raise exc.CircularDependencyError(
1048 err2.args[0],
1049 err2.cycles,
1050 err2.edges,
1051 msg="Can't sort tables for DROP; an "
1052 "unresolvable foreign key "
1053 "dependency exists between tables: %s. Please ensure "
1054 "that the ForeignKey and ForeignKeyConstraint objects "
1055 "involved in the cycle have "
1056 "names so that they can be dropped using "
1057 "DROP CONSTRAINT."
1058 % (", ".join(sorted([t.fullname for t in err2.cycles]))),
1059 ) from err2
1060
1061 seq_coll = [
1062 s
1063 for s in metadata._sequences.values()
1064 if self._can_drop_sequence(s)
1065 ]
1066
1067 event_collection = [t for (t, fks) in collection if t is not None]
1068
1069 with self.with_ddl_events(
1070 metadata,
1071 tables=event_collection,
1072 checkfirst=self.checkfirst,
1073 ):
1074 for table, fkcs in collection:
1075 if table is not None:
1076 self.traverse_single(
1077 table,
1078 drop_ok=True,
1079 _is_metadata_operation=True,
1080 _ignore_sequences=seq_coll,
1081 )
1082 else:
1083 for fkc in fkcs:
1084 self.traverse_single(fkc)
1085
1086 for seq in seq_coll:
1087 self.traverse_single(seq, drop_ok=seq.column is None)
1088
1089 def _can_drop_table(self, table):
1090 self.dialect.validate_identifier(table.name)
1091 effective_schema = self.connection.schema_for_object(table)
1092 if effective_schema:
1093 self.dialect.validate_identifier(effective_schema)
1094 return not self.checkfirst or self.dialect.has_table(
1095 self.connection, table.name, schema=effective_schema
1096 )
1097
1098 def _can_drop_index(self, index):
1099 effective_schema = self.connection.schema_for_object(index.table)
1100 if effective_schema:
1101 self.dialect.validate_identifier(effective_schema)
1102 return not self.checkfirst or self.dialect.has_index(
1103 self.connection,
1104 index.table.name,
1105 index.name,
1106 schema=effective_schema,
1107 )
1108
1109 def _can_drop_sequence(self, sequence):
1110 effective_schema = self.connection.schema_for_object(sequence)
1111 return self.dialect.supports_sequences and (
1112 (not self.dialect.sequences_optional or not sequence.optional)
1113 and (
1114 not self.checkfirst
1115 or self.dialect.has_sequence(
1116 self.connection, sequence.name, schema=effective_schema
1117 )
1118 )
1119 )
1120
1121 def visit_index(self, index, drop_ok=False):
1122 if not drop_ok and not self._can_drop_index(index):
1123 return
1124
1125 with self.with_ddl_events(index):
1126 DropIndex(index)(index, self.connection)
1127
1128 def visit_table(
1129 self,
1130 table,
1131 drop_ok=False,
1132 _is_metadata_operation=False,
1133 _ignore_sequences=(),
1134 ):
1135 if not drop_ok and not self._can_drop_table(table):
1136 return
1137
1138 with self.with_ddl_events(
1139 table,
1140 checkfirst=self.checkfirst,
1141 _is_metadata_operation=_is_metadata_operation,
1142 ):
1143 DropTable(table)._invoke_with(self.connection)
1144
1145 # traverse client side defaults which may refer to server-side
1146 # sequences. noting that some of these client side defaults may
1147 # also be set up as server side defaults
1148 # (see https://docs.sqlalchemy.org/en/
1149 # latest/core/defaults.html
1150 # #associating-a-sequence-as-the-server-side-
1151 # default), so have to be dropped after the table is dropped.
1152 for column in table.columns:
1153 if (
1154 column.default is not None
1155 and column.default not in _ignore_sequences
1156 ):
1157 self.traverse_single(column.default)
1158
1159 def visit_foreign_key_constraint(self, constraint):
1160 if not self.dialect.supports_alter:
1161 return
1162 with self.with_ddl_events(constraint):
1163 DropConstraint(constraint)._invoke_with(self.connection)
1164
1165 def visit_sequence(self, sequence, drop_ok=False):
1166 if not drop_ok and not self._can_drop_sequence(sequence):
1167 return
1168 with self.with_ddl_events(sequence):
1169 DropSequence(sequence)._invoke_with(self.connection)
1170
1171
1172def sort_tables(
1173 tables: Iterable[TableClause],
1174 skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None,
1175 extra_dependencies: Optional[
1176 typing_Sequence[Tuple[TableClause, TableClause]]
1177 ] = None,
1178) -> List[Table]:
1179 """Sort a collection of :class:`_schema.Table` objects based on
1180 dependency.
1181
1182 This is a dependency-ordered sort which will emit :class:`_schema.Table`
1183 objects such that they will follow their dependent :class:`_schema.Table`
1184 objects.
1185 Tables are dependent on another based on the presence of
1186 :class:`_schema.ForeignKeyConstraint`
1187 objects as well as explicit dependencies
1188 added by :meth:`_schema.Table.add_is_dependent_on`.
1189
1190 .. warning::
1191
1192 The :func:`._schema.sort_tables` function cannot by itself
1193 accommodate automatic resolution of dependency cycles between
1194 tables, which are usually caused by mutually dependent foreign key
1195 constraints. When these cycles are detected, the foreign keys
1196 of these tables are omitted from consideration in the sort.
1197 A warning is emitted when this condition occurs, which will be an
1198 exception raise in a future release. Tables which are not part
1199 of the cycle will still be returned in dependency order.
1200
1201 To resolve these cycles, the
1202 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
1203 applied to those constraints which create a cycle. Alternatively,
1204 the :func:`_schema.sort_tables_and_constraints` function will
1205 automatically return foreign key constraints in a separate
1206 collection when cycles are detected so that they may be applied
1207 to a schema separately.
1208
1209 .. versionchanged:: 1.3.17 - a warning is emitted when
1210 :func:`_schema.sort_tables` cannot perform a proper sort due to
1211 cyclical dependencies. This will be an exception in a future
1212 release. Additionally, the sort will continue to return
1213 other tables not involved in the cycle in dependency order
1214 which was not the case previously.
1215
1216 :param tables: a sequence of :class:`_schema.Table` objects.
1217
1218 :param skip_fn: optional callable which will be passed a
1219 :class:`_schema.ForeignKeyConstraint` object; if it returns True, this
1220 constraint will not be considered as a dependency. Note this is
1221 **different** from the same parameter in
1222 :func:`.sort_tables_and_constraints`, which is
1223 instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
1224
1225 :param extra_dependencies: a sequence of 2-tuples of tables which will
1226 also be considered as dependent on each other.
1227
1228 .. seealso::
1229
1230 :func:`.sort_tables_and_constraints`
1231
1232 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
1233
1234
1235 """
1236
1237 if skip_fn is not None:
1238 fixed_skip_fn = skip_fn
1239
1240 def _skip_fn(fkc):
1241 for fk in fkc.elements:
1242 if fixed_skip_fn(fk):
1243 return True
1244 else:
1245 return None
1246
1247 else:
1248 _skip_fn = None # type: ignore
1249
1250 return [
1251 t
1252 for (t, fkcs) in sort_tables_and_constraints(
1253 tables,
1254 filter_fn=_skip_fn,
1255 extra_dependencies=extra_dependencies,
1256 _warn_for_cycles=True,
1257 )
1258 if t is not None
1259 ]
1260
1261
1262def sort_tables_and_constraints(
1263 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
1264):
1265 """Sort a collection of :class:`_schema.Table` /
1266 :class:`_schema.ForeignKeyConstraint`
1267 objects.
1268
1269 This is a dependency-ordered sort which will emit tuples of
1270 ``(Table, [ForeignKeyConstraint, ...])`` such that each
1271 :class:`_schema.Table` follows its dependent :class:`_schema.Table`
1272 objects.
1273 Remaining :class:`_schema.ForeignKeyConstraint`
1274 objects that are separate due to
1275 dependency rules not satisfied by the sort are emitted afterwards
1276 as ``(None, [ForeignKeyConstraint ...])``.
1277
1278 Tables are dependent on another based on the presence of
1279 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
1280 added by :meth:`_schema.Table.add_is_dependent_on`,
1281 as well as dependencies
1282 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
1283 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
1284 parameters.
1285
1286 :param tables: a sequence of :class:`_schema.Table` objects.
1287
1288 :param filter_fn: optional callable which will be passed a
1289 :class:`_schema.ForeignKeyConstraint` object,
1290 and returns a value based on
1291 whether this constraint should definitely be included or excluded as
1292 an inline constraint, or neither. If it returns False, the constraint
1293 will definitely be included as a dependency that cannot be subject
1294 to ALTER; if True, it will **only** be included as an ALTER result at
1295 the end. Returning None means the constraint is included in the
1296 table-based result unless it is detected as part of a dependency cycle.
1297
1298 :param extra_dependencies: a sequence of 2-tuples of tables which will
1299 also be considered as dependent on each other.
1300
1301 .. seealso::
1302
1303 :func:`.sort_tables`
1304
1305
1306 """
1307
1308 fixed_dependencies = set()
1309 mutable_dependencies = set()
1310
1311 if extra_dependencies is not None:
1312 fixed_dependencies.update(extra_dependencies)
1313
1314 remaining_fkcs = set()
1315 for table in tables:
1316 for fkc in table.foreign_key_constraints:
1317 if fkc.use_alter is True:
1318 remaining_fkcs.add(fkc)
1319 continue
1320
1321 if filter_fn:
1322 filtered = filter_fn(fkc)
1323
1324 if filtered is True:
1325 remaining_fkcs.add(fkc)
1326 continue
1327
1328 dependent_on = fkc.referred_table
1329 if dependent_on is not table:
1330 mutable_dependencies.add((dependent_on, table))
1331
1332 fixed_dependencies.update(
1333 (parent, table) for parent in table._extra_dependencies
1334 )
1335
1336 try:
1337 candidate_sort = list(
1338 topological.sort(
1339 fixed_dependencies.union(mutable_dependencies),
1340 tables,
1341 )
1342 )
1343 except exc.CircularDependencyError as err:
1344 if _warn_for_cycles:
1345 util.warn(
1346 "Cannot correctly sort tables; there are unresolvable cycles "
1347 'between tables "%s", which is usually caused by mutually '
1348 "dependent foreign key constraints. Foreign key constraints "
1349 "involving these tables will not be considered; this warning "
1350 "may raise an error in a future release."
1351 % (", ".join(sorted(t.fullname for t in err.cycles)),)
1352 )
1353 for edge in err.edges:
1354 if edge in mutable_dependencies:
1355 table = edge[1]
1356 if table not in err.cycles:
1357 continue
1358 can_remove = [
1359 fkc
1360 for fkc in table.foreign_key_constraints
1361 if filter_fn is None or filter_fn(fkc) is not False
1362 ]
1363 remaining_fkcs.update(can_remove)
1364 for fkc in can_remove:
1365 dependent_on = fkc.referred_table
1366 if dependent_on is not table:
1367 mutable_dependencies.discard((dependent_on, table))
1368 candidate_sort = list(
1369 topological.sort(
1370 fixed_dependencies.union(mutable_dependencies),
1371 tables,
1372 )
1373 )
1374
1375 return [
1376 (table, table.foreign_key_constraints.difference(remaining_fkcs))
1377 for table in candidate_sort
1378 ] + [(None, list(remaining_fkcs))]