1# sql/ddl.py
2# Copyright (C) 2009-2021 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
7"""
8Provides the hierarchy of DDL-defining schema items as well as routines
9to invoke them for a create/drop call.
10
11"""
12
13from .base import _bind_or_error
14from .base import _generative
15from .base import Executable
16from .base import SchemaVisitor
17from .elements import ClauseElement
18from .. import event
19from .. import exc
20from .. import util
21from ..util import topological
22
23
24class _DDLCompiles(ClauseElement):
25 def _compiler(self, dialect, **kw):
26 """Return a compiler appropriate for this ClauseElement, given a
27 Dialect."""
28
29 return dialect.ddl_compiler(dialect, self, **kw)
30
31
32class DDLElement(Executable, _DDLCompiles):
33 """Base class for DDL expression constructs.
34
35 This class is the base for the general purpose :class:`.DDL` class,
36 as well as the various create/drop clause constructs such as
37 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
38 etc.
39
40 :class:`.DDLElement` integrates closely with SQLAlchemy events,
41 introduced in :ref:`event_toplevel`. An instance of one is
42 itself an event receiving callable::
43
44 event.listen(
45 users,
46 'after_create',
47 AddConstraint(constraint).execute_if(dialect='postgresql')
48 )
49
50 .. seealso::
51
52 :class:`.DDL`
53
54 :class:`.DDLEvents`
55
56 :ref:`event_toplevel`
57
58 :ref:`schema_ddl_sequences`
59
60 """
61
62 _execution_options = Executable._execution_options.union(
63 {"autocommit": True}
64 )
65
66 target = None
67 on = None
68 dialect = None
69 callable_ = None
70
71 def _execute_on_connection(self, connection, multiparams, params):
72 return connection._execute_ddl(self, multiparams, params)
73
74 def execute(self, bind=None, target=None):
75 """Execute this DDL immediately.
76
77 Executes the DDL statement in isolation using the supplied
78 :class:`.Connectable` or
79 :class:`.Connectable` assigned to the ``.bind``
80 property, if not supplied. If the DDL has a conditional ``on``
81 criteria, it will be invoked with None as the event.
82
83 :param bind:
84 Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
85 :class:`.Connectable` must be present in the
86 ``.bind`` property.
87
88 :param target:
89 Optional, defaults to None. The target :class:`_schema.SchemaItem`
90 for the execute call. Will be passed to the ``on`` callable if any,
91 and may also provide string expansion data for the statement.
92 See ``execute_at`` for more information.
93
94 """
95
96 if bind is None:
97 bind = _bind_or_error(self)
98
99 if self._should_execute(target, bind):
100 return bind.execute(self.against(target))
101 else:
102 bind.engine.logger.info("DDL execution skipped, criteria not met.")
103
104 @util.deprecated(
105 "0.7",
106 "The :meth:`.DDLElement.execute_at` method is deprecated and will "
107 "be removed in a future release. Please use the :class:`.DDLEvents` "
108 "listener interface in conjunction with the "
109 ":meth:`.DDLElement.execute_if` method.",
110 )
111 def execute_at(self, event_name, target):
112 """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
113
114 Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
115 executing it when that schema item is created or dropped. The DDL
116 statement will be executed using the same Connection and transactional
117 context as the Table create/drop itself. The ``.bind`` property of
118 this statement is ignored.
119
120 :param event:
121 One of the events defined in the schema item's ``.ddl_events``;
122 e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
123
124 :param target:
125 The Table or MetaData instance for which this DDLElement will
126 be associated with.
127
128 A DDLElement instance can be linked to any number of schema items.
129
130 ``execute_at`` builds on the ``append_ddl_listener`` interface of
131 :class:`_schema.MetaData` and :class:`_schema.Table` objects.
132
133 Caveat: Creating or dropping a Table in isolation will also trigger
134 any DDL set to ``execute_at`` that Table's MetaData. This may change
135 in a future release.
136
137 """
138
139 def call_event(target, connection, **kw):
140 if self._should_execute_deprecated(
141 event_name, target, connection, **kw
142 ):
143 return connection.execute(self.against(target))
144
145 event.listen(target, "" + event_name.replace("-", "_"), call_event)
146
147 @_generative
148 def against(self, target):
149 """Return a copy of this DDL against a specific schema item."""
150
151 self.target = target
152
153 @_generative
154 def execute_if(self, dialect=None, callable_=None, state=None):
155 r"""Return a callable that will execute this
156 DDLElement conditionally.
157
158 Used to provide a wrapper for event listening::
159
160 event.listen(
161 metadata,
162 'before_create',
163 DDL("my_ddl").execute_if(dialect='postgresql')
164 )
165
166 :param dialect: May be a string or tuple of strings.
167 If a string, it will be compared to the name of the
168 executing database dialect::
169
170 DDL('something').execute_if(dialect='postgresql')
171
172 If a tuple, specifies multiple dialect names::
173
174 DDL('something').execute_if(dialect=('postgresql', 'mysql'))
175
176 :param callable\_: A callable, which will be invoked with
177 four positional arguments as well as optional keyword
178 arguments:
179
180 :ddl:
181 This DDL element.
182
183 :target:
184 The :class:`_schema.Table` or :class:`_schema.MetaData`
185 object which is the
186 target of this event. May be None if the DDL is executed
187 explicitly.
188
189 :bind:
190 The :class:`_engine.Connection` being used for DDL execution
191
192 :tables:
193 Optional keyword argument - a list of Table objects which are to
194 be created/ dropped within a MetaData.create_all() or drop_all()
195 method call.
196
197 :state:
198 Optional keyword argument - will be the ``state`` argument
199 passed to this function.
200
201 :checkfirst:
202 Keyword argument, will be True if the 'checkfirst' flag was
203 set during the call to ``create()``, ``create_all()``,
204 ``drop()``, ``drop_all()``.
205
206 If the callable returns a True value, the DDL statement will be
207 executed.
208
209 :param state: any value which will be passed to the callable\_
210 as the ``state`` keyword argument.
211
212 .. seealso::
213
214 :class:`.DDLEvents`
215
216 :ref:`event_toplevel`
217
218 """
219 self.dialect = dialect
220 self.callable_ = callable_
221 self.state = state
222
223 def _should_execute(self, target, bind, **kw):
224 if self.on is not None and not self._should_execute_deprecated(
225 None, target, bind, **kw
226 ):
227 return False
228
229 if isinstance(self.dialect, util.string_types):
230 if self.dialect != bind.engine.name:
231 return False
232 elif isinstance(self.dialect, (tuple, list, set)):
233 if bind.engine.name not in self.dialect:
234 return False
235 if self.callable_ is not None and not self.callable_(
236 self, target, bind, state=self.state, **kw
237 ):
238 return False
239
240 return True
241
242 def _should_execute_deprecated(self, event, target, bind, **kw):
243 if self.on is None:
244 return True
245 elif isinstance(self.on, util.string_types):
246 return self.on == bind.engine.name
247 elif isinstance(self.on, (tuple, list, set)):
248 return bind.engine.name in self.on
249 else:
250 return self.on(self, event, target, bind, **kw)
251
252 def __call__(self, target, bind, **kw):
253 """Execute the DDL as a ddl_listener."""
254
255 if self._should_execute(target, bind, **kw):
256 return bind.execute(self.against(target))
257
258 def _check_ddl_on(self, on):
259 if on is not None and (
260 not isinstance(on, util.string_types + (tuple, list, set))
261 and not util.callable(on)
262 ):
263 raise exc.ArgumentError(
264 "Expected the name of a database dialect, a tuple "
265 "of names, or a callable for "
266 "'on' criteria, got type '%s'." % type(on).__name__
267 )
268
269 def bind(self):
270 if self._bind:
271 return self._bind
272
273 def _set_bind(self, bind):
274 self._bind = bind
275
276 bind = property(bind, _set_bind)
277
278 def _generate(self):
279 s = self.__class__.__new__(self.__class__)
280 s.__dict__ = self.__dict__.copy()
281 return s
282
283
284class DDL(DDLElement):
285 """A literal DDL statement.
286
287 Specifies literal SQL DDL to be executed by the database. DDL objects
288 function as DDL event listeners, and can be subscribed to those events
289 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
290 :class:`_schema.MetaData` objects as targets.
291 Basic templating support allows
292 a single DDL instance to handle repetitive tasks for multiple tables.
293
294 Examples::
295
296 from sqlalchemy import event, DDL
297
298 tbl = Table('users', metadata, Column('uid', Integer))
299 event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
300
301 spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
302 event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
303
304 drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
305 connection.execute(drop_spow)
306
307 When operating on Table events, the following ``statement``
308 string substitutions are available::
309
310 %(table)s - the Table name, with any required quoting applied
311 %(schema)s - the schema name, with any required quoting applied
312 %(fullname)s - the Table name including schema, quoted if needed
313
314 The DDL's "context", if any, will be combined with the standard
315 substitutions noted above. Keys present in the context will override
316 the standard substitutions.
317
318 """
319
320 __visit_name__ = "ddl"
321
322 @util.deprecated_params(
323 on=(
324 "0.7",
325 "The :paramref:`.DDL.on` parameter is deprecated and will be "
326 "removed in a future release. Please refer to "
327 ":meth:`.DDLElement.execute_if`.",
328 )
329 )
330 def __init__(self, statement, on=None, context=None, bind=None):
331 """Create a DDL statement.
332
333 :param statement:
334 A string or unicode string to be executed. Statements will be
335 processed with Python's string formatting operator. See the
336 ``context`` argument and the ``execute_at`` method.
337
338 A literal '%' in a statement must be escaped as '%%'.
339
340 SQL bind parameters are not available in DDL statements.
341
342 :param on:
343
344 Optional filtering criteria. May be a string, tuple or a callable
345 predicate. If a string, it will be compared to the name of the
346 executing database dialect::
347
348 DDL('something', on='postgresql')
349
350 If a tuple, specifies multiple dialect names::
351
352 DDL('something', on=('postgresql', 'mysql'))
353
354 If a callable, it will be invoked with four positional arguments
355 as well as optional keyword arguments:
356
357 :ddl:
358 This DDL element.
359
360 :event:
361 The name of the event that has triggered this DDL, such as
362 'after-create' Will be None if the DDL is executed explicitly.
363
364 :target:
365 The ``Table`` or ``MetaData`` object which is the target of
366 this event. May be None if the DDL is executed explicitly.
367
368 :connection:
369 The ``Connection`` being used for DDL execution
370
371 :tables:
372 Optional keyword argument - a list of Table objects which are to
373 be created/ dropped within a MetaData.create_all() or drop_all()
374 method call.
375
376
377 If the callable returns a true value, the DDL statement will be
378 executed.
379
380 :param context:
381 Optional dictionary, defaults to None. These values will be
382 available for use in string substitutions on the DDL statement.
383
384 :param bind:
385 Optional. A :class:`.Connectable`, used by
386 default when ``execute()`` is invoked without a bind argument.
387
388
389 .. seealso::
390
391 :class:`.DDLEvents`
392
393 :ref:`event_toplevel`
394
395 """
396
397 if not isinstance(statement, util.string_types):
398 raise exc.ArgumentError(
399 "Expected a string or unicode SQL statement, got '%r'"
400 % statement
401 )
402
403 self.statement = statement
404 self.context = context or {}
405
406 self._check_ddl_on(on)
407 self.on = on
408 self._bind = bind
409
410 def __repr__(self):
411 return "<%s@%s; %s>" % (
412 type(self).__name__,
413 id(self),
414 ", ".join(
415 [repr(self.statement)]
416 + [
417 "%s=%r" % (key, getattr(self, key))
418 for key in ("on", "context")
419 if getattr(self, key)
420 ]
421 ),
422 )
423
424
425class _CreateDropBase(DDLElement):
426 """Base class for DDL constructs that represent CREATE and DROP or
427 equivalents.
428
429 The common theme of _CreateDropBase is a single
430 ``element`` attribute which refers to the element
431 to be created or dropped.
432
433 """
434
435 def __init__(self, element, on=None, bind=None):
436 self.element = element
437 self._check_ddl_on(on)
438 self.on = on
439 self.bind = bind
440
441 def _create_rule_disable(self, compiler):
442 """Allow disable of _create_rule using a callable.
443
444 Pass to _create_rule using
445 util.portable_instancemethod(self._create_rule_disable)
446 to retain serializability.
447
448 """
449 return False
450
451
452class CreateSchema(_CreateDropBase):
453 """Represent a CREATE SCHEMA statement.
454
455 The argument here is the string name of the schema.
456
457 """
458
459 __visit_name__ = "create_schema"
460
461 def __init__(self, name, quote=None, **kw):
462 """Create a new :class:`.CreateSchema` construct."""
463
464 self.quote = quote
465 super(CreateSchema, self).__init__(name, **kw)
466
467
468class DropSchema(_CreateDropBase):
469 """Represent a DROP SCHEMA statement.
470
471 The argument here is the string name of the schema.
472
473 """
474
475 __visit_name__ = "drop_schema"
476
477 def __init__(self, name, quote=None, cascade=False, **kw):
478 """Create a new :class:`.DropSchema` construct."""
479
480 self.quote = quote
481 self.cascade = cascade
482 super(DropSchema, self).__init__(name, **kw)
483
484
485class CreateTable(_CreateDropBase):
486 """Represent a CREATE TABLE statement."""
487
488 __visit_name__ = "create_table"
489
490 def __init__(
491 self, element, on=None, bind=None, include_foreign_key_constraints=None
492 ):
493 """Create a :class:`.CreateTable` construct.
494
495 :param element: a :class:`_schema.Table` that's the subject
496 of the CREATE
497 :param on: See the description for 'on' in :class:`.DDL`.
498 :param bind: See the description for 'bind' in :class:`.DDL`.
499 :param include_foreign_key_constraints: optional sequence of
500 :class:`_schema.ForeignKeyConstraint` objects that will be included
501 inline within the CREATE construct; if omitted, all foreign key
502 constraints that do not specify use_alter=True are included.
503
504 .. versionadded:: 1.0.0
505
506 """
507 super(CreateTable, self).__init__(element, on=on, bind=bind)
508 self.columns = [CreateColumn(column) for column in element.columns]
509 self.include_foreign_key_constraints = include_foreign_key_constraints
510
511
512class _DropView(_CreateDropBase):
513 """Semi-public 'DROP VIEW' construct.
514
515 Used by the test suite for dialect-agnostic drops of views.
516 This object will eventually be part of a public "view" API.
517
518 """
519
520 __visit_name__ = "drop_view"
521
522
523class CreateColumn(_DDLCompiles):
524 """Represent a :class:`_schema.Column`
525 as rendered in a CREATE TABLE statement,
526 via the :class:`.CreateTable` construct.
527
528 This is provided to support custom column DDL within the generation
529 of CREATE TABLE statements, by using the
530 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
531 to extend :class:`.CreateColumn`.
532
533 Typical integration is to examine the incoming :class:`_schema.Column`
534 object, and to redirect compilation if a particular flag or condition
535 is found::
536
537 from sqlalchemy import schema
538 from sqlalchemy.ext.compiler import compiles
539
540 @compiles(schema.CreateColumn)
541 def compile(element, compiler, **kw):
542 column = element.element
543
544 if "special" not in column.info:
545 return compiler.visit_create_column(element, **kw)
546
547 text = "%s SPECIAL DIRECTIVE %s" % (
548 column.name,
549 compiler.type_compiler.process(column.type)
550 )
551 default = compiler.get_column_default_string(column)
552 if default is not None:
553 text += " DEFAULT " + default
554
555 if not column.nullable:
556 text += " NOT NULL"
557
558 if column.constraints:
559 text += " ".join(
560 compiler.process(const)
561 for const in column.constraints)
562 return text
563
564 The above construct can be applied to a :class:`_schema.Table`
565 as follows::
566
567 from sqlalchemy import Table, Metadata, Column, Integer, String
568 from sqlalchemy import schema
569
570 metadata = MetaData()
571
572 table = Table('mytable', MetaData(),
573 Column('x', Integer, info={"special":True}, primary_key=True),
574 Column('y', String(50)),
575 Column('z', String(20), info={"special":True})
576 )
577
578 metadata.create_all(conn)
579
580 Above, the directives we've added to the :attr:`_schema.Column.info`
581 collection
582 will be detected by our custom compilation scheme::
583
584 CREATE TABLE mytable (
585 x SPECIAL DIRECTIVE INTEGER NOT NULL,
586 y VARCHAR(50),
587 z SPECIAL DIRECTIVE VARCHAR(20),
588 PRIMARY KEY (x)
589 )
590
591 The :class:`.CreateColumn` construct can also be used to skip certain
592 columns when producing a ``CREATE TABLE``. This is accomplished by
593 creating a compilation rule that conditionally returns ``None``.
594 This is essentially how to produce the same effect as using the
595 ``system=True`` argument on :class:`_schema.Column`, which marks a column
596 as an implicitly-present "system" column.
597
598 For example, suppose we wish to produce a :class:`_schema.Table`
599 which skips
600 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
601 backend, but on other backends does render it, in anticipation of a
602 triggered rule. A conditional compilation rule could skip this name only
603 on PostgreSQL::
604
605 from sqlalchemy.schema import CreateColumn
606
607 @compiles(CreateColumn, "postgresql")
608 def skip_xmin(element, compiler, **kw):
609 if element.element.name == 'xmin':
610 return None
611 else:
612 return compiler.visit_create_column(element, **kw)
613
614
615 my_table = Table('mytable', metadata,
616 Column('id', Integer, primary_key=True),
617 Column('xmin', Integer)
618 )
619
620 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
621 which only includes the ``id`` column in the string; the ``xmin`` column
622 will be omitted, but only against the PostgreSQL backend.
623
624 """
625
626 __visit_name__ = "create_column"
627
628 def __init__(self, element):
629 self.element = element
630
631
632class DropTable(_CreateDropBase):
633 """Represent a DROP TABLE statement."""
634
635 __visit_name__ = "drop_table"
636
637
638class CreateSequence(_CreateDropBase):
639 """Represent a CREATE SEQUENCE statement."""
640
641 __visit_name__ = "create_sequence"
642
643
644class DropSequence(_CreateDropBase):
645 """Represent a DROP SEQUENCE statement."""
646
647 __visit_name__ = "drop_sequence"
648
649
650class CreateIndex(_CreateDropBase):
651 """Represent a CREATE INDEX statement."""
652
653 __visit_name__ = "create_index"
654
655
656class DropIndex(_CreateDropBase):
657 """Represent a DROP INDEX statement."""
658
659 __visit_name__ = "drop_index"
660
661
662class AddConstraint(_CreateDropBase):
663 """Represent an ALTER TABLE ADD CONSTRAINT statement."""
664
665 __visit_name__ = "add_constraint"
666
667 def __init__(self, element, *args, **kw):
668 super(AddConstraint, self).__init__(element, *args, **kw)
669 element._create_rule = util.portable_instancemethod(
670 self._create_rule_disable
671 )
672
673
674class DropConstraint(_CreateDropBase):
675 """Represent an ALTER TABLE DROP CONSTRAINT statement."""
676
677 __visit_name__ = "drop_constraint"
678
679 def __init__(self, element, cascade=False, **kw):
680 self.cascade = cascade
681 super(DropConstraint, self).__init__(element, **kw)
682 element._create_rule = util.portable_instancemethod(
683 self._create_rule_disable
684 )
685
686
687class SetTableComment(_CreateDropBase):
688 """Represent a COMMENT ON TABLE IS statement."""
689
690 __visit_name__ = "set_table_comment"
691
692
693class DropTableComment(_CreateDropBase):
694 """Represent a COMMENT ON TABLE '' statement.
695
696 Note this varies a lot across database backends.
697
698 """
699
700 __visit_name__ = "drop_table_comment"
701
702
703class SetColumnComment(_CreateDropBase):
704 """Represent a COMMENT ON COLUMN IS statement."""
705
706 __visit_name__ = "set_column_comment"
707
708
709class DropColumnComment(_CreateDropBase):
710 """Represent a COMMENT ON COLUMN IS NULL statement."""
711
712 __visit_name__ = "drop_column_comment"
713
714
715class DDLBase(SchemaVisitor):
716 def __init__(self, connection):
717 self.connection = connection
718
719
720class SchemaGenerator(DDLBase):
721 def __init__(
722 self, dialect, connection, checkfirst=False, tables=None, **kwargs
723 ):
724 super(SchemaGenerator, self).__init__(connection, **kwargs)
725 self.checkfirst = checkfirst
726 self.tables = tables
727 self.preparer = dialect.identifier_preparer
728 self.dialect = dialect
729 self.memo = {}
730
731 def _can_create_table(self, table):
732 self.dialect.validate_identifier(table.name)
733 effective_schema = self.connection.schema_for_object(table)
734 if effective_schema:
735 self.dialect.validate_identifier(effective_schema)
736 return not self.checkfirst or not self.dialect.has_table(
737 self.connection, table.name, schema=effective_schema
738 )
739
740 def _can_create_sequence(self, sequence):
741 effective_schema = self.connection.schema_for_object(sequence)
742
743 return self.dialect.supports_sequences and (
744 (not self.dialect.sequences_optional or not sequence.optional)
745 and (
746 not self.checkfirst
747 or not self.dialect.has_sequence(
748 self.connection, sequence.name, schema=effective_schema
749 )
750 )
751 )
752
753 def visit_metadata(self, metadata):
754 if self.tables is not None:
755 tables = self.tables
756 else:
757 tables = list(metadata.tables.values())
758
759 collection = sort_tables_and_constraints(
760 [t for t in tables if self._can_create_table(t)]
761 )
762
763 seq_coll = [
764 s
765 for s in metadata._sequences.values()
766 if s.column is None and self._can_create_sequence(s)
767 ]
768
769 event_collection = [t for (t, fks) in collection if t is not None]
770 metadata.dispatch.before_create(
771 metadata,
772 self.connection,
773 tables=event_collection,
774 checkfirst=self.checkfirst,
775 _ddl_runner=self,
776 )
777
778 for seq in seq_coll:
779 self.traverse_single(seq, create_ok=True)
780
781 for table, fkcs in collection:
782 if table is not None:
783 self.traverse_single(
784 table,
785 create_ok=True,
786 include_foreign_key_constraints=fkcs,
787 _is_metadata_operation=True,
788 )
789 else:
790 for fkc in fkcs:
791 self.traverse_single(fkc)
792
793 metadata.dispatch.after_create(
794 metadata,
795 self.connection,
796 tables=event_collection,
797 checkfirst=self.checkfirst,
798 _ddl_runner=self,
799 )
800
801 def visit_table(
802 self,
803 table,
804 create_ok=False,
805 include_foreign_key_constraints=None,
806 _is_metadata_operation=False,
807 ):
808 if not create_ok and not self._can_create_table(table):
809 return
810
811 table.dispatch.before_create(
812 table,
813 self.connection,
814 checkfirst=self.checkfirst,
815 _ddl_runner=self,
816 _is_metadata_operation=_is_metadata_operation,
817 )
818
819 for column in table.columns:
820 if column.default is not None:
821 self.traverse_single(column.default)
822
823 if not self.dialect.supports_alter:
824 # e.g., don't omit any foreign key constraints
825 include_foreign_key_constraints = None
826
827 self.connection.execute(
828 # fmt: off
829 CreateTable(
830 table,
831 include_foreign_key_constraints= # noqa
832 include_foreign_key_constraints, # noqa
833 )
834 # fmt: on
835 )
836
837 if hasattr(table, "indexes"):
838 for index in table.indexes:
839 self.traverse_single(index)
840
841 if self.dialect.supports_comments and not self.dialect.inline_comments:
842 if table.comment is not None:
843 self.connection.execute(SetTableComment(table))
844
845 for column in table.columns:
846 if column.comment is not None:
847 self.connection.execute(SetColumnComment(column))
848
849 table.dispatch.after_create(
850 table,
851 self.connection,
852 checkfirst=self.checkfirst,
853 _ddl_runner=self,
854 _is_metadata_operation=_is_metadata_operation,
855 )
856
857 def visit_foreign_key_constraint(self, constraint):
858 if not self.dialect.supports_alter:
859 return
860 self.connection.execute(AddConstraint(constraint))
861
862 def visit_sequence(self, sequence, create_ok=False):
863 if not create_ok and not self._can_create_sequence(sequence):
864 return
865 self.connection.execute(CreateSequence(sequence))
866
867 def visit_index(self, index):
868 self.connection.execute(CreateIndex(index))
869
870
871class SchemaDropper(DDLBase):
872 def __init__(
873 self, dialect, connection, checkfirst=False, tables=None, **kwargs
874 ):
875 super(SchemaDropper, self).__init__(connection, **kwargs)
876 self.checkfirst = checkfirst
877 self.tables = tables
878 self.preparer = dialect.identifier_preparer
879 self.dialect = dialect
880 self.memo = {}
881
882 def visit_metadata(self, metadata):
883 if self.tables is not None:
884 tables = self.tables
885 else:
886 tables = list(metadata.tables.values())
887
888 try:
889 unsorted_tables = [t for t in tables if self._can_drop_table(t)]
890 collection = list(
891 reversed(
892 sort_tables_and_constraints(
893 unsorted_tables,
894 filter_fn=lambda constraint: False
895 if not self.dialect.supports_alter
896 or constraint.name is None
897 else None,
898 )
899 )
900 )
901 except exc.CircularDependencyError as err2:
902 if not self.dialect.supports_alter:
903 util.warn(
904 "Can't sort tables for DROP; an "
905 "unresolvable foreign key "
906 "dependency exists between tables: %s; and backend does "
907 "not support ALTER. To restore at least a partial sort, "
908 "apply use_alter=True to ForeignKey and "
909 "ForeignKeyConstraint "
910 "objects involved in the cycle to mark these as known "
911 "cycles that will be ignored."
912 % (", ".join(sorted([t.fullname for t in err2.cycles])))
913 )
914 collection = [(t, ()) for t in unsorted_tables]
915 else:
916 util.raise_(
917 exc.CircularDependencyError(
918 err2.args[0],
919 err2.cycles,
920 err2.edges,
921 msg="Can't sort tables for DROP; an "
922 "unresolvable foreign key "
923 "dependency exists between tables: %s. Please ensure "
924 "that the ForeignKey and ForeignKeyConstraint objects "
925 "involved in the cycle have "
926 "names so that they can be dropped using "
927 "DROP CONSTRAINT."
928 % (
929 ", ".join(
930 sorted([t.fullname for t in err2.cycles])
931 )
932 ),
933 ),
934 from_=err2,
935 )
936
937 seq_coll = [
938 s
939 for s in metadata._sequences.values()
940 if self._can_drop_sequence(s)
941 ]
942
943 event_collection = [t for (t, fks) in collection if t is not None]
944
945 metadata.dispatch.before_drop(
946 metadata,
947 self.connection,
948 tables=event_collection,
949 checkfirst=self.checkfirst,
950 _ddl_runner=self,
951 )
952
953 for table, fkcs in collection:
954 if table is not None:
955 self.traverse_single(
956 table,
957 drop_ok=True,
958 _is_metadata_operation=True,
959 _ignore_sequences=seq_coll,
960 )
961 else:
962 for fkc in fkcs:
963 self.traverse_single(fkc)
964
965 for seq in seq_coll:
966 self.traverse_single(seq, drop_ok=seq.column is None)
967
968 metadata.dispatch.after_drop(
969 metadata,
970 self.connection,
971 tables=event_collection,
972 checkfirst=self.checkfirst,
973 _ddl_runner=self,
974 )
975
976 def _can_drop_table(self, table):
977 self.dialect.validate_identifier(table.name)
978 effective_schema = self.connection.schema_for_object(table)
979 if effective_schema:
980 self.dialect.validate_identifier(effective_schema)
981 return not self.checkfirst or self.dialect.has_table(
982 self.connection, table.name, schema=effective_schema
983 )
984
985 def _can_drop_sequence(self, sequence):
986 effective_schema = self.connection.schema_for_object(sequence)
987 return self.dialect.supports_sequences and (
988 (not self.dialect.sequences_optional or not sequence.optional)
989 and (
990 not self.checkfirst
991 or self.dialect.has_sequence(
992 self.connection, sequence.name, schema=effective_schema
993 )
994 )
995 )
996
997 def visit_index(self, index):
998 self.connection.execute(DropIndex(index))
999
1000 def visit_table(
1001 self,
1002 table,
1003 drop_ok=False,
1004 _is_metadata_operation=False,
1005 _ignore_sequences=[],
1006 ):
1007 if not drop_ok and not self._can_drop_table(table):
1008 return
1009
1010 table.dispatch.before_drop(
1011 table,
1012 self.connection,
1013 checkfirst=self.checkfirst,
1014 _ddl_runner=self,
1015 _is_metadata_operation=_is_metadata_operation,
1016 )
1017
1018 self.connection.execute(DropTable(table))
1019
1020 # traverse client side defaults which may refer to server-side
1021 # sequences. noting that some of these client side defaults may also be
1022 # set up as server side defaults (see http://docs.sqlalchemy.org/en/
1023 # latest/core/defaults.html#associating-a-sequence-as-the-server-side-
1024 # default), so have to be dropped after the table is dropped.
1025 for column in table.columns:
1026 if (
1027 column.default is not None
1028 and column.default not in _ignore_sequences
1029 ):
1030 self.traverse_single(column.default)
1031
1032 table.dispatch.after_drop(
1033 table,
1034 self.connection,
1035 checkfirst=self.checkfirst,
1036 _ddl_runner=self,
1037 _is_metadata_operation=_is_metadata_operation,
1038 )
1039
1040 def visit_foreign_key_constraint(self, constraint):
1041 if not self.dialect.supports_alter:
1042 return
1043 self.connection.execute(DropConstraint(constraint))
1044
1045 def visit_sequence(self, sequence, drop_ok=False):
1046
1047 if not drop_ok and not self._can_drop_sequence(sequence):
1048 return
1049 self.connection.execute(DropSequence(sequence))
1050
1051
1052def sort_tables(
1053 tables,
1054 skip_fn=None,
1055 extra_dependencies=None,
1056):
1057 """Sort a collection of :class:`_schema.Table` objects based on
1058 dependency.
1059
1060 This is a dependency-ordered sort which will emit :class:`_schema.Table`
1061 objects such that they will follow their dependent :class:`_schema.Table`
1062 objects.
1063 Tables are dependent on another based on the presence of
1064 :class:`_schema.ForeignKeyConstraint`
1065 objects as well as explicit dependencies
1066 added by :meth:`_schema.Table.add_is_dependent_on`.
1067
1068 .. warning::
1069
1070 The :func:`._schema.sort_tables` function cannot by itself
1071 accommodate automatic resolution of dependency cycles between
1072 tables, which are usually caused by mutually dependent foreign key
1073 constraints. When these cycles are detected, the foreign keys
1074 of these tables are omitted from consideration in the sort.
1075 A warning is emitted when this condition occurs, which will be an
1076 exception raise in a future release. Tables which are not part
1077 of the cycle will still be returned in dependency order.
1078
1079 To resolve these cycles, the
1080 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
1081 applied to those constraints which create a cycle. Alternatively,
1082 the :func:`_schema.sort_tables_and_constraints` function will
1083 automatically return foreign key constraints in a separate
1084 collection when cycles are detected so that they may be applied
1085 to a schema separately.
1086
1087 .. versionchanged:: 1.3.17 - a warning is emitted when
1088 :func:`_schema.sort_tables` cannot perform a proper sort due to
1089 cyclical dependencies. This will be an exception in a future
1090 release. Additionally, the sort will continue to return
1091 other tables not involved in the cycle in dependency order
1092 which was not the case previously.
1093
1094 :param tables: a sequence of :class:`_schema.Table` objects.
1095
1096 :param skip_fn: optional callable which will be passed a
1097 :class:`_schema.ForeignKey` object; if it returns True, this
1098 constraint will not be considered as a dependency. Note this is
1099 **different** from the same parameter in
1100 :func:`.sort_tables_and_constraints`, which is
1101 instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
1102
1103 :param extra_dependencies: a sequence of 2-tuples of tables which will
1104 also be considered as dependent on each other.
1105
1106 .. seealso::
1107
1108 :func:`.sort_tables_and_constraints`
1109
1110 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
1111
1112
1113 """
1114
1115 if skip_fn is not None:
1116
1117 def _skip_fn(fkc):
1118 for fk in fkc.elements:
1119 if skip_fn(fk):
1120 return True
1121 else:
1122 return None
1123
1124 else:
1125 _skip_fn = None
1126
1127 return [
1128 t
1129 for (t, fkcs) in sort_tables_and_constraints(
1130 tables,
1131 filter_fn=_skip_fn,
1132 extra_dependencies=extra_dependencies,
1133 _warn_for_cycles=True,
1134 )
1135 if t is not None
1136 ]
1137
1138
1139def sort_tables_and_constraints(
1140 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
1141):
1142 """Sort a collection of :class:`_schema.Table` /
1143 :class:`_schema.ForeignKeyConstraint`
1144 objects.
1145
1146 This is a dependency-ordered sort which will emit tuples of
1147 ``(Table, [ForeignKeyConstraint, ...])`` such that each
1148 :class:`_schema.Table` follows its dependent :class:`_schema.Table`
1149 objects.
1150 Remaining :class:`_schema.ForeignKeyConstraint`
1151 objects that are separate due to
1152 dependency rules not satisfied by the sort are emitted afterwards
1153 as ``(None, [ForeignKeyConstraint ...])``.
1154
1155 Tables are dependent on another based on the presence of
1156 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
1157 added by :meth:`_schema.Table.add_is_dependent_on`,
1158 as well as dependencies
1159 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
1160 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
1161 parameters.
1162
1163 :param tables: a sequence of :class:`_schema.Table` objects.
1164
1165 :param filter_fn: optional callable which will be passed a
1166 :class:`_schema.ForeignKeyConstraint` object,
1167 and returns a value based on
1168 whether this constraint should definitely be included or excluded as
1169 an inline constraint, or neither. If it returns False, the constraint
1170 will definitely be included as a dependency that cannot be subject
1171 to ALTER; if True, it will **only** be included as an ALTER result at
1172 the end. Returning None means the constraint is included in the
1173 table-based result unless it is detected as part of a dependency cycle.
1174
1175 :param extra_dependencies: a sequence of 2-tuples of tables which will
1176 also be considered as dependent on each other.
1177
1178 .. versionadded:: 1.0.0
1179
1180 .. seealso::
1181
1182 :func:`.sort_tables`
1183
1184
1185 """
1186
1187 fixed_dependencies = set()
1188 mutable_dependencies = set()
1189
1190 if extra_dependencies is not None:
1191 fixed_dependencies.update(extra_dependencies)
1192
1193 remaining_fkcs = set()
1194 for table in tables:
1195 for fkc in table.foreign_key_constraints:
1196 if fkc.use_alter is True:
1197 remaining_fkcs.add(fkc)
1198 continue
1199
1200 if filter_fn:
1201 filtered = filter_fn(fkc)
1202
1203 if filtered is True:
1204 remaining_fkcs.add(fkc)
1205 continue
1206
1207 dependent_on = fkc.referred_table
1208 if dependent_on is not table:
1209 mutable_dependencies.add((dependent_on, table))
1210
1211 fixed_dependencies.update(
1212 (parent, table) for parent in table._extra_dependencies
1213 )
1214
1215 try:
1216 candidate_sort = list(
1217 topological.sort(
1218 fixed_dependencies.union(mutable_dependencies),
1219 tables,
1220 deterministic_order=True,
1221 )
1222 )
1223 except exc.CircularDependencyError as err:
1224 if _warn_for_cycles:
1225 util.warn(
1226 "Cannot correctly sort tables; there are unresolvable cycles "
1227 'between tables "%s", which is usually caused by mutually '
1228 "dependent foreign key constraints. Foreign key constraints "
1229 "involving these tables will not be considered; this warning "
1230 "may raise an error in a future release."
1231 % (", ".join(sorted(t.fullname for t in err.cycles)),)
1232 )
1233 for edge in err.edges:
1234 if edge in mutable_dependencies:
1235 table = edge[1]
1236 if table not in err.cycles:
1237 continue
1238 can_remove = [
1239 fkc
1240 for fkc in table.foreign_key_constraints
1241 if filter_fn is None or filter_fn(fkc) is not False
1242 ]
1243 remaining_fkcs.update(can_remove)
1244 for fkc in can_remove:
1245 dependent_on = fkc.referred_table
1246 if dependent_on is not table:
1247 mutable_dependencies.discard((dependent_on, table))
1248 candidate_sort = list(
1249 topological.sort(
1250 fixed_dependencies.union(mutable_dependencies),
1251 tables,
1252 deterministic_order=True,
1253 )
1254 )
1255
1256 return [
1257 (table, table.foreign_key_constraints.difference(remaining_fkcs))
1258 for table in candidate_sort
1259 ] + [(None, list(remaining_fkcs))]