Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/sqlalchemy/sql/ddl.py: 34%
336 statements
« prev ^ index » next coverage.py v7.0.1, created at 2022-12-25 06:11 +0000
« prev ^ index » next coverage.py v7.0.1, created at 2022-12-25 06:11 +0000
1# sql/ddl.py
2# Copyright (C) 2009-2022 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7"""
8Provides the hierarchy of DDL-defining schema items as well as routines
9to invoke them for a create/drop call.
11"""
13from . import roles
14from .base import _bind_or_error
15from .base import _generative
16from .base import Executable
17from .base import SchemaVisitor
18from .elements import ClauseElement
19from .. import exc
20from .. import util
21from ..util import topological
24class _DDLCompiles(ClauseElement):
25 _hierarchy_supports_caching = False
26 """disable cache warnings for all _DDLCompiles subclasses. """
28 def _compiler(self, dialect, **kw):
29 """Return a compiler appropriate for this ClauseElement, given a
30 Dialect."""
32 return dialect.ddl_compiler(dialect, self, **kw)
34 def _compile_w_cache(self, *arg, **kw):
35 raise NotImplementedError()
38class DDLElement(roles.DDLRole, Executable, _DDLCompiles):
39 """Base class for DDL expression constructs.
41 This class is the base for the general purpose :class:`.DDL` class,
42 as well as the various create/drop clause constructs such as
43 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
44 etc.
46 :class:`.DDLElement` integrates closely with SQLAlchemy events,
47 introduced in :ref:`event_toplevel`. An instance of one is
48 itself an event receiving callable::
50 event.listen(
51 users,
52 'after_create',
53 AddConstraint(constraint).execute_if(dialect='postgresql')
54 )
56 .. seealso::
58 :class:`.DDL`
60 :class:`.DDLEvents`
62 :ref:`event_toplevel`
64 :ref:`schema_ddl_sequences`
66 """
68 _execution_options = Executable._execution_options.union(
69 {"autocommit": True}
70 )
72 target = None
73 on = None
74 dialect = None
75 callable_ = None
77 def _execute_on_connection(
78 self, connection, multiparams, params, execution_options
79 ):
80 return connection._execute_ddl(
81 self, multiparams, params, execution_options
82 )
84 @util.deprecated_20(
85 ":meth:`.DDLElement.execute`",
86 alternative="All statement execution in SQLAlchemy 2.0 is performed "
87 "by the :meth:`_engine.Connection.execute` method of "
88 ":class:`_engine.Connection`, "
89 "or in the ORM by the :meth:`.Session.execute` method of "
90 ":class:`.Session`.",
91 )
92 def execute(self, bind=None, target=None):
93 """Execute this DDL immediately.
95 Executes the DDL statement in isolation using the supplied
96 :class:`.Connectable` or
97 :class:`.Connectable` assigned to the ``.bind``
98 property, if not supplied. If the DDL has a conditional ``on``
99 criteria, it will be invoked with None as the event.
101 :param bind:
102 Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
103 :class:`.Connectable` must be present in the
104 ``.bind`` property.
106 :param target:
107 Optional, defaults to None. The target :class:`_schema.SchemaItem`
108 for the execute call. This is equivalent to passing the
109 :class:`_schema.SchemaItem` to the :meth:`.DDLElement.against`
110 method and then invoking :meth:`_schema.DDLElement.execute`
111 upon the resulting :class:`_schema.DDLElement` object. See
112 :meth:`.DDLElement.against` for further detail.
114 """
116 if bind is None:
117 bind = _bind_or_error(self)
119 if self._should_execute(target, bind):
120 return bind.execute(self.against(target))
121 else:
122 bind.engine.logger.info("DDL execution skipped, criteria not met.")
124 @_generative
125 def against(self, target):
126 """Return a copy of this :class:`_schema.DDLElement` which will include
127 the given target.
129 This essentially applies the given item to the ``.target`` attribute
130 of the returned :class:`_schema.DDLElement` object. This target
131 is then usable by event handlers and compilation routines in order to
132 provide services such as tokenization of a DDL string in terms of a
133 particular :class:`_schema.Table`.
135 When a :class:`_schema.DDLElement` object is established as an event
136 handler for the :meth:`_events.DDLEvents.before_create` or
137 :meth:`_events.DDLEvents.after_create` events, and the event
138 then occurs for a given target such as a :class:`_schema.Constraint`
139 or :class:`_schema.Table`, that target is established with a copy
140 of the :class:`_schema.DDLElement` object using this method, which
141 then proceeds to the :meth:`_schema.DDLElement.execute` method
142 in order to invoke the actual DDL instruction.
144 :param target: a :class:`_schema.SchemaItem` that will be the subject
145 of a DDL operation.
147 :return: a copy of this :class:`_schema.DDLElement` with the
148 ``.target`` attribute assigned to the given
149 :class:`_schema.SchemaItem`.
151 .. seealso::
153 :class:`_schema.DDL` - uses tokenization against the "target" when
154 processing the DDL string.
156 """
158 self.target = target
160 @_generative
161 def execute_if(self, dialect=None, callable_=None, state=None):
162 r"""Return a callable that will execute this
163 :class:`_ddl.DDLElement` conditionally within an event handler.
165 Used to provide a wrapper for event listening::
167 event.listen(
168 metadata,
169 'before_create',
170 DDL("my_ddl").execute_if(dialect='postgresql')
171 )
173 :param dialect: May be a string or tuple of strings.
174 If a string, it will be compared to the name of the
175 executing database dialect::
177 DDL('something').execute_if(dialect='postgresql')
179 If a tuple, specifies multiple dialect names::
181 DDL('something').execute_if(dialect=('postgresql', 'mysql'))
183 :param callable\_: A callable, which will be invoked with
184 four positional arguments as well as optional keyword
185 arguments:
187 :ddl:
188 This DDL element.
190 :target:
191 The :class:`_schema.Table` or :class:`_schema.MetaData`
192 object which is the
193 target of this event. May be None if the DDL is executed
194 explicitly.
196 :bind:
197 The :class:`_engine.Connection` being used for DDL execution
199 :tables:
200 Optional keyword argument - a list of Table objects which are to
201 be created/ dropped within a MetaData.create_all() or drop_all()
202 method call.
204 :state:
205 Optional keyword argument - will be the ``state`` argument
206 passed to this function.
208 :checkfirst:
209 Keyword argument, will be True if the 'checkfirst' flag was
210 set during the call to ``create()``, ``create_all()``,
211 ``drop()``, ``drop_all()``.
213 If the callable returns a True value, the DDL statement will be
214 executed.
216 :param state: any value which will be passed to the callable\_
217 as the ``state`` keyword argument.
219 .. seealso::
221 :class:`.DDLEvents`
223 :ref:`event_toplevel`
225 """
226 self.dialect = dialect
227 self.callable_ = callable_
228 self.state = state
230 def _should_execute(self, target, bind, **kw):
231 if isinstance(self.dialect, util.string_types):
232 if self.dialect != bind.engine.name:
233 return False
234 elif isinstance(self.dialect, (tuple, list, set)):
235 if bind.engine.name not in self.dialect:
236 return False
237 if self.callable_ is not None and not self.callable_(
238 self, target, bind, state=self.state, **kw
239 ):
240 return False
242 return True
244 def __call__(self, target, bind, **kw):
245 """Execute the DDL as a ddl_listener."""
247 if self._should_execute(target, bind, **kw):
248 return bind.execute(self.against(target))
250 def bind(self):
251 if self._bind:
252 return self._bind
254 def _set_bind(self, bind):
255 self._bind = bind
257 bind = property(bind, _set_bind)
259 def _generate(self):
260 s = self.__class__.__new__(self.__class__)
261 s.__dict__ = self.__dict__.copy()
262 return s
265class DDL(DDLElement):
266 """A literal DDL statement.
268 Specifies literal SQL DDL to be executed by the database. DDL objects
269 function as DDL event listeners, and can be subscribed to those events
270 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or
271 :class:`_schema.MetaData` objects as targets.
272 Basic templating support allows
273 a single DDL instance to handle repetitive tasks for multiple tables.
275 Examples::
277 from sqlalchemy import event, DDL
279 tbl = Table('users', metadata, Column('uid', Integer))
280 event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
282 spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
283 event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
285 drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
286 connection.execute(drop_spow)
288 When operating on Table events, the following ``statement``
289 string substitutions are available::
291 %(table)s - the Table name, with any required quoting applied
292 %(schema)s - the schema name, with any required quoting applied
293 %(fullname)s - the Table name including schema, quoted if needed
295 The DDL's "context", if any, will be combined with the standard
296 substitutions noted above. Keys present in the context will override
297 the standard substitutions.
299 """
301 __visit_name__ = "ddl"
303 @util.deprecated_params(
304 bind=(
305 "2.0",
306 "The :paramref:`_ddl.DDL.bind` argument is deprecated and "
307 "will be removed in SQLAlchemy 2.0.",
308 ),
309 )
310 def __init__(self, statement, context=None, bind=None):
311 """Create a DDL statement.
313 :param statement:
314 A string or unicode string to be executed. Statements will be
315 processed with Python's string formatting operator using
316 a fixed set of string substitutions, as well as additional
317 substitutions provided by the optional :paramref:`.DDL.context`
318 parameter.
320 A literal '%' in a statement must be escaped as '%%'.
322 SQL bind parameters are not available in DDL statements.
324 :param context:
325 Optional dictionary, defaults to None. These values will be
326 available for use in string substitutions on the DDL statement.
328 :param bind:
329 Optional. A :class:`.Connectable`, used by
330 default when ``execute()`` is invoked without a bind argument.
333 .. seealso::
335 :class:`.DDLEvents`
337 :ref:`event_toplevel`
339 """
341 if not isinstance(statement, util.string_types):
342 raise exc.ArgumentError(
343 "Expected a string or unicode SQL statement, got '%r'"
344 % statement
345 )
347 self.statement = statement
348 self.context = context or {}
350 self._bind = bind
352 def __repr__(self):
353 return "<%s@%s; %s>" % (
354 type(self).__name__,
355 id(self),
356 ", ".join(
357 [repr(self.statement)]
358 + [
359 "%s=%r" % (key, getattr(self, key))
360 for key in ("on", "context")
361 if getattr(self, key)
362 ]
363 ),
364 )
367class _CreateDropBase(DDLElement):
368 """Base class for DDL constructs that represent CREATE and DROP or
369 equivalents.
371 The common theme of _CreateDropBase is a single
372 ``element`` attribute which refers to the element
373 to be created or dropped.
375 """
377 @util.deprecated_params(
378 bind=(
379 "2.0",
380 "The :paramref:`_ddl.DDLElement.bind` argument is "
381 "deprecated and "
382 "will be removed in SQLAlchemy 2.0.",
383 ),
384 )
385 def __init__(
386 self,
387 element,
388 bind=None,
389 if_exists=False,
390 if_not_exists=False,
391 _legacy_bind=None,
392 ):
393 self.element = element
394 if bind:
395 self.bind = bind
396 elif _legacy_bind:
397 self.bind = _legacy_bind
398 self.if_exists = if_exists
399 self.if_not_exists = if_not_exists
401 @property
402 def stringify_dialect(self):
403 return self.element.create_drop_stringify_dialect
405 def _create_rule_disable(self, compiler):
406 """Allow disable of _create_rule using a callable.
408 Pass to _create_rule using
409 util.portable_instancemethod(self._create_rule_disable)
410 to retain serializability.
412 """
413 return False
416class CreateSchema(_CreateDropBase):
417 """Represent a CREATE SCHEMA statement.
419 The argument here is the string name of the schema.
421 """
423 __visit_name__ = "create_schema"
425 def __init__(self, name, quote=None, **kw):
426 """Create a new :class:`.CreateSchema` construct."""
428 self.quote = quote
429 super(CreateSchema, self).__init__(name, **kw)
432class DropSchema(_CreateDropBase):
433 """Represent a DROP SCHEMA statement.
435 The argument here is the string name of the schema.
437 """
439 __visit_name__ = "drop_schema"
441 def __init__(self, name, quote=None, cascade=False, **kw):
442 """Create a new :class:`.DropSchema` construct."""
444 self.quote = quote
445 self.cascade = cascade
446 super(DropSchema, self).__init__(name, **kw)
449class CreateTable(_CreateDropBase):
450 """Represent a CREATE TABLE statement."""
452 __visit_name__ = "create_table"
454 @util.deprecated_params(
455 bind=(
456 "2.0",
457 "The :paramref:`_ddl.CreateTable.bind` argument is deprecated and "
458 "will be removed in SQLAlchemy 2.0.",
459 ),
460 )
461 def __init__(
462 self,
463 element,
464 bind=None,
465 include_foreign_key_constraints=None,
466 if_not_exists=False,
467 ):
468 """Create a :class:`.CreateTable` construct.
470 :param element: a :class:`_schema.Table` that's the subject
471 of the CREATE
472 :param on: See the description for 'on' in :class:`.DDL`.
473 :param bind: See the description for 'bind' in :class:`.DDL`.
474 :param include_foreign_key_constraints: optional sequence of
475 :class:`_schema.ForeignKeyConstraint` objects that will be included
476 inline within the CREATE construct; if omitted, all foreign key
477 constraints that do not specify use_alter=True are included.
479 .. versionadded:: 1.0.0
481 :param if_not_exists: if True, an IF NOT EXISTS operator will be
482 applied to the construct.
484 .. versionadded:: 1.4.0b2
486 """
487 super(CreateTable, self).__init__(
488 element, _legacy_bind=bind, if_not_exists=if_not_exists
489 )
490 self.columns = [CreateColumn(column) for column in element.columns]
491 self.include_foreign_key_constraints = include_foreign_key_constraints
494class _DropView(_CreateDropBase):
495 """Semi-public 'DROP VIEW' construct.
497 Used by the test suite for dialect-agnostic drops of views.
498 This object will eventually be part of a public "view" API.
500 """
502 __visit_name__ = "drop_view"
505class CreateColumn(_DDLCompiles):
506 """Represent a :class:`_schema.Column`
507 as rendered in a CREATE TABLE statement,
508 via the :class:`.CreateTable` construct.
510 This is provided to support custom column DDL within the generation
511 of CREATE TABLE statements, by using the
512 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
513 to extend :class:`.CreateColumn`.
515 Typical integration is to examine the incoming :class:`_schema.Column`
516 object, and to redirect compilation if a particular flag or condition
517 is found::
519 from sqlalchemy import schema
520 from sqlalchemy.ext.compiler import compiles
522 @compiles(schema.CreateColumn)
523 def compile(element, compiler, **kw):
524 column = element.element
526 if "special" not in column.info:
527 return compiler.visit_create_column(element, **kw)
529 text = "%s SPECIAL DIRECTIVE %s" % (
530 column.name,
531 compiler.type_compiler.process(column.type)
532 )
533 default = compiler.get_column_default_string(column)
534 if default is not None:
535 text += " DEFAULT " + default
537 if not column.nullable:
538 text += " NOT NULL"
540 if column.constraints:
541 text += " ".join(
542 compiler.process(const)
543 for const in column.constraints)
544 return text
546 The above construct can be applied to a :class:`_schema.Table`
547 as follows::
549 from sqlalchemy import Table, Metadata, Column, Integer, String
550 from sqlalchemy import schema
552 metadata = MetaData()
554 table = Table('mytable', MetaData(),
555 Column('x', Integer, info={"special":True}, primary_key=True),
556 Column('y', String(50)),
557 Column('z', String(20), info={"special":True})
558 )
560 metadata.create_all(conn)
562 Above, the directives we've added to the :attr:`_schema.Column.info`
563 collection
564 will be detected by our custom compilation scheme::
566 CREATE TABLE mytable (
567 x SPECIAL DIRECTIVE INTEGER NOT NULL,
568 y VARCHAR(50),
569 z SPECIAL DIRECTIVE VARCHAR(20),
570 PRIMARY KEY (x)
571 )
573 The :class:`.CreateColumn` construct can also be used to skip certain
574 columns when producing a ``CREATE TABLE``. This is accomplished by
575 creating a compilation rule that conditionally returns ``None``.
576 This is essentially how to produce the same effect as using the
577 ``system=True`` argument on :class:`_schema.Column`, which marks a column
578 as an implicitly-present "system" column.
580 For example, suppose we wish to produce a :class:`_schema.Table`
581 which skips
582 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL
583 backend, but on other backends does render it, in anticipation of a
584 triggered rule. A conditional compilation rule could skip this name only
585 on PostgreSQL::
587 from sqlalchemy.schema import CreateColumn
589 @compiles(CreateColumn, "postgresql")
590 def skip_xmin(element, compiler, **kw):
591 if element.element.name == 'xmin':
592 return None
593 else:
594 return compiler.visit_create_column(element, **kw)
597 my_table = Table('mytable', metadata,
598 Column('id', Integer, primary_key=True),
599 Column('xmin', Integer)
600 )
602 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
603 which only includes the ``id`` column in the string; the ``xmin`` column
604 will be omitted, but only against the PostgreSQL backend.
606 """
608 __visit_name__ = "create_column"
610 def __init__(self, element):
611 self.element = element
614class DropTable(_CreateDropBase):
615 """Represent a DROP TABLE statement."""
617 __visit_name__ = "drop_table"
619 @util.deprecated_params(
620 bind=(
621 "2.0",
622 "The :paramref:`_ddl.DropTable.bind` argument is "
623 "deprecated and "
624 "will be removed in SQLAlchemy 2.0.",
625 ),
626 )
627 def __init__(self, element, bind=None, if_exists=False):
628 """Create a :class:`.DropTable` construct.
630 :param element: a :class:`_schema.Table` that's the subject
631 of the DROP.
632 :param on: See the description for 'on' in :class:`.DDL`.
633 :param bind: See the description for 'bind' in :class:`.DDL`.
634 :param if_exists: if True, an IF EXISTS operator will be applied to the
635 construct.
637 .. versionadded:: 1.4.0b2
639 """
640 super(DropTable, self).__init__(
641 element, _legacy_bind=bind, if_exists=if_exists
642 )
645class CreateSequence(_CreateDropBase):
646 """Represent a CREATE SEQUENCE statement."""
648 __visit_name__ = "create_sequence"
651class DropSequence(_CreateDropBase):
652 """Represent a DROP SEQUENCE statement."""
654 __visit_name__ = "drop_sequence"
657class CreateIndex(_CreateDropBase):
658 """Represent a CREATE INDEX statement."""
660 __visit_name__ = "create_index"
662 @util.deprecated_params(
663 bind=(
664 "2.0",
665 "The :paramref:`_ddl.CreateIndex.bind` argument is "
666 "deprecated and "
667 "will be removed in SQLAlchemy 2.0.",
668 ),
669 )
670 def __init__(self, element, bind=None, if_not_exists=False):
671 """Create a :class:`.Createindex` construct.
673 :param element: a :class:`_schema.Index` that's the subject
674 of the CREATE.
675 :param on: See the description for 'on' in :class:`.DDL`.
676 :param bind: See the description for 'bind' in :class:`.DDL`.
677 :param if_not_exists: if True, an IF NOT EXISTS operator will be
678 applied to the construct.
680 .. versionadded:: 1.4.0b2
682 """
683 super(CreateIndex, self).__init__(
684 element, _legacy_bind=bind, if_not_exists=if_not_exists
685 )
688class DropIndex(_CreateDropBase):
689 """Represent a DROP INDEX statement."""
691 __visit_name__ = "drop_index"
693 @util.deprecated_params(
694 bind=(
695 "2.0",
696 "The :paramref:`_ddl.DropIndex.bind` argument is "
697 "deprecated and "
698 "will be removed in SQLAlchemy 2.0.",
699 ),
700 )
701 def __init__(self, element, bind=None, if_exists=False):
702 """Create a :class:`.DropIndex` construct.
704 :param element: a :class:`_schema.Index` that's the subject
705 of the DROP.
706 :param on: See the description for 'on' in :class:`.DDL`.
707 :param bind: See the description for 'bind' in :class:`.DDL`.
708 :param if_exists: if True, an IF EXISTS operator will be applied to the
709 construct.
711 .. versionadded:: 1.4.0b2
713 """
714 super(DropIndex, self).__init__(
715 element, _legacy_bind=bind, if_exists=if_exists
716 )
719class AddConstraint(_CreateDropBase):
720 """Represent an ALTER TABLE ADD CONSTRAINT statement."""
722 __visit_name__ = "add_constraint"
724 def __init__(self, element, *args, **kw):
725 super(AddConstraint, self).__init__(element, *args, **kw)
726 element._create_rule = util.portable_instancemethod(
727 self._create_rule_disable
728 )
731class DropConstraint(_CreateDropBase):
732 """Represent an ALTER TABLE DROP CONSTRAINT statement."""
734 __visit_name__ = "drop_constraint"
736 def __init__(self, element, cascade=False, **kw):
737 self.cascade = cascade
738 super(DropConstraint, self).__init__(element, **kw)
739 element._create_rule = util.portable_instancemethod(
740 self._create_rule_disable
741 )
744class SetTableComment(_CreateDropBase):
745 """Represent a COMMENT ON TABLE IS statement."""
747 __visit_name__ = "set_table_comment"
750class DropTableComment(_CreateDropBase):
751 """Represent a COMMENT ON TABLE '' statement.
753 Note this varies a lot across database backends.
755 """
757 __visit_name__ = "drop_table_comment"
760class SetColumnComment(_CreateDropBase):
761 """Represent a COMMENT ON COLUMN IS statement."""
763 __visit_name__ = "set_column_comment"
766class DropColumnComment(_CreateDropBase):
767 """Represent a COMMENT ON COLUMN IS NULL statement."""
769 __visit_name__ = "drop_column_comment"
772class DDLBase(SchemaVisitor):
773 def __init__(self, connection):
774 self.connection = connection
777class SchemaGenerator(DDLBase):
778 def __init__(
779 self, dialect, connection, checkfirst=False, tables=None, **kwargs
780 ):
781 super(SchemaGenerator, self).__init__(connection, **kwargs)
782 self.checkfirst = checkfirst
783 self.tables = tables
784 self.preparer = dialect.identifier_preparer
785 self.dialect = dialect
786 self.memo = {}
788 def _can_create_table(self, table):
789 self.dialect.validate_identifier(table.name)
790 effective_schema = self.connection.schema_for_object(table)
791 if effective_schema:
792 self.dialect.validate_identifier(effective_schema)
793 return not self.checkfirst or not self.dialect.has_table(
794 self.connection, table.name, schema=effective_schema
795 )
797 def _can_create_index(self, index):
798 effective_schema = self.connection.schema_for_object(index.table)
799 if effective_schema:
800 self.dialect.validate_identifier(effective_schema)
801 return not self.checkfirst or not self.dialect.has_index(
802 self.connection,
803 index.table.name,
804 index.name,
805 schema=effective_schema,
806 )
808 def _can_create_sequence(self, sequence):
809 effective_schema = self.connection.schema_for_object(sequence)
811 return self.dialect.supports_sequences and (
812 (not self.dialect.sequences_optional or not sequence.optional)
813 and (
814 not self.checkfirst
815 or not self.dialect.has_sequence(
816 self.connection, sequence.name, schema=effective_schema
817 )
818 )
819 )
821 def visit_metadata(self, metadata):
822 if self.tables is not None:
823 tables = self.tables
824 else:
825 tables = list(metadata.tables.values())
827 collection = sort_tables_and_constraints(
828 [t for t in tables if self._can_create_table(t)]
829 )
831 seq_coll = [
832 s
833 for s in metadata._sequences.values()
834 if s.column is None and self._can_create_sequence(s)
835 ]
837 event_collection = [t for (t, fks) in collection if t is not None]
838 metadata.dispatch.before_create(
839 metadata,
840 self.connection,
841 tables=event_collection,
842 checkfirst=self.checkfirst,
843 _ddl_runner=self,
844 )
846 for seq in seq_coll:
847 self.traverse_single(seq, create_ok=True)
849 for table, fkcs in collection:
850 if table is not None:
851 self.traverse_single(
852 table,
853 create_ok=True,
854 include_foreign_key_constraints=fkcs,
855 _is_metadata_operation=True,
856 )
857 else:
858 for fkc in fkcs:
859 self.traverse_single(fkc)
861 metadata.dispatch.after_create(
862 metadata,
863 self.connection,
864 tables=event_collection,
865 checkfirst=self.checkfirst,
866 _ddl_runner=self,
867 )
869 def visit_table(
870 self,
871 table,
872 create_ok=False,
873 include_foreign_key_constraints=None,
874 _is_metadata_operation=False,
875 ):
876 if not create_ok and not self._can_create_table(table):
877 return
879 table.dispatch.before_create(
880 table,
881 self.connection,
882 checkfirst=self.checkfirst,
883 _ddl_runner=self,
884 _is_metadata_operation=_is_metadata_operation,
885 )
887 for column in table.columns:
888 if column.default is not None:
889 self.traverse_single(column.default)
891 if not self.dialect.supports_alter:
892 # e.g., don't omit any foreign key constraints
893 include_foreign_key_constraints = None
895 self.connection.execute(
896 # fmt: off
897 CreateTable(
898 table,
899 include_foreign_key_constraints= # noqa
900 include_foreign_key_constraints, # noqa
901 )
902 # fmt: on
903 )
905 if hasattr(table, "indexes"):
906 for index in table.indexes:
907 self.traverse_single(index, create_ok=True)
909 if self.dialect.supports_comments and not self.dialect.inline_comments:
910 if table.comment is not None:
911 self.connection.execute(SetTableComment(table))
913 for column in table.columns:
914 if column.comment is not None:
915 self.connection.execute(SetColumnComment(column))
917 table.dispatch.after_create(
918 table,
919 self.connection,
920 checkfirst=self.checkfirst,
921 _ddl_runner=self,
922 _is_metadata_operation=_is_metadata_operation,
923 )
925 def visit_foreign_key_constraint(self, constraint):
926 if not self.dialect.supports_alter:
927 return
928 self.connection.execute(AddConstraint(constraint))
930 def visit_sequence(self, sequence, create_ok=False):
931 if not create_ok and not self._can_create_sequence(sequence):
932 return
933 self.connection.execute(CreateSequence(sequence))
935 def visit_index(self, index, create_ok=False):
936 if not create_ok and not self._can_create_index(index):
937 return
938 self.connection.execute(CreateIndex(index))
941class SchemaDropper(DDLBase):
942 def __init__(
943 self, dialect, connection, checkfirst=False, tables=None, **kwargs
944 ):
945 super(SchemaDropper, self).__init__(connection, **kwargs)
946 self.checkfirst = checkfirst
947 self.tables = tables
948 self.preparer = dialect.identifier_preparer
949 self.dialect = dialect
950 self.memo = {}
952 def visit_metadata(self, metadata):
953 if self.tables is not None:
954 tables = self.tables
955 else:
956 tables = list(metadata.tables.values())
958 try:
959 unsorted_tables = [t for t in tables if self._can_drop_table(t)]
960 collection = list(
961 reversed(
962 sort_tables_and_constraints(
963 unsorted_tables,
964 filter_fn=lambda constraint: False
965 if not self.dialect.supports_alter
966 or constraint.name is None
967 else None,
968 )
969 )
970 )
971 except exc.CircularDependencyError as err2:
972 if not self.dialect.supports_alter:
973 util.warn(
974 "Can't sort tables for DROP; an "
975 "unresolvable foreign key "
976 "dependency exists between tables: %s; and backend does "
977 "not support ALTER. To restore at least a partial sort, "
978 "apply use_alter=True to ForeignKey and "
979 "ForeignKeyConstraint "
980 "objects involved in the cycle to mark these as known "
981 "cycles that will be ignored."
982 % (", ".join(sorted([t.fullname for t in err2.cycles])))
983 )
984 collection = [(t, ()) for t in unsorted_tables]
985 else:
986 util.raise_(
987 exc.CircularDependencyError(
988 err2.args[0],
989 err2.cycles,
990 err2.edges,
991 msg="Can't sort tables for DROP; an "
992 "unresolvable foreign key "
993 "dependency exists between tables: %s. Please ensure "
994 "that the ForeignKey and ForeignKeyConstraint objects "
995 "involved in the cycle have "
996 "names so that they can be dropped using "
997 "DROP CONSTRAINT."
998 % (
999 ", ".join(
1000 sorted([t.fullname for t in err2.cycles])
1001 )
1002 ),
1003 ),
1004 from_=err2,
1005 )
1007 seq_coll = [
1008 s
1009 for s in metadata._sequences.values()
1010 if self._can_drop_sequence(s)
1011 ]
1013 event_collection = [t for (t, fks) in collection if t is not None]
1015 metadata.dispatch.before_drop(
1016 metadata,
1017 self.connection,
1018 tables=event_collection,
1019 checkfirst=self.checkfirst,
1020 _ddl_runner=self,
1021 )
1023 for table, fkcs in collection:
1024 if table is not None:
1025 self.traverse_single(
1026 table,
1027 drop_ok=True,
1028 _is_metadata_operation=True,
1029 _ignore_sequences=seq_coll,
1030 )
1031 else:
1032 for fkc in fkcs:
1033 self.traverse_single(fkc)
1035 for seq in seq_coll:
1036 self.traverse_single(seq, drop_ok=seq.column is None)
1038 metadata.dispatch.after_drop(
1039 metadata,
1040 self.connection,
1041 tables=event_collection,
1042 checkfirst=self.checkfirst,
1043 _ddl_runner=self,
1044 )
1046 def _can_drop_table(self, table):
1047 self.dialect.validate_identifier(table.name)
1048 effective_schema = self.connection.schema_for_object(table)
1049 if effective_schema:
1050 self.dialect.validate_identifier(effective_schema)
1051 return not self.checkfirst or self.dialect.has_table(
1052 self.connection, table.name, schema=effective_schema
1053 )
1055 def _can_drop_index(self, index):
1056 effective_schema = self.connection.schema_for_object(index.table)
1057 if effective_schema:
1058 self.dialect.validate_identifier(effective_schema)
1059 return not self.checkfirst or self.dialect.has_index(
1060 self.connection,
1061 index.table.name,
1062 index.name,
1063 schema=effective_schema,
1064 )
1066 def _can_drop_sequence(self, sequence):
1067 effective_schema = self.connection.schema_for_object(sequence)
1068 return self.dialect.supports_sequences and (
1069 (not self.dialect.sequences_optional or not sequence.optional)
1070 and (
1071 not self.checkfirst
1072 or self.dialect.has_sequence(
1073 self.connection, sequence.name, schema=effective_schema
1074 )
1075 )
1076 )
1078 def visit_index(self, index, drop_ok=False):
1079 if not drop_ok and not self._can_drop_index(index):
1080 return
1082 self.connection.execute(DropIndex(index))
1084 def visit_table(
1085 self,
1086 table,
1087 drop_ok=False,
1088 _is_metadata_operation=False,
1089 _ignore_sequences=(),
1090 ):
1091 if not drop_ok and not self._can_drop_table(table):
1092 return
1094 table.dispatch.before_drop(
1095 table,
1096 self.connection,
1097 checkfirst=self.checkfirst,
1098 _ddl_runner=self,
1099 _is_metadata_operation=_is_metadata_operation,
1100 )
1102 self.connection.execute(DropTable(table))
1104 # traverse client side defaults which may refer to server-side
1105 # sequences. noting that some of these client side defaults may also be
1106 # set up as server side defaults (see https://docs.sqlalchemy.org/en/
1107 # latest/core/defaults.html#associating-a-sequence-as-the-server-side-
1108 # default), so have to be dropped after the table is dropped.
1109 for column in table.columns:
1110 if (
1111 column.default is not None
1112 and column.default not in _ignore_sequences
1113 ):
1114 self.traverse_single(column.default)
1116 table.dispatch.after_drop(
1117 table,
1118 self.connection,
1119 checkfirst=self.checkfirst,
1120 _ddl_runner=self,
1121 _is_metadata_operation=_is_metadata_operation,
1122 )
1124 def visit_foreign_key_constraint(self, constraint):
1125 if not self.dialect.supports_alter:
1126 return
1127 self.connection.execute(DropConstraint(constraint))
1129 def visit_sequence(self, sequence, drop_ok=False):
1131 if not drop_ok and not self._can_drop_sequence(sequence):
1132 return
1133 self.connection.execute(DropSequence(sequence))
1136def sort_tables(
1137 tables,
1138 skip_fn=None,
1139 extra_dependencies=None,
1140):
1141 """Sort a collection of :class:`_schema.Table` objects based on
1142 dependency.
1144 This is a dependency-ordered sort which will emit :class:`_schema.Table`
1145 objects such that they will follow their dependent :class:`_schema.Table`
1146 objects.
1147 Tables are dependent on another based on the presence of
1148 :class:`_schema.ForeignKeyConstraint`
1149 objects as well as explicit dependencies
1150 added by :meth:`_schema.Table.add_is_dependent_on`.
1152 .. warning::
1154 The :func:`._schema.sort_tables` function cannot by itself
1155 accommodate automatic resolution of dependency cycles between
1156 tables, which are usually caused by mutually dependent foreign key
1157 constraints. When these cycles are detected, the foreign keys
1158 of these tables are omitted from consideration in the sort.
1159 A warning is emitted when this condition occurs, which will be an
1160 exception raise in a future release. Tables which are not part
1161 of the cycle will still be returned in dependency order.
1163 To resolve these cycles, the
1164 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be
1165 applied to those constraints which create a cycle. Alternatively,
1166 the :func:`_schema.sort_tables_and_constraints` function will
1167 automatically return foreign key constraints in a separate
1168 collection when cycles are detected so that they may be applied
1169 to a schema separately.
1171 .. versionchanged:: 1.3.17 - a warning is emitted when
1172 :func:`_schema.sort_tables` cannot perform a proper sort due to
1173 cyclical dependencies. This will be an exception in a future
1174 release. Additionally, the sort will continue to return
1175 other tables not involved in the cycle in dependency order
1176 which was not the case previously.
1178 :param tables: a sequence of :class:`_schema.Table` objects.
1180 :param skip_fn: optional callable which will be passed a
1181 :class:`_schema.ForeignKey` object; if it returns True, this
1182 constraint will not be considered as a dependency. Note this is
1183 **different** from the same parameter in
1184 :func:`.sort_tables_and_constraints`, which is
1185 instead passed the owning :class:`_schema.ForeignKeyConstraint` object.
1187 :param extra_dependencies: a sequence of 2-tuples of tables which will
1188 also be considered as dependent on each other.
1190 .. seealso::
1192 :func:`.sort_tables_and_constraints`
1194 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort
1197 """
1199 if skip_fn is not None:
1201 def _skip_fn(fkc):
1202 for fk in fkc.elements:
1203 if skip_fn(fk):
1204 return True
1205 else:
1206 return None
1208 else:
1209 _skip_fn = None
1211 return [
1212 t
1213 for (t, fkcs) in sort_tables_and_constraints(
1214 tables,
1215 filter_fn=_skip_fn,
1216 extra_dependencies=extra_dependencies,
1217 _warn_for_cycles=True,
1218 )
1219 if t is not None
1220 ]
1223def sort_tables_and_constraints(
1224 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False
1225):
1226 """Sort a collection of :class:`_schema.Table` /
1227 :class:`_schema.ForeignKeyConstraint`
1228 objects.
1230 This is a dependency-ordered sort which will emit tuples of
1231 ``(Table, [ForeignKeyConstraint, ...])`` such that each
1232 :class:`_schema.Table` follows its dependent :class:`_schema.Table`
1233 objects.
1234 Remaining :class:`_schema.ForeignKeyConstraint`
1235 objects that are separate due to
1236 dependency rules not satisfied by the sort are emitted afterwards
1237 as ``(None, [ForeignKeyConstraint ...])``.
1239 Tables are dependent on another based on the presence of
1240 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies
1241 added by :meth:`_schema.Table.add_is_dependent_on`,
1242 as well as dependencies
1243 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn`
1244 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies`
1245 parameters.
1247 :param tables: a sequence of :class:`_schema.Table` objects.
1249 :param filter_fn: optional callable which will be passed a
1250 :class:`_schema.ForeignKeyConstraint` object,
1251 and returns a value based on
1252 whether this constraint should definitely be included or excluded as
1253 an inline constraint, or neither. If it returns False, the constraint
1254 will definitely be included as a dependency that cannot be subject
1255 to ALTER; if True, it will **only** be included as an ALTER result at
1256 the end. Returning None means the constraint is included in the
1257 table-based result unless it is detected as part of a dependency cycle.
1259 :param extra_dependencies: a sequence of 2-tuples of tables which will
1260 also be considered as dependent on each other.
1262 .. versionadded:: 1.0.0
1264 .. seealso::
1266 :func:`.sort_tables`
1269 """
1271 fixed_dependencies = set()
1272 mutable_dependencies = set()
1274 if extra_dependencies is not None:
1275 fixed_dependencies.update(extra_dependencies)
1277 remaining_fkcs = set()
1278 for table in tables:
1279 for fkc in table.foreign_key_constraints:
1280 if fkc.use_alter is True:
1281 remaining_fkcs.add(fkc)
1282 continue
1284 if filter_fn:
1285 filtered = filter_fn(fkc)
1287 if filtered is True:
1288 remaining_fkcs.add(fkc)
1289 continue
1291 dependent_on = fkc.referred_table
1292 if dependent_on is not table:
1293 mutable_dependencies.add((dependent_on, table))
1295 fixed_dependencies.update(
1296 (parent, table) for parent in table._extra_dependencies
1297 )
1299 try:
1300 candidate_sort = list(
1301 topological.sort(
1302 fixed_dependencies.union(mutable_dependencies),
1303 tables,
1304 )
1305 )
1306 except exc.CircularDependencyError as err:
1307 if _warn_for_cycles:
1308 util.warn(
1309 "Cannot correctly sort tables; there are unresolvable cycles "
1310 'between tables "%s", which is usually caused by mutually '
1311 "dependent foreign key constraints. Foreign key constraints "
1312 "involving these tables will not be considered; this warning "
1313 "may raise an error in a future release."
1314 % (", ".join(sorted(t.fullname for t in err.cycles)),)
1315 )
1316 for edge in err.edges:
1317 if edge in mutable_dependencies:
1318 table = edge[1]
1319 if table not in err.cycles:
1320 continue
1321 can_remove = [
1322 fkc
1323 for fkc in table.foreign_key_constraints
1324 if filter_fn is None or filter_fn(fkc) is not False
1325 ]
1326 remaining_fkcs.update(can_remove)
1327 for fkc in can_remove:
1328 dependent_on = fkc.referred_table
1329 if dependent_on is not table:
1330 mutable_dependencies.discard((dependent_on, table))
1331 candidate_sort = list(
1332 topological.sort(
1333 fixed_dependencies.union(mutable_dependencies),
1334 tables,
1335 )
1336 )
1338 return [
1339 (table, table.foreign_key_constraints.difference(remaining_fkcs))
1340 for table in candidate_sort
1341 ] + [(None, list(remaining_fkcs))]