Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/sqlalchemy/sql/ddl.py: 34%

338 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:35 +0000

1# sql/ddl.py 

2# Copyright (C) 2009-2023 the SQLAlchemy authors and contributors 

3# <see AUTHORS file> 

4# 

5# This module is part of SQLAlchemy and is released under 

6# the MIT License: https://www.opensource.org/licenses/mit-license.php 

7""" 

8Provides the hierarchy of DDL-defining schema items as well as routines 

9to invoke them for a create/drop call. 

10 

11""" 

12 

13from . import roles 

14from .base import _bind_or_error 

15from .base import _generative 

16from .base import Executable 

17from .base import SchemaVisitor 

18from .elements import ClauseElement 

19from .. import exc 

20from .. import util 

21from ..util import topological 

22 

23 

24class _DDLCompiles(ClauseElement): 

25 _hierarchy_supports_caching = False 

26 """disable cache warnings for all _DDLCompiles subclasses. """ 

27 

28 def _compiler(self, dialect, **kw): 

29 """Return a compiler appropriate for this ClauseElement, given a 

30 Dialect.""" 

31 

32 return dialect.ddl_compiler(dialect, self, **kw) 

33 

34 def _compile_w_cache(self, *arg, **kw): 

35 raise NotImplementedError() 

36 

37 

38class DDLElement(roles.DDLRole, Executable, _DDLCompiles): 

39 """Base class for DDL expression constructs. 

40 

41 This class is the base for the general purpose :class:`.DDL` class, 

42 as well as the various create/drop clause constructs such as 

43 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`, 

44 etc. 

45 

46 :class:`.DDLElement` integrates closely with SQLAlchemy events, 

47 introduced in :ref:`event_toplevel`. An instance of one is 

48 itself an event receiving callable:: 

49 

50 event.listen( 

51 users, 

52 'after_create', 

53 AddConstraint(constraint).execute_if(dialect='postgresql') 

54 ) 

55 

56 .. seealso:: 

57 

58 :class:`.DDL` 

59 

60 :class:`.DDLEvents` 

61 

62 :ref:`event_toplevel` 

63 

64 :ref:`schema_ddl_sequences` 

65 

66 """ 

67 

68 _execution_options = Executable._execution_options.union( 

69 {"autocommit": True} 

70 ) 

71 

72 target = None 

73 on = None 

74 dialect = None 

75 callable_ = None 

76 

77 def _execute_on_connection( 

78 self, connection, multiparams, params, execution_options 

79 ): 

80 return connection._execute_ddl( 

81 self, multiparams, params, execution_options 

82 ) 

83 

84 @util.deprecated_20( 

85 ":meth:`.DDLElement.execute`", 

86 alternative="All statement execution in SQLAlchemy 2.0 is performed " 

87 "by the :meth:`_engine.Connection.execute` method of " 

88 ":class:`_engine.Connection`, " 

89 "or in the ORM by the :meth:`.Session.execute` method of " 

90 ":class:`.Session`.", 

91 ) 

92 def execute(self, bind=None, target=None): 

93 """Execute this DDL immediately. 

94 

95 Executes the DDL statement in isolation using the supplied 

96 :class:`.Connectable` or 

97 :class:`.Connectable` assigned to the ``.bind`` 

98 property, if not supplied. If the DDL has a conditional ``on`` 

99 criteria, it will be invoked with None as the event. 

100 

101 :param bind: 

102 Optional, an ``Engine`` or ``Connection``. If not supplied, a valid 

103 :class:`.Connectable` must be present in the 

104 ``.bind`` property. 

105 

106 :param target: 

107 Optional, defaults to None. The target :class:`_schema.SchemaItem` 

108 for the execute call. This is equivalent to passing the 

109 :class:`_schema.SchemaItem` to the :meth:`.DDLElement.against` 

110 method and then invoking :meth:`_schema.DDLElement.execute` 

111 upon the resulting :class:`_schema.DDLElement` object. See 

112 :meth:`.DDLElement.against` for further detail. 

113 

114 """ 

115 

116 if bind is None: 

117 bind = _bind_or_error(self) 

118 

119 if self._should_execute(target, bind): 

120 return bind.execute(self.against(target)) 

121 else: 

122 bind.engine.logger.info("DDL execution skipped, criteria not met.") 

123 

124 @_generative 

125 def against(self, target): 

126 """Return a copy of this :class:`_schema.DDLElement` which will include 

127 the given target. 

128 

129 This essentially applies the given item to the ``.target`` attribute 

130 of the returned :class:`_schema.DDLElement` object. This target 

131 is then usable by event handlers and compilation routines in order to 

132 provide services such as tokenization of a DDL string in terms of a 

133 particular :class:`_schema.Table`. 

134 

135 When a :class:`_schema.DDLElement` object is established as an event 

136 handler for the :meth:`_events.DDLEvents.before_create` or 

137 :meth:`_events.DDLEvents.after_create` events, and the event 

138 then occurs for a given target such as a :class:`_schema.Constraint` 

139 or :class:`_schema.Table`, that target is established with a copy 

140 of the :class:`_schema.DDLElement` object using this method, which 

141 then proceeds to the :meth:`_schema.DDLElement.execute` method 

142 in order to invoke the actual DDL instruction. 

143 

144 :param target: a :class:`_schema.SchemaItem` that will be the subject 

145 of a DDL operation. 

146 

147 :return: a copy of this :class:`_schema.DDLElement` with the 

148 ``.target`` attribute assigned to the given 

149 :class:`_schema.SchemaItem`. 

150 

151 .. seealso:: 

152 

153 :class:`_schema.DDL` - uses tokenization against the "target" when 

154 processing the DDL string. 

155 

156 """ 

157 

158 self.target = target 

159 

160 @_generative 

161 def execute_if(self, dialect=None, callable_=None, state=None): 

162 r"""Return a callable that will execute this 

163 :class:`_ddl.DDLElement` conditionally within an event handler. 

164 

165 Used to provide a wrapper for event listening:: 

166 

167 event.listen( 

168 metadata, 

169 'before_create', 

170 DDL("my_ddl").execute_if(dialect='postgresql') 

171 ) 

172 

173 :param dialect: May be a string or tuple of strings. 

174 If a string, it will be compared to the name of the 

175 executing database dialect:: 

176 

177 DDL('something').execute_if(dialect='postgresql') 

178 

179 If a tuple, specifies multiple dialect names:: 

180 

181 DDL('something').execute_if(dialect=('postgresql', 'mysql')) 

182 

183 :param callable\_: A callable, which will be invoked with 

184 four positional arguments as well as optional keyword 

185 arguments: 

186 

187 :ddl: 

188 This DDL element. 

189 

190 :target: 

191 The :class:`_schema.Table` or :class:`_schema.MetaData` 

192 object which is the 

193 target of this event. May be None if the DDL is executed 

194 explicitly. 

195 

196 :bind: 

197 The :class:`_engine.Connection` being used for DDL execution 

198 

199 :tables: 

200 Optional keyword argument - a list of Table objects which are to 

201 be created/ dropped within a MetaData.create_all() or drop_all() 

202 method call. 

203 

204 :state: 

205 Optional keyword argument - will be the ``state`` argument 

206 passed to this function. 

207 

208 :checkfirst: 

209 Keyword argument, will be True if the 'checkfirst' flag was 

210 set during the call to ``create()``, ``create_all()``, 

211 ``drop()``, ``drop_all()``. 

212 

213 If the callable returns a True value, the DDL statement will be 

214 executed. 

215 

216 :param state: any value which will be passed to the callable\_ 

217 as the ``state`` keyword argument. 

218 

219 .. seealso:: 

220 

221 :class:`.DDLEvents` 

222 

223 :ref:`event_toplevel` 

224 

225 """ 

226 self.dialect = dialect 

227 self.callable_ = callable_ 

228 self.state = state 

229 

230 def _should_execute(self, target, bind, **kw): 

231 if isinstance(self.dialect, util.string_types): 

232 if self.dialect != bind.engine.name: 

233 return False 

234 elif isinstance(self.dialect, (tuple, list, set)): 

235 if bind.engine.name not in self.dialect: 

236 return False 

237 if self.callable_ is not None and not self.callable_( 

238 self, target, bind, state=self.state, **kw 

239 ): 

240 return False 

241 

242 return True 

243 

244 def __call__(self, target, bind, **kw): 

245 """Execute the DDL as a ddl_listener.""" 

246 

247 if self._should_execute(target, bind, **kw): 

248 return bind.execute(self.against(target)) 

249 

250 def bind(self): 

251 if self._bind: 

252 return self._bind 

253 

254 def _set_bind(self, bind): 

255 self._bind = bind 

256 

257 bind = property(bind, _set_bind) 

258 

259 def _generate(self): 

260 s = self.__class__.__new__(self.__class__) 

261 s.__dict__ = self.__dict__.copy() 

262 return s 

263 

264 

265class DDL(DDLElement): 

266 """A literal DDL statement. 

267 

268 Specifies literal SQL DDL to be executed by the database. DDL objects 

269 function as DDL event listeners, and can be subscribed to those events 

270 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or 

271 :class:`_schema.MetaData` objects as targets. 

272 Basic templating support allows 

273 a single DDL instance to handle repetitive tasks for multiple tables. 

274 

275 Examples:: 

276 

277 from sqlalchemy import event, DDL 

278 

279 tbl = Table('users', metadata, Column('uid', Integer)) 

280 event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger')) 

281 

282 spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE') 

283 event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb')) 

284 

285 drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE') 

286 connection.execute(drop_spow) 

287 

288 When operating on Table events, the following ``statement`` 

289 string substitutions are available:: 

290 

291 %(table)s - the Table name, with any required quoting applied 

292 %(schema)s - the schema name, with any required quoting applied 

293 %(fullname)s - the Table name including schema, quoted if needed 

294 

295 The DDL's "context", if any, will be combined with the standard 

296 substitutions noted above. Keys present in the context will override 

297 the standard substitutions. 

298 

299 """ 

300 

301 __visit_name__ = "ddl" 

302 

303 @util.deprecated_params( 

304 bind=( 

305 "2.0", 

306 "The :paramref:`_ddl.DDL.bind` argument is deprecated and " 

307 "will be removed in SQLAlchemy 2.0.", 

308 ), 

309 ) 

310 def __init__(self, statement, context=None, bind=None): 

311 """Create a DDL statement. 

312 

313 :param statement: 

314 A string or unicode string to be executed. Statements will be 

315 processed with Python's string formatting operator using 

316 a fixed set of string substitutions, as well as additional 

317 substitutions provided by the optional :paramref:`.DDL.context` 

318 parameter. 

319 

320 A literal '%' in a statement must be escaped as '%%'. 

321 

322 SQL bind parameters are not available in DDL statements. 

323 

324 :param context: 

325 Optional dictionary, defaults to None. These values will be 

326 available for use in string substitutions on the DDL statement. 

327 

328 :param bind: 

329 Optional. A :class:`.Connectable`, used by 

330 default when ``execute()`` is invoked without a bind argument. 

331 

332 

333 .. seealso:: 

334 

335 :class:`.DDLEvents` 

336 

337 :ref:`event_toplevel` 

338 

339 """ 

340 

341 if not isinstance(statement, util.string_types): 

342 raise exc.ArgumentError( 

343 "Expected a string or unicode SQL statement, got '%r'" 

344 % statement 

345 ) 

346 

347 self.statement = statement 

348 self.context = context or {} 

349 

350 self._bind = bind 

351 

352 def __repr__(self): 

353 return "<%s@%s; %s>" % ( 

354 type(self).__name__, 

355 id(self), 

356 ", ".join( 

357 [repr(self.statement)] 

358 + [ 

359 "%s=%r" % (key, getattr(self, key)) 

360 for key in ("on", "context") 

361 if getattr(self, key) 

362 ] 

363 ), 

364 ) 

365 

366 

367class _CreateDropBase(DDLElement): 

368 """Base class for DDL constructs that represent CREATE and DROP or 

369 equivalents. 

370 

371 The common theme of _CreateDropBase is a single 

372 ``element`` attribute which refers to the element 

373 to be created or dropped. 

374 

375 """ 

376 

377 @util.deprecated_params( 

378 bind=( 

379 "2.0", 

380 "The :paramref:`_ddl.DDLElement.bind` argument is " 

381 "deprecated and " 

382 "will be removed in SQLAlchemy 2.0.", 

383 ), 

384 ) 

385 def __init__( 

386 self, 

387 element, 

388 bind=None, 

389 if_exists=False, 

390 if_not_exists=False, 

391 _legacy_bind=None, 

392 ): 

393 self.element = element 

394 if bind: 

395 self.bind = bind 

396 elif _legacy_bind: 

397 self.bind = _legacy_bind 

398 self.if_exists = if_exists 

399 self.if_not_exists = if_not_exists 

400 

401 @property 

402 def stringify_dialect(self): 

403 return self.element.create_drop_stringify_dialect 

404 

405 def _create_rule_disable(self, compiler): 

406 """Allow disable of _create_rule using a callable. 

407 

408 Pass to _create_rule using 

409 util.portable_instancemethod(self._create_rule_disable) 

410 to retain serializability. 

411 

412 """ 

413 return False 

414 

415 

416class CreateSchema(_CreateDropBase): 

417 """Represent a CREATE SCHEMA statement. 

418 

419 The argument here is the string name of the schema. 

420 

421 """ 

422 

423 __visit_name__ = "create_schema" 

424 

425 stringify_dialect = "default" 

426 

427 def __init__(self, name, quote=None, **kw): 

428 """Create a new :class:`.CreateSchema` construct.""" 

429 

430 self.quote = quote 

431 super(CreateSchema, self).__init__(name, **kw) 

432 

433 

434class DropSchema(_CreateDropBase): 

435 """Represent a DROP SCHEMA statement. 

436 

437 The argument here is the string name of the schema. 

438 

439 """ 

440 

441 __visit_name__ = "drop_schema" 

442 

443 stringify_dialect = "default" 

444 

445 def __init__(self, name, quote=None, cascade=False, **kw): 

446 """Create a new :class:`.DropSchema` construct.""" 

447 

448 self.quote = quote 

449 self.cascade = cascade 

450 super(DropSchema, self).__init__(name, **kw) 

451 

452 

453class CreateTable(_CreateDropBase): 

454 """Represent a CREATE TABLE statement.""" 

455 

456 __visit_name__ = "create_table" 

457 

458 @util.deprecated_params( 

459 bind=( 

460 "2.0", 

461 "The :paramref:`_ddl.CreateTable.bind` argument is deprecated and " 

462 "will be removed in SQLAlchemy 2.0.", 

463 ), 

464 ) 

465 def __init__( 

466 self, 

467 element, 

468 bind=None, 

469 include_foreign_key_constraints=None, 

470 if_not_exists=False, 

471 ): 

472 """Create a :class:`.CreateTable` construct. 

473 

474 :param element: a :class:`_schema.Table` that's the subject 

475 of the CREATE 

476 :param on: See the description for 'on' in :class:`.DDL`. 

477 :param bind: See the description for 'bind' in :class:`.DDL`. 

478 :param include_foreign_key_constraints: optional sequence of 

479 :class:`_schema.ForeignKeyConstraint` objects that will be included 

480 inline within the CREATE construct; if omitted, all foreign key 

481 constraints that do not specify use_alter=True are included. 

482 

483 .. versionadded:: 1.0.0 

484 

485 :param if_not_exists: if True, an IF NOT EXISTS operator will be 

486 applied to the construct. 

487 

488 .. versionadded:: 1.4.0b2 

489 

490 """ 

491 super(CreateTable, self).__init__( 

492 element, _legacy_bind=bind, if_not_exists=if_not_exists 

493 ) 

494 self.columns = [CreateColumn(column) for column in element.columns] 

495 self.include_foreign_key_constraints = include_foreign_key_constraints 

496 

497 

498class _DropView(_CreateDropBase): 

499 """Semi-public 'DROP VIEW' construct. 

500 

501 Used by the test suite for dialect-agnostic drops of views. 

502 This object will eventually be part of a public "view" API. 

503 

504 """ 

505 

506 __visit_name__ = "drop_view" 

507 

508 

509class CreateColumn(_DDLCompiles): 

510 """Represent a :class:`_schema.Column` 

511 as rendered in a CREATE TABLE statement, 

512 via the :class:`.CreateTable` construct. 

513 

514 This is provided to support custom column DDL within the generation 

515 of CREATE TABLE statements, by using the 

516 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel` 

517 to extend :class:`.CreateColumn`. 

518 

519 Typical integration is to examine the incoming :class:`_schema.Column` 

520 object, and to redirect compilation if a particular flag or condition 

521 is found:: 

522 

523 from sqlalchemy import schema 

524 from sqlalchemy.ext.compiler import compiles 

525 

526 @compiles(schema.CreateColumn) 

527 def compile(element, compiler, **kw): 

528 column = element.element 

529 

530 if "special" not in column.info: 

531 return compiler.visit_create_column(element, **kw) 

532 

533 text = "%s SPECIAL DIRECTIVE %s" % ( 

534 column.name, 

535 compiler.type_compiler.process(column.type) 

536 ) 

537 default = compiler.get_column_default_string(column) 

538 if default is not None: 

539 text += " DEFAULT " + default 

540 

541 if not column.nullable: 

542 text += " NOT NULL" 

543 

544 if column.constraints: 

545 text += " ".join( 

546 compiler.process(const) 

547 for const in column.constraints) 

548 return text 

549 

550 The above construct can be applied to a :class:`_schema.Table` 

551 as follows:: 

552 

553 from sqlalchemy import Table, Metadata, Column, Integer, String 

554 from sqlalchemy import schema 

555 

556 metadata = MetaData() 

557 

558 table = Table('mytable', MetaData(), 

559 Column('x', Integer, info={"special":True}, primary_key=True), 

560 Column('y', String(50)), 

561 Column('z', String(20), info={"special":True}) 

562 ) 

563 

564 metadata.create_all(conn) 

565 

566 Above, the directives we've added to the :attr:`_schema.Column.info` 

567 collection 

568 will be detected by our custom compilation scheme:: 

569 

570 CREATE TABLE mytable ( 

571 x SPECIAL DIRECTIVE INTEGER NOT NULL, 

572 y VARCHAR(50), 

573 z SPECIAL DIRECTIVE VARCHAR(20), 

574 PRIMARY KEY (x) 

575 ) 

576 

577 The :class:`.CreateColumn` construct can also be used to skip certain 

578 columns when producing a ``CREATE TABLE``. This is accomplished by 

579 creating a compilation rule that conditionally returns ``None``. 

580 This is essentially how to produce the same effect as using the 

581 ``system=True`` argument on :class:`_schema.Column`, which marks a column 

582 as an implicitly-present "system" column. 

583 

584 For example, suppose we wish to produce a :class:`_schema.Table` 

585 which skips 

586 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL 

587 backend, but on other backends does render it, in anticipation of a 

588 triggered rule. A conditional compilation rule could skip this name only 

589 on PostgreSQL:: 

590 

591 from sqlalchemy.schema import CreateColumn 

592 

593 @compiles(CreateColumn, "postgresql") 

594 def skip_xmin(element, compiler, **kw): 

595 if element.element.name == 'xmin': 

596 return None 

597 else: 

598 return compiler.visit_create_column(element, **kw) 

599 

600 

601 my_table = Table('mytable', metadata, 

602 Column('id', Integer, primary_key=True), 

603 Column('xmin', Integer) 

604 ) 

605 

606 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE`` 

607 which only includes the ``id`` column in the string; the ``xmin`` column 

608 will be omitted, but only against the PostgreSQL backend. 

609 

610 """ 

611 

612 __visit_name__ = "create_column" 

613 

614 def __init__(self, element): 

615 self.element = element 

616 

617 

618class DropTable(_CreateDropBase): 

619 """Represent a DROP TABLE statement.""" 

620 

621 __visit_name__ = "drop_table" 

622 

623 @util.deprecated_params( 

624 bind=( 

625 "2.0", 

626 "The :paramref:`_ddl.DropTable.bind` argument is " 

627 "deprecated and " 

628 "will be removed in SQLAlchemy 2.0.", 

629 ), 

630 ) 

631 def __init__(self, element, bind=None, if_exists=False): 

632 """Create a :class:`.DropTable` construct. 

633 

634 :param element: a :class:`_schema.Table` that's the subject 

635 of the DROP. 

636 :param on: See the description for 'on' in :class:`.DDL`. 

637 :param bind: See the description for 'bind' in :class:`.DDL`. 

638 :param if_exists: if True, an IF EXISTS operator will be applied to the 

639 construct. 

640 

641 .. versionadded:: 1.4.0b2 

642 

643 """ 

644 super(DropTable, self).__init__( 

645 element, _legacy_bind=bind, if_exists=if_exists 

646 ) 

647 

648 

649class CreateSequence(_CreateDropBase): 

650 """Represent a CREATE SEQUENCE statement.""" 

651 

652 __visit_name__ = "create_sequence" 

653 

654 

655class DropSequence(_CreateDropBase): 

656 """Represent a DROP SEQUENCE statement.""" 

657 

658 __visit_name__ = "drop_sequence" 

659 

660 

661class CreateIndex(_CreateDropBase): 

662 """Represent a CREATE INDEX statement.""" 

663 

664 __visit_name__ = "create_index" 

665 

666 @util.deprecated_params( 

667 bind=( 

668 "2.0", 

669 "The :paramref:`_ddl.CreateIndex.bind` argument is " 

670 "deprecated and " 

671 "will be removed in SQLAlchemy 2.0.", 

672 ), 

673 ) 

674 def __init__(self, element, bind=None, if_not_exists=False): 

675 """Create a :class:`.Createindex` construct. 

676 

677 :param element: a :class:`_schema.Index` that's the subject 

678 of the CREATE. 

679 :param on: See the description for 'on' in :class:`.DDL`. 

680 :param bind: See the description for 'bind' in :class:`.DDL`. 

681 :param if_not_exists: if True, an IF NOT EXISTS operator will be 

682 applied to the construct. 

683 

684 .. versionadded:: 1.4.0b2 

685 

686 """ 

687 super(CreateIndex, self).__init__( 

688 element, _legacy_bind=bind, if_not_exists=if_not_exists 

689 ) 

690 

691 

692class DropIndex(_CreateDropBase): 

693 """Represent a DROP INDEX statement.""" 

694 

695 __visit_name__ = "drop_index" 

696 

697 @util.deprecated_params( 

698 bind=( 

699 "2.0", 

700 "The :paramref:`_ddl.DropIndex.bind` argument is " 

701 "deprecated and " 

702 "will be removed in SQLAlchemy 2.0.", 

703 ), 

704 ) 

705 def __init__(self, element, bind=None, if_exists=False): 

706 """Create a :class:`.DropIndex` construct. 

707 

708 :param element: a :class:`_schema.Index` that's the subject 

709 of the DROP. 

710 :param on: See the description for 'on' in :class:`.DDL`. 

711 :param bind: See the description for 'bind' in :class:`.DDL`. 

712 :param if_exists: if True, an IF EXISTS operator will be applied to the 

713 construct. 

714 

715 .. versionadded:: 1.4.0b2 

716 

717 """ 

718 super(DropIndex, self).__init__( 

719 element, _legacy_bind=bind, if_exists=if_exists 

720 ) 

721 

722 

723class AddConstraint(_CreateDropBase): 

724 """Represent an ALTER TABLE ADD CONSTRAINT statement.""" 

725 

726 __visit_name__ = "add_constraint" 

727 

728 def __init__(self, element, *args, **kw): 

729 super(AddConstraint, self).__init__(element, *args, **kw) 

730 element._create_rule = util.portable_instancemethod( 

731 self._create_rule_disable 

732 ) 

733 

734 

735class DropConstraint(_CreateDropBase): 

736 """Represent an ALTER TABLE DROP CONSTRAINT statement.""" 

737 

738 __visit_name__ = "drop_constraint" 

739 

740 def __init__(self, element, cascade=False, **kw): 

741 self.cascade = cascade 

742 super(DropConstraint, self).__init__(element, **kw) 

743 element._create_rule = util.portable_instancemethod( 

744 self._create_rule_disable 

745 ) 

746 

747 

748class SetTableComment(_CreateDropBase): 

749 """Represent a COMMENT ON TABLE IS statement.""" 

750 

751 __visit_name__ = "set_table_comment" 

752 

753 

754class DropTableComment(_CreateDropBase): 

755 """Represent a COMMENT ON TABLE '' statement. 

756 

757 Note this varies a lot across database backends. 

758 

759 """ 

760 

761 __visit_name__ = "drop_table_comment" 

762 

763 

764class SetColumnComment(_CreateDropBase): 

765 """Represent a COMMENT ON COLUMN IS statement.""" 

766 

767 __visit_name__ = "set_column_comment" 

768 

769 

770class DropColumnComment(_CreateDropBase): 

771 """Represent a COMMENT ON COLUMN IS NULL statement.""" 

772 

773 __visit_name__ = "drop_column_comment" 

774 

775 

776class DDLBase(SchemaVisitor): 

777 def __init__(self, connection): 

778 self.connection = connection 

779 

780 

781class SchemaGenerator(DDLBase): 

782 def __init__( 

783 self, dialect, connection, checkfirst=False, tables=None, **kwargs 

784 ): 

785 super(SchemaGenerator, self).__init__(connection, **kwargs) 

786 self.checkfirst = checkfirst 

787 self.tables = tables 

788 self.preparer = dialect.identifier_preparer 

789 self.dialect = dialect 

790 self.memo = {} 

791 

792 def _can_create_table(self, table): 

793 self.dialect.validate_identifier(table.name) 

794 effective_schema = self.connection.schema_for_object(table) 

795 if effective_schema: 

796 self.dialect.validate_identifier(effective_schema) 

797 return not self.checkfirst or not self.dialect.has_table( 

798 self.connection, table.name, schema=effective_schema 

799 ) 

800 

801 def _can_create_index(self, index): 

802 effective_schema = self.connection.schema_for_object(index.table) 

803 if effective_schema: 

804 self.dialect.validate_identifier(effective_schema) 

805 return not self.checkfirst or not self.dialect.has_index( 

806 self.connection, 

807 index.table.name, 

808 index.name, 

809 schema=effective_schema, 

810 ) 

811 

812 def _can_create_sequence(self, sequence): 

813 effective_schema = self.connection.schema_for_object(sequence) 

814 

815 return self.dialect.supports_sequences and ( 

816 (not self.dialect.sequences_optional or not sequence.optional) 

817 and ( 

818 not self.checkfirst 

819 or not self.dialect.has_sequence( 

820 self.connection, sequence.name, schema=effective_schema 

821 ) 

822 ) 

823 ) 

824 

825 def visit_metadata(self, metadata): 

826 if self.tables is not None: 

827 tables = self.tables 

828 else: 

829 tables = list(metadata.tables.values()) 

830 

831 collection = sort_tables_and_constraints( 

832 [t for t in tables if self._can_create_table(t)] 

833 ) 

834 

835 seq_coll = [ 

836 s 

837 for s in metadata._sequences.values() 

838 if s.column is None and self._can_create_sequence(s) 

839 ] 

840 

841 event_collection = [t for (t, fks) in collection if t is not None] 

842 metadata.dispatch.before_create( 

843 metadata, 

844 self.connection, 

845 tables=event_collection, 

846 checkfirst=self.checkfirst, 

847 _ddl_runner=self, 

848 ) 

849 

850 for seq in seq_coll: 

851 self.traverse_single(seq, create_ok=True) 

852 

853 for table, fkcs in collection: 

854 if table is not None: 

855 self.traverse_single( 

856 table, 

857 create_ok=True, 

858 include_foreign_key_constraints=fkcs, 

859 _is_metadata_operation=True, 

860 ) 

861 else: 

862 for fkc in fkcs: 

863 self.traverse_single(fkc) 

864 

865 metadata.dispatch.after_create( 

866 metadata, 

867 self.connection, 

868 tables=event_collection, 

869 checkfirst=self.checkfirst, 

870 _ddl_runner=self, 

871 ) 

872 

873 def visit_table( 

874 self, 

875 table, 

876 create_ok=False, 

877 include_foreign_key_constraints=None, 

878 _is_metadata_operation=False, 

879 ): 

880 if not create_ok and not self._can_create_table(table): 

881 return 

882 

883 table.dispatch.before_create( 

884 table, 

885 self.connection, 

886 checkfirst=self.checkfirst, 

887 _ddl_runner=self, 

888 _is_metadata_operation=_is_metadata_operation, 

889 ) 

890 

891 for column in table.columns: 

892 if column.default is not None: 

893 self.traverse_single(column.default) 

894 

895 if not self.dialect.supports_alter: 

896 # e.g., don't omit any foreign key constraints 

897 include_foreign_key_constraints = None 

898 

899 self.connection.execute( 

900 # fmt: off 

901 CreateTable( 

902 table, 

903 include_foreign_key_constraints= # noqa 

904 include_foreign_key_constraints, # noqa 

905 ) 

906 # fmt: on 

907 ) 

908 

909 if hasattr(table, "indexes"): 

910 for index in table.indexes: 

911 self.traverse_single(index, create_ok=True) 

912 

913 if self.dialect.supports_comments and not self.dialect.inline_comments: 

914 if table.comment is not None: 

915 self.connection.execute(SetTableComment(table)) 

916 

917 for column in table.columns: 

918 if column.comment is not None: 

919 self.connection.execute(SetColumnComment(column)) 

920 

921 table.dispatch.after_create( 

922 table, 

923 self.connection, 

924 checkfirst=self.checkfirst, 

925 _ddl_runner=self, 

926 _is_metadata_operation=_is_metadata_operation, 

927 ) 

928 

929 def visit_foreign_key_constraint(self, constraint): 

930 if not self.dialect.supports_alter: 

931 return 

932 self.connection.execute(AddConstraint(constraint)) 

933 

934 def visit_sequence(self, sequence, create_ok=False): 

935 if not create_ok and not self._can_create_sequence(sequence): 

936 return 

937 self.connection.execute(CreateSequence(sequence)) 

938 

939 def visit_index(self, index, create_ok=False): 

940 if not create_ok and not self._can_create_index(index): 

941 return 

942 self.connection.execute(CreateIndex(index)) 

943 

944 

945class SchemaDropper(DDLBase): 

946 def __init__( 

947 self, dialect, connection, checkfirst=False, tables=None, **kwargs 

948 ): 

949 super(SchemaDropper, self).__init__(connection, **kwargs) 

950 self.checkfirst = checkfirst 

951 self.tables = tables 

952 self.preparer = dialect.identifier_preparer 

953 self.dialect = dialect 

954 self.memo = {} 

955 

956 def visit_metadata(self, metadata): 

957 if self.tables is not None: 

958 tables = self.tables 

959 else: 

960 tables = list(metadata.tables.values()) 

961 

962 try: 

963 unsorted_tables = [t for t in tables if self._can_drop_table(t)] 

964 collection = list( 

965 reversed( 

966 sort_tables_and_constraints( 

967 unsorted_tables, 

968 filter_fn=lambda constraint: False 

969 if not self.dialect.supports_alter 

970 or constraint.name is None 

971 else None, 

972 ) 

973 ) 

974 ) 

975 except exc.CircularDependencyError as err2: 

976 if not self.dialect.supports_alter: 

977 util.warn( 

978 "Can't sort tables for DROP; an " 

979 "unresolvable foreign key " 

980 "dependency exists between tables: %s; and backend does " 

981 "not support ALTER. To restore at least a partial sort, " 

982 "apply use_alter=True to ForeignKey and " 

983 "ForeignKeyConstraint " 

984 "objects involved in the cycle to mark these as known " 

985 "cycles that will be ignored." 

986 % (", ".join(sorted([t.fullname for t in err2.cycles]))) 

987 ) 

988 collection = [(t, ()) for t in unsorted_tables] 

989 else: 

990 util.raise_( 

991 exc.CircularDependencyError( 

992 err2.args[0], 

993 err2.cycles, 

994 err2.edges, 

995 msg="Can't sort tables for DROP; an " 

996 "unresolvable foreign key " 

997 "dependency exists between tables: %s. Please ensure " 

998 "that the ForeignKey and ForeignKeyConstraint objects " 

999 "involved in the cycle have " 

1000 "names so that they can be dropped using " 

1001 "DROP CONSTRAINT." 

1002 % ( 

1003 ", ".join( 

1004 sorted([t.fullname for t in err2.cycles]) 

1005 ) 

1006 ), 

1007 ), 

1008 from_=err2, 

1009 ) 

1010 

1011 seq_coll = [ 

1012 s 

1013 for s in metadata._sequences.values() 

1014 if self._can_drop_sequence(s) 

1015 ] 

1016 

1017 event_collection = [t for (t, fks) in collection if t is not None] 

1018 

1019 metadata.dispatch.before_drop( 

1020 metadata, 

1021 self.connection, 

1022 tables=event_collection, 

1023 checkfirst=self.checkfirst, 

1024 _ddl_runner=self, 

1025 ) 

1026 

1027 for table, fkcs in collection: 

1028 if table is not None: 

1029 self.traverse_single( 

1030 table, 

1031 drop_ok=True, 

1032 _is_metadata_operation=True, 

1033 _ignore_sequences=seq_coll, 

1034 ) 

1035 else: 

1036 for fkc in fkcs: 

1037 self.traverse_single(fkc) 

1038 

1039 for seq in seq_coll: 

1040 self.traverse_single(seq, drop_ok=seq.column is None) 

1041 

1042 metadata.dispatch.after_drop( 

1043 metadata, 

1044 self.connection, 

1045 tables=event_collection, 

1046 checkfirst=self.checkfirst, 

1047 _ddl_runner=self, 

1048 ) 

1049 

1050 def _can_drop_table(self, table): 

1051 self.dialect.validate_identifier(table.name) 

1052 effective_schema = self.connection.schema_for_object(table) 

1053 if effective_schema: 

1054 self.dialect.validate_identifier(effective_schema) 

1055 return not self.checkfirst or self.dialect.has_table( 

1056 self.connection, table.name, schema=effective_schema 

1057 ) 

1058 

1059 def _can_drop_index(self, index): 

1060 effective_schema = self.connection.schema_for_object(index.table) 

1061 if effective_schema: 

1062 self.dialect.validate_identifier(effective_schema) 

1063 return not self.checkfirst or self.dialect.has_index( 

1064 self.connection, 

1065 index.table.name, 

1066 index.name, 

1067 schema=effective_schema, 

1068 ) 

1069 

1070 def _can_drop_sequence(self, sequence): 

1071 effective_schema = self.connection.schema_for_object(sequence) 

1072 return self.dialect.supports_sequences and ( 

1073 (not self.dialect.sequences_optional or not sequence.optional) 

1074 and ( 

1075 not self.checkfirst 

1076 or self.dialect.has_sequence( 

1077 self.connection, sequence.name, schema=effective_schema 

1078 ) 

1079 ) 

1080 ) 

1081 

1082 def visit_index(self, index, drop_ok=False): 

1083 if not drop_ok and not self._can_drop_index(index): 

1084 return 

1085 

1086 self.connection.execute(DropIndex(index)) 

1087 

1088 def visit_table( 

1089 self, 

1090 table, 

1091 drop_ok=False, 

1092 _is_metadata_operation=False, 

1093 _ignore_sequences=(), 

1094 ): 

1095 if not drop_ok and not self._can_drop_table(table): 

1096 return 

1097 

1098 table.dispatch.before_drop( 

1099 table, 

1100 self.connection, 

1101 checkfirst=self.checkfirst, 

1102 _ddl_runner=self, 

1103 _is_metadata_operation=_is_metadata_operation, 

1104 ) 

1105 

1106 self.connection.execute(DropTable(table)) 

1107 

1108 # traverse client side defaults which may refer to server-side 

1109 # sequences. noting that some of these client side defaults may also be 

1110 # set up as server side defaults (see https://docs.sqlalchemy.org/en/ 

1111 # latest/core/defaults.html#associating-a-sequence-as-the-server-side- 

1112 # default), so have to be dropped after the table is dropped. 

1113 for column in table.columns: 

1114 if ( 

1115 column.default is not None 

1116 and column.default not in _ignore_sequences 

1117 ): 

1118 self.traverse_single(column.default) 

1119 

1120 table.dispatch.after_drop( 

1121 table, 

1122 self.connection, 

1123 checkfirst=self.checkfirst, 

1124 _ddl_runner=self, 

1125 _is_metadata_operation=_is_metadata_operation, 

1126 ) 

1127 

1128 def visit_foreign_key_constraint(self, constraint): 

1129 if not self.dialect.supports_alter: 

1130 return 

1131 self.connection.execute(DropConstraint(constraint)) 

1132 

1133 def visit_sequence(self, sequence, drop_ok=False): 

1134 

1135 if not drop_ok and not self._can_drop_sequence(sequence): 

1136 return 

1137 self.connection.execute(DropSequence(sequence)) 

1138 

1139 

1140def sort_tables( 

1141 tables, 

1142 skip_fn=None, 

1143 extra_dependencies=None, 

1144): 

1145 """Sort a collection of :class:`_schema.Table` objects based on 

1146 dependency. 

1147 

1148 This is a dependency-ordered sort which will emit :class:`_schema.Table` 

1149 objects such that they will follow their dependent :class:`_schema.Table` 

1150 objects. 

1151 Tables are dependent on another based on the presence of 

1152 :class:`_schema.ForeignKeyConstraint` 

1153 objects as well as explicit dependencies 

1154 added by :meth:`_schema.Table.add_is_dependent_on`. 

1155 

1156 .. warning:: 

1157 

1158 The :func:`._schema.sort_tables` function cannot by itself 

1159 accommodate automatic resolution of dependency cycles between 

1160 tables, which are usually caused by mutually dependent foreign key 

1161 constraints. When these cycles are detected, the foreign keys 

1162 of these tables are omitted from consideration in the sort. 

1163 A warning is emitted when this condition occurs, which will be an 

1164 exception raise in a future release. Tables which are not part 

1165 of the cycle will still be returned in dependency order. 

1166 

1167 To resolve these cycles, the 

1168 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be 

1169 applied to those constraints which create a cycle. Alternatively, 

1170 the :func:`_schema.sort_tables_and_constraints` function will 

1171 automatically return foreign key constraints in a separate 

1172 collection when cycles are detected so that they may be applied 

1173 to a schema separately. 

1174 

1175 .. versionchanged:: 1.3.17 - a warning is emitted when 

1176 :func:`_schema.sort_tables` cannot perform a proper sort due to 

1177 cyclical dependencies. This will be an exception in a future 

1178 release. Additionally, the sort will continue to return 

1179 other tables not involved in the cycle in dependency order 

1180 which was not the case previously. 

1181 

1182 :param tables: a sequence of :class:`_schema.Table` objects. 

1183 

1184 :param skip_fn: optional callable which will be passed a 

1185 :class:`_schema.ForeignKey` object; if it returns True, this 

1186 constraint will not be considered as a dependency. Note this is 

1187 **different** from the same parameter in 

1188 :func:`.sort_tables_and_constraints`, which is 

1189 instead passed the owning :class:`_schema.ForeignKeyConstraint` object. 

1190 

1191 :param extra_dependencies: a sequence of 2-tuples of tables which will 

1192 also be considered as dependent on each other. 

1193 

1194 .. seealso:: 

1195 

1196 :func:`.sort_tables_and_constraints` 

1197 

1198 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort 

1199 

1200 

1201 """ 

1202 

1203 if skip_fn is not None: 

1204 

1205 def _skip_fn(fkc): 

1206 for fk in fkc.elements: 

1207 if skip_fn(fk): 

1208 return True 

1209 else: 

1210 return None 

1211 

1212 else: 

1213 _skip_fn = None 

1214 

1215 return [ 

1216 t 

1217 for (t, fkcs) in sort_tables_and_constraints( 

1218 tables, 

1219 filter_fn=_skip_fn, 

1220 extra_dependencies=extra_dependencies, 

1221 _warn_for_cycles=True, 

1222 ) 

1223 if t is not None 

1224 ] 

1225 

1226 

1227def sort_tables_and_constraints( 

1228 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False 

1229): 

1230 """Sort a collection of :class:`_schema.Table` / 

1231 :class:`_schema.ForeignKeyConstraint` 

1232 objects. 

1233 

1234 This is a dependency-ordered sort which will emit tuples of 

1235 ``(Table, [ForeignKeyConstraint, ...])`` such that each 

1236 :class:`_schema.Table` follows its dependent :class:`_schema.Table` 

1237 objects. 

1238 Remaining :class:`_schema.ForeignKeyConstraint` 

1239 objects that are separate due to 

1240 dependency rules not satisfied by the sort are emitted afterwards 

1241 as ``(None, [ForeignKeyConstraint ...])``. 

1242 

1243 Tables are dependent on another based on the presence of 

1244 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies 

1245 added by :meth:`_schema.Table.add_is_dependent_on`, 

1246 as well as dependencies 

1247 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` 

1248 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` 

1249 parameters. 

1250 

1251 :param tables: a sequence of :class:`_schema.Table` objects. 

1252 

1253 :param filter_fn: optional callable which will be passed a 

1254 :class:`_schema.ForeignKeyConstraint` object, 

1255 and returns a value based on 

1256 whether this constraint should definitely be included or excluded as 

1257 an inline constraint, or neither. If it returns False, the constraint 

1258 will definitely be included as a dependency that cannot be subject 

1259 to ALTER; if True, it will **only** be included as an ALTER result at 

1260 the end. Returning None means the constraint is included in the 

1261 table-based result unless it is detected as part of a dependency cycle. 

1262 

1263 :param extra_dependencies: a sequence of 2-tuples of tables which will 

1264 also be considered as dependent on each other. 

1265 

1266 .. versionadded:: 1.0.0 

1267 

1268 .. seealso:: 

1269 

1270 :func:`.sort_tables` 

1271 

1272 

1273 """ 

1274 

1275 fixed_dependencies = set() 

1276 mutable_dependencies = set() 

1277 

1278 if extra_dependencies is not None: 

1279 fixed_dependencies.update(extra_dependencies) 

1280 

1281 remaining_fkcs = set() 

1282 for table in tables: 

1283 for fkc in table.foreign_key_constraints: 

1284 if fkc.use_alter is True: 

1285 remaining_fkcs.add(fkc) 

1286 continue 

1287 

1288 if filter_fn: 

1289 filtered = filter_fn(fkc) 

1290 

1291 if filtered is True: 

1292 remaining_fkcs.add(fkc) 

1293 continue 

1294 

1295 dependent_on = fkc.referred_table 

1296 if dependent_on is not table: 

1297 mutable_dependencies.add((dependent_on, table)) 

1298 

1299 fixed_dependencies.update( 

1300 (parent, table) for parent in table._extra_dependencies 

1301 ) 

1302 

1303 try: 

1304 candidate_sort = list( 

1305 topological.sort( 

1306 fixed_dependencies.union(mutable_dependencies), 

1307 tables, 

1308 ) 

1309 ) 

1310 except exc.CircularDependencyError as err: 

1311 if _warn_for_cycles: 

1312 util.warn( 

1313 "Cannot correctly sort tables; there are unresolvable cycles " 

1314 'between tables "%s", which is usually caused by mutually ' 

1315 "dependent foreign key constraints. Foreign key constraints " 

1316 "involving these tables will not be considered; this warning " 

1317 "may raise an error in a future release." 

1318 % (", ".join(sorted(t.fullname for t in err.cycles)),) 

1319 ) 

1320 for edge in err.edges: 

1321 if edge in mutable_dependencies: 

1322 table = edge[1] 

1323 if table not in err.cycles: 

1324 continue 

1325 can_remove = [ 

1326 fkc 

1327 for fkc in table.foreign_key_constraints 

1328 if filter_fn is None or filter_fn(fkc) is not False 

1329 ] 

1330 remaining_fkcs.update(can_remove) 

1331 for fkc in can_remove: 

1332 dependent_on = fkc.referred_table 

1333 if dependent_on is not table: 

1334 mutable_dependencies.discard((dependent_on, table)) 

1335 candidate_sort = list( 

1336 topological.sort( 

1337 fixed_dependencies.union(mutable_dependencies), 

1338 tables, 

1339 ) 

1340 ) 

1341 

1342 return [ 

1343 (table, table.foreign_key_constraints.difference(remaining_fkcs)) 

1344 for table in candidate_sort 

1345 ] + [(None, list(remaining_fkcs))]