Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/sqlalchemy/sql/ddl.py: 52%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

408 statements  

1# sql/ddl.py 

2# Copyright (C) 2009-2025 the SQLAlchemy authors and contributors 

3# <see AUTHORS file> 

4# 

5# This module is part of SQLAlchemy and is released under 

6# the MIT License: https://www.opensource.org/licenses/mit-license.php 

7# mypy: allow-untyped-defs, allow-untyped-calls 

8 

9""" 

10Provides the hierarchy of DDL-defining schema items as well as routines 

11to invoke them for a create/drop call. 

12 

13""" 

14from __future__ import annotations 

15 

16import contextlib 

17import typing 

18from typing import Any 

19from typing import Callable 

20from typing import Generic 

21from typing import Iterable 

22from typing import List 

23from typing import Optional 

24from typing import Sequence as typing_Sequence 

25from typing import Tuple 

26from typing import TypeVar 

27from typing import Union 

28 

29from . import roles 

30from .base import _generative 

31from .base import Executable 

32from .base import SchemaVisitor 

33from .elements import ClauseElement 

34from .. import exc 

35from .. import util 

36from ..util import topological 

37from ..util.typing import Protocol 

38from ..util.typing import Self 

39 

40if typing.TYPE_CHECKING: 

41 from .compiler import Compiled 

42 from .compiler import DDLCompiler 

43 from .elements import BindParameter 

44 from .schema import Column 

45 from .schema import Constraint 

46 from .schema import ForeignKeyConstraint 

47 from .schema import Index 

48 from .schema import SchemaItem 

49 from .schema import Sequence as Sequence # noqa: F401 

50 from .schema import Table 

51 from .selectable import TableClause 

52 from ..engine.base import Connection 

53 from ..engine.interfaces import CacheStats 

54 from ..engine.interfaces import CompiledCacheType 

55 from ..engine.interfaces import Dialect 

56 from ..engine.interfaces import SchemaTranslateMapType 

57 

58_SI = TypeVar("_SI", bound=Union["SchemaItem", str]) 

59 

60 

61class BaseDDLElement(ClauseElement): 

62 """The root of DDL constructs, including those that are sub-elements 

63 within the "create table" and other processes. 

64 

65 .. versionadded:: 2.0 

66 

67 """ 

68 

69 _hierarchy_supports_caching = False 

70 """disable cache warnings for all _DDLCompiles subclasses. """ 

71 

72 def _compiler(self, dialect, **kw): 

73 """Return a compiler appropriate for this ClauseElement, given a 

74 Dialect.""" 

75 

76 return dialect.ddl_compiler(dialect, self, **kw) 

77 

78 def _compile_w_cache( 

79 self, 

80 dialect: Dialect, 

81 *, 

82 compiled_cache: Optional[CompiledCacheType], 

83 column_keys: List[str], 

84 for_executemany: bool = False, 

85 schema_translate_map: Optional[SchemaTranslateMapType] = None, 

86 **kw: Any, 

87 ) -> Tuple[ 

88 Compiled, Optional[typing_Sequence[BindParameter[Any]]], CacheStats 

89 ]: 

90 raise NotImplementedError() 

91 

92 

93class DDLIfCallable(Protocol): 

94 def __call__( 

95 self, 

96 ddl: BaseDDLElement, 

97 target: Union[SchemaItem, str], 

98 bind: Optional[Connection], 

99 tables: Optional[List[Table]] = None, 

100 state: Optional[Any] = None, 

101 *, 

102 dialect: Dialect, 

103 compiler: Optional[DDLCompiler] = ..., 

104 checkfirst: bool, 

105 ) -> bool: ... 

106 

107 

108class DDLIf(typing.NamedTuple): 

109 dialect: Optional[str] 

110 callable_: Optional[DDLIfCallable] 

111 state: Optional[Any] 

112 

113 def _should_execute( 

114 self, 

115 ddl: BaseDDLElement, 

116 target: Union[SchemaItem, str], 

117 bind: Optional[Connection], 

118 compiler: Optional[DDLCompiler] = None, 

119 **kw: Any, 

120 ) -> bool: 

121 if bind is not None: 

122 dialect = bind.dialect 

123 elif compiler is not None: 

124 dialect = compiler.dialect 

125 else: 

126 assert False, "compiler or dialect is required" 

127 

128 if isinstance(self.dialect, str): 

129 if self.dialect != dialect.name: 

130 return False 

131 elif isinstance(self.dialect, (tuple, list, set)): 

132 if dialect.name not in self.dialect: 

133 return False 

134 if self.callable_ is not None and not self.callable_( 

135 ddl, 

136 target, 

137 bind, 

138 state=self.state, 

139 dialect=dialect, 

140 compiler=compiler, 

141 **kw, 

142 ): 

143 return False 

144 

145 return True 

146 

147 

148class ExecutableDDLElement(roles.DDLRole, Executable, BaseDDLElement): 

149 """Base class for standalone executable DDL expression constructs. 

150 

151 This class is the base for the general purpose :class:`.DDL` class, 

152 as well as the various create/drop clause constructs such as 

153 :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`, 

154 etc. 

155 

156 .. versionchanged:: 2.0 :class:`.ExecutableDDLElement` is renamed from 

157 :class:`.DDLElement`, which still exists for backwards compatibility. 

158 

159 :class:`.ExecutableDDLElement` integrates closely with SQLAlchemy events, 

160 introduced in :ref:`event_toplevel`. An instance of one is 

161 itself an event receiving callable:: 

162 

163 event.listen( 

164 users, 

165 "after_create", 

166 AddConstraint(constraint).execute_if(dialect="postgresql"), 

167 ) 

168 

169 .. seealso:: 

170 

171 :class:`.DDL` 

172 

173 :class:`.DDLEvents` 

174 

175 :ref:`event_toplevel` 

176 

177 :ref:`schema_ddl_sequences` 

178 

179 """ 

180 

181 _ddl_if: Optional[DDLIf] = None 

182 target: Union[SchemaItem, str, None] = None 

183 

184 def _execute_on_connection( 

185 self, connection, distilled_params, execution_options 

186 ): 

187 return connection._execute_ddl( 

188 self, distilled_params, execution_options 

189 ) 

190 

191 @_generative 

192 def against(self, target: SchemaItem) -> Self: 

193 """Return a copy of this :class:`_schema.ExecutableDDLElement` which 

194 will include the given target. 

195 

196 This essentially applies the given item to the ``.target`` attribute of 

197 the returned :class:`_schema.ExecutableDDLElement` object. This target 

198 is then usable by event handlers and compilation routines in order to 

199 provide services such as tokenization of a DDL string in terms of a 

200 particular :class:`_schema.Table`. 

201 

202 When a :class:`_schema.ExecutableDDLElement` object is established as 

203 an event handler for the :meth:`_events.DDLEvents.before_create` or 

204 :meth:`_events.DDLEvents.after_create` events, and the event then 

205 occurs for a given target such as a :class:`_schema.Constraint` or 

206 :class:`_schema.Table`, that target is established with a copy of the 

207 :class:`_schema.ExecutableDDLElement` object using this method, which 

208 then proceeds to the :meth:`_schema.ExecutableDDLElement.execute` 

209 method in order to invoke the actual DDL instruction. 

210 

211 :param target: a :class:`_schema.SchemaItem` that will be the subject 

212 of a DDL operation. 

213 

214 :return: a copy of this :class:`_schema.ExecutableDDLElement` with the 

215 ``.target`` attribute assigned to the given 

216 :class:`_schema.SchemaItem`. 

217 

218 .. seealso:: 

219 

220 :class:`_schema.DDL` - uses tokenization against the "target" when 

221 processing the DDL string. 

222 

223 """ 

224 self.target = target 

225 return self 

226 

227 @_generative 

228 def execute_if( 

229 self, 

230 dialect: Optional[str] = None, 

231 callable_: Optional[DDLIfCallable] = None, 

232 state: Optional[Any] = None, 

233 ) -> Self: 

234 r"""Return a callable that will execute this 

235 :class:`_ddl.ExecutableDDLElement` conditionally within an event 

236 handler. 

237 

238 Used to provide a wrapper for event listening:: 

239 

240 event.listen( 

241 metadata, 

242 "before_create", 

243 DDL("my_ddl").execute_if(dialect="postgresql"), 

244 ) 

245 

246 :param dialect: May be a string or tuple of strings. 

247 If a string, it will be compared to the name of the 

248 executing database dialect:: 

249 

250 DDL("something").execute_if(dialect="postgresql") 

251 

252 If a tuple, specifies multiple dialect names:: 

253 

254 DDL("something").execute_if(dialect=("postgresql", "mysql")) 

255 

256 :param callable\_: A callable, which will be invoked with 

257 three positional arguments as well as optional keyword 

258 arguments: 

259 

260 :ddl: 

261 This DDL element. 

262 

263 :target: 

264 The :class:`_schema.Table` or :class:`_schema.MetaData` 

265 object which is the 

266 target of this event. May be None if the DDL is executed 

267 explicitly. 

268 

269 :bind: 

270 The :class:`_engine.Connection` being used for DDL execution. 

271 May be None if this construct is being created inline within 

272 a table, in which case ``compiler`` will be present. 

273 

274 :tables: 

275 Optional keyword argument - a list of Table objects which are to 

276 be created/ dropped within a MetaData.create_all() or drop_all() 

277 method call. 

278 

279 :dialect: keyword argument, but always present - the 

280 :class:`.Dialect` involved in the operation. 

281 

282 :compiler: keyword argument. Will be ``None`` for an engine 

283 level DDL invocation, but will refer to a :class:`.DDLCompiler` 

284 if this DDL element is being created inline within a table. 

285 

286 :state: 

287 Optional keyword argument - will be the ``state`` argument 

288 passed to this function. 

289 

290 :checkfirst: 

291 Keyword argument, will be True if the 'checkfirst' flag was 

292 set during the call to ``create()``, ``create_all()``, 

293 ``drop()``, ``drop_all()``. 

294 

295 If the callable returns a True value, the DDL statement will be 

296 executed. 

297 

298 :param state: any value which will be passed to the callable\_ 

299 as the ``state`` keyword argument. 

300 

301 .. seealso:: 

302 

303 :meth:`.SchemaItem.ddl_if` 

304 

305 :class:`.DDLEvents` 

306 

307 :ref:`event_toplevel` 

308 

309 """ 

310 self._ddl_if = DDLIf(dialect, callable_, state) 

311 return self 

312 

313 def _should_execute(self, target, bind, **kw): 

314 if self._ddl_if is None: 

315 return True 

316 else: 

317 return self._ddl_if._should_execute(self, target, bind, **kw) 

318 

319 def _invoke_with(self, bind): 

320 if self._should_execute(self.target, bind): 

321 return bind.execute(self) 

322 

323 def __call__(self, target, bind, **kw): 

324 """Execute the DDL as a ddl_listener.""" 

325 

326 self.against(target)._invoke_with(bind) 

327 

328 def _generate(self): 

329 s = self.__class__.__new__(self.__class__) 

330 s.__dict__ = self.__dict__.copy() 

331 return s 

332 

333 

334DDLElement = ExecutableDDLElement 

335""":class:`.DDLElement` is renamed to :class:`.ExecutableDDLElement`.""" 

336 

337 

338class DDL(ExecutableDDLElement): 

339 """A literal DDL statement. 

340 

341 Specifies literal SQL DDL to be executed by the database. DDL objects 

342 function as DDL event listeners, and can be subscribed to those events 

343 listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or 

344 :class:`_schema.MetaData` objects as targets. 

345 Basic templating support allows 

346 a single DDL instance to handle repetitive tasks for multiple tables. 

347 

348 Examples:: 

349 

350 from sqlalchemy import event, DDL 

351 

352 tbl = Table("users", metadata, Column("uid", Integer)) 

353 event.listen(tbl, "before_create", DDL("DROP TRIGGER users_trigger")) 

354 

355 spow = DDL("ALTER TABLE %(table)s SET secretpowers TRUE") 

356 event.listen(tbl, "after_create", spow.execute_if(dialect="somedb")) 

357 

358 drop_spow = DDL("ALTER TABLE users SET secretpowers FALSE") 

359 connection.execute(drop_spow) 

360 

361 When operating on Table events, the following ``statement`` 

362 string substitutions are available: 

363 

364 .. sourcecode:: text 

365 

366 %(table)s - the Table name, with any required quoting applied 

367 %(schema)s - the schema name, with any required quoting applied 

368 %(fullname)s - the Table name including schema, quoted if needed 

369 

370 The DDL's "context", if any, will be combined with the standard 

371 substitutions noted above. Keys present in the context will override 

372 the standard substitutions. 

373 

374 """ 

375 

376 __visit_name__ = "ddl" 

377 

378 def __init__(self, statement, context=None): 

379 """Create a DDL statement. 

380 

381 :param statement: 

382 A string or unicode string to be executed. Statements will be 

383 processed with Python's string formatting operator using 

384 a fixed set of string substitutions, as well as additional 

385 substitutions provided by the optional :paramref:`.DDL.context` 

386 parameter. 

387 

388 A literal '%' in a statement must be escaped as '%%'. 

389 

390 SQL bind parameters are not available in DDL statements. 

391 

392 :param context: 

393 Optional dictionary, defaults to None. These values will be 

394 available for use in string substitutions on the DDL statement. 

395 

396 .. seealso:: 

397 

398 :class:`.DDLEvents` 

399 

400 :ref:`event_toplevel` 

401 

402 """ 

403 

404 if not isinstance(statement, str): 

405 raise exc.ArgumentError( 

406 "Expected a string or unicode SQL statement, got '%r'" 

407 % statement 

408 ) 

409 

410 self.statement = statement 

411 self.context = context or {} 

412 

413 def __repr__(self): 

414 parts = [repr(self.statement)] 

415 if self.context: 

416 parts.append(f"context={self.context}") 

417 

418 return "<%s@%s; %s>" % ( 

419 type(self).__name__, 

420 id(self), 

421 ", ".join(parts), 

422 ) 

423 

424 

425class _CreateDropBase(ExecutableDDLElement, Generic[_SI]): 

426 """Base class for DDL constructs that represent CREATE and DROP or 

427 equivalents. 

428 

429 The common theme of _CreateDropBase is a single 

430 ``element`` attribute which refers to the element 

431 to be created or dropped. 

432 

433 """ 

434 

435 element: _SI 

436 

437 def __init__(self, element: _SI) -> None: 

438 self.element = self.target = element 

439 self._ddl_if = getattr(element, "_ddl_if", None) 

440 

441 @property 

442 def stringify_dialect(self): # type: ignore[override] 

443 assert not isinstance(self.element, str) 

444 return self.element.create_drop_stringify_dialect 

445 

446 def _create_rule_disable(self, compiler): 

447 """Allow disable of _create_rule using a callable. 

448 

449 Pass to _create_rule using 

450 util.portable_instancemethod(self._create_rule_disable) 

451 to retain serializability. 

452 

453 """ 

454 return False 

455 

456 

457class _CreateBase(_CreateDropBase[_SI]): 

458 def __init__(self, element: _SI, if_not_exists: bool = False) -> None: 

459 super().__init__(element) 

460 self.if_not_exists = if_not_exists 

461 

462 

463class _DropBase(_CreateDropBase[_SI]): 

464 def __init__(self, element: _SI, if_exists: bool = False) -> None: 

465 super().__init__(element) 

466 self.if_exists = if_exists 

467 

468 

469class CreateSchema(_CreateBase[str]): 

470 """Represent a CREATE SCHEMA statement. 

471 

472 The argument here is the string name of the schema. 

473 

474 """ 

475 

476 __visit_name__ = "create_schema" 

477 

478 stringify_dialect = "default" 

479 

480 def __init__( 

481 self, 

482 name: str, 

483 if_not_exists: bool = False, 

484 ) -> None: 

485 """Create a new :class:`.CreateSchema` construct.""" 

486 

487 super().__init__(element=name, if_not_exists=if_not_exists) 

488 

489 

490class DropSchema(_DropBase[str]): 

491 """Represent a DROP SCHEMA statement. 

492 

493 The argument here is the string name of the schema. 

494 

495 """ 

496 

497 __visit_name__ = "drop_schema" 

498 

499 stringify_dialect = "default" 

500 

501 def __init__( 

502 self, 

503 name: str, 

504 cascade: bool = False, 

505 if_exists: bool = False, 

506 ) -> None: 

507 """Create a new :class:`.DropSchema` construct.""" 

508 

509 super().__init__(element=name, if_exists=if_exists) 

510 self.cascade = cascade 

511 

512 

513class CreateTable(_CreateBase["Table"]): 

514 """Represent a CREATE TABLE statement.""" 

515 

516 __visit_name__ = "create_table" 

517 

518 def __init__( 

519 self, 

520 element: Table, 

521 include_foreign_key_constraints: Optional[ 

522 typing_Sequence[ForeignKeyConstraint] 

523 ] = None, 

524 if_not_exists: bool = False, 

525 ) -> None: 

526 """Create a :class:`.CreateTable` construct. 

527 

528 :param element: a :class:`_schema.Table` that's the subject 

529 of the CREATE 

530 :param on: See the description for 'on' in :class:`.DDL`. 

531 :param include_foreign_key_constraints: optional sequence of 

532 :class:`_schema.ForeignKeyConstraint` objects that will be included 

533 inline within the CREATE construct; if omitted, all foreign key 

534 constraints that do not specify use_alter=True are included. 

535 

536 :param if_not_exists: if True, an IF NOT EXISTS operator will be 

537 applied to the construct. 

538 

539 .. versionadded:: 1.4.0b2 

540 

541 """ 

542 super().__init__(element, if_not_exists=if_not_exists) 

543 self.columns = [CreateColumn(column) for column in element.columns] 

544 self.include_foreign_key_constraints = include_foreign_key_constraints 

545 

546 

547class _DropView(_DropBase["Table"]): 

548 """Semi-public 'DROP VIEW' construct. 

549 

550 Used by the test suite for dialect-agnostic drops of views. 

551 This object will eventually be part of a public "view" API. 

552 

553 """ 

554 

555 __visit_name__ = "drop_view" 

556 

557 

558class CreateConstraint(BaseDDLElement): 

559 element: Constraint 

560 

561 def __init__(self, element: Constraint) -> None: 

562 self.element = element 

563 

564 

565class CreateColumn(BaseDDLElement): 

566 """Represent a :class:`_schema.Column` 

567 as rendered in a CREATE TABLE statement, 

568 via the :class:`.CreateTable` construct. 

569 

570 This is provided to support custom column DDL within the generation 

571 of CREATE TABLE statements, by using the 

572 compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel` 

573 to extend :class:`.CreateColumn`. 

574 

575 Typical integration is to examine the incoming :class:`_schema.Column` 

576 object, and to redirect compilation if a particular flag or condition 

577 is found:: 

578 

579 from sqlalchemy import schema 

580 from sqlalchemy.ext.compiler import compiles 

581 

582 

583 @compiles(schema.CreateColumn) 

584 def compile(element, compiler, **kw): 

585 column = element.element 

586 

587 if "special" not in column.info: 

588 return compiler.visit_create_column(element, **kw) 

589 

590 text = "%s SPECIAL DIRECTIVE %s" % ( 

591 column.name, 

592 compiler.type_compiler.process(column.type), 

593 ) 

594 default = compiler.get_column_default_string(column) 

595 if default is not None: 

596 text += " DEFAULT " + default 

597 

598 if not column.nullable: 

599 text += " NOT NULL" 

600 

601 if column.constraints: 

602 text += " ".join( 

603 compiler.process(const) for const in column.constraints 

604 ) 

605 return text 

606 

607 The above construct can be applied to a :class:`_schema.Table` 

608 as follows:: 

609 

610 from sqlalchemy import Table, Metadata, Column, Integer, String 

611 from sqlalchemy import schema 

612 

613 metadata = MetaData() 

614 

615 table = Table( 

616 "mytable", 

617 MetaData(), 

618 Column("x", Integer, info={"special": True}, primary_key=True), 

619 Column("y", String(50)), 

620 Column("z", String(20), info={"special": True}), 

621 ) 

622 

623 metadata.create_all(conn) 

624 

625 Above, the directives we've added to the :attr:`_schema.Column.info` 

626 collection 

627 will be detected by our custom compilation scheme: 

628 

629 .. sourcecode:: sql 

630 

631 CREATE TABLE mytable ( 

632 x SPECIAL DIRECTIVE INTEGER NOT NULL, 

633 y VARCHAR(50), 

634 z SPECIAL DIRECTIVE VARCHAR(20), 

635 PRIMARY KEY (x) 

636 ) 

637 

638 The :class:`.CreateColumn` construct can also be used to skip certain 

639 columns when producing a ``CREATE TABLE``. This is accomplished by 

640 creating a compilation rule that conditionally returns ``None``. 

641 This is essentially how to produce the same effect as using the 

642 ``system=True`` argument on :class:`_schema.Column`, which marks a column 

643 as an implicitly-present "system" column. 

644 

645 For example, suppose we wish to produce a :class:`_schema.Table` 

646 which skips 

647 rendering of the PostgreSQL ``xmin`` column against the PostgreSQL 

648 backend, but on other backends does render it, in anticipation of a 

649 triggered rule. A conditional compilation rule could skip this name only 

650 on PostgreSQL:: 

651 

652 from sqlalchemy.schema import CreateColumn 

653 

654 

655 @compiles(CreateColumn, "postgresql") 

656 def skip_xmin(element, compiler, **kw): 

657 if element.element.name == "xmin": 

658 return None 

659 else: 

660 return compiler.visit_create_column(element, **kw) 

661 

662 

663 my_table = Table( 

664 "mytable", 

665 metadata, 

666 Column("id", Integer, primary_key=True), 

667 Column("xmin", Integer), 

668 ) 

669 

670 Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE`` 

671 which only includes the ``id`` column in the string; the ``xmin`` column 

672 will be omitted, but only against the PostgreSQL backend. 

673 

674 """ 

675 

676 __visit_name__ = "create_column" 

677 

678 element: Column[Any] 

679 

680 def __init__(self, element: Column[Any]) -> None: 

681 self.element = element 

682 

683 

684class DropTable(_DropBase["Table"]): 

685 """Represent a DROP TABLE statement.""" 

686 

687 __visit_name__ = "drop_table" 

688 

689 def __init__(self, element: Table, if_exists: bool = False) -> None: 

690 """Create a :class:`.DropTable` construct. 

691 

692 :param element: a :class:`_schema.Table` that's the subject 

693 of the DROP. 

694 :param on: See the description for 'on' in :class:`.DDL`. 

695 :param if_exists: if True, an IF EXISTS operator will be applied to the 

696 construct. 

697 

698 .. versionadded:: 1.4.0b2 

699 

700 """ 

701 super().__init__(element, if_exists=if_exists) 

702 

703 

704class CreateSequence(_CreateBase["Sequence"]): 

705 """Represent a CREATE SEQUENCE statement.""" 

706 

707 __visit_name__ = "create_sequence" 

708 

709 

710class DropSequence(_DropBase["Sequence"]): 

711 """Represent a DROP SEQUENCE statement.""" 

712 

713 __visit_name__ = "drop_sequence" 

714 

715 

716class CreateIndex(_CreateBase["Index"]): 

717 """Represent a CREATE INDEX statement.""" 

718 

719 __visit_name__ = "create_index" 

720 

721 def __init__(self, element: Index, if_not_exists: bool = False) -> None: 

722 """Create a :class:`.Createindex` construct. 

723 

724 :param element: a :class:`_schema.Index` that's the subject 

725 of the CREATE. 

726 :param if_not_exists: if True, an IF NOT EXISTS operator will be 

727 applied to the construct. 

728 

729 .. versionadded:: 1.4.0b2 

730 

731 """ 

732 super().__init__(element, if_not_exists=if_not_exists) 

733 

734 

735class DropIndex(_DropBase["Index"]): 

736 """Represent a DROP INDEX statement.""" 

737 

738 __visit_name__ = "drop_index" 

739 

740 def __init__(self, element: Index, if_exists: bool = False) -> None: 

741 """Create a :class:`.DropIndex` construct. 

742 

743 :param element: a :class:`_schema.Index` that's the subject 

744 of the DROP. 

745 :param if_exists: if True, an IF EXISTS operator will be applied to the 

746 construct. 

747 

748 .. versionadded:: 1.4.0b2 

749 

750 """ 

751 super().__init__(element, if_exists=if_exists) 

752 

753 

754class AddConstraint(_CreateBase["Constraint"]): 

755 """Represent an ALTER TABLE ADD CONSTRAINT statement.""" 

756 

757 __visit_name__ = "add_constraint" 

758 

759 def __init__( 

760 self, 

761 element: Constraint, 

762 *, 

763 isolate_from_table: bool = True, 

764 ) -> None: 

765 """Construct a new :class:`.AddConstraint` construct. 

766 

767 :param element: a :class:`.Constraint` object 

768 

769 :param isolate_from_table: optional boolean, defaults to True. Has 

770 the effect of the incoming constraint being isolated from being 

771 included in a CREATE TABLE sequence when associated with a 

772 :class:`.Table`. 

773 

774 .. versionadded:: 2.0.39 - added 

775 :paramref:`.AddConstraint.isolate_from_table`, defaulting 

776 to True. Previously, the behavior of this parameter was implicitly 

777 turned on in all cases. 

778 

779 """ 

780 super().__init__(element) 

781 

782 if isolate_from_table: 

783 element._create_rule = util.portable_instancemethod( 

784 self._create_rule_disable 

785 ) 

786 

787 

788class DropConstraint(_DropBase["Constraint"]): 

789 """Represent an ALTER TABLE DROP CONSTRAINT statement.""" 

790 

791 __visit_name__ = "drop_constraint" 

792 

793 def __init__( 

794 self, 

795 element: Constraint, 

796 *, 

797 cascade: bool = False, 

798 if_exists: bool = False, 

799 isolate_from_table: bool = True, 

800 **kw: Any, 

801 ) -> None: 

802 """Construct a new :class:`.DropConstraint` construct. 

803 

804 :param element: a :class:`.Constraint` object 

805 :param cascade: optional boolean, indicates backend-specific 

806 "CASCADE CONSTRAINT" directive should be rendered if available 

807 :param if_exists: optional boolean, indicates backend-specific 

808 "IF EXISTS" directive should be rendered if available 

809 :param isolate_from_table: optional boolean, defaults to True. Has 

810 the effect of the incoming constraint being isolated from being 

811 included in a CREATE TABLE sequence when associated with a 

812 :class:`.Table`. 

813 

814 .. versionadded:: 2.0.39 - added 

815 :paramref:`.DropConstraint.isolate_from_table`, defaulting 

816 to True. Previously, the behavior of this parameter was implicitly 

817 turned on in all cases. 

818 

819 """ 

820 self.cascade = cascade 

821 super().__init__(element, if_exists=if_exists, **kw) 

822 

823 if isolate_from_table: 

824 element._create_rule = util.portable_instancemethod( 

825 self._create_rule_disable 

826 ) 

827 

828 

829class SetTableComment(_CreateDropBase["Table"]): 

830 """Represent a COMMENT ON TABLE IS statement.""" 

831 

832 __visit_name__ = "set_table_comment" 

833 

834 

835class DropTableComment(_CreateDropBase["Table"]): 

836 """Represent a COMMENT ON TABLE '' statement. 

837 

838 Note this varies a lot across database backends. 

839 

840 """ 

841 

842 __visit_name__ = "drop_table_comment" 

843 

844 

845class SetColumnComment(_CreateDropBase["Column[Any]"]): 

846 """Represent a COMMENT ON COLUMN IS statement.""" 

847 

848 __visit_name__ = "set_column_comment" 

849 

850 

851class DropColumnComment(_CreateDropBase["Column[Any]"]): 

852 """Represent a COMMENT ON COLUMN IS NULL statement.""" 

853 

854 __visit_name__ = "drop_column_comment" 

855 

856 

857class SetConstraintComment(_CreateDropBase["Constraint"]): 

858 """Represent a COMMENT ON CONSTRAINT IS statement.""" 

859 

860 __visit_name__ = "set_constraint_comment" 

861 

862 

863class DropConstraintComment(_CreateDropBase["Constraint"]): 

864 """Represent a COMMENT ON CONSTRAINT IS NULL statement.""" 

865 

866 __visit_name__ = "drop_constraint_comment" 

867 

868 

869class InvokeDDLBase(SchemaVisitor): 

870 def __init__(self, connection, **kw): 

871 self.connection = connection 

872 assert not kw, f"Unexpected keywords: {kw.keys()}" 

873 

874 @contextlib.contextmanager 

875 def with_ddl_events(self, target, **kw): 

876 """helper context manager that will apply appropriate DDL events 

877 to a CREATE or DROP operation.""" 

878 

879 raise NotImplementedError() 

880 

881 

882class InvokeCreateDDLBase(InvokeDDLBase): 

883 @contextlib.contextmanager 

884 def with_ddl_events(self, target, **kw): 

885 """helper context manager that will apply appropriate DDL events 

886 to a CREATE or DROP operation.""" 

887 

888 target.dispatch.before_create( 

889 target, self.connection, _ddl_runner=self, **kw 

890 ) 

891 yield 

892 target.dispatch.after_create( 

893 target, self.connection, _ddl_runner=self, **kw 

894 ) 

895 

896 

897class InvokeDropDDLBase(InvokeDDLBase): 

898 @contextlib.contextmanager 

899 def with_ddl_events(self, target, **kw): 

900 """helper context manager that will apply appropriate DDL events 

901 to a CREATE or DROP operation.""" 

902 

903 target.dispatch.before_drop( 

904 target, self.connection, _ddl_runner=self, **kw 

905 ) 

906 yield 

907 target.dispatch.after_drop( 

908 target, self.connection, _ddl_runner=self, **kw 

909 ) 

910 

911 

912class SchemaGenerator(InvokeCreateDDLBase): 

913 def __init__( 

914 self, dialect, connection, checkfirst=False, tables=None, **kwargs 

915 ): 

916 super().__init__(connection, **kwargs) 

917 self.checkfirst = checkfirst 

918 self.tables = tables 

919 self.preparer = dialect.identifier_preparer 

920 self.dialect = dialect 

921 self.memo = {} 

922 

923 def _can_create_table(self, table): 

924 self.dialect.validate_identifier(table.name) 

925 effective_schema = self.connection.schema_for_object(table) 

926 if effective_schema: 

927 self.dialect.validate_identifier(effective_schema) 

928 return not self.checkfirst or not self.dialect.has_table( 

929 self.connection, table.name, schema=effective_schema 

930 ) 

931 

932 def _can_create_index(self, index): 

933 effective_schema = self.connection.schema_for_object(index.table) 

934 if effective_schema: 

935 self.dialect.validate_identifier(effective_schema) 

936 return not self.checkfirst or not self.dialect.has_index( 

937 self.connection, 

938 index.table.name, 

939 index.name, 

940 schema=effective_schema, 

941 ) 

942 

943 def _can_create_sequence(self, sequence): 

944 effective_schema = self.connection.schema_for_object(sequence) 

945 

946 return self.dialect.supports_sequences and ( 

947 (not self.dialect.sequences_optional or not sequence.optional) 

948 and ( 

949 not self.checkfirst 

950 or not self.dialect.has_sequence( 

951 self.connection, sequence.name, schema=effective_schema 

952 ) 

953 ) 

954 ) 

955 

956 def visit_metadata(self, metadata): 

957 if self.tables is not None: 

958 tables = self.tables 

959 else: 

960 tables = list(metadata.tables.values()) 

961 

962 collection = sort_tables_and_constraints( 

963 [t for t in tables if self._can_create_table(t)] 

964 ) 

965 

966 seq_coll = [ 

967 s 

968 for s in metadata._sequences.values() 

969 if s.column is None and self._can_create_sequence(s) 

970 ] 

971 

972 event_collection = [t for (t, fks) in collection if t is not None] 

973 

974 with self.with_ddl_events( 

975 metadata, 

976 tables=event_collection, 

977 checkfirst=self.checkfirst, 

978 ): 

979 for seq in seq_coll: 

980 self.traverse_single(seq, create_ok=True) 

981 

982 for table, fkcs in collection: 

983 if table is not None: 

984 self.traverse_single( 

985 table, 

986 create_ok=True, 

987 include_foreign_key_constraints=fkcs, 

988 _is_metadata_operation=True, 

989 ) 

990 else: 

991 for fkc in fkcs: 

992 self.traverse_single(fkc) 

993 

994 def visit_table( 

995 self, 

996 table, 

997 create_ok=False, 

998 include_foreign_key_constraints=None, 

999 _is_metadata_operation=False, 

1000 ): 

1001 if not create_ok and not self._can_create_table(table): 

1002 return 

1003 

1004 with self.with_ddl_events( 

1005 table, 

1006 checkfirst=self.checkfirst, 

1007 _is_metadata_operation=_is_metadata_operation, 

1008 ): 

1009 for column in table.columns: 

1010 if column.default is not None: 

1011 self.traverse_single(column.default) 

1012 

1013 if not self.dialect.supports_alter: 

1014 # e.g., don't omit any foreign key constraints 

1015 include_foreign_key_constraints = None 

1016 

1017 CreateTable( 

1018 table, 

1019 include_foreign_key_constraints=( 

1020 include_foreign_key_constraints 

1021 ), 

1022 )._invoke_with(self.connection) 

1023 

1024 if hasattr(table, "indexes"): 

1025 for index in table.indexes: 

1026 self.traverse_single(index, create_ok=True) 

1027 

1028 if ( 

1029 self.dialect.supports_comments 

1030 and not self.dialect.inline_comments 

1031 ): 

1032 if table.comment is not None: 

1033 SetTableComment(table)._invoke_with(self.connection) 

1034 

1035 for column in table.columns: 

1036 if column.comment is not None: 

1037 SetColumnComment(column)._invoke_with(self.connection) 

1038 

1039 if self.dialect.supports_constraint_comments: 

1040 for constraint in table.constraints: 

1041 if constraint.comment is not None: 

1042 self.connection.execute( 

1043 SetConstraintComment(constraint) 

1044 ) 

1045 

1046 def visit_foreign_key_constraint(self, constraint): 

1047 if not self.dialect.supports_alter: 

1048 return 

1049 

1050 with self.with_ddl_events(constraint): 

1051 AddConstraint(constraint)._invoke_with(self.connection) 

1052 

1053 def visit_sequence(self, sequence, create_ok=False): 

1054 if not create_ok and not self._can_create_sequence(sequence): 

1055 return 

1056 with self.with_ddl_events(sequence): 

1057 CreateSequence(sequence)._invoke_with(self.connection) 

1058 

1059 def visit_index(self, index, create_ok=False): 

1060 if not create_ok and not self._can_create_index(index): 

1061 return 

1062 with self.with_ddl_events(index): 

1063 CreateIndex(index)._invoke_with(self.connection) 

1064 

1065 

1066class SchemaDropper(InvokeDropDDLBase): 

1067 def __init__( 

1068 self, dialect, connection, checkfirst=False, tables=None, **kwargs 

1069 ): 

1070 super().__init__(connection, **kwargs) 

1071 self.checkfirst = checkfirst 

1072 self.tables = tables 

1073 self.preparer = dialect.identifier_preparer 

1074 self.dialect = dialect 

1075 self.memo = {} 

1076 

1077 def visit_metadata(self, metadata): 

1078 if self.tables is not None: 

1079 tables = self.tables 

1080 else: 

1081 tables = list(metadata.tables.values()) 

1082 

1083 try: 

1084 unsorted_tables = [t for t in tables if self._can_drop_table(t)] 

1085 collection = list( 

1086 reversed( 

1087 sort_tables_and_constraints( 

1088 unsorted_tables, 

1089 filter_fn=lambda constraint: ( 

1090 False 

1091 if not self.dialect.supports_alter 

1092 or constraint.name is None 

1093 else None 

1094 ), 

1095 ) 

1096 ) 

1097 ) 

1098 except exc.CircularDependencyError as err2: 

1099 if not self.dialect.supports_alter: 

1100 util.warn( 

1101 "Can't sort tables for DROP; an " 

1102 "unresolvable foreign key " 

1103 "dependency exists between tables: %s; and backend does " 

1104 "not support ALTER. To restore at least a partial sort, " 

1105 "apply use_alter=True to ForeignKey and " 

1106 "ForeignKeyConstraint " 

1107 "objects involved in the cycle to mark these as known " 

1108 "cycles that will be ignored." 

1109 % (", ".join(sorted([t.fullname for t in err2.cycles]))) 

1110 ) 

1111 collection = [(t, ()) for t in unsorted_tables] 

1112 else: 

1113 raise exc.CircularDependencyError( 

1114 err2.args[0], 

1115 err2.cycles, 

1116 err2.edges, 

1117 msg="Can't sort tables for DROP; an " 

1118 "unresolvable foreign key " 

1119 "dependency exists between tables: %s. Please ensure " 

1120 "that the ForeignKey and ForeignKeyConstraint objects " 

1121 "involved in the cycle have " 

1122 "names so that they can be dropped using " 

1123 "DROP CONSTRAINT." 

1124 % (", ".join(sorted([t.fullname for t in err2.cycles]))), 

1125 ) from err2 

1126 

1127 seq_coll = [ 

1128 s 

1129 for s in metadata._sequences.values() 

1130 if self._can_drop_sequence(s) 

1131 ] 

1132 

1133 event_collection = [t for (t, fks) in collection if t is not None] 

1134 

1135 with self.with_ddl_events( 

1136 metadata, 

1137 tables=event_collection, 

1138 checkfirst=self.checkfirst, 

1139 ): 

1140 for table, fkcs in collection: 

1141 if table is not None: 

1142 self.traverse_single( 

1143 table, 

1144 drop_ok=True, 

1145 _is_metadata_operation=True, 

1146 _ignore_sequences=seq_coll, 

1147 ) 

1148 else: 

1149 for fkc in fkcs: 

1150 self.traverse_single(fkc) 

1151 

1152 for seq in seq_coll: 

1153 self.traverse_single(seq, drop_ok=seq.column is None) 

1154 

1155 def _can_drop_table(self, table): 

1156 self.dialect.validate_identifier(table.name) 

1157 effective_schema = self.connection.schema_for_object(table) 

1158 if effective_schema: 

1159 self.dialect.validate_identifier(effective_schema) 

1160 return not self.checkfirst or self.dialect.has_table( 

1161 self.connection, table.name, schema=effective_schema 

1162 ) 

1163 

1164 def _can_drop_index(self, index): 

1165 effective_schema = self.connection.schema_for_object(index.table) 

1166 if effective_schema: 

1167 self.dialect.validate_identifier(effective_schema) 

1168 return not self.checkfirst or self.dialect.has_index( 

1169 self.connection, 

1170 index.table.name, 

1171 index.name, 

1172 schema=effective_schema, 

1173 ) 

1174 

1175 def _can_drop_sequence(self, sequence): 

1176 effective_schema = self.connection.schema_for_object(sequence) 

1177 return self.dialect.supports_sequences and ( 

1178 (not self.dialect.sequences_optional or not sequence.optional) 

1179 and ( 

1180 not self.checkfirst 

1181 or self.dialect.has_sequence( 

1182 self.connection, sequence.name, schema=effective_schema 

1183 ) 

1184 ) 

1185 ) 

1186 

1187 def visit_index(self, index, drop_ok=False): 

1188 if not drop_ok and not self._can_drop_index(index): 

1189 return 

1190 

1191 with self.with_ddl_events(index): 

1192 DropIndex(index)(index, self.connection) 

1193 

1194 def visit_table( 

1195 self, 

1196 table, 

1197 drop_ok=False, 

1198 _is_metadata_operation=False, 

1199 _ignore_sequences=(), 

1200 ): 

1201 if not drop_ok and not self._can_drop_table(table): 

1202 return 

1203 

1204 with self.with_ddl_events( 

1205 table, 

1206 checkfirst=self.checkfirst, 

1207 _is_metadata_operation=_is_metadata_operation, 

1208 ): 

1209 DropTable(table)._invoke_with(self.connection) 

1210 

1211 # traverse client side defaults which may refer to server-side 

1212 # sequences. noting that some of these client side defaults may 

1213 # also be set up as server side defaults 

1214 # (see https://docs.sqlalchemy.org/en/ 

1215 # latest/core/defaults.html 

1216 # #associating-a-sequence-as-the-server-side- 

1217 # default), so have to be dropped after the table is dropped. 

1218 for column in table.columns: 

1219 if ( 

1220 column.default is not None 

1221 and column.default not in _ignore_sequences 

1222 ): 

1223 self.traverse_single(column.default) 

1224 

1225 def visit_foreign_key_constraint(self, constraint): 

1226 if not self.dialect.supports_alter: 

1227 return 

1228 with self.with_ddl_events(constraint): 

1229 DropConstraint(constraint)._invoke_with(self.connection) 

1230 

1231 def visit_sequence(self, sequence, drop_ok=False): 

1232 if not drop_ok and not self._can_drop_sequence(sequence): 

1233 return 

1234 with self.with_ddl_events(sequence): 

1235 DropSequence(sequence)._invoke_with(self.connection) 

1236 

1237 

1238def sort_tables( 

1239 tables: Iterable[TableClause], 

1240 skip_fn: Optional[Callable[[ForeignKeyConstraint], bool]] = None, 

1241 extra_dependencies: Optional[ 

1242 typing_Sequence[Tuple[TableClause, TableClause]] 

1243 ] = None, 

1244) -> List[Table]: 

1245 """Sort a collection of :class:`_schema.Table` objects based on 

1246 dependency. 

1247 

1248 This is a dependency-ordered sort which will emit :class:`_schema.Table` 

1249 objects such that they will follow their dependent :class:`_schema.Table` 

1250 objects. 

1251 Tables are dependent on another based on the presence of 

1252 :class:`_schema.ForeignKeyConstraint` 

1253 objects as well as explicit dependencies 

1254 added by :meth:`_schema.Table.add_is_dependent_on`. 

1255 

1256 .. warning:: 

1257 

1258 The :func:`._schema.sort_tables` function cannot by itself 

1259 accommodate automatic resolution of dependency cycles between 

1260 tables, which are usually caused by mutually dependent foreign key 

1261 constraints. When these cycles are detected, the foreign keys 

1262 of these tables are omitted from consideration in the sort. 

1263 A warning is emitted when this condition occurs, which will be an 

1264 exception raise in a future release. Tables which are not part 

1265 of the cycle will still be returned in dependency order. 

1266 

1267 To resolve these cycles, the 

1268 :paramref:`_schema.ForeignKeyConstraint.use_alter` parameter may be 

1269 applied to those constraints which create a cycle. Alternatively, 

1270 the :func:`_schema.sort_tables_and_constraints` function will 

1271 automatically return foreign key constraints in a separate 

1272 collection when cycles are detected so that they may be applied 

1273 to a schema separately. 

1274 

1275 .. versionchanged:: 1.3.17 - a warning is emitted when 

1276 :func:`_schema.sort_tables` cannot perform a proper sort due to 

1277 cyclical dependencies. This will be an exception in a future 

1278 release. Additionally, the sort will continue to return 

1279 other tables not involved in the cycle in dependency order 

1280 which was not the case previously. 

1281 

1282 :param tables: a sequence of :class:`_schema.Table` objects. 

1283 

1284 :param skip_fn: optional callable which will be passed a 

1285 :class:`_schema.ForeignKeyConstraint` object; if it returns True, this 

1286 constraint will not be considered as a dependency. Note this is 

1287 **different** from the same parameter in 

1288 :func:`.sort_tables_and_constraints`, which is 

1289 instead passed the owning :class:`_schema.ForeignKeyConstraint` object. 

1290 

1291 :param extra_dependencies: a sequence of 2-tuples of tables which will 

1292 also be considered as dependent on each other. 

1293 

1294 .. seealso:: 

1295 

1296 :func:`.sort_tables_and_constraints` 

1297 

1298 :attr:`_schema.MetaData.sorted_tables` - uses this function to sort 

1299 

1300 

1301 """ 

1302 

1303 if skip_fn is not None: 

1304 fixed_skip_fn = skip_fn 

1305 

1306 def _skip_fn(fkc): 

1307 for fk in fkc.elements: 

1308 if fixed_skip_fn(fk): 

1309 return True 

1310 else: 

1311 return None 

1312 

1313 else: 

1314 _skip_fn = None # type: ignore 

1315 

1316 return [ 

1317 t 

1318 for (t, fkcs) in sort_tables_and_constraints( 

1319 tables, 

1320 filter_fn=_skip_fn, 

1321 extra_dependencies=extra_dependencies, 

1322 _warn_for_cycles=True, 

1323 ) 

1324 if t is not None 

1325 ] 

1326 

1327 

1328def sort_tables_and_constraints( 

1329 tables, filter_fn=None, extra_dependencies=None, _warn_for_cycles=False 

1330): 

1331 """Sort a collection of :class:`_schema.Table` / 

1332 :class:`_schema.ForeignKeyConstraint` 

1333 objects. 

1334 

1335 This is a dependency-ordered sort which will emit tuples of 

1336 ``(Table, [ForeignKeyConstraint, ...])`` such that each 

1337 :class:`_schema.Table` follows its dependent :class:`_schema.Table` 

1338 objects. 

1339 Remaining :class:`_schema.ForeignKeyConstraint` 

1340 objects that are separate due to 

1341 dependency rules not satisfied by the sort are emitted afterwards 

1342 as ``(None, [ForeignKeyConstraint ...])``. 

1343 

1344 Tables are dependent on another based on the presence of 

1345 :class:`_schema.ForeignKeyConstraint` objects, explicit dependencies 

1346 added by :meth:`_schema.Table.add_is_dependent_on`, 

1347 as well as dependencies 

1348 stated here using the :paramref:`~.sort_tables_and_constraints.skip_fn` 

1349 and/or :paramref:`~.sort_tables_and_constraints.extra_dependencies` 

1350 parameters. 

1351 

1352 :param tables: a sequence of :class:`_schema.Table` objects. 

1353 

1354 :param filter_fn: optional callable which will be passed a 

1355 :class:`_schema.ForeignKeyConstraint` object, 

1356 and returns a value based on 

1357 whether this constraint should definitely be included or excluded as 

1358 an inline constraint, or neither. If it returns False, the constraint 

1359 will definitely be included as a dependency that cannot be subject 

1360 to ALTER; if True, it will **only** be included as an ALTER result at 

1361 the end. Returning None means the constraint is included in the 

1362 table-based result unless it is detected as part of a dependency cycle. 

1363 

1364 :param extra_dependencies: a sequence of 2-tuples of tables which will 

1365 also be considered as dependent on each other. 

1366 

1367 .. seealso:: 

1368 

1369 :func:`.sort_tables` 

1370 

1371 

1372 """ 

1373 

1374 fixed_dependencies = set() 

1375 mutable_dependencies = set() 

1376 

1377 if extra_dependencies is not None: 

1378 fixed_dependencies.update(extra_dependencies) 

1379 

1380 remaining_fkcs = set() 

1381 for table in tables: 

1382 for fkc in table.foreign_key_constraints: 

1383 if fkc.use_alter is True: 

1384 remaining_fkcs.add(fkc) 

1385 continue 

1386 

1387 if filter_fn: 

1388 filtered = filter_fn(fkc) 

1389 

1390 if filtered is True: 

1391 remaining_fkcs.add(fkc) 

1392 continue 

1393 

1394 dependent_on = fkc.referred_table 

1395 if dependent_on is not table: 

1396 mutable_dependencies.add((dependent_on, table)) 

1397 

1398 fixed_dependencies.update( 

1399 (parent, table) for parent in table._extra_dependencies 

1400 ) 

1401 

1402 try: 

1403 candidate_sort = list( 

1404 topological.sort( 

1405 fixed_dependencies.union(mutable_dependencies), 

1406 tables, 

1407 ) 

1408 ) 

1409 except exc.CircularDependencyError as err: 

1410 if _warn_for_cycles: 

1411 util.warn( 

1412 "Cannot correctly sort tables; there are unresolvable cycles " 

1413 'between tables "%s", which is usually caused by mutually ' 

1414 "dependent foreign key constraints. Foreign key constraints " 

1415 "involving these tables will not be considered; this warning " 

1416 "may raise an error in a future release." 

1417 % (", ".join(sorted(t.fullname for t in err.cycles)),) 

1418 ) 

1419 for edge in err.edges: 

1420 if edge in mutable_dependencies: 

1421 table = edge[1] 

1422 if table not in err.cycles: 

1423 continue 

1424 can_remove = [ 

1425 fkc 

1426 for fkc in table.foreign_key_constraints 

1427 if filter_fn is None or filter_fn(fkc) is not False 

1428 ] 

1429 remaining_fkcs.update(can_remove) 

1430 for fkc in can_remove: 

1431 dependent_on = fkc.referred_table 

1432 if dependent_on is not table: 

1433 mutable_dependencies.discard((dependent_on, table)) 

1434 candidate_sort = list( 

1435 topological.sort( 

1436 fixed_dependencies.union(mutable_dependencies), 

1437 tables, 

1438 ) 

1439 ) 

1440 

1441 return [ 

1442 (table, table.foreign_key_constraints.difference(remaining_fkcs)) 

1443 for table in candidate_sort 

1444 ] + [(None, list(remaining_fkcs))]