1# engine/create.py
2# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: https://www.opensource.org/licenses/mit-license.php
7
8from __future__ import annotations
9
10import inspect
11import typing
12from typing import Any
13from typing import Callable
14from typing import cast
15from typing import Dict
16from typing import List
17from typing import Optional
18from typing import overload
19from typing import Type
20from typing import Union
21
22from . import base
23from . import url as _url
24from .interfaces import DBAPIConnection
25from .mock import create_mock_engine
26from .. import event
27from .. import exc
28from .. import util
29from ..pool import _AdhocProxiedConnection
30from ..pool import ConnectionPoolEntry
31from ..sql import compiler
32from ..util import immutabledict
33
34if typing.TYPE_CHECKING:
35 from .base import Engine
36 from .interfaces import _ExecuteOptions
37 from .interfaces import _ParamStyle
38 from .interfaces import IsolationLevel
39 from .url import URL
40 from ..log import _EchoFlagType
41 from ..pool import _CreatorFnType
42 from ..pool import _CreatorWRecFnType
43 from ..pool import _ResetStyleArgType
44 from ..pool import Pool
45 from ..util.typing import Literal
46
47
48@overload
49def create_engine(
50 url: Union[str, URL],
51 *,
52 connect_args: Dict[Any, Any] = ...,
53 convert_unicode: bool = ...,
54 creator: Union[_CreatorFnType, _CreatorWRecFnType] = ...,
55 echo: _EchoFlagType = ...,
56 echo_pool: _EchoFlagType = ...,
57 enable_from_linting: bool = ...,
58 execution_options: _ExecuteOptions = ...,
59 future: Literal[True],
60 hide_parameters: bool = ...,
61 implicit_returning: Literal[True] = ...,
62 insertmanyvalues_page_size: int = ...,
63 isolation_level: IsolationLevel = ...,
64 json_deserializer: Callable[..., Any] = ...,
65 json_serializer: Callable[..., Any] = ...,
66 label_length: Optional[int] = ...,
67 logging_name: str = ...,
68 max_identifier_length: Optional[int] = ...,
69 max_overflow: int = ...,
70 module: Optional[Any] = ...,
71 paramstyle: Optional[_ParamStyle] = ...,
72 pool: Optional[Pool] = ...,
73 poolclass: Optional[Type[Pool]] = ...,
74 pool_logging_name: str = ...,
75 pool_pre_ping: bool = ...,
76 pool_size: int = ...,
77 pool_recycle: int = ...,
78 pool_reset_on_return: Optional[_ResetStyleArgType] = ...,
79 pool_timeout: float = ...,
80 pool_use_lifo: bool = ...,
81 plugins: List[str] = ...,
82 query_cache_size: int = ...,
83 use_insertmanyvalues: bool = ...,
84 **kwargs: Any,
85) -> Engine: ...
86
87
88@overload
89def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: ...
90
91
92@util.deprecated_params(
93 strategy=(
94 "1.4",
95 "The :paramref:`_sa.create_engine.strategy` keyword is deprecated, "
96 "and the only argument accepted is 'mock'; please use "
97 ":func:`.create_mock_engine` going forward. For general "
98 "customization of create_engine which may have been accomplished "
99 "using strategies, see :class:`.CreateEnginePlugin`.",
100 ),
101 empty_in_strategy=(
102 "1.4",
103 "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
104 "deprecated, and no longer has any effect. All IN expressions "
105 "are now rendered using "
106 'the "expanding parameter" strategy which renders a set of bound'
107 'expressions, or an "empty set" SELECT, at statement execution'
108 "time.",
109 ),
110 implicit_returning=(
111 "2.0",
112 "The :paramref:`_sa.create_engine.implicit_returning` parameter "
113 "is deprecated and will be removed in a future release. ",
114 ),
115)
116def create_engine(url: Union[str, _url.URL], **kwargs: Any) -> Engine:
117 """Create a new :class:`_engine.Engine` instance.
118
119 The standard calling form is to send the :ref:`URL <database_urls>` as the
120 first positional argument, usually a string
121 that indicates database dialect and connection arguments::
122
123 engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test")
124
125 .. note::
126
127 Please review :ref:`database_urls` for general guidelines in composing
128 URL strings. In particular, special characters, such as those often
129 part of passwords, must be URL encoded to be properly parsed.
130
131 Additional keyword arguments may then follow it which
132 establish various options on the resulting :class:`_engine.Engine`
133 and its underlying :class:`.Dialect` and :class:`_pool.Pool`
134 constructs::
135
136 engine = create_engine("mysql+mysqldb://scott:tiger@hostname/dbname",
137 pool_recycle=3600, echo=True)
138
139 The string form of the URL is
140 ``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
141 ``dialect`` is a database name such as ``mysql``, ``oracle``,
142 ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
143 ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
144 the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
145
146 ``**kwargs`` takes a wide variety of options which are routed
147 towards their appropriate components. Arguments may be specific to
148 the :class:`_engine.Engine`, the underlying :class:`.Dialect`,
149 as well as the
150 :class:`_pool.Pool`. Specific dialects also accept keyword arguments that
151 are unique to that dialect. Here, we describe the parameters
152 that are common to most :func:`_sa.create_engine()` usage.
153
154 Once established, the newly resulting :class:`_engine.Engine` will
155 request a connection from the underlying :class:`_pool.Pool` once
156 :meth:`_engine.Engine.connect` is called, or a method which depends on it
157 such as :meth:`_engine.Engine.execute` is invoked. The
158 :class:`_pool.Pool` in turn
159 will establish the first actual DBAPI connection when this request
160 is received. The :func:`_sa.create_engine` call itself does **not**
161 establish any actual DBAPI connections directly.
162
163 .. seealso::
164
165 :doc:`/core/engines`
166
167 :doc:`/dialects/index`
168
169 :ref:`connections_toplevel`
170
171 :param connect_args: a dictionary of options which will be
172 passed directly to the DBAPI's ``connect()`` method as
173 additional keyword arguments. See the example
174 at :ref:`custom_dbapi_args`.
175
176 :param creator: a callable which returns a DBAPI connection.
177 This creation function will be passed to the underlying
178 connection pool and will be used to create all new database
179 connections. Usage of this function causes connection
180 parameters specified in the URL argument to be bypassed.
181
182 This hook is not as flexible as the newer
183 :meth:`_events.DialectEvents.do_connect` hook which allows complete
184 control over how a connection is made to the database, given the full
185 set of URL arguments and state beforehand.
186
187 .. seealso::
188
189 :meth:`_events.DialectEvents.do_connect` - event hook that allows
190 full control over DBAPI connection mechanics.
191
192 :ref:`custom_dbapi_args`
193
194 :param echo=False: if True, the Engine will log all statements
195 as well as a ``repr()`` of their parameter lists to the default log
196 handler, which defaults to ``sys.stdout`` for output. If set to the
197 string ``"debug"``, result rows will be printed to the standard output
198 as well. The ``echo`` attribute of ``Engine`` can be modified at any
199 time to turn logging on and off; direct control of logging is also
200 available using the standard Python ``logging`` module.
201
202 .. seealso::
203
204 :ref:`dbengine_logging` - further detail on how to configure
205 logging.
206
207
208 :param echo_pool=False: if True, the connection pool will log
209 informational output such as when connections are invalidated
210 as well as when connections are recycled to the default log handler,
211 which defaults to ``sys.stdout`` for output. If set to the string
212 ``"debug"``, the logging will include pool checkouts and checkins.
213 Direct control of logging is also available using the standard Python
214 ``logging`` module.
215
216 .. seealso::
217
218 :ref:`dbengine_logging` - further detail on how to configure
219 logging.
220
221
222 :param empty_in_strategy: No longer used; SQLAlchemy now uses
223 "empty set" behavior for IN in all cases.
224
225 :param enable_from_linting: defaults to True. Will emit a warning
226 if a given SELECT statement is found to have un-linked FROM elements
227 which would cause a cartesian product.
228
229 .. versionadded:: 1.4
230
231 .. seealso::
232
233 :ref:`change_4737`
234
235 :param execution_options: Dictionary execution options which will
236 be applied to all connections. See
237 :meth:`~sqlalchemy.engine.Connection.execution_options`
238
239 :param future: Use the 2.0 style :class:`_engine.Engine` and
240 :class:`_engine.Connection` API.
241
242 As of SQLAlchemy 2.0, this parameter is present for backwards
243 compatibility only and must remain at its default value of ``True``.
244
245 The :paramref:`_sa.create_engine.future` parameter will be
246 deprecated in a subsequent 2.x release and eventually removed.
247
248 .. versionadded:: 1.4
249
250 .. versionchanged:: 2.0 All :class:`_engine.Engine` objects are
251 "future" style engines and there is no longer a ``future=False``
252 mode of operation.
253
254 .. seealso::
255
256 :ref:`migration_20_toplevel`
257
258 :param hide_parameters: Boolean, when set to True, SQL statement parameters
259 will not be displayed in INFO logging nor will they be formatted into
260 the string representation of :class:`.StatementError` objects.
261
262 .. versionadded:: 1.3.8
263
264 .. seealso::
265
266 :ref:`dbengine_logging` - further detail on how to configure
267 logging.
268
269 :param implicit_returning=True: Legacy parameter that may only be set
270 to True. In SQLAlchemy 2.0, this parameter does nothing. In order to
271 disable "implicit returning" for statements invoked by the ORM,
272 configure this on a per-table basis using the
273 :paramref:`.Table.implicit_returning` parameter.
274
275
276 :param insertmanyvalues_page_size: number of rows to format into an
277 INSERT statement when the statement uses "insertmanyvalues" mode, which is
278 a paged form of bulk insert that is used for many backends when using
279 :term:`executemany` execution typically in conjunction with RETURNING.
280 Defaults to 1000, but may also be subject to dialect-specific limiting
281 factors which may override this value on a per-statement basis.
282
283 .. versionadded:: 2.0
284
285 .. seealso::
286
287 :ref:`engine_insertmanyvalues`
288
289 :ref:`engine_insertmanyvalues_page_size`
290
291 :paramref:`_engine.Connection.execution_options.insertmanyvalues_page_size`
292
293 :param isolation_level: optional string name of an isolation level
294 which will be set on all new connections unconditionally.
295 Isolation levels are typically some subset of the string names
296 ``"SERIALIZABLE"``, ``"REPEATABLE READ"``,
297 ``"READ COMMITTED"``, ``"READ UNCOMMITTED"`` and ``"AUTOCOMMIT"``
298 based on backend.
299
300 The :paramref:`_sa.create_engine.isolation_level` parameter is
301 in contrast to the
302 :paramref:`.Connection.execution_options.isolation_level`
303 execution option, which may be set on an individual
304 :class:`.Connection`, as well as the same parameter passed to
305 :meth:`.Engine.execution_options`, where it may be used to create
306 multiple engines with different isolation levels that share a common
307 connection pool and dialect.
308
309 .. versionchanged:: 2.0 The
310 :paramref:`_sa.create_engine.isolation_level`
311 parameter has been generalized to work on all dialects which support
312 the concept of isolation level, and is provided as a more succinct,
313 up front configuration switch in contrast to the execution option
314 which is more of an ad-hoc programmatic option.
315
316 .. seealso::
317
318 :ref:`dbapi_autocommit`
319
320 :param json_deserializer: for dialects that support the
321 :class:`_types.JSON`
322 datatype, this is a Python callable that will convert a JSON string
323 to a Python object. By default, the Python ``json.loads`` function is
324 used.
325
326 .. versionchanged:: 1.3.7 The SQLite dialect renamed this from
327 ``_json_deserializer``.
328
329 :param json_serializer: for dialects that support the :class:`_types.JSON`
330 datatype, this is a Python callable that will render a given object
331 as JSON. By default, the Python ``json.dumps`` function is used.
332
333 .. versionchanged:: 1.3.7 The SQLite dialect renamed this from
334 ``_json_serializer``.
335
336
337 :param label_length=None: optional integer value which limits
338 the size of dynamically generated column labels to that many
339 characters. If less than 6, labels are generated as
340 "_(counter)". If ``None``, the value of
341 ``dialect.max_identifier_length``, which may be affected via the
342 :paramref:`_sa.create_engine.max_identifier_length` parameter,
343 is used instead. The value of
344 :paramref:`_sa.create_engine.label_length`
345 may not be larger than that of
346 :paramref:`_sa.create_engine.max_identfier_length`.
347
348 .. seealso::
349
350 :paramref:`_sa.create_engine.max_identifier_length`
351
352 :param logging_name: String identifier which will be used within
353 the "name" field of logging records generated within the
354 "sqlalchemy.engine" logger. Defaults to a hexstring of the
355 object's id.
356
357 .. seealso::
358
359 :ref:`dbengine_logging` - further detail on how to configure
360 logging.
361
362 :paramref:`_engine.Connection.execution_options.logging_token`
363
364 :param max_identifier_length: integer; override the max_identifier_length
365 determined by the dialect. if ``None`` or zero, has no effect. This
366 is the database's configured maximum number of characters that may be
367 used in a SQL identifier such as a table name, column name, or label
368 name. All dialects determine this value automatically, however in the
369 case of a new database version for which this value has changed but
370 SQLAlchemy's dialect has not been adjusted, the value may be passed
371 here.
372
373 .. versionadded:: 1.3.9
374
375 .. seealso::
376
377 :paramref:`_sa.create_engine.label_length`
378
379 :param max_overflow=10: the number of connections to allow in
380 connection pool "overflow", that is connections that can be
381 opened above and beyond the pool_size setting, which defaults
382 to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
383
384 :param module=None: reference to a Python module object (the module
385 itself, not its string name). Specifies an alternate DBAPI module to
386 be used by the engine's dialect. Each sub-dialect references a
387 specific DBAPI which will be imported before first connect. This
388 parameter causes the import to be bypassed, and the given module to
389 be used instead. Can be used for testing of DBAPIs as well as to
390 inject "mock" DBAPI implementations into the :class:`_engine.Engine`.
391
392 :param paramstyle=None: The `paramstyle <https://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
393 to use when rendering bound parameters. This style defaults to the
394 one recommended by the DBAPI itself, which is retrieved from the
395 ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
396 more than one paramstyle, and in particular it may be desirable
397 to change a "named" paramstyle into a "positional" one, or vice versa.
398 When this attribute is passed, it should be one of the values
399 ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
400 ``"pyformat"``, and should correspond to a parameter style known
401 to be supported by the DBAPI in use.
402
403 :param pool=None: an already-constructed instance of
404 :class:`~sqlalchemy.pool.Pool`, such as a
405 :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
406 pool will be used directly as the underlying connection pool
407 for the engine, bypassing whatever connection parameters are
408 present in the URL argument. For information on constructing
409 connection pools manually, see :ref:`pooling_toplevel`.
410
411 :param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
412 subclass, which will be used to create a connection pool
413 instance using the connection parameters given in the URL. Note
414 this differs from ``pool`` in that you don't actually
415 instantiate the pool in this case, you just indicate what type
416 of pool to be used.
417
418 :param pool_logging_name: String identifier which will be used within
419 the "name" field of logging records generated within the
420 "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
421 id.
422
423 .. seealso::
424
425 :ref:`dbengine_logging` - further detail on how to configure
426 logging.
427
428 :param pool_pre_ping: boolean, if True will enable the connection pool
429 "pre-ping" feature that tests connections for liveness upon
430 each checkout.
431
432 .. versionadded:: 1.2
433
434 .. seealso::
435
436 :ref:`pool_disconnects_pessimistic`
437
438 :param pool_size=5: the number of connections to keep open
439 inside the connection pool. This used with
440 :class:`~sqlalchemy.pool.QueuePool` as
441 well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
442 :class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
443 of 0 indicates no limit; to disable pooling, set ``poolclass`` to
444 :class:`~sqlalchemy.pool.NullPool` instead.
445
446 :param pool_recycle=-1: this setting causes the pool to recycle
447 connections after the given number of seconds has passed. It
448 defaults to -1, or no timeout. For example, setting to 3600
449 means connections will be recycled after one hour. Note that
450 MySQL in particular will disconnect automatically if no
451 activity is detected on a connection for eight hours (although
452 this is configurable with the MySQLDB connection itself and the
453 server configuration as well).
454
455 .. seealso::
456
457 :ref:`pool_setting_recycle`
458
459 :param pool_reset_on_return='rollback': set the
460 :paramref:`_pool.Pool.reset_on_return` parameter of the underlying
461 :class:`_pool.Pool` object, which can be set to the values
462 ``"rollback"``, ``"commit"``, or ``None``.
463
464 .. seealso::
465
466 :ref:`pool_reset_on_return`
467
468 :param pool_timeout=30: number of seconds to wait before giving
469 up on getting a connection from the pool. This is only used
470 with :class:`~sqlalchemy.pool.QueuePool`. This can be a float but is
471 subject to the limitations of Python time functions which may not be
472 reliable in the tens of milliseconds.
473
474 .. note: don't use 30.0 above, it seems to break with the :param tag
475
476 :param pool_use_lifo=False: use LIFO (last-in-first-out) when retrieving
477 connections from :class:`.QueuePool` instead of FIFO
478 (first-in-first-out). Using LIFO, a server-side timeout scheme can
479 reduce the number of connections used during non- peak periods of
480 use. When planning for server-side timeouts, ensure that a recycle or
481 pre-ping strategy is in use to gracefully handle stale connections.
482
483 .. versionadded:: 1.3
484
485 .. seealso::
486
487 :ref:`pool_use_lifo`
488
489 :ref:`pool_disconnects`
490
491 :param plugins: string list of plugin names to load. See
492 :class:`.CreateEnginePlugin` for background.
493
494 .. versionadded:: 1.2.3
495
496 :param query_cache_size: size of the cache used to cache the SQL string
497 form of queries. Set to zero to disable caching.
498
499 The cache is pruned of its least recently used items when its size reaches
500 N * 1.5. Defaults to 500, meaning the cache will always store at least
501 500 SQL statements when filled, and will grow up to 750 items at which
502 point it is pruned back down to 500 by removing the 250 least recently
503 used items.
504
505 Caching is accomplished on a per-statement basis by generating a
506 cache key that represents the statement's structure, then generating
507 string SQL for the current dialect only if that key is not present
508 in the cache. All statements support caching, however some features
509 such as an INSERT with a large set of parameters will intentionally
510 bypass the cache. SQL logging will indicate statistics for each
511 statement whether or not it were pull from the cache.
512
513 .. note:: some ORM functions related to unit-of-work persistence as well
514 as some attribute loading strategies will make use of individual
515 per-mapper caches outside of the main cache.
516
517
518 .. seealso::
519
520 :ref:`sql_caching`
521
522 .. versionadded:: 1.4
523
524 :param use_insertmanyvalues: True by default, use the "insertmanyvalues"
525 execution style for INSERT..RETURNING statements by default.
526
527 .. versionadded:: 2.0
528
529 .. seealso::
530
531 :ref:`engine_insertmanyvalues`
532
533 """ # noqa
534
535 if "strategy" in kwargs:
536 strat = kwargs.pop("strategy")
537 if strat == "mock":
538 # this case is deprecated
539 return create_mock_engine(url, **kwargs) # type: ignore
540 else:
541 raise exc.ArgumentError("unknown strategy: %r" % strat)
542
543 kwargs.pop("empty_in_strategy", None)
544
545 # create url.URL object
546 u = _url.make_url(url)
547
548 u, plugins, kwargs = u._instantiate_plugins(kwargs)
549
550 entrypoint = u._get_entrypoint()
551 _is_async = kwargs.pop("_is_async", False)
552 if _is_async:
553 dialect_cls = entrypoint.get_async_dialect_cls(u)
554 else:
555 dialect_cls = entrypoint.get_dialect_cls(u)
556
557 if kwargs.pop("_coerce_config", False):
558
559 def pop_kwarg(key: str, default: Optional[Any] = None) -> Any:
560 value = kwargs.pop(key, default)
561 if key in dialect_cls.engine_config_types:
562 value = dialect_cls.engine_config_types[key](value)
563 return value
564
565 else:
566 pop_kwarg = kwargs.pop # type: ignore
567
568 dialect_args = {}
569 # consume dialect arguments from kwargs
570 for k in util.get_cls_kwargs(dialect_cls):
571 if k in kwargs:
572 dialect_args[k] = pop_kwarg(k)
573
574 dbapi = kwargs.pop("module", None)
575 if dbapi is None:
576 dbapi_args = {}
577
578 if "import_dbapi" in dialect_cls.__dict__:
579 dbapi_meth = dialect_cls.import_dbapi
580
581 elif hasattr(dialect_cls, "dbapi") and inspect.ismethod(
582 dialect_cls.dbapi
583 ):
584 util.warn_deprecated(
585 "The dbapi() classmethod on dialect classes has been "
586 "renamed to import_dbapi(). Implement an import_dbapi() "
587 f"classmethod directly on class {dialect_cls} to remove this "
588 "warning; the old .dbapi() classmethod may be maintained for "
589 "backwards compatibility.",
590 "2.0",
591 )
592 dbapi_meth = dialect_cls.dbapi
593 else:
594 dbapi_meth = dialect_cls.import_dbapi
595
596 for k in util.get_func_kwargs(dbapi_meth):
597 if k in kwargs:
598 dbapi_args[k] = pop_kwarg(k)
599 dbapi = dbapi_meth(**dbapi_args)
600
601 dialect_args["dbapi"] = dbapi
602
603 dialect_args.setdefault("compiler_linting", compiler.NO_LINTING)
604 enable_from_linting = kwargs.pop("enable_from_linting", True)
605 if enable_from_linting:
606 dialect_args["compiler_linting"] ^= compiler.COLLECT_CARTESIAN_PRODUCTS
607
608 for plugin in plugins:
609 plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
610
611 # create dialect
612 dialect = dialect_cls(**dialect_args)
613
614 # assemble connection arguments
615 (cargs_tup, cparams) = dialect.create_connect_args(u)
616 cparams.update(pop_kwarg("connect_args", {}))
617 cargs = list(cargs_tup) # allow mutability
618
619 # look for existing pool or create
620 pool = pop_kwarg("pool", None)
621 if pool is None:
622
623 def connect(
624 connection_record: Optional[ConnectionPoolEntry] = None,
625 ) -> DBAPIConnection:
626 if dialect._has_events:
627 for fn in dialect.dispatch.do_connect:
628 connection = cast(
629 DBAPIConnection,
630 fn(dialect, connection_record, cargs, cparams),
631 )
632 if connection is not None:
633 return connection
634
635 return dialect.connect(*cargs, **cparams)
636
637 creator = pop_kwarg("creator", connect)
638
639 poolclass = pop_kwarg("poolclass", None)
640 if poolclass is None:
641 poolclass = dialect.get_dialect_pool_class(u)
642 pool_args = {"dialect": dialect}
643
644 # consume pool arguments from kwargs, translating a few of
645 # the arguments
646 for k in util.get_cls_kwargs(poolclass):
647 tk = _pool_translate_kwargs.get(k, k)
648 if tk in kwargs:
649 pool_args[k] = pop_kwarg(tk)
650
651 for plugin in plugins:
652 plugin.handle_pool_kwargs(poolclass, pool_args)
653
654 pool = poolclass(creator, **pool_args)
655 else:
656 pool._dialect = dialect
657
658 if (
659 hasattr(pool, "_is_asyncio")
660 and pool._is_asyncio is not dialect.is_async
661 ):
662 raise exc.ArgumentError(
663 f"Pool class {pool.__class__.__name__} cannot be "
664 f"used with {'non-' if not dialect.is_async else ''}"
665 "asyncio engine",
666 code="pcls",
667 )
668
669 # create engine.
670 if not pop_kwarg("future", True):
671 raise exc.ArgumentError(
672 "The 'future' parameter passed to "
673 "create_engine() may only be set to True."
674 )
675
676 engineclass = base.Engine
677
678 engine_args = {}
679 for k in util.get_cls_kwargs(engineclass):
680 if k in kwargs:
681 engine_args[k] = pop_kwarg(k)
682
683 # internal flags used by the test suite for instrumenting / proxying
684 # engines with mocks etc.
685 _initialize = kwargs.pop("_initialize", True)
686
687 # all kwargs should be consumed
688 if kwargs:
689 raise TypeError(
690 "Invalid argument(s) %s sent to create_engine(), "
691 "using configuration %s/%s/%s. Please check that the "
692 "keyword arguments are appropriate for this combination "
693 "of components."
694 % (
695 ",".join("'%s'" % k for k in kwargs),
696 dialect.__class__.__name__,
697 pool.__class__.__name__,
698 engineclass.__name__,
699 )
700 )
701
702 engine = engineclass(pool, dialect, u, **engine_args)
703
704 if _initialize:
705 do_on_connect = dialect.on_connect_url(u)
706 if do_on_connect:
707
708 def on_connect(
709 dbapi_connection: DBAPIConnection,
710 connection_record: ConnectionPoolEntry,
711 ) -> None:
712 assert do_on_connect is not None
713 do_on_connect(dbapi_connection)
714
715 event.listen(pool, "connect", on_connect)
716
717 builtin_on_connect = dialect._builtin_onconnect()
718 if builtin_on_connect:
719 event.listen(pool, "connect", builtin_on_connect)
720
721 def first_connect(
722 dbapi_connection: DBAPIConnection,
723 connection_record: ConnectionPoolEntry,
724 ) -> None:
725 c = base.Connection(
726 engine,
727 connection=_AdhocProxiedConnection(
728 dbapi_connection, connection_record
729 ),
730 _has_events=False,
731 # reconnecting will be a reentrant condition, so if the
732 # connection goes away, Connection is then closed
733 _allow_revalidate=False,
734 # dont trigger the autobegin sequence
735 # within the up front dialect checks
736 _allow_autobegin=False,
737 )
738 c._execution_options = util.EMPTY_DICT
739
740 try:
741 dialect.initialize(c)
742 finally:
743 # note that "invalidated" and "closed" are mutually
744 # exclusive in 1.4 Connection.
745 if not c.invalidated and not c.closed:
746 # transaction is rolled back otherwise, tested by
747 # test/dialect/postgresql/test_dialect.py
748 # ::MiscBackendTest::test_initial_transaction_state
749 dialect.do_rollback(c.connection)
750
751 # previously, the "first_connect" event was used here, which was then
752 # scaled back if the "on_connect" handler were present. now,
753 # since "on_connect" is virtually always present, just use
754 # "connect" event with once_unless_exception in all cases so that
755 # the connection event flow is consistent in all cases.
756 event.listen(
757 pool, "connect", first_connect, _once_unless_exception=True
758 )
759
760 dialect_cls.engine_created(engine)
761 if entrypoint is not dialect_cls:
762 entrypoint.engine_created(engine)
763
764 for plugin in plugins:
765 plugin.engine_created(engine)
766
767 return engine
768
769
770def engine_from_config(
771 configuration: Dict[str, Any], prefix: str = "sqlalchemy.", **kwargs: Any
772) -> Engine:
773 """Create a new Engine instance using a configuration dictionary.
774
775 The dictionary is typically produced from a config file.
776
777 The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
778 ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
779 indicates the prefix to be searched for. Each matching key (after the
780 prefix is stripped) is treated as though it were the corresponding keyword
781 argument to a :func:`_sa.create_engine` call.
782
783 The only required key is (assuming the default prefix) ``sqlalchemy.url``,
784 which provides the :ref:`database URL <database_urls>`.
785
786 A select set of keyword arguments will be "coerced" to their
787 expected type based on string values. The set of arguments
788 is extensible per-dialect using the ``engine_config_types`` accessor.
789
790 :param configuration: A dictionary (typically produced from a config file,
791 but this is not a requirement). Items whose keys start with the value
792 of 'prefix' will have that prefix stripped, and will then be passed to
793 :func:`_sa.create_engine`.
794
795 :param prefix: Prefix to match and then strip from keys
796 in 'configuration'.
797
798 :param kwargs: Each keyword argument to ``engine_from_config()`` itself
799 overrides the corresponding item taken from the 'configuration'
800 dictionary. Keyword arguments should *not* be prefixed.
801
802 """
803
804 options = {
805 key[len(prefix) :]: configuration[key]
806 for key in configuration
807 if key.startswith(prefix)
808 }
809 options["_coerce_config"] = True
810 options.update(kwargs)
811 url = options.pop("url")
812 return create_engine(url, **options)
813
814
815@overload
816def create_pool_from_url(
817 url: Union[str, URL],
818 *,
819 poolclass: Optional[Type[Pool]] = ...,
820 logging_name: str = ...,
821 pre_ping: bool = ...,
822 size: int = ...,
823 recycle: int = ...,
824 reset_on_return: Optional[_ResetStyleArgType] = ...,
825 timeout: float = ...,
826 use_lifo: bool = ...,
827 **kwargs: Any,
828) -> Pool: ...
829
830
831@overload
832def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: ...
833
834
835def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool:
836 """Create a pool instance from the given url.
837
838 If ``poolclass`` is not provided the pool class used
839 is selected using the dialect specified in the URL.
840
841 The arguments passed to :func:`_sa.create_pool_from_url` are
842 identical to the pool argument passed to the :func:`_sa.create_engine`
843 function.
844
845 .. versionadded:: 2.0.10
846 """
847
848 for key in _pool_translate_kwargs:
849 if key in kwargs:
850 kwargs[_pool_translate_kwargs[key]] = kwargs.pop(key)
851
852 engine = create_engine(url, **kwargs, _initialize=False)
853 return engine.pool
854
855
856_pool_translate_kwargs = immutabledict(
857 {
858 "logging_name": "pool_logging_name",
859 "echo": "echo_pool",
860 "timeout": "pool_timeout",
861 "recycle": "pool_recycle",
862 "events": "pool_events", # deprecated
863 "reset_on_return": "pool_reset_on_return",
864 "pre_ping": "pool_pre_ping",
865 "use_lifo": "pool_use_lifo",
866 }
867)