This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
# ext/asyncio/engine.py
|
||||
# Copyright (C) 2020-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2020-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -41,8 +41,6 @@ from ...engine.base import NestedTransaction
|
||||
from ...engine.base import Transaction
|
||||
from ...exc import ArgumentError
|
||||
from ...util.concurrency import greenlet_spawn
|
||||
from ...util.typing import Concatenate
|
||||
from ...util.typing import ParamSpec
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...engine.cursor import CursorResult
|
||||
@@ -63,7 +61,6 @@ if TYPE_CHECKING:
|
||||
from ...sql.base import Executable
|
||||
from ...sql.selectable import TypedReturnsRows
|
||||
|
||||
_P = ParamSpec("_P")
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
|
||||
@@ -198,7 +195,6 @@ class AsyncConnection(
|
||||
method of :class:`_asyncio.AsyncEngine`::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
engine = create_async_engine("postgresql+asyncpg://user:pass@host/dbname")
|
||||
|
||||
async with engine.connect() as conn:
|
||||
@@ -255,7 +251,7 @@ class AsyncConnection(
|
||||
|
||||
@classmethod
|
||||
def _regenerate_proxy_for_target(
|
||||
cls, target: Connection, **additional_kw: Any # noqa: U100
|
||||
cls, target: Connection
|
||||
) -> AsyncConnection:
|
||||
return AsyncConnection(
|
||||
AsyncEngine._retrieve_proxy_for_target(target.engine), target
|
||||
@@ -418,12 +414,13 @@ class AsyncConnection(
|
||||
yield_per: int = ...,
|
||||
insertmanyvalues_page_size: int = ...,
|
||||
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
|
||||
preserve_rowcount: bool = False,
|
||||
**opt: Any,
|
||||
) -> AsyncConnection: ...
|
||||
) -> AsyncConnection:
|
||||
...
|
||||
|
||||
@overload
|
||||
async def execution_options(self, **opt: Any) -> AsyncConnection: ...
|
||||
async def execution_options(self, **opt: Any) -> AsyncConnection:
|
||||
...
|
||||
|
||||
async def execution_options(self, **opt: Any) -> AsyncConnection:
|
||||
r"""Set non-SQL options for the connection which take effect
|
||||
@@ -521,7 +518,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> GeneratorStartableContext[AsyncResult[_T]]: ...
|
||||
) -> GeneratorStartableContext[AsyncResult[_T]]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def stream(
|
||||
@@ -530,7 +528,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> GeneratorStartableContext[AsyncResult[Any]]: ...
|
||||
) -> GeneratorStartableContext[AsyncResult[Any]]:
|
||||
...
|
||||
|
||||
@asyncstartablecontext
|
||||
async def stream(
|
||||
@@ -545,7 +544,7 @@ class AsyncConnection(
|
||||
|
||||
E.g.::
|
||||
|
||||
result = await conn.stream(stmt)
|
||||
result = await conn.stream(stmt):
|
||||
async for row in result:
|
||||
print(f"{row}")
|
||||
|
||||
@@ -574,11 +573,6 @@ class AsyncConnection(
|
||||
:meth:`.AsyncConnection.stream_scalars`
|
||||
|
||||
"""
|
||||
if not self.dialect.supports_server_side_cursors:
|
||||
raise exc.InvalidRequestError(
|
||||
"Cant use `stream` or `stream_scalars` with the current "
|
||||
"dialect since it does not support server side cursors."
|
||||
)
|
||||
|
||||
result = await greenlet_spawn(
|
||||
self._proxied.execute,
|
||||
@@ -606,7 +600,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> CursorResult[_T]: ...
|
||||
) -> CursorResult[_T]:
|
||||
...
|
||||
|
||||
@overload
|
||||
async def execute(
|
||||
@@ -615,7 +610,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> CursorResult[Any]: ...
|
||||
) -> CursorResult[Any]:
|
||||
...
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
@@ -671,7 +667,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreSingleExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> Optional[_T]: ...
|
||||
) -> Optional[_T]:
|
||||
...
|
||||
|
||||
@overload
|
||||
async def scalar(
|
||||
@@ -680,7 +677,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreSingleExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> Any: ...
|
||||
) -> Any:
|
||||
...
|
||||
|
||||
async def scalar(
|
||||
self,
|
||||
@@ -711,7 +709,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> ScalarResult[_T]: ...
|
||||
) -> ScalarResult[_T]:
|
||||
...
|
||||
|
||||
@overload
|
||||
async def scalars(
|
||||
@@ -720,7 +719,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> ScalarResult[Any]: ...
|
||||
) -> ScalarResult[Any]:
|
||||
...
|
||||
|
||||
async def scalars(
|
||||
self,
|
||||
@@ -752,7 +752,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreSingleExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> GeneratorStartableContext[AsyncScalarResult[_T]]: ...
|
||||
) -> GeneratorStartableContext[AsyncScalarResult[_T]]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def stream_scalars(
|
||||
@@ -761,7 +762,8 @@ class AsyncConnection(
|
||||
parameters: Optional[_CoreSingleExecuteParams] = None,
|
||||
*,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> GeneratorStartableContext[AsyncScalarResult[Any]]: ...
|
||||
) -> GeneratorStartableContext[AsyncScalarResult[Any]]:
|
||||
...
|
||||
|
||||
@asyncstartablecontext
|
||||
async def stream_scalars(
|
||||
@@ -817,12 +819,9 @@ class AsyncConnection(
|
||||
yield result.scalars()
|
||||
|
||||
async def run_sync(
|
||||
self,
|
||||
fn: Callable[Concatenate[Connection, _P], _T],
|
||||
*arg: _P.args,
|
||||
**kw: _P.kwargs,
|
||||
self, fn: Callable[..., _T], *arg: Any, **kw: Any
|
||||
) -> _T:
|
||||
'''Invoke the given synchronous (i.e. not async) callable,
|
||||
"""Invoke the given synchronous (i.e. not async) callable,
|
||||
passing a synchronous-style :class:`_engine.Connection` as the first
|
||||
argument.
|
||||
|
||||
@@ -832,26 +831,26 @@ class AsyncConnection(
|
||||
E.g.::
|
||||
|
||||
def do_something_with_core(conn: Connection, arg1: int, arg2: str) -> str:
|
||||
"""A synchronous function that does not require awaiting
|
||||
'''A synchronous function that does not require awaiting
|
||||
|
||||
:param conn: a Core SQLAlchemy Connection, used synchronously
|
||||
|
||||
:return: an optional return value is supported
|
||||
|
||||
"""
|
||||
conn.execute(some_table.insert().values(int_col=arg1, str_col=arg2))
|
||||
'''
|
||||
conn.execute(
|
||||
some_table.insert().values(int_col=arg1, str_col=arg2)
|
||||
)
|
||||
return "success"
|
||||
|
||||
|
||||
async def do_something_async(async_engine: AsyncEngine) -> None:
|
||||
"""an async function that uses awaiting"""
|
||||
'''an async function that uses awaiting'''
|
||||
|
||||
async with async_engine.begin() as async_conn:
|
||||
# run do_something_with_core() with a sync-style
|
||||
# Connection, proxied into an awaitable
|
||||
return_code = await async_conn.run_sync(
|
||||
do_something_with_core, 5, "strval"
|
||||
)
|
||||
return_code = await async_conn.run_sync(do_something_with_core, 5, "strval")
|
||||
print(return_code)
|
||||
|
||||
This method maintains the asyncio event loop all the way through
|
||||
@@ -882,11 +881,9 @@ class AsyncConnection(
|
||||
|
||||
:ref:`session_run_sync`
|
||||
|
||||
''' # noqa: E501
|
||||
""" # noqa: E501
|
||||
|
||||
return await greenlet_spawn(
|
||||
fn, self._proxied, *arg, _require_await=False, **kw
|
||||
)
|
||||
return await greenlet_spawn(fn, self._proxied, *arg, **kw)
|
||||
|
||||
def __await__(self) -> Generator[Any, None, AsyncConnection]:
|
||||
return self.start().__await__()
|
||||
@@ -931,7 +928,7 @@ class AsyncConnection(
|
||||
return self._proxied.invalidated
|
||||
|
||||
@property
|
||||
def dialect(self) -> Dialect:
|
||||
def dialect(self) -> Any:
|
||||
r"""Proxy for the :attr:`_engine.Connection.dialect` attribute
|
||||
on behalf of the :class:`_asyncio.AsyncConnection` class.
|
||||
|
||||
@@ -940,7 +937,7 @@ class AsyncConnection(
|
||||
return self._proxied.dialect
|
||||
|
||||
@dialect.setter
|
||||
def dialect(self, attr: Dialect) -> None:
|
||||
def dialect(self, attr: Any) -> None:
|
||||
self._proxied.dialect = attr
|
||||
|
||||
@property
|
||||
@@ -1001,7 +998,6 @@ class AsyncEngine(ProxyComparable[Engine], AsyncConnectable):
|
||||
:func:`_asyncio.create_async_engine` function::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
engine = create_async_engine("postgresql+asyncpg://user:pass@host/dbname")
|
||||
|
||||
.. versionadded:: 1.4
|
||||
@@ -1041,9 +1037,7 @@ class AsyncEngine(ProxyComparable[Engine], AsyncConnectable):
|
||||
return self.sync_engine
|
||||
|
||||
@classmethod
|
||||
def _regenerate_proxy_for_target(
|
||||
cls, target: Engine, **additional_kw: Any # noqa: U100
|
||||
) -> AsyncEngine:
|
||||
def _regenerate_proxy_for_target(cls, target: Engine) -> AsyncEngine:
|
||||
return AsyncEngine(target)
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
@@ -1060,6 +1054,7 @@ class AsyncEngine(ProxyComparable[Engine], AsyncConnectable):
|
||||
)
|
||||
await conn.execute(text("my_special_procedure(5)"))
|
||||
|
||||
|
||||
"""
|
||||
conn = self.connect()
|
||||
|
||||
@@ -1105,10 +1100,12 @@ class AsyncEngine(ProxyComparable[Engine], AsyncConnectable):
|
||||
insertmanyvalues_page_size: int = ...,
|
||||
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
|
||||
**opt: Any,
|
||||
) -> AsyncEngine: ...
|
||||
) -> AsyncEngine:
|
||||
...
|
||||
|
||||
@overload
|
||||
def execution_options(self, **opt: Any) -> AsyncEngine: ...
|
||||
def execution_options(self, **opt: Any) -> AsyncEngine:
|
||||
...
|
||||
|
||||
def execution_options(self, **opt: Any) -> AsyncEngine:
|
||||
"""Return a new :class:`_asyncio.AsyncEngine` that will provide
|
||||
@@ -1163,7 +1160,7 @@ class AsyncEngine(ProxyComparable[Engine], AsyncConnectable):
|
||||
This applies **only** to the built-in cache that is established
|
||||
via the :paramref:`_engine.create_engine.query_cache_size` parameter.
|
||||
It will not impact any dictionary caches that were passed via the
|
||||
:paramref:`.Connection.execution_options.compiled_cache` parameter.
|
||||
:paramref:`.Connection.execution_options.query_cache` parameter.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
@@ -1346,7 +1343,7 @@ class AsyncTransaction(
|
||||
|
||||
@classmethod
|
||||
def _regenerate_proxy_for_target(
|
||||
cls, target: Transaction, **additional_kw: Any # noqa: U100
|
||||
cls, target: Transaction
|
||||
) -> AsyncTransaction:
|
||||
sync_connection = target.connection
|
||||
sync_transaction = target
|
||||
@@ -1421,17 +1418,19 @@ class AsyncTransaction(
|
||||
|
||||
|
||||
@overload
|
||||
def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine: ...
|
||||
def _get_sync_engine_or_connection(async_engine: AsyncEngine) -> Engine:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def _get_sync_engine_or_connection(
|
||||
async_engine: AsyncConnection,
|
||||
) -> Connection: ...
|
||||
) -> Connection:
|
||||
...
|
||||
|
||||
|
||||
def _get_sync_engine_or_connection(
|
||||
async_engine: Union[AsyncEngine, AsyncConnection],
|
||||
async_engine: Union[AsyncEngine, AsyncConnection]
|
||||
) -> Union[Engine, Connection]:
|
||||
if isinstance(async_engine, AsyncConnection):
|
||||
return async_engine._proxied
|
||||
|
||||
Reference in New Issue
Block a user