API refactor
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-07 16:25:52 +09:00
parent 76d0d86211
commit 91c7e04474
1171 changed files with 81940 additions and 44117 deletions

View File

@@ -1,5 +1,5 @@
# connectors/__init__.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under

View File

@@ -1,5 +1,5 @@
# connectors/aioodbc.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -20,6 +20,7 @@ from .. import util
from ..util.concurrency import await_fallback
from ..util.concurrency import await_only
if TYPE_CHECKING:
from ..engine.interfaces import ConnectArgsType
from ..engine.url import URL
@@ -58,6 +59,15 @@ class AsyncAdapt_aioodbc_connection(AsyncAdapt_dbapi_connection):
self._connection._conn.autocommit = value
def ping(self, reconnect):
return self.await_(self._connection.ping(reconnect))
def add_output_converter(self, *arg, **kw):
self._connection.add_output_converter(*arg, **kw)
def character_set_name(self):
return self._connection.character_set_name()
def cursor(self, server_side=False):
# aioodbc sets connection=None when closed and just fails with
# AttributeError here. Here we use the same ProgrammingError +
@@ -170,18 +180,5 @@ class aiodbcConnector(PyODBCConnector):
else:
return pool.AsyncAdaptedQueuePool
def _do_isolation_level(self, connection, autocommit, isolation_level):
connection.set_autocommit(autocommit)
connection.set_isolation_level(isolation_level)
def _do_autocommit(self, connection, value):
connection.set_autocommit(value)
def set_readonly(self, connection, value):
connection.set_read_only(value)
def set_deferrable(self, connection, value):
connection.set_deferrable(value)
def get_driver_connection(self, connection):
return connection._connection

View File

@@ -1,22 +1,124 @@
# connectors/asyncio.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
"""generic asyncio-adapted versions of DBAPI connection and cursor"""
from __future__ import annotations
import asyncio
import collections
import itertools
import sys
from typing import Any
from typing import AsyncIterator
from typing import Deque
from typing import Iterator
from typing import NoReturn
from typing import Optional
from typing import Sequence
from typing import TYPE_CHECKING
from ..engine import AdaptedConnection
from ..util.concurrency import asyncio
from ..util.concurrency import await_fallback
from ..util.concurrency import await_only
from ..util.typing import Protocol
if TYPE_CHECKING:
from ..engine.interfaces import _DBAPICursorDescription
from ..engine.interfaces import _DBAPIMultiExecuteParams
from ..engine.interfaces import _DBAPISingleExecuteParams
from ..engine.interfaces import DBAPIModule
from ..util.typing import Self
class AsyncIODBAPIConnection(Protocol):
"""protocol representing an async adapted version of a
:pep:`249` database connection.
"""
# note that async DBAPIs dont agree if close() should be awaitable,
# so it is omitted here and picked up by the __getattr__ hook below
async def commit(self) -> None: ...
def cursor(self, *args: Any, **kwargs: Any) -> AsyncIODBAPICursor: ...
async def rollback(self) -> None: ...
def __getattr__(self, key: str) -> Any: ...
def __setattr__(self, key: str, value: Any) -> None: ...
class AsyncIODBAPICursor(Protocol):
"""protocol representing an async adapted version
of a :pep:`249` database cursor.
"""
def __aenter__(self) -> Any: ...
@property
def description(
self,
) -> _DBAPICursorDescription:
"""The description attribute of the Cursor."""
...
@property
def rowcount(self) -> int: ...
arraysize: int
lastrowid: int
async def close(self) -> None: ...
async def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
) -> Any: ...
async def executemany(
self,
operation: Any,
parameters: _DBAPIMultiExecuteParams,
) -> Any: ...
async def fetchone(self) -> Optional[Any]: ...
async def fetchmany(self, size: Optional[int] = ...) -> Sequence[Any]: ...
async def fetchall(self) -> Sequence[Any]: ...
async def setinputsizes(self, sizes: Sequence[Any]) -> None: ...
def setoutputsize(self, size: Any, column: Any) -> None: ...
async def callproc(
self, procname: str, parameters: Sequence[Any] = ...
) -> Any: ...
async def nextset(self) -> Optional[bool]: ...
def __aiter__(self) -> AsyncIterator[Any]: ...
class AsyncAdapt_dbapi_module:
if TYPE_CHECKING:
Error = DBAPIModule.Error
OperationalError = DBAPIModule.OperationalError
InterfaceError = DBAPIModule.InterfaceError
IntegrityError = DBAPIModule.IntegrityError
def __getattr__(self, key: str) -> Any: ...
class AsyncAdapt_dbapi_cursor:
@@ -29,99 +131,136 @@ class AsyncAdapt_dbapi_cursor:
"_rows",
)
def __init__(self, adapt_connection):
_cursor: AsyncIODBAPICursor
_adapt_connection: AsyncAdapt_dbapi_connection
_connection: AsyncIODBAPIConnection
_rows: Deque[Any]
def __init__(self, adapt_connection: AsyncAdapt_dbapi_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._connection.cursor()
cursor = self._make_new_cursor(self._connection)
self._cursor = self._aenter_cursor(cursor)
self._cursor = self.await_(cursor.__aenter__())
self._rows = collections.deque()
if not self.server_side:
self._rows = collections.deque()
def _aenter_cursor(self, cursor: AsyncIODBAPICursor) -> AsyncIODBAPICursor:
return self.await_(cursor.__aenter__()) # type: ignore[no-any-return]
def _make_new_cursor(
self, connection: AsyncIODBAPIConnection
) -> AsyncIODBAPICursor:
return connection.cursor()
@property
def description(self):
def description(self) -> Optional[_DBAPICursorDescription]:
return self._cursor.description
@property
def rowcount(self):
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def arraysize(self):
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value):
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def lastrowid(self):
def lastrowid(self) -> int:
return self._cursor.lastrowid
def close(self):
def close(self) -> None:
# note we aren't actually closing the cursor here,
# we are just letting GC do it. see notes in aiomysql dialect
self._rows.clear()
def execute(self, operation, parameters=None):
return self.await_(self._execute_async(operation, parameters))
def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
) -> Any:
try:
return self.await_(self._execute_async(operation, parameters))
except Exception as error:
self._adapt_connection._handle_exception(error)
def executemany(self, operation, seq_of_parameters):
return self.await_(
self._executemany_async(operation, seq_of_parameters)
)
def executemany(
self,
operation: Any,
seq_of_parameters: _DBAPIMultiExecuteParams,
) -> Any:
try:
return self.await_(
self._executemany_async(operation, seq_of_parameters)
)
except Exception as error:
self._adapt_connection._handle_exception(error)
async def _execute_async(self, operation, parameters):
async def _execute_async(
self, operation: Any, parameters: Optional[_DBAPISingleExecuteParams]
) -> Any:
async with self._adapt_connection._execute_mutex:
result = await self._cursor.execute(operation, parameters or ())
if parameters is None:
result = await self._cursor.execute(operation)
else:
result = await self._cursor.execute(operation, parameters)
if self._cursor.description and not self.server_side:
# aioodbc has a "fake" async result, so we have to pull it out
# of that here since our default result is not async.
# we could just as easily grab "_rows" here and be done with it
# but this is safer.
self._rows = collections.deque(await self._cursor.fetchall())
return result
async def _executemany_async(self, operation, seq_of_parameters):
async def _executemany_async(
self,
operation: Any,
seq_of_parameters: _DBAPIMultiExecuteParams,
) -> Any:
async with self._adapt_connection._execute_mutex:
return await self._cursor.executemany(operation, seq_of_parameters)
def nextset(self):
def nextset(self) -> None:
self.await_(self._cursor.nextset())
if self._cursor.description and not self.server_side:
self._rows = collections.deque(
self.await_(self._cursor.fetchall())
)
def setinputsizes(self, *inputsizes):
def setinputsizes(self, *inputsizes: Any) -> None:
# NOTE: this is overrridden in aioodbc due to
# see https://github.com/aio-libs/aioodbc/issues/451
# right now
return self.await_(self._cursor.setinputsizes(*inputsizes))
def __iter__(self):
def __enter__(self) -> Self:
return self
def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
self.close()
def __iter__(self) -> Iterator[Any]:
while self._rows:
yield self._rows.popleft()
def fetchone(self):
def fetchone(self) -> Optional[Any]:
if self._rows:
return self._rows.popleft()
else:
return None
def fetchmany(self, size=None):
def fetchmany(self, size: Optional[int] = None) -> Sequence[Any]:
if size is None:
size = self.arraysize
rr = self._rows
return [rr.popleft() for _ in range(min(size, len(rr)))]
rr = iter(self._rows)
retval = list(itertools.islice(rr, 0, size))
self._rows = collections.deque(rr)
return retval
def fetchall(self):
def fetchall(self) -> Sequence[Any]:
retval = list(self._rows)
self._rows.clear()
return retval
@@ -131,75 +270,78 @@ class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor):
__slots__ = ()
server_side = True
def __init__(self, adapt_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._connection.cursor()
self._cursor = self.await_(cursor.__aenter__())
def close(self):
def close(self) -> None:
if self._cursor is not None:
self.await_(self._cursor.close())
self._cursor = None
self._cursor = None # type: ignore
def fetchone(self):
def fetchone(self) -> Optional[Any]:
return self.await_(self._cursor.fetchone())
def fetchmany(self, size=None):
def fetchmany(self, size: Optional[int] = None) -> Any:
return self.await_(self._cursor.fetchmany(size=size))
def fetchall(self):
def fetchall(self) -> Sequence[Any]:
return self.await_(self._cursor.fetchall())
def __iter__(self) -> Iterator[Any]:
iterator = self._cursor.__aiter__()
while True:
try:
yield self.await_(iterator.__anext__())
except StopAsyncIteration:
break
class AsyncAdapt_dbapi_connection(AdaptedConnection):
_cursor_cls = AsyncAdapt_dbapi_cursor
_ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor
await_ = staticmethod(await_only)
__slots__ = ("dbapi", "_execute_mutex")
def __init__(self, dbapi, connection):
_connection: AsyncIODBAPIConnection
def __init__(self, dbapi: Any, connection: AsyncIODBAPIConnection):
self.dbapi = dbapi
self._connection = connection
self._execute_mutex = asyncio.Lock()
def ping(self, reconnect):
return self.await_(self._connection.ping(reconnect))
def add_output_converter(self, *arg, **kw):
self._connection.add_output_converter(*arg, **kw)
def character_set_name(self):
return self._connection.character_set_name()
@property
def autocommit(self):
return self._connection.autocommit
@autocommit.setter
def autocommit(self, value):
# https://github.com/aio-libs/aioodbc/issues/448
# self._connection.autocommit = value
self._connection._conn.autocommit = value
def cursor(self, server_side=False):
def cursor(self, server_side: bool = False) -> AsyncAdapt_dbapi_cursor:
if server_side:
return self._ss_cursor_cls(self)
else:
return self._cursor_cls(self)
def rollback(self):
self.await_(self._connection.rollback())
def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
) -> Any:
"""lots of DBAPIs seem to provide this, so include it"""
cursor = self.cursor()
cursor.execute(operation, parameters)
return cursor
def commit(self):
self.await_(self._connection.commit())
def _handle_exception(self, error: Exception) -> NoReturn:
exc_info = sys.exc_info()
def close(self):
raise error.with_traceback(exc_info[2])
def rollback(self) -> None:
try:
self.await_(self._connection.rollback())
except Exception as error:
self._handle_exception(error)
def commit(self) -> None:
try:
self.await_(self._connection.commit())
except Exception as error:
self._handle_exception(error)
def close(self) -> None:
self.await_(self._connection.close())

View File

@@ -1,5 +1,5 @@
# connectors/pyodbc.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -8,7 +8,6 @@
from __future__ import annotations
import re
from types import ModuleType
import typing
from typing import Any
from typing import Dict
@@ -29,6 +28,7 @@ from ..engine import URL
from ..sql.type_api import TypeEngine
if typing.TYPE_CHECKING:
from ..engine.interfaces import DBAPIModule
from ..engine.interfaces import IsolationLevel
@@ -48,15 +48,13 @@ class PyODBCConnector(Connector):
# hold the desired driver name
pyodbc_driver_name: Optional[str] = None
dbapi: ModuleType
def __init__(self, use_setinputsizes: bool = False, **kw: Any):
super().__init__(**kw)
if use_setinputsizes:
self.bind_typing = interfaces.BindTyping.SETINPUTSIZES
@classmethod
def import_dbapi(cls) -> ModuleType:
def import_dbapi(cls) -> DBAPIModule:
return __import__("pyodbc")
def create_connect_args(self, url: URL) -> ConnectArgsType:
@@ -150,7 +148,7 @@ class PyODBCConnector(Connector):
],
cursor: Optional[interfaces.DBAPICursor],
) -> bool:
if isinstance(e, self.dbapi.ProgrammingError):
if isinstance(e, self.loaded_dbapi.ProgrammingError):
return "The cursor's connection has been closed." in str(
e
) or "Attempt to use a closed connection." in str(e)
@@ -217,19 +215,19 @@ class PyODBCConnector(Connector):
cursor.setinputsizes(
[
(dbtype, None, None)
if not isinstance(dbtype, tuple)
else dbtype
(
(dbtype, None, None)
if not isinstance(dbtype, tuple)
else dbtype
)
for key, dbtype, sqltype in list_of_tuples
]
)
def get_isolation_level_values(
self, dbapi_connection: interfaces.DBAPIConnection
self, dbapi_conn: interfaces.DBAPIConnection
) -> List[IsolationLevel]:
return super().get_isolation_level_values(dbapi_connection) + [
"AUTOCOMMIT"
]
return [*super().get_isolation_level_values(dbapi_conn), "AUTOCOMMIT"]
def set_isolation_level(
self,
@@ -245,3 +243,8 @@ class PyODBCConnector(Connector):
else:
dbapi_connection.autocommit = False
super().set_isolation_level(dbapi_connection, level)
def detect_autocommit_setting(
self, dbapi_conn: interfaces.DBAPIConnection
) -> bool:
return bool(dbapi_conn.autocommit)