main commit
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-16 16:30:25 +09:00
parent 91c7e04474
commit 537e7b363f
1146 changed files with 45926 additions and 77196 deletions

View File

@@ -1,5 +1,5 @@
# dialects/sqlite/pysqlite.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# sqlite/pysqlite.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -28,9 +28,7 @@ Connect Strings
---------------
The file specification for the SQLite database is taken as the "database"
portion of the URL. Note that the format of a SQLAlchemy url is:
.. sourcecode:: text
portion of the URL. Note that the format of a SQLAlchemy url is::
driver://user:pass@host/database
@@ -39,28 +37,25 @@ the **right** of the third slash. So connecting to a relative filepath
looks like::
# relative path
e = create_engine("sqlite:///path/to/database.db")
e = create_engine('sqlite:///path/to/database.db')
An absolute path, which is denoted by starting with a slash, means you
need **four** slashes::
# absolute path
e = create_engine("sqlite:////path/to/database.db")
e = create_engine('sqlite:////path/to/database.db')
To use a Windows path, regular drive specifications and backslashes can be
used. Double backslashes are probably needed::
# absolute path on Windows
e = create_engine("sqlite:///C:\\path\\to\\database.db")
e = create_engine('sqlite:///C:\\path\\to\\database.db')
To use sqlite ``:memory:`` database specify it as the filename using
``sqlite:///:memory:``. It's also the default if no filepath is
present, specifying only ``sqlite://`` and nothing else::
The sqlite ``:memory:`` identifier is the default if no filepath is
present. Specify ``sqlite://`` and nothing else::
# in-memory database (note three slashes)
e = create_engine("sqlite:///:memory:")
# also in-memory database
e2 = create_engine("sqlite://")
# in-memory database
e = create_engine('sqlite://')
.. _pysqlite_uri_connections:
@@ -100,9 +95,7 @@ Above, the pysqlite / sqlite3 DBAPI would be passed arguments as::
sqlite3.connect(
"file:path/to/database?mode=ro&nolock=1",
check_same_thread=True,
timeout=10,
uri=True,
check_same_thread=True, timeout=10, uri=True
)
Regarding future parameters added to either the Python or native drivers. new
@@ -148,11 +141,8 @@ as follows::
def regexp(a, b):
return re.search(a, b) is not None
sqlite_connection.create_function(
"regexp",
2,
regexp,
"regexp", 2, regexp,
)
There is currently no support for regular expression flags as a separate
@@ -193,12 +183,10 @@ Keeping in mind that pysqlite's parsing option is not recommended,
nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
can be forced if one configures "native_datetime=True" on create_engine()::
engine = create_engine(
"sqlite://",
connect_args={
"detect_types": sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES
},
native_datetime=True,
engine = create_engine('sqlite://',
connect_args={'detect_types':
sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
native_datetime=True
)
With this flag enabled, the DATE and TIMESTAMP types (but note - not the
@@ -253,7 +241,6 @@ Pooling may be disabled for a file based database by specifying the
parameter::
from sqlalchemy import NullPool
engine = create_engine("sqlite:///myfile.db", poolclass=NullPool)
It's been observed that the :class:`.NullPool` implementation incurs an
@@ -273,12 +260,9 @@ globally, and the ``check_same_thread`` flag can be passed to Pysqlite
as ``False``::
from sqlalchemy.pool import StaticPool
engine = create_engine(
"sqlite://",
connect_args={"check_same_thread": False},
poolclass=StaticPool,
)
engine = create_engine('sqlite://',
connect_args={'check_same_thread':False},
poolclass=StaticPool)
Note that using a ``:memory:`` database in multiple threads requires a recent
version of SQLite.
@@ -297,14 +281,14 @@ needed within multiple threads for this case::
# maintain the same connection per thread
from sqlalchemy.pool import SingletonThreadPool
engine = create_engine("sqlite:///mydb.db", poolclass=SingletonThreadPool)
engine = create_engine('sqlite:///mydb.db',
poolclass=SingletonThreadPool)
# maintain the same connection across all threads
from sqlalchemy.pool import StaticPool
engine = create_engine("sqlite:///mydb.db", poolclass=StaticPool)
engine = create_engine('sqlite:///mydb.db',
poolclass=StaticPool)
Note that :class:`.SingletonThreadPool` should be configured for the number
of threads that are to be used; beyond that number, connections will be
@@ -333,14 +317,13 @@ same column, use a custom type that will check each row individually::
from sqlalchemy import String
from sqlalchemy import TypeDecorator
class MixedBinary(TypeDecorator):
impl = String
cache_ok = True
def process_result_value(self, value, dialect):
if isinstance(value, str):
value = bytes(value, "utf-8")
value = bytes(value, 'utf-8')
elif value is not None:
value = bytes(value)
@@ -354,10 +337,74 @@ Then use the above ``MixedBinary`` datatype in the place where
Serializable isolation / Savepoints / Transactional DDL
-------------------------------------------------------
A newly revised version of this important section is now available
at the top level of the SQLAlchemy SQLite documentation, in the section
:ref:`sqlite_transactions`.
In the section :ref:`sqlite_concurrency`, we refer to the pysqlite
driver's assortment of issues that prevent several features of SQLite
from working correctly. The pysqlite DBAPI driver has several
long-standing bugs which impact the correctness of its transactional
behavior. In its default mode of operation, SQLite features such as
SERIALIZABLE isolation, transactional DDL, and SAVEPOINT support are
non-functional, and in order to use these features, workarounds must
be taken.
The issue is essentially that the driver attempts to second-guess the user's
intent, failing to start transactions and sometimes ending them prematurely, in
an effort to minimize the SQLite databases's file locking behavior, even
though SQLite itself uses "shared" locks for read-only activities.
SQLAlchemy chooses to not alter this behavior by default, as it is the
long-expected behavior of the pysqlite driver; if and when the pysqlite
driver attempts to repair these issues, that will be more of a driver towards
defaults for SQLAlchemy.
The good news is that with a few events, we can implement transactional
support fully, by disabling pysqlite's feature entirely and emitting BEGIN
ourselves. This is achieved using two event listeners::
from sqlalchemy import create_engine, event
engine = create_engine("sqlite:///myfile.db")
@event.listens_for(engine, "connect")
def do_connect(dbapi_connection, connection_record):
# disable pysqlite's emitting of the BEGIN statement entirely.
# also stops it from emitting COMMIT before any DDL.
dbapi_connection.isolation_level = None
@event.listens_for(engine, "begin")
def do_begin(conn):
# emit our own BEGIN
conn.exec_driver_sql("BEGIN")
.. warning:: When using the above recipe, it is advised to not use the
:paramref:`.Connection.execution_options.isolation_level` setting on
:class:`_engine.Connection` and :func:`_sa.create_engine`
with the SQLite driver,
as this function necessarily will also alter the ".isolation_level" setting.
Above, we intercept a new pysqlite connection and disable any transactional
integration. Then, at the point at which SQLAlchemy knows that transaction
scope is to begin, we emit ``"BEGIN"`` ourselves.
When we take control of ``"BEGIN"``, we can also control directly SQLite's
locking modes, introduced at
`BEGIN TRANSACTION <https://sqlite.org/lang_transaction.html>`_,
by adding the desired locking mode to our ``"BEGIN"``::
@event.listens_for(engine, "begin")
def do_begin(conn):
conn.exec_driver_sql("BEGIN EXCLUSIVE")
.. seealso::
`BEGIN TRANSACTION <https://sqlite.org/lang_transaction.html>`_ -
on the SQLite site
`sqlite3 SELECT does not BEGIN a transaction <https://bugs.python.org/issue9924>`_ -
on the Python bug tracker
`sqlite3 module breaks transactions and potentially corrupts data <https://bugs.python.org/issue10740>`_ -
on the Python bug tracker
.. _pysqlite_udfs:
@@ -392,16 +439,12 @@ connection when it is created. That is accomplished with an event listener::
with engine.connect() as conn:
print(conn.scalar(text("SELECT UDF()")))
""" # noqa
from __future__ import annotations
import math
import os
import re
from typing import cast
from typing import Optional
from typing import TYPE_CHECKING
from typing import Union
from .base import DATE
from .base import DATETIME
@@ -411,13 +454,6 @@ from ... import pool
from ... import types as sqltypes
from ... import util
if TYPE_CHECKING:
from ...engine.interfaces import DBAPIConnection
from ...engine.interfaces import DBAPICursor
from ...engine.interfaces import DBAPIModule
from ...engine.url import URL
from ...pool.base import PoolProxiedConnection
class _SQLite_pysqliteTimeStamp(DATETIME):
def bind_processor(self, dialect):
@@ -471,7 +507,7 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
return sqlite
@classmethod
def _is_url_file_db(cls, url: URL):
def _is_url_file_db(cls, url):
if (url.database and url.database != ":memory:") and (
url.query.get("mode", None) != "memory"
):
@@ -502,9 +538,6 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
dbapi_connection.isolation_level = ""
return super().set_isolation_level(dbapi_connection, level)
def detect_autocommit_setting(self, dbapi_connection):
return dbapi_connection.isolation_level is None
def on_connect(self):
def regexp(a, b):
if b is None:
@@ -604,13 +637,7 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
return ([filename], pysqlite_opts)
def is_disconnect(
self,
e: DBAPIModule.Error,
connection: Optional[Union[PoolProxiedConnection, DBAPIConnection]],
cursor: Optional[DBAPICursor],
) -> bool:
self.dbapi = cast("DBAPIModule", self.dbapi)
def is_disconnect(self, e, connection, cursor):
return isinstance(
e, self.dbapi.ProgrammingError
) and "Cannot operate on a closed database." in str(e)