This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
# testing/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -83,7 +83,6 @@ from .util import provide_metadata
|
||||
from .util import resolve_lambda
|
||||
from .util import rowset
|
||||
from .util import run_as_contextmanager
|
||||
from .util import skip_if_timeout
|
||||
from .util import teardown_events
|
||||
from .warnings import assert_warnings
|
||||
from .warnings import warn_test_suite
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/assertions.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -274,8 +274,8 @@ def int_within_variance(expected, received, variance):
|
||||
)
|
||||
|
||||
|
||||
def eq_regex(a, b, msg=None, flags=0):
|
||||
assert re.match(b, a, flags), msg or "%r !~ %r" % (a, b)
|
||||
def eq_regex(a, b, msg=None):
|
||||
assert re.match(b, a), msg or "%r !~ %r" % (a, b)
|
||||
|
||||
|
||||
def eq_(a, b, msg=None):
|
||||
@@ -513,7 +513,6 @@ class AssertsCompiledSQL:
|
||||
use_default_dialect=False,
|
||||
allow_dialect_select=False,
|
||||
supports_default_values=True,
|
||||
supports_native_boolean=False,
|
||||
supports_default_metavalue=True,
|
||||
literal_binds=False,
|
||||
render_postcompile=False,
|
||||
@@ -528,7 +527,6 @@ class AssertsCompiledSQL:
|
||||
dialect = default.DefaultDialect()
|
||||
dialect.supports_default_values = supports_default_values
|
||||
dialect.supports_default_metavalue = supports_default_metavalue
|
||||
dialect.supports_native_boolean = supports_native_boolean
|
||||
elif allow_dialect_select:
|
||||
dialect = None
|
||||
else:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/assertsql.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -88,9 +88,9 @@ class CompiledSQL(SQLMatchRule):
|
||||
dialect.supports_default_metavalue = True
|
||||
|
||||
if self.enable_returning:
|
||||
dialect.insert_returning = dialect.update_returning = (
|
||||
dialect.delete_returning
|
||||
) = True
|
||||
dialect.insert_returning = (
|
||||
dialect.update_returning
|
||||
) = dialect.delete_returning = True
|
||||
dialect.use_insertmanyvalues = True
|
||||
dialect.supports_multivalues_insert = True
|
||||
dialect.update_returning_multifrom = True
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/asyncio.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -24,21 +24,16 @@ from functools import wraps
|
||||
import inspect
|
||||
|
||||
from . import config
|
||||
from ..util.concurrency import _AsyncUtil
|
||||
from ..util.concurrency import _util_async_run
|
||||
from ..util.concurrency import _util_async_run_coroutine_function
|
||||
|
||||
# may be set to False if the
|
||||
# --disable-asyncio flag is passed to the test runner.
|
||||
ENABLE_ASYNCIO = True
|
||||
_async_util = _AsyncUtil() # it has lazy init so just always create one
|
||||
|
||||
|
||||
def _shutdown():
|
||||
"""called when the test finishes"""
|
||||
_async_util.close()
|
||||
|
||||
|
||||
def _run_coroutine_function(fn, *args, **kwargs):
|
||||
return _async_util.run(fn, *args, **kwargs)
|
||||
return _util_async_run_coroutine_function(fn, *args, **kwargs)
|
||||
|
||||
|
||||
def _assume_async(fn, *args, **kwargs):
|
||||
@@ -55,7 +50,7 @@ def _assume_async(fn, *args, **kwargs):
|
||||
if not ENABLE_ASYNCIO:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return _async_util.run_in_greenlet(fn, *args, **kwargs)
|
||||
return _util_async_run(fn, *args, **kwargs)
|
||||
|
||||
|
||||
def _maybe_async_provisioning(fn, *args, **kwargs):
|
||||
@@ -74,7 +69,7 @@ def _maybe_async_provisioning(fn, *args, **kwargs):
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
if config.any_async:
|
||||
return _async_util.run_in_greenlet(fn, *args, **kwargs)
|
||||
return _util_async_run(fn, *args, **kwargs)
|
||||
else:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
@@ -94,7 +89,7 @@ def _maybe_async(fn, *args, **kwargs):
|
||||
is_async = config._current.is_async
|
||||
|
||||
if is_async:
|
||||
return _async_util.run_in_greenlet(fn, *args, **kwargs)
|
||||
return _util_async_run(fn, *args, **kwargs)
|
||||
else:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/config.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -122,9 +122,7 @@ def combinations(
|
||||
passed, each argument combination is turned into a pytest.param() object,
|
||||
mapping the elements of the argument tuple to produce an id based on a
|
||||
character value in the same position within the string template using the
|
||||
following scheme:
|
||||
|
||||
.. sourcecode:: text
|
||||
following scheme::
|
||||
|
||||
i - the given argument is a string that is part of the id only, don't
|
||||
pass it as an argument
|
||||
@@ -148,7 +146,7 @@ def combinations(
|
||||
(operator.ne, "ne"),
|
||||
(operator.gt, "gt"),
|
||||
(operator.lt, "lt"),
|
||||
id_="na",
|
||||
id_="na"
|
||||
)
|
||||
def test_operator(self, opfunc, name):
|
||||
pass
|
||||
@@ -179,7 +177,8 @@ class Variation:
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
||||
def __getattr__(self, key: str) -> bool: ...
|
||||
def __getattr__(self, key: str) -> bool:
|
||||
...
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -230,9 +229,14 @@ def variation(argname_or_fn, cases=None):
|
||||
|
||||
@testing.variation("querytyp", ["select", "subquery", "legacy_query"])
|
||||
@testing.variation("lazy", ["select", "raise", "raise_on_sql"])
|
||||
def test_thing(self, querytyp, lazy, decl_base):
|
||||
def test_thing(
|
||||
self,
|
||||
querytyp,
|
||||
lazy,
|
||||
decl_base
|
||||
):
|
||||
class Thing(decl_base):
|
||||
__tablename__ = "thing"
|
||||
__tablename__ = 'thing'
|
||||
|
||||
# use name directly
|
||||
rel = relationship("Rel", lazy=lazy.name)
|
||||
@@ -247,6 +251,7 @@ def variation(argname_or_fn, cases=None):
|
||||
else:
|
||||
querytyp.fail()
|
||||
|
||||
|
||||
The variable provided is a slots object of boolean variables, as well
|
||||
as the name of the case itself under the attribute ".name"
|
||||
|
||||
@@ -264,11 +269,9 @@ def variation(argname_or_fn, cases=None):
|
||||
else:
|
||||
argname = argname_or_fn
|
||||
cases_plus_limitations = [
|
||||
(
|
||||
entry
|
||||
if (isinstance(entry, tuple) and len(entry) == 2)
|
||||
else (entry, None)
|
||||
)
|
||||
entry
|
||||
if (isinstance(entry, tuple) and len(entry) == 2)
|
||||
else (entry, None)
|
||||
for entry in cases
|
||||
]
|
||||
|
||||
@@ -277,11 +280,9 @@ def variation(argname_or_fn, cases=None):
|
||||
)
|
||||
return combinations(
|
||||
*[
|
||||
(
|
||||
(variation._name, variation, limitation)
|
||||
if limitation is not None
|
||||
else (variation._name, variation)
|
||||
)
|
||||
(variation._name, variation, limitation)
|
||||
if limitation is not None
|
||||
else (variation._name, variation)
|
||||
for variation, (case, limitation) in zip(
|
||||
variations, cases_plus_limitations
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/engines.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -289,7 +289,8 @@ def testing_engine(
|
||||
options: Optional[Dict[str, Any]] = None,
|
||||
asyncio: Literal[False] = False,
|
||||
transfer_staticpool: bool = False,
|
||||
) -> Engine: ...
|
||||
) -> Engine:
|
||||
...
|
||||
|
||||
|
||||
@typing.overload
|
||||
@@ -298,7 +299,8 @@ def testing_engine(
|
||||
options: Optional[Dict[str, Any]] = None,
|
||||
asyncio: Literal[True] = True,
|
||||
transfer_staticpool: bool = False,
|
||||
) -> AsyncEngine: ...
|
||||
) -> AsyncEngine:
|
||||
...
|
||||
|
||||
|
||||
def testing_engine(
|
||||
@@ -330,18 +332,16 @@ def testing_engine(
|
||||
url = url or config.db.url
|
||||
|
||||
url = make_url(url)
|
||||
if options is None:
|
||||
if config.db is None or url.drivername == config.db.url.drivername:
|
||||
options = config.db_opts
|
||||
else:
|
||||
options = {}
|
||||
elif config.db is not None and url.drivername == config.db.url.drivername:
|
||||
default_opt = config.db_opts.copy()
|
||||
default_opt.update(options)
|
||||
|
||||
if (
|
||||
config.db is None or url.drivername == config.db.url.drivername
|
||||
) and config.db_opts:
|
||||
use_options = config.db_opts.copy()
|
||||
else:
|
||||
use_options = {}
|
||||
|
||||
if options is not None:
|
||||
use_options.update(options)
|
||||
|
||||
engine = create_engine(url, **use_options)
|
||||
engine = create_engine(url, **options)
|
||||
|
||||
if sqlite_savepoint and engine.name == "sqlite":
|
||||
# apply SQLite savepoint workaround
|
||||
@@ -370,12 +370,7 @@ def testing_engine(
|
||||
True # enable event blocks, helps with profiling
|
||||
)
|
||||
|
||||
if (
|
||||
isinstance(engine.pool, pool.QueuePool)
|
||||
and "pool" not in use_options
|
||||
and "pool_timeout" not in use_options
|
||||
and "max_overflow" not in use_options
|
||||
):
|
||||
if isinstance(engine.pool, pool.QueuePool):
|
||||
engine.pool._timeout = 0
|
||||
engine.pool._max_overflow = 0
|
||||
if use_reaper:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/entities.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/exclusions.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -205,12 +205,12 @@ class Predicate:
|
||||
if negate:
|
||||
bool_ = not negate
|
||||
return self.description % {
|
||||
"driver": (
|
||||
config.db.url.get_driver_name() if config else "<no driver>"
|
||||
),
|
||||
"database": (
|
||||
config.db.url.get_backend_name() if config else "<no database>"
|
||||
),
|
||||
"driver": config.db.url.get_driver_name()
|
||||
if config
|
||||
else "<no driver>",
|
||||
"database": config.db.url.get_backend_name()
|
||||
if config
|
||||
else "<no database>",
|
||||
"doesnt_support": "doesn't support" if bool_ else "does support",
|
||||
"does_support": "does support" if bool_ else "doesn't support",
|
||||
}
|
||||
@@ -392,8 +392,8 @@ def open(): # noqa
|
||||
return skip_if(BooleanPredicate(False, "mark as execute"))
|
||||
|
||||
|
||||
def closed(reason="marked as skip"):
|
||||
return skip_if(BooleanPredicate(True, reason))
|
||||
def closed():
|
||||
return skip_if(BooleanPredicate(True, "marked as skip"))
|
||||
|
||||
|
||||
def fails(reason=None):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/fixtures/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/fixtures/base.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -14,7 +14,6 @@ from .. import assertions
|
||||
from .. import config
|
||||
from ..assertions import eq_
|
||||
from ..util import drop_all_tables_from_metadata
|
||||
from ..util import picklers
|
||||
from ... import Column
|
||||
from ... import func
|
||||
from ... import Integer
|
||||
@@ -195,10 +194,6 @@ class TestBase:
|
||||
|
||||
return go
|
||||
|
||||
@config.fixture(params=picklers())
|
||||
def picklers(self, request):
|
||||
yield request.param
|
||||
|
||||
@config.fixture()
|
||||
def metadata(self, request):
|
||||
"""Provide bound MetaData for a single test, dropping afterwards."""
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/fixtures/mypy.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -86,11 +86,9 @@ class MypyTest(TestBase):
|
||||
"--config-file",
|
||||
os.path.join(
|
||||
use_cachedir,
|
||||
(
|
||||
"sqla_mypy_config.cfg"
|
||||
if use_plugin
|
||||
else "plain_mypy_config.cfg"
|
||||
),
|
||||
"sqla_mypy_config.cfg"
|
||||
if use_plugin
|
||||
else "plain_mypy_config.cfg",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -143,9 +141,7 @@ class MypyTest(TestBase):
|
||||
from sqlalchemy.ext.mypy.util import mypy_14
|
||||
|
||||
expected_messages = []
|
||||
expected_re = re.compile(
|
||||
r"\s*# EXPECTED(_MYPY)?(_RE)?(_ROW)?(_TYPE)?: (.+)"
|
||||
)
|
||||
expected_re = re.compile(r"\s*# EXPECTED(_MYPY)?(_RE)?(_TYPE)?: (.+)")
|
||||
py_ver_re = re.compile(r"^#\s*PYTHON_VERSION\s?>=\s?(\d+\.\d+)")
|
||||
with open(path) as file_:
|
||||
current_assert_messages = []
|
||||
@@ -163,24 +159,9 @@ class MypyTest(TestBase):
|
||||
if m:
|
||||
is_mypy = bool(m.group(1))
|
||||
is_re = bool(m.group(2))
|
||||
is_row = bool(m.group(3))
|
||||
is_type = bool(m.group(4))
|
||||
|
||||
expected_msg = re.sub(r"# noqa[:]? ?.*", "", m.group(5))
|
||||
if is_row:
|
||||
expected_msg = re.sub(
|
||||
r"Row\[([^\]]+)\]",
|
||||
lambda m: f"tuple[{m.group(1)}, fallback=s"
|
||||
f"qlalchemy.engine.row.{m.group(0)}]",
|
||||
expected_msg,
|
||||
)
|
||||
# For some reason it does not use or syntax (|)
|
||||
expected_msg = re.sub(
|
||||
r"Optional\[(.*)\]",
|
||||
lambda m: f"Union[{m.group(1)}, None]",
|
||||
expected_msg,
|
||||
)
|
||||
is_type = bool(m.group(3))
|
||||
|
||||
expected_msg = re.sub(r"# noqa[:]? ?.*", "", m.group(4))
|
||||
if is_type:
|
||||
if not is_re:
|
||||
# the goal here is that we can cut-and-paste
|
||||
@@ -227,11 +208,9 @@ class MypyTest(TestBase):
|
||||
# skip first character which could be capitalized
|
||||
# "List item x not found" type of message
|
||||
expected_msg = expected_msg[0] + re.sub(
|
||||
(
|
||||
r"\b(List|Tuple|Dict|Set)\b"
|
||||
if is_type
|
||||
else r"\b(List|Tuple|Dict|Set|Type)\b"
|
||||
),
|
||||
r"\b(List|Tuple|Dict|Set)\b"
|
||||
if is_type
|
||||
else r"\b(List|Tuple|Dict|Set|Type)\b",
|
||||
lambda m: m.group(1).lower(),
|
||||
expected_msg[1:],
|
||||
)
|
||||
@@ -260,9 +239,7 @@ class MypyTest(TestBase):
|
||||
|
||||
return expected_messages
|
||||
|
||||
def _check_output(
|
||||
self, path, expected_messages, stdout: str, stderr, exitcode
|
||||
):
|
||||
def _check_output(self, path, expected_messages, stdout, stderr, exitcode):
|
||||
not_located = []
|
||||
filename = os.path.basename(path)
|
||||
if expected_messages:
|
||||
@@ -282,8 +259,7 @@ class MypyTest(TestBase):
|
||||
):
|
||||
while raw_lines:
|
||||
ol = raw_lines.pop(0)
|
||||
if not re.match(r".+\.py:\d+: note: +def .*", ol):
|
||||
raw_lines.insert(0, ol)
|
||||
if not re.match(r".+\.py:\d+: note: +def \[.*", ol):
|
||||
break
|
||||
elif re.match(
|
||||
r".+\.py:\d+: note: .*(?:perhaps|suggestion)", e, re.I
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/fixtures/orm.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/fixtures/sql.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -459,10 +459,6 @@ def insertmanyvalues_fixture(
|
||||
# by not having the other methods we assert that those aren't being
|
||||
# used
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
return self.cursor.description
|
||||
|
||||
def fetchall(self):
|
||||
rows = self.cursor.fetchall()
|
||||
rows = list(rows)
|
||||
@@ -470,29 +466,22 @@ def insertmanyvalues_fixture(
|
||||
return rows
|
||||
|
||||
def _deliver_insertmanyvalues_batches(
|
||||
connection,
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
generic_setinputsizes,
|
||||
context,
|
||||
cursor, statement, parameters, generic_setinputsizes, context
|
||||
):
|
||||
if randomize_rows:
|
||||
cursor = RandomCursor(cursor)
|
||||
for batch in orig_dialect(
|
||||
connection,
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
generic_setinputsizes,
|
||||
context,
|
||||
cursor, statement, parameters, generic_setinputsizes, context
|
||||
):
|
||||
if warn_on_downgraded and batch.is_downgraded:
|
||||
util.warn("Batches were downgraded for sorted INSERT")
|
||||
|
||||
yield batch
|
||||
|
||||
def _exec_insertmany_context(dialect, context):
|
||||
def _exec_insertmany_context(
|
||||
dialect,
|
||||
context,
|
||||
):
|
||||
with mock.patch.object(
|
||||
dialect,
|
||||
"_deliver_insertmanyvalues_batches",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/pickleable.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
# testing/plugin/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/plugin/bootstrap.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
"""
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/plugin/plugin_base.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# plugin/plugin_base.py
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -90,7 +90,7 @@ def setup_options(make_option):
|
||||
action="append",
|
||||
type=str,
|
||||
dest="dburi",
|
||||
help="Database uri. Multiple OK, first one is run by default.",
|
||||
help="Database uri. Multiple OK, " "first one is run by default.",
|
||||
)
|
||||
make_option(
|
||||
"--dbdriver",
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/plugin/pytestplugin.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -182,12 +176,6 @@ def pytest_sessionfinish(session):
|
||||
collect_types.dump_stats(session.config.option.dump_pyannotate)
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
from sqlalchemy.testing import asyncio
|
||||
|
||||
asyncio._shutdown()
|
||||
|
||||
|
||||
def pytest_collection_finish(session):
|
||||
if session.config.option.dump_pyannotate:
|
||||
from pyannotate_runtime import collect_types
|
||||
@@ -270,6 +258,7 @@ def pytest_collection_modifyitems(session, config, items):
|
||||
for test_class in test_classes:
|
||||
# transfer legacy __backend__ and __sparse_backend__ symbols
|
||||
# to be markers
|
||||
add_markers = set()
|
||||
if getattr(test_class.cls, "__backend__", False) or getattr(
|
||||
test_class.cls, "__only_on__", False
|
||||
):
|
||||
@@ -674,9 +663,9 @@ class PytestFixtureFunctions(plugin_base.FixtureFunctions):
|
||||
"i": lambda obj: obj,
|
||||
"r": repr,
|
||||
"s": str,
|
||||
"n": lambda obj: (
|
||||
obj.__name__ if hasattr(obj, "__name__") else type(obj).__name__
|
||||
),
|
||||
"n": lambda obj: obj.__name__
|
||||
if hasattr(obj, "__name__")
|
||||
else type(obj).__name__,
|
||||
}
|
||||
|
||||
def combinations(self, *arg_sets, **kw):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/profiling.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/provision.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -74,7 +68,6 @@ def setup_config(db_url, options, file_config, follower_ident):
|
||||
# hooks
|
||||
|
||||
dialect = sa_url.make_url(db_url).get_dialect()
|
||||
|
||||
dialect.load_provisioning()
|
||||
|
||||
if follower_ident:
|
||||
@@ -108,9 +101,7 @@ def generate_db_urls(db_urls, extra_drivers):
|
||||
"""Generate a set of URLs to test given configured URLs plus additional
|
||||
driver names.
|
||||
|
||||
Given:
|
||||
|
||||
.. sourcecode:: text
|
||||
Given::
|
||||
|
||||
--dburi postgresql://db1 \
|
||||
--dburi postgresql://db2 \
|
||||
@@ -118,9 +109,7 @@ def generate_db_urls(db_urls, extra_drivers):
|
||||
--dbdriver=psycopg2 --dbdriver=asyncpg?async_fallback=true
|
||||
|
||||
Noting that the default postgresql driver is psycopg2, the output
|
||||
would be:
|
||||
|
||||
.. sourcecode:: text
|
||||
would be::
|
||||
|
||||
postgresql+psycopg2://db1
|
||||
postgresql+asyncpg://db1
|
||||
@@ -137,8 +126,6 @@ def generate_db_urls(db_urls, extra_drivers):
|
||||
driver name. For example, to enable the async fallback option for
|
||||
asyncpg::
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
--dburi postgresql://db1 \
|
||||
--dbdriver=asyncpg?async_fallback=true
|
||||
|
||||
@@ -153,10 +140,7 @@ def generate_db_urls(db_urls, extra_drivers):
|
||||
]
|
||||
|
||||
for url_obj, dialect in urls_plus_dialects:
|
||||
# use get_driver_name instead of dialect.driver to account for
|
||||
# "_async" virtual drivers like oracledb and psycopg
|
||||
driver_name = url_obj.get_driver_name()
|
||||
backend_to_driver_we_already_have[dialect.name].add(driver_name)
|
||||
backend_to_driver_we_already_have[dialect.name].add(dialect.driver)
|
||||
|
||||
backend_to_driver_we_need = {}
|
||||
|
||||
@@ -368,7 +352,7 @@ def update_db_opts(db_url, db_opts, options):
|
||||
def post_configure_engine(url, engine, follower_ident):
|
||||
"""Perform extra steps after configuring an engine for testing.
|
||||
|
||||
(For the internal dialects, currently only used by sqlite, oracle, mssql)
|
||||
(For the internal dialects, currently only used by sqlite, oracle)
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/requirements.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -19,7 +19,6 @@ to provide specific inclusion/exclusions.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import platform
|
||||
|
||||
from . import asyncio as _test_asyncio
|
||||
@@ -92,9 +91,7 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
@property
|
||||
def table_value_constructor(self):
|
||||
"""Database / dialect supports a query like:
|
||||
|
||||
.. sourcecode:: sql
|
||||
"""Database / dialect supports a query like::
|
||||
|
||||
SELECT * FROM VALUES ( (c1, c2), (c1, c2), ...)
|
||||
AS some_table(col1, col2)
|
||||
@@ -315,13 +312,6 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def ctes_with_values(self):
|
||||
"""target database supports CTES that ride on top of a VALUES
|
||||
clause."""
|
||||
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def ctes_on_dml(self):
|
||||
"""target database supports CTES which consist of INSERT, UPDATE
|
||||
@@ -665,12 +655,6 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def temp_table_comment_reflection(self):
|
||||
"""indicates if database supports comments on temp tables and
|
||||
the dialect can reflect them"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def comment_reflection(self):
|
||||
"""Indicates if the database support table comment reflection"""
|
||||
@@ -812,11 +796,6 @@ class SuiteRequirements(Requirements):
|
||||
"""target dialect supports reflection of unique constraints"""
|
||||
return exclusions.open()
|
||||
|
||||
@property
|
||||
def inline_check_constraint_reflection(self):
|
||||
"""target dialect supports reflection of inline check constraints"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def check_constraint_reflection(self):
|
||||
"""target dialect supports reflection of check constraints"""
|
||||
@@ -836,11 +815,6 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
return exclusions.open()
|
||||
|
||||
@property
|
||||
def nvarchar_types(self):
|
||||
"""target database supports NVARCHAR and NCHAR as an actual datatype"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def unicode_data_no_special_types(self):
|
||||
"""Target database/dialect can receive / deliver / compare data with
|
||||
@@ -1013,9 +987,7 @@ class SuiteRequirements(Requirements):
|
||||
@property
|
||||
def binary_literals(self):
|
||||
"""target backend supports simple binary literals, e.g. an
|
||||
expression like:
|
||||
|
||||
.. sourcecode:: sql
|
||||
expression like::
|
||||
|
||||
SELECT CAST('foo' AS BINARY)
|
||||
|
||||
@@ -1033,13 +1005,6 @@ class SuiteRequirements(Requirements):
|
||||
"""target dialect supports 'AUTOCOMMIT' as an isolation_level"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def skip_autocommit_rollback(self):
|
||||
"""target dialect supports the detect_autocommit_setting() method and
|
||||
uses the default implementation of do_rollback()"""
|
||||
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def isolation_level(self):
|
||||
"""target dialect supports general isolation level settings.
|
||||
@@ -1128,11 +1093,6 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
return exclusions.only_if(go)
|
||||
|
||||
@property
|
||||
def array_type(self):
|
||||
"""Target platform implements a native ARRAY type"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def json_type(self):
|
||||
"""target platform implements a native JSON type."""
|
||||
@@ -1194,19 +1154,6 @@ class SuiteRequirements(Requirements):
|
||||
"""
|
||||
return self.precision_numerics_many_significant_digits
|
||||
|
||||
@property
|
||||
def server_defaults(self):
|
||||
"""Target backend supports server side defaults for columns"""
|
||||
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def expression_server_defaults(self):
|
||||
"""Target backend supports server side defaults with SQL expressions
|
||||
for columns"""
|
||||
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def implicit_decimal_binds(self):
|
||||
"""target backend will return a selected Decimal as a Decimal, not
|
||||
@@ -1216,7 +1163,9 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
expr = decimal.Decimal("15.7563")
|
||||
|
||||
value = e.scalar(select(literal(expr)))
|
||||
value = e.scalar(
|
||||
select(literal(expr))
|
||||
)
|
||||
|
||||
assert value == expr
|
||||
|
||||
@@ -1384,9 +1333,7 @@ class SuiteRequirements(Requirements):
|
||||
present in a subquery in the WHERE clause.
|
||||
|
||||
This is an ANSI-standard syntax that apparently MySQL can't handle,
|
||||
such as:
|
||||
|
||||
.. sourcecode:: sql
|
||||
such as::
|
||||
|
||||
UPDATE documents SET flag=1 WHERE documents.title IN
|
||||
(SELECT max(documents.title) AS title
|
||||
@@ -1419,11 +1366,7 @@ class SuiteRequirements(Requirements):
|
||||
"""target database supports ordering by a column from a SELECT
|
||||
inside of a UNION
|
||||
|
||||
E.g.:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
(SELECT id, ...) UNION (SELECT id, ...) ORDER BY id
|
||||
E.g. (SELECT id, ...) UNION (SELECT id, ...) ORDER BY id
|
||||
|
||||
"""
|
||||
return exclusions.open()
|
||||
@@ -1433,9 +1376,7 @@ class SuiteRequirements(Requirements):
|
||||
"""target backend supports ORDER BY a column label within an
|
||||
expression.
|
||||
|
||||
Basically this:
|
||||
|
||||
.. sourcecode:: sql
|
||||
Basically this::
|
||||
|
||||
select data as foo from test order by foo || 'bar'
|
||||
|
||||
@@ -1524,10 +1465,6 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
return config.add_to_marker.timing_intensive
|
||||
|
||||
@property
|
||||
def posix(self):
|
||||
return exclusions.skip_if(lambda: os.name != "posix")
|
||||
|
||||
@property
|
||||
def memory_intensive(self):
|
||||
from . import config
|
||||
@@ -1569,27 +1506,6 @@ class SuiteRequirements(Requirements):
|
||||
|
||||
return exclusions.skip_if(check)
|
||||
|
||||
@property
|
||||
def up_to_date_typealias_type(self):
|
||||
# this checks a particular quirk found in typing_extensions <=4.12.0
|
||||
# using older python versions like 3.10 or 3.9, we use TypeAliasType
|
||||
# from typing_extensions which does not provide for sufficient
|
||||
# introspection prior to 4.13.0
|
||||
def check(config):
|
||||
import typing
|
||||
import typing_extensions
|
||||
|
||||
TypeAliasType = getattr(
|
||||
typing, "TypeAliasType", typing_extensions.TypeAliasType
|
||||
)
|
||||
TV = typing.TypeVar("TV")
|
||||
TA_generic = TypeAliasType( # type: ignore
|
||||
"TA_generic", typing.List[TV], type_params=(TV,)
|
||||
)
|
||||
return hasattr(TA_generic[int], "__value__")
|
||||
|
||||
return exclusions.only_if(check)
|
||||
|
||||
@property
|
||||
def python38(self):
|
||||
return exclusions.only_if(
|
||||
@@ -1614,32 +1530,6 @@ class SuiteRequirements(Requirements):
|
||||
lambda: util.py311, "Python 3.11 or above required"
|
||||
)
|
||||
|
||||
@property
|
||||
def python312(self):
|
||||
return exclusions.only_if(
|
||||
lambda: util.py312, "Python 3.12 or above required"
|
||||
)
|
||||
|
||||
@property
|
||||
def fail_python314b1(self):
|
||||
return exclusions.fails_if(
|
||||
lambda: util.compat.py314b1, "Fails as of python 3.14.0b1"
|
||||
)
|
||||
|
||||
@property
|
||||
def not_python314(self):
|
||||
"""This requirement is interim to assist with backporting of
|
||||
issue #12405.
|
||||
|
||||
SQLAlchemy 2.0 still includes the ``await_fallback()`` method that
|
||||
makes use of ``asyncio.get_event_loop_policy()``. This is removed
|
||||
in SQLAlchemy 2.1.
|
||||
|
||||
"""
|
||||
return exclusions.skip_if(
|
||||
lambda: util.py314, "Python 3.14 or above not supported"
|
||||
)
|
||||
|
||||
@property
|
||||
def cpython(self):
|
||||
return exclusions.only_if(
|
||||
@@ -1719,18 +1609,6 @@ class SuiteRequirements(Requirements):
|
||||
def asyncio(self):
|
||||
return self.greenlet
|
||||
|
||||
@property
|
||||
def no_greenlet(self):
|
||||
def go(config):
|
||||
try:
|
||||
import greenlet # noqa: F401
|
||||
except ImportError:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
return exclusions.only_if(go)
|
||||
|
||||
@property
|
||||
def greenlet(self):
|
||||
def go(config):
|
||||
@@ -1885,34 +1763,3 @@ class SuiteRequirements(Requirements):
|
||||
def materialized_views_reflect_pk(self):
|
||||
"""Target database reflect MATERIALIZED VIEWs pks."""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def supports_bitwise_or(self):
|
||||
"""Target database supports bitwise or"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def supports_bitwise_and(self):
|
||||
"""Target database supports bitwise and"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def supports_bitwise_not(self):
|
||||
"""Target database supports bitwise not"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def supports_bitwise_xor(self):
|
||||
"""Target database supports bitwise xor"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def supports_bitwise_shift(self):
|
||||
"""Target database supports bitwise left or right shift"""
|
||||
return exclusions.closed()
|
||||
|
||||
@property
|
||||
def like_escapes(self):
|
||||
"""Target backend supports custom ESCAPE characters
|
||||
with LIKE comparisons"""
|
||||
return exclusions.open()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/schema.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/__init__.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from .test_cte import * # noqa
|
||||
from .test_ddl import * # noqa
|
||||
from .test_deprecations import * # noqa
|
||||
|
||||
@@ -1,22 +1,14 @@
|
||||
# testing/suite/test_cte.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from .. import fixtures
|
||||
from ..assertions import eq_
|
||||
from ..schema import Column
|
||||
from ..schema import Table
|
||||
from ... import column
|
||||
from ... import ForeignKey
|
||||
from ... import Integer
|
||||
from ... import select
|
||||
from ... import String
|
||||
from ... import testing
|
||||
from ... import values
|
||||
|
||||
|
||||
class CTETest(fixtures.TablesTest):
|
||||
@@ -211,27 +203,3 @@ class CTETest(fixtures.TablesTest):
|
||||
).fetchall(),
|
||||
[(1, "d1", None), (5, "d5", 3)],
|
||||
)
|
||||
|
||||
@testing.variation("values_named", [True, False])
|
||||
@testing.variation("cte_named", [True, False])
|
||||
@testing.variation("literal_binds", [True, False])
|
||||
@testing.requires.ctes_with_values
|
||||
def test_values_named_via_cte(
|
||||
self, connection, values_named, cte_named, literal_binds
|
||||
):
|
||||
|
||||
cte1 = (
|
||||
values(
|
||||
column("col1", String),
|
||||
column("col2", Integer),
|
||||
literal_binds=bool(literal_binds),
|
||||
name="some name" if values_named else None,
|
||||
)
|
||||
.data([("a", 2), ("b", 3)])
|
||||
.cte("cte1" if cte_named else None)
|
||||
)
|
||||
|
||||
stmt = select(cte1)
|
||||
|
||||
rows = connection.execute(stmt).all()
|
||||
eq_(rows, [("a", 2), ("b", 3)])
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_ddl.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
import random
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_deprecations.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from .. import fixtures
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_dialect.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
@@ -17,7 +11,6 @@ from .. import eq_
|
||||
from .. import fixtures
|
||||
from .. import is_not_none
|
||||
from .. import is_true
|
||||
from .. import mock
|
||||
from .. import ne_
|
||||
from .. import provide_metadata
|
||||
from ..assertions import expect_raises
|
||||
@@ -294,11 +287,7 @@ class AutocommitIsolationTest(fixtures.TablesTest):
|
||||
test_needs_acid=True,
|
||||
)
|
||||
|
||||
def _test_conn_autocommits(self, conn, autocommit, ensure_table=False):
|
||||
if ensure_table:
|
||||
self.tables.some_table.create(conn, checkfirst=True)
|
||||
conn.commit()
|
||||
|
||||
def _test_conn_autocommits(self, conn, autocommit):
|
||||
trans = conn.begin()
|
||||
conn.execute(
|
||||
self.tables.some_table.insert(), {"id": 1, "data": "some data"}
|
||||
@@ -341,37 +330,6 @@ class AutocommitIsolationTest(fixtures.TablesTest):
|
||||
)
|
||||
self._test_conn_autocommits(conn, False)
|
||||
|
||||
@testing.requires.skip_autocommit_rollback
|
||||
@testing.variation("autocommit_setting", ["false", "engine", "option"])
|
||||
@testing.variation("block_rollback", [True, False])
|
||||
def test_autocommit_block(
|
||||
self, testing_engine, autocommit_setting, block_rollback
|
||||
):
|
||||
kw = {}
|
||||
if bool(block_rollback):
|
||||
kw["skip_autocommit_rollback"] = True
|
||||
if autocommit_setting.engine:
|
||||
kw["isolation_level"] = "AUTOCOMMIT"
|
||||
|
||||
engine = testing_engine(options=kw)
|
||||
|
||||
conn = engine.connect()
|
||||
if autocommit_setting.option:
|
||||
conn.execution_options(isolation_level="AUTOCOMMIT")
|
||||
self._test_conn_autocommits(
|
||||
conn,
|
||||
autocommit_setting.engine or autocommit_setting.option,
|
||||
ensure_table=True,
|
||||
)
|
||||
with mock.patch.object(
|
||||
conn.connection, "rollback", wraps=conn.connection.rollback
|
||||
) as check_rollback:
|
||||
conn.close()
|
||||
if autocommit_setting.false or not block_rollback:
|
||||
eq_(check_rollback.mock_calls, [mock.call()])
|
||||
else:
|
||||
eq_(check_rollback.mock_calls, [])
|
||||
|
||||
@testing.requires.independent_readonly_connections
|
||||
@testing.variation("use_dialect_setting", [True, False])
|
||||
def test_dialect_autocommit_is_restored(
|
||||
@@ -573,7 +531,7 @@ class DifficultParametersTest(fixtures.TestBase):
|
||||
t.c[name].in_(["some name", "some other_name"])
|
||||
)
|
||||
|
||||
connection.execute(stmt).first()
|
||||
row = connection.execute(stmt).first()
|
||||
|
||||
@testing.fixture
|
||||
def multirow_fixture(self, metadata, connection):
|
||||
@@ -657,7 +615,7 @@ class ReturningGuardsTest(fixtures.TablesTest):
|
||||
f"current server capabilities does not support "
|
||||
f".*RETURNING when executemany is used",
|
||||
):
|
||||
connection.execute(
|
||||
result = connection.execute(
|
||||
stmt,
|
||||
[
|
||||
{id_param_name: 1, "data": "d1"},
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_insert.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from decimal import Decimal
|
||||
@@ -492,11 +486,9 @@ class ReturningTest(fixtures.TablesTest):
|
||||
t.c.value,
|
||||
sort_by_parameter_order=bool(sort_by_parameter_order),
|
||||
),
|
||||
(
|
||||
[{"value": value} for i in range(10)]
|
||||
if multiple_rows
|
||||
else {"value": value}
|
||||
),
|
||||
[{"value": value} for i in range(10)]
|
||||
if multiple_rows
|
||||
else {"value": value},
|
||||
)
|
||||
|
||||
if multiple_rows:
|
||||
@@ -553,12 +545,6 @@ class ReturningTest(fixtures.TablesTest):
|
||||
uuid.uuid4(),
|
||||
testing.requires.uuid_data_type,
|
||||
),
|
||||
(
|
||||
"generic_native_uuid_str",
|
||||
Uuid(as_uuid=False, native_uuid=True),
|
||||
str(uuid.uuid4()),
|
||||
testing.requires.uuid_data_type,
|
||||
),
|
||||
("UUID", UUID(), uuid.uuid4(), testing.requires.uuid_data_type),
|
||||
(
|
||||
"LargeBinary1",
|
||||
@@ -604,11 +590,9 @@ class ReturningTest(fixtures.TablesTest):
|
||||
t.c.value,
|
||||
sort_by_parameter_order=bool(sort_by_parameter_order),
|
||||
),
|
||||
(
|
||||
[{"value": value} for i in range(10)]
|
||||
if multiple_rows
|
||||
else {"value": value}
|
||||
),
|
||||
[{"value": value} for i in range(10)]
|
||||
if multiple_rows
|
||||
else {"value": value},
|
||||
)
|
||||
|
||||
if multiple_rows:
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
# testing/suite/test_reflection.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
import contextlib
|
||||
import operator
|
||||
import re
|
||||
|
||||
@@ -14,7 +7,6 @@ import sqlalchemy as sa
|
||||
from .. import config
|
||||
from .. import engines
|
||||
from .. import eq_
|
||||
from .. import eq_regex
|
||||
from .. import expect_raises
|
||||
from .. import expect_raises_message
|
||||
from .. import expect_warnings
|
||||
@@ -24,8 +16,6 @@ from ..provision import get_temp_table_name
|
||||
from ..provision import temp_table_keyword_args
|
||||
from ..schema import Column
|
||||
from ..schema import Table
|
||||
from ... import Boolean
|
||||
from ... import DateTime
|
||||
from ... import event
|
||||
from ... import ForeignKey
|
||||
from ... import func
|
||||
@@ -223,7 +213,6 @@ class HasTableTest(OneConnectionTablesTest):
|
||||
|
||||
class HasIndexTest(fixtures.TablesTest):
|
||||
__backend__ = True
|
||||
__requires__ = ("index_reflection",)
|
||||
|
||||
@classmethod
|
||||
def define_tables(cls, metadata):
|
||||
@@ -298,36 +287,25 @@ class HasIndexTest(fixtures.TablesTest):
|
||||
)
|
||||
|
||||
|
||||
class BizarroCharacterTest(fixtures.TestBase):
|
||||
class BizarroCharacterFKResolutionTest(fixtures.TestBase):
|
||||
"""tests for #10275"""
|
||||
|
||||
__backend__ = True
|
||||
|
||||
def column_names():
|
||||
return testing.combinations(
|
||||
("plainname",),
|
||||
("(3)",),
|
||||
("col%p",),
|
||||
("[brack]",),
|
||||
argnames="columnname",
|
||||
)
|
||||
|
||||
def table_names():
|
||||
return testing.combinations(
|
||||
("plain",),
|
||||
("(2)",),
|
||||
("per % cent",),
|
||||
("[brackets]",),
|
||||
argnames="tablename",
|
||||
)
|
||||
|
||||
@testing.combinations(
|
||||
("id",), ("(3)",), ("col%p",), ("[brack]",), argnames="columnname"
|
||||
)
|
||||
@testing.variation("use_composite", [True, False])
|
||||
@column_names()
|
||||
@table_names()
|
||||
@testing.requires.foreign_key_constraint_reflection
|
||||
@testing.combinations(
|
||||
("plain",),
|
||||
("(2)",),
|
||||
("per % cent",),
|
||||
("[brackets]",),
|
||||
argnames="tablename",
|
||||
)
|
||||
def test_fk_ref(
|
||||
self, connection, metadata, use_composite, tablename, columnname
|
||||
):
|
||||
"""tests for #10275"""
|
||||
tt = Table(
|
||||
tablename,
|
||||
metadata,
|
||||
@@ -367,77 +345,6 @@ class BizarroCharacterTest(fixtures.TestBase):
|
||||
if use_composite:
|
||||
assert o2.c.ref2.references(t1.c[1])
|
||||
|
||||
@column_names()
|
||||
@table_names()
|
||||
@testing.requires.identity_columns
|
||||
def test_reflect_identity(
|
||||
self, tablename, columnname, connection, metadata
|
||||
):
|
||||
Table(
|
||||
tablename,
|
||||
metadata,
|
||||
Column(columnname, Integer, Identity(), primary_key=True),
|
||||
)
|
||||
metadata.create_all(connection)
|
||||
insp = inspect(connection)
|
||||
|
||||
eq_(insp.get_columns(tablename)[0]["identity"]["start"], 1)
|
||||
|
||||
@column_names()
|
||||
@table_names()
|
||||
@testing.requires.comment_reflection
|
||||
def test_reflect_comments(
|
||||
self, tablename, columnname, connection, metadata
|
||||
):
|
||||
Table(
|
||||
tablename,
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column(columnname, Integer, comment="some comment"),
|
||||
)
|
||||
metadata.create_all(connection)
|
||||
insp = inspect(connection)
|
||||
|
||||
eq_(insp.get_columns(tablename)[1]["comment"], "some comment")
|
||||
|
||||
|
||||
class TempTableElementsTest(fixtures.TestBase):
|
||||
|
||||
__backend__ = True
|
||||
|
||||
__requires__ = ("temp_table_reflection",)
|
||||
|
||||
@testing.fixture
|
||||
def tablename(self):
|
||||
return get_temp_table_name(
|
||||
config, config.db, f"ident_tmp_{config.ident}"
|
||||
)
|
||||
|
||||
@testing.requires.identity_columns
|
||||
def test_reflect_identity(self, tablename, connection, metadata):
|
||||
Table(
|
||||
tablename,
|
||||
metadata,
|
||||
Column("id", Integer, Identity(), primary_key=True),
|
||||
)
|
||||
metadata.create_all(connection)
|
||||
insp = inspect(connection)
|
||||
|
||||
eq_(insp.get_columns(tablename)[0]["identity"]["start"], 1)
|
||||
|
||||
@testing.requires.temp_table_comment_reflection
|
||||
def test_reflect_comments(self, tablename, connection, metadata):
|
||||
Table(
|
||||
tablename,
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("foobar", Integer, comment="some comment"),
|
||||
)
|
||||
metadata.create_all(connection)
|
||||
insp = inspect(connection)
|
||||
|
||||
eq_(insp.get_columns(tablename)[1]["comment"], "some comment")
|
||||
|
||||
|
||||
class QuotedNameArgumentTest(fixtures.TablesTest):
|
||||
run_create_tables = "once"
|
||||
@@ -541,7 +448,7 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
|
||||
is_true(isinstance(res, dict))
|
||||
else:
|
||||
with expect_raises(NotImplementedError):
|
||||
insp.get_table_options(name)
|
||||
res = insp.get_table_options(name)
|
||||
|
||||
@quote_fixtures
|
||||
@testing.requires.view_column_reflection
|
||||
@@ -560,13 +467,11 @@ class QuotedNameArgumentTest(fixtures.TablesTest):
|
||||
assert insp.get_pk_constraint(name)
|
||||
|
||||
@quote_fixtures
|
||||
@testing.requires.foreign_key_constraint_reflection
|
||||
def test_get_foreign_keys(self, name):
|
||||
insp = inspect(config.db)
|
||||
assert insp.get_foreign_keys(name)
|
||||
|
||||
@quote_fixtures
|
||||
@testing.requires.index_reflection
|
||||
def test_get_indexes(self, name):
|
||||
insp = inspect(config.db)
|
||||
assert insp.get_indexes(name)
|
||||
@@ -1179,9 +1084,9 @@ class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
|
||||
"referred_columns": ref_col,
|
||||
"name": name,
|
||||
"options": mock.ANY,
|
||||
"referred_schema": (
|
||||
ref_schema if ref_schema is not None else tt()
|
||||
),
|
||||
"referred_schema": ref_schema
|
||||
if ref_schema is not None
|
||||
else tt(),
|
||||
"referred_table": ref_table,
|
||||
"comment": comment,
|
||||
}
|
||||
@@ -2035,8 +1940,6 @@ class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
|
||||
if dupe:
|
||||
names_that_duplicate_index.add(dupe)
|
||||
eq_(refl.pop("comment", None), None)
|
||||
# ignore dialect_options
|
||||
refl.pop("dialect_options", None)
|
||||
eq_(orig, refl)
|
||||
|
||||
reflected_metadata = MetaData()
|
||||
@@ -2128,7 +2031,7 @@ class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
|
||||
is_true(isinstance(res, dict))
|
||||
else:
|
||||
with expect_raises(NotImplementedError):
|
||||
insp.get_table_options("users", schema=schema)
|
||||
res = insp.get_table_options("users", schema=schema)
|
||||
|
||||
@testing.combinations((True, testing.requires.schemas), False)
|
||||
def test_multi_get_table_options(self, use_schema):
|
||||
@@ -2144,7 +2047,7 @@ class ComponentReflectionTest(ComparesTables, OneConnectionTablesTest):
|
||||
eq_(res, exp)
|
||||
else:
|
||||
with expect_raises(NotImplementedError):
|
||||
insp.get_multi_table_options()
|
||||
res = insp.get_multi_table_options()
|
||||
|
||||
@testing.fixture
|
||||
def get_multi_exp(self, connection):
|
||||
@@ -2545,158 +2448,62 @@ class TableNoColumnsTest(fixtures.TestBase):
|
||||
class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase):
|
||||
__backend__ = True
|
||||
|
||||
@testing.fixture(params=[True, False])
|
||||
def use_schema_fixture(self, request):
|
||||
if request.param:
|
||||
return config.test_schema
|
||||
@testing.combinations(
|
||||
(True, testing.requires.schemas), (False,), argnames="use_schema"
|
||||
)
|
||||
@testing.requires.check_constraint_reflection
|
||||
def test_get_check_constraints(self, metadata, connection, use_schema):
|
||||
if use_schema:
|
||||
schema = config.test_schema
|
||||
else:
|
||||
return None
|
||||
schema = None
|
||||
|
||||
@testing.fixture()
|
||||
def inspect_for_table(self, metadata, connection, use_schema_fixture):
|
||||
@contextlib.contextmanager
|
||||
def go(tablename):
|
||||
yield use_schema_fixture, inspect(connection)
|
||||
Table(
|
||||
"sa_cc",
|
||||
metadata,
|
||||
Column("a", Integer()),
|
||||
sa.CheckConstraint("a > 1 AND a < 5", name="cc1"),
|
||||
sa.CheckConstraint(
|
||||
"a = 1 OR (a > 2 AND a < 5)", name="UsesCasing"
|
||||
),
|
||||
schema=schema,
|
||||
)
|
||||
Table(
|
||||
"no_constraints",
|
||||
metadata,
|
||||
Column("data", sa.String(20)),
|
||||
schema=schema,
|
||||
)
|
||||
|
||||
metadata.create_all(connection)
|
||||
metadata.create_all(connection)
|
||||
|
||||
return go
|
||||
insp = inspect(connection)
|
||||
reflected = sorted(
|
||||
insp.get_check_constraints("sa_cc", schema=schema),
|
||||
key=operator.itemgetter("name"),
|
||||
)
|
||||
|
||||
def ck_eq(self, reflected, expected):
|
||||
# trying to minimize effect of quoting, parenthesis, etc.
|
||||
# may need to add more to this as new dialects get CHECK
|
||||
# constraint reflection support
|
||||
def normalize(sqltext):
|
||||
return " ".join(
|
||||
re.findall(r"and|\d|=|a|b|c|or|<|>", sqltext.lower(), re.I)
|
||||
re.findall(r"and|\d|=|a|or|<|>", sqltext.lower(), re.I)
|
||||
)
|
||||
|
||||
reflected = sorted(
|
||||
[
|
||||
{"name": item["name"], "sqltext": normalize(item["sqltext"])}
|
||||
for item in reflected
|
||||
],
|
||||
key=lambda item: (item["sqltext"]),
|
||||
)
|
||||
|
||||
expected = sorted(
|
||||
expected,
|
||||
key=lambda item: (item["sqltext"]),
|
||||
)
|
||||
eq_(reflected, expected)
|
||||
|
||||
@testing.requires.check_constraint_reflection
|
||||
def test_check_constraint_no_constraint(self, metadata, inspect_for_table):
|
||||
with inspect_for_table("no_constraints") as (schema, inspector):
|
||||
Table(
|
||||
"no_constraints",
|
||||
metadata,
|
||||
Column("data", sa.String(20)),
|
||||
schema=schema,
|
||||
)
|
||||
|
||||
self.ck_eq(
|
||||
inspector.get_check_constraints("no_constraints", schema=schema),
|
||||
[],
|
||||
)
|
||||
|
||||
@testing.requires.inline_check_constraint_reflection
|
||||
@testing.combinations(
|
||||
"my_inline", "MyInline", None, argnames="constraint_name"
|
||||
)
|
||||
def test_check_constraint_inline(
|
||||
self, metadata, inspect_for_table, constraint_name
|
||||
):
|
||||
|
||||
with inspect_for_table("sa_cc") as (schema, inspector):
|
||||
Table(
|
||||
"sa_cc",
|
||||
metadata,
|
||||
Column("id", Integer(), primary_key=True),
|
||||
Column(
|
||||
"a",
|
||||
Integer(),
|
||||
sa.CheckConstraint(
|
||||
"a > 1 AND a < 5", name=constraint_name
|
||||
),
|
||||
),
|
||||
Column("data", String(50)),
|
||||
schema=schema,
|
||||
)
|
||||
|
||||
reflected = inspector.get_check_constraints("sa_cc", schema=schema)
|
||||
|
||||
self.ck_eq(
|
||||
reflected = [
|
||||
{"name": item["name"], "sqltext": normalize(item["sqltext"])}
|
||||
for item in reflected
|
||||
]
|
||||
eq_(
|
||||
reflected,
|
||||
[
|
||||
{
|
||||
"name": constraint_name or mock.ANY,
|
||||
"sqltext": "a > 1 and a < 5",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
@testing.requires.check_constraint_reflection
|
||||
@testing.combinations(
|
||||
"my_ck_const", "MyCkConst", None, argnames="constraint_name"
|
||||
)
|
||||
def test_check_constraint_standalone(
|
||||
self, metadata, inspect_for_table, constraint_name
|
||||
):
|
||||
with inspect_for_table("sa_cc") as (schema, inspector):
|
||||
Table(
|
||||
"sa_cc",
|
||||
metadata,
|
||||
Column("a", Integer()),
|
||||
sa.CheckConstraint(
|
||||
"a = 1 OR (a > 2 AND a < 5)", name=constraint_name
|
||||
),
|
||||
schema=schema,
|
||||
)
|
||||
|
||||
reflected = inspector.get_check_constraints("sa_cc", schema=schema)
|
||||
|
||||
self.ck_eq(
|
||||
reflected,
|
||||
[
|
||||
{
|
||||
"name": constraint_name or mock.ANY,
|
||||
"sqltext": "a = 1 or a > 2 and a < 5",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
@testing.requires.inline_check_constraint_reflection
|
||||
def test_check_constraint_mixed(self, metadata, inspect_for_table):
|
||||
with inspect_for_table("sa_cc") as (schema, inspector):
|
||||
Table(
|
||||
"sa_cc",
|
||||
metadata,
|
||||
Column("id", Integer(), primary_key=True),
|
||||
Column("a", Integer(), sa.CheckConstraint("a > 1 AND a < 5")),
|
||||
Column(
|
||||
"b",
|
||||
Integer(),
|
||||
sa.CheckConstraint("b > 1 AND b < 5", name="my_inline"),
|
||||
),
|
||||
Column("c", Integer()),
|
||||
Column("data", String(50)),
|
||||
sa.UniqueConstraint("data", name="some_uq"),
|
||||
sa.CheckConstraint("c > 1 AND c < 5", name="cc1"),
|
||||
sa.UniqueConstraint("c", name="some_c_uq"),
|
||||
schema=schema,
|
||||
)
|
||||
|
||||
reflected = inspector.get_check_constraints("sa_cc", schema=schema)
|
||||
|
||||
self.ck_eq(
|
||||
reflected,
|
||||
[
|
||||
{"name": "cc1", "sqltext": "c > 1 and c < 5"},
|
||||
{"name": "my_inline", "sqltext": "b > 1 and b < 5"},
|
||||
{"name": mock.ANY, "sqltext": "a > 1 and a < 5"},
|
||||
{"name": "UsesCasing", "sqltext": "a = 1 or a > 2 and a < 5"},
|
||||
{"name": "cc1", "sqltext": "a > 1 and a < 5"},
|
||||
],
|
||||
)
|
||||
no_cst = "no_constraints"
|
||||
eq_(insp.get_check_constraints(no_cst, schema=schema), [])
|
||||
|
||||
@testing.requires.indexes_with_expressions
|
||||
def test_reflect_expression_based_indexes(self, metadata, connection):
|
||||
@@ -2852,25 +2659,12 @@ class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase):
|
||||
eq_(typ.scale, 5)
|
||||
|
||||
@testing.requires.table_reflection
|
||||
@testing.combinations(
|
||||
sql_types.String,
|
||||
sql_types.VARCHAR,
|
||||
sql_types.CHAR,
|
||||
(sql_types.NVARCHAR, testing.requires.nvarchar_types),
|
||||
(sql_types.NCHAR, testing.requires.nvarchar_types),
|
||||
argnames="type_",
|
||||
)
|
||||
def test_string_length_reflection(self, connection, metadata, type_):
|
||||
typ = self._type_round_trip(connection, metadata, type_(52))[0]
|
||||
if issubclass(type_, sql_types.VARCHAR):
|
||||
assert isinstance(typ, sql_types.VARCHAR)
|
||||
elif issubclass(type_, sql_types.CHAR):
|
||||
assert isinstance(typ, sql_types.CHAR)
|
||||
else:
|
||||
assert isinstance(typ, sql_types.String)
|
||||
|
||||
def test_varchar_reflection(self, connection, metadata):
|
||||
typ = self._type_round_trip(
|
||||
connection, metadata, sql_types.String(52)
|
||||
)[0]
|
||||
assert isinstance(typ, sql_types.String)
|
||||
eq_(typ.length, 52)
|
||||
assert isinstance(typ.length, int)
|
||||
|
||||
@testing.requires.table_reflection
|
||||
def test_nullable_reflection(self, connection, metadata):
|
||||
@@ -2982,47 +2776,6 @@ class ComponentReflectionTestExtra(ComparesIndexes, fixtures.TestBase):
|
||||
eq_(opts, expected)
|
||||
# eq_(dict((k, opts[k]) for k in opts if opts[k]), expected)
|
||||
|
||||
@testing.combinations(
|
||||
(Integer, sa.text("10"), r"'?10'?"),
|
||||
(Integer, "10", r"'?10'?"),
|
||||
(Boolean, sa.true(), r"1|true"),
|
||||
(
|
||||
Integer,
|
||||
sa.text("3 + 5"),
|
||||
r"3\+5",
|
||||
testing.requires.expression_server_defaults,
|
||||
),
|
||||
(
|
||||
Integer,
|
||||
sa.text("(3 * 5)"),
|
||||
r"3\*5",
|
||||
testing.requires.expression_server_defaults,
|
||||
),
|
||||
(DateTime, func.now(), r"current_timestamp|now|getdate"),
|
||||
(
|
||||
Integer,
|
||||
sa.literal_column("3") + sa.literal_column("5"),
|
||||
r"3\+5",
|
||||
testing.requires.expression_server_defaults,
|
||||
),
|
||||
argnames="datatype, default, expected_reg",
|
||||
)
|
||||
@testing.requires.server_defaults
|
||||
def test_server_defaults(
|
||||
self, metadata, connection, datatype, default, expected_reg
|
||||
):
|
||||
t = Table(
|
||||
"t",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("thecol", datatype, server_default=default),
|
||||
)
|
||||
t.create(connection)
|
||||
|
||||
reflected = inspect(connection).get_columns("t")[1]["default"]
|
||||
reflected_sanitized = re.sub(r"[\(\) \']", "", reflected)
|
||||
eq_regex(reflected_sanitized, expected_reg, flags=re.IGNORECASE)
|
||||
|
||||
|
||||
class NormalizedNameTest(fixtures.TablesTest):
|
||||
__requires__ = ("denormalized_names",)
|
||||
@@ -3359,12 +3112,11 @@ __all__ = (
|
||||
"ComponentReflectionTestExtra",
|
||||
"TableNoColumnsTest",
|
||||
"QuotedNameArgumentTest",
|
||||
"BizarroCharacterTest",
|
||||
"BizarroCharacterFKResolutionTest",
|
||||
"HasTableTest",
|
||||
"HasIndexTest",
|
||||
"NormalizedNameTest",
|
||||
"ComputedReflectionTest",
|
||||
"IdentityReflectionTest",
|
||||
"CompositeKeyReflectionTest",
|
||||
"TempTableElementsTest",
|
||||
)
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
# testing/suite/test_results.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from .. import engines
|
||||
from .. import fixtures
|
||||
@@ -268,16 +261,12 @@ class ServerSideCursorsTest(
|
||||
return isinstance(cursor, sscursor)
|
||||
elif self.engine.dialect.driver == "mariadbconnector":
|
||||
return not cursor.buffered
|
||||
elif self.engine.dialect.driver == "mysqlconnector":
|
||||
return "buffered" not in type(cursor).__name__.lower()
|
||||
elif self.engine.dialect.driver in ("asyncpg", "aiosqlite"):
|
||||
return cursor.server_side
|
||||
elif self.engine.dialect.driver == "pg8000":
|
||||
return getattr(cursor, "server_side", False)
|
||||
elif self.engine.dialect.driver == "psycopg":
|
||||
return bool(getattr(cursor, "name", False))
|
||||
elif self.engine.dialect.driver == "oracledb":
|
||||
return getattr(cursor, "server_side", False)
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -298,26 +287,11 @@ class ServerSideCursorsTest(
|
||||
)
|
||||
return self.engine
|
||||
|
||||
def stringify(self, str_):
|
||||
return re.compile(r"SELECT (\d+)", re.I).sub(
|
||||
lambda m: str(select(int(m.group(1))).compile(testing.db)), str_
|
||||
)
|
||||
|
||||
@testing.combinations(
|
||||
("global_string", True, lambda stringify: stringify("select 1"), True),
|
||||
(
|
||||
"global_text",
|
||||
True,
|
||||
lambda stringify: text(stringify("select 1")),
|
||||
True,
|
||||
),
|
||||
("global_string", True, "select 1", True),
|
||||
("global_text", True, text("select 1"), True),
|
||||
("global_expr", True, select(1), True),
|
||||
(
|
||||
"global_off_explicit",
|
||||
False,
|
||||
lambda stringify: text(stringify("select 1")),
|
||||
False,
|
||||
),
|
||||
("global_off_explicit", False, text("select 1"), False),
|
||||
(
|
||||
"stmt_option",
|
||||
False,
|
||||
@@ -335,22 +309,15 @@ class ServerSideCursorsTest(
|
||||
(
|
||||
"for_update_string",
|
||||
True,
|
||||
lambda stringify: stringify("SELECT 1 FOR UPDATE"),
|
||||
"SELECT 1 FOR UPDATE",
|
||||
True,
|
||||
testing.skip_if(["sqlite", "mssql"]),
|
||||
),
|
||||
(
|
||||
"text_no_ss",
|
||||
False,
|
||||
lambda stringify: text(stringify("select 42")),
|
||||
False,
|
||||
),
|
||||
("text_no_ss", False, text("select 42"), False),
|
||||
(
|
||||
"text_ss_option",
|
||||
False,
|
||||
lambda stringify: text(stringify("select 42")).execution_options(
|
||||
stream_results=True
|
||||
),
|
||||
text("select 42").execution_options(stream_results=True),
|
||||
True,
|
||||
),
|
||||
id_="iaaa",
|
||||
@@ -361,11 +328,6 @@ class ServerSideCursorsTest(
|
||||
):
|
||||
engine = self._fixture(engine_ss_arg)
|
||||
with engine.begin() as conn:
|
||||
if callable(statement):
|
||||
statement = testing.resolve_lambda(
|
||||
statement, stringify=self.stringify
|
||||
)
|
||||
|
||||
if isinstance(statement, str):
|
||||
result = conn.exec_driver_sql(statement)
|
||||
else:
|
||||
@@ -380,7 +342,7 @@ class ServerSideCursorsTest(
|
||||
# should be enabled for this one
|
||||
result = conn.execution_options(
|
||||
stream_results=True
|
||||
).exec_driver_sql(self.stringify("select 1"))
|
||||
).exec_driver_sql("select 1")
|
||||
assert self._is_server_side(result.cursor)
|
||||
|
||||
# the connection has autobegun, which means at the end of the
|
||||
@@ -434,9 +396,7 @@ class ServerSideCursorsTest(
|
||||
test_table = Table(
|
||||
"test_table",
|
||||
md,
|
||||
Column(
|
||||
"id", Integer, primary_key=True, test_needs_autoincrement=True
|
||||
),
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("data", String(50)),
|
||||
)
|
||||
|
||||
@@ -476,9 +436,7 @@ class ServerSideCursorsTest(
|
||||
test_table = Table(
|
||||
"test_table",
|
||||
md,
|
||||
Column(
|
||||
"id", Integer, primary_key=True, test_needs_autoincrement=True
|
||||
),
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("data", String(50)),
|
||||
)
|
||||
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_rowcount.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from sqlalchemy import bindparam
|
||||
@@ -204,7 +198,7 @@ class RowCountTest(fixtures.TablesTest):
|
||||
def test_text_rowcount(self, connection):
|
||||
# test issue #3622, make sure eager rowcount is called for text
|
||||
result = connection.execute(
|
||||
text("update employees set department='Z' where department='C'")
|
||||
text("update employees set department='Z' " "where department='C'")
|
||||
)
|
||||
eq_(result.rowcount, 3)
|
||||
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_select.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
import collections.abc as collections_abc
|
||||
@@ -1541,7 +1535,6 @@ class LikeFunctionsTest(fixtures.TablesTest):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.startswith("ab%c"), {1, 2, 3, 4, 5, 6, 7, 8, 9, 10})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_startswith_autoescape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.startswith("ab%c", autoescape=True), {3})
|
||||
@@ -1553,12 +1546,10 @@ class LikeFunctionsTest(fixtures.TablesTest):
|
||||
{1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
)
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_startswith_escape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.startswith("ab##c", escape="#"), {7})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_startswith_autoescape_escape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.startswith("ab%c", autoescape=True, escape="#"), {3})
|
||||
@@ -1574,17 +1565,14 @@ class LikeFunctionsTest(fixtures.TablesTest):
|
||||
col.endswith(literal_column("'e%fg'")), {1, 2, 3, 4, 5, 6, 7, 8, 9}
|
||||
)
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_endswith_autoescape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.endswith("e%fg", autoescape=True), {6})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_endswith_escape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.endswith("e##fg", escape="#"), {9})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_endswith_autoescape_escape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.endswith("e%fg", autoescape=True, escape="#"), {6})
|
||||
@@ -1594,17 +1582,14 @@ class LikeFunctionsTest(fixtures.TablesTest):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.contains("b%cde"), {1, 2, 3, 4, 5, 6, 7, 8, 9})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_contains_autoescape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.contains("b%cde", autoescape=True), {3})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_contains_escape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.contains("b##cde", escape="#"), {7})
|
||||
|
||||
@testing.requires.like_escapes
|
||||
def test_contains_autoescape_escape(self):
|
||||
col = self.tables.some_table.c.data
|
||||
self._test(col.contains("b%cd", autoescape=True, escape="#"), {3})
|
||||
@@ -1780,7 +1765,7 @@ class IdentityAutoincrementTest(fixtures.TablesTest):
|
||||
)
|
||||
|
||||
def test_autoincrement_with_identity(self, connection):
|
||||
connection.execute(self.tables.tbl.insert(), {"desc": "row"})
|
||||
res = connection.execute(self.tables.tbl.insert(), {"desc": "row"})
|
||||
res = connection.execute(self.tables.tbl.select()).first()
|
||||
eq_(res, (1, "row"))
|
||||
|
||||
@@ -1895,114 +1880,3 @@ class IsOrIsNotDistinctFromTest(fixtures.TablesTest):
|
||||
len(result),
|
||||
expected_row_count_for_is_not,
|
||||
)
|
||||
|
||||
|
||||
class WindowFunctionTest(fixtures.TablesTest):
|
||||
__requires__ = ("window_functions",)
|
||||
|
||||
__backend__ = True
|
||||
|
||||
@classmethod
|
||||
def define_tables(cls, metadata):
|
||||
Table(
|
||||
"some_table",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("col1", Integer),
|
||||
Column("col2", Integer),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def insert_data(cls, connection):
|
||||
connection.execute(
|
||||
cls.tables.some_table.insert(),
|
||||
[{"id": i, "col1": i, "col2": i * 5} for i in range(1, 50)],
|
||||
)
|
||||
|
||||
def test_window(self, connection):
|
||||
some_table = self.tables.some_table
|
||||
rows = connection.execute(
|
||||
select(
|
||||
func.max(some_table.c.col2).over(
|
||||
order_by=[some_table.c.col1.desc()]
|
||||
)
|
||||
).where(some_table.c.col1 < 20)
|
||||
).all()
|
||||
|
||||
eq_(rows, [(95,) for i in range(19)])
|
||||
|
||||
def test_window_rows_between(self, connection):
|
||||
some_table = self.tables.some_table
|
||||
|
||||
# note the rows are part of the cache key right now, not handled
|
||||
# as binds. this is issue #11515
|
||||
rows = connection.execute(
|
||||
select(
|
||||
func.max(some_table.c.col2).over(
|
||||
order_by=[some_table.c.col1],
|
||||
rows=(-5, 0),
|
||||
)
|
||||
)
|
||||
).all()
|
||||
|
||||
eq_(rows, [(i,) for i in range(5, 250, 5)])
|
||||
|
||||
|
||||
class BitwiseTest(fixtures.TablesTest):
|
||||
__backend__ = True
|
||||
run_inserts = run_deletes = "once"
|
||||
|
||||
inserted_data = [{"a": i, "b": i + 1} for i in range(10)]
|
||||
|
||||
@classmethod
|
||||
def define_tables(cls, metadata):
|
||||
Table("bitwise", metadata, Column("a", Integer), Column("b", Integer))
|
||||
|
||||
@classmethod
|
||||
def insert_data(cls, connection):
|
||||
connection.execute(cls.tables.bitwise.insert(), cls.inserted_data)
|
||||
|
||||
@testing.combinations(
|
||||
(
|
||||
lambda a: a.bitwise_xor(5),
|
||||
[i for i in range(10) if i != 5],
|
||||
testing.requires.supports_bitwise_xor,
|
||||
),
|
||||
(
|
||||
lambda a: a.bitwise_or(1),
|
||||
list(range(10)),
|
||||
testing.requires.supports_bitwise_or,
|
||||
),
|
||||
(
|
||||
lambda a: a.bitwise_and(4),
|
||||
list(range(4, 8)),
|
||||
testing.requires.supports_bitwise_and,
|
||||
),
|
||||
(
|
||||
lambda a: (a - 2).bitwise_not(),
|
||||
[0],
|
||||
testing.requires.supports_bitwise_not,
|
||||
),
|
||||
(
|
||||
lambda a: a.bitwise_lshift(1),
|
||||
list(range(1, 10)),
|
||||
testing.requires.supports_bitwise_shift,
|
||||
),
|
||||
(
|
||||
lambda a: a.bitwise_rshift(2),
|
||||
list(range(4, 10)),
|
||||
testing.requires.supports_bitwise_shift,
|
||||
),
|
||||
argnames="case, expected",
|
||||
)
|
||||
def test_bitwise(self, case, expected, connection):
|
||||
tbl = self.tables.bitwise
|
||||
|
||||
a = tbl.c.a
|
||||
|
||||
op = testing.resolve_lambda(case, a=a)
|
||||
|
||||
stmt = select(tbl).where(op > 0).order_by(a)
|
||||
|
||||
res = connection.execute(stmt).mappings().all()
|
||||
eq_(res, [self.inserted_data[i] for i in expected])
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_sequence.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from .. import config
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_types.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
@@ -32,7 +26,6 @@ from ... import case
|
||||
from ... import cast
|
||||
from ... import Date
|
||||
from ... import DateTime
|
||||
from ... import Enum
|
||||
from ... import Float
|
||||
from ... import Integer
|
||||
from ... import Interval
|
||||
@@ -299,7 +292,6 @@ class ArrayTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
|
||||
class BinaryTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
__backend__ = True
|
||||
__requires__ = ("binary_literals",)
|
||||
|
||||
@classmethod
|
||||
def define_tables(cls, metadata):
|
||||
@@ -1484,7 +1476,6 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
|
||||
return datatype, compare_value, p_s
|
||||
|
||||
@testing.requires.legacy_unconditional_json_extract
|
||||
@_index_fixtures(False)
|
||||
def test_index_typed_access(self, datatype, value):
|
||||
data_table = self.tables.data_table
|
||||
@@ -1506,7 +1497,6 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
eq_(roundtrip, compare_value)
|
||||
is_(type(roundtrip), type(compare_value))
|
||||
|
||||
@testing.requires.legacy_unconditional_json_extract
|
||||
@_index_fixtures(True)
|
||||
def test_index_typed_comparison(self, datatype, value):
|
||||
data_table = self.tables.data_table
|
||||
@@ -1531,7 +1521,6 @@ class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
# make sure we get a row even if value is None
|
||||
eq_(row, (compare_value,))
|
||||
|
||||
@testing.requires.legacy_unconditional_json_extract
|
||||
@_index_fixtures(True)
|
||||
def test_path_typed_comparison(self, datatype, value):
|
||||
data_table = self.tables.data_table
|
||||
@@ -1923,74 +1912,6 @@ class JSONLegacyStringCastIndexTest(
|
||||
)
|
||||
|
||||
|
||||
class EnumTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
__backend__ = True
|
||||
|
||||
enum_values = "a", "b", "a%", "b%percent", "réveillé"
|
||||
|
||||
datatype = Enum(*enum_values, name="myenum")
|
||||
|
||||
@classmethod
|
||||
def define_tables(cls, metadata):
|
||||
Table(
|
||||
"enum_table",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("enum_data", cls.datatype),
|
||||
)
|
||||
|
||||
@testing.combinations(*enum_values, argnames="data")
|
||||
def test_round_trip(self, data, connection):
|
||||
connection.execute(
|
||||
self.tables.enum_table.insert(), {"id": 1, "enum_data": data}
|
||||
)
|
||||
|
||||
eq_(
|
||||
connection.scalar(
|
||||
select(self.tables.enum_table.c.enum_data).where(
|
||||
self.tables.enum_table.c.id == 1
|
||||
)
|
||||
),
|
||||
data,
|
||||
)
|
||||
|
||||
def test_round_trip_executemany(self, connection):
|
||||
connection.execute(
|
||||
self.tables.enum_table.insert(),
|
||||
[
|
||||
{"id": 1, "enum_data": "b%percent"},
|
||||
{"id": 2, "enum_data": "réveillé"},
|
||||
{"id": 3, "enum_data": "b"},
|
||||
{"id": 4, "enum_data": "a%"},
|
||||
],
|
||||
)
|
||||
|
||||
eq_(
|
||||
connection.scalars(
|
||||
select(self.tables.enum_table.c.enum_data).order_by(
|
||||
self.tables.enum_table.c.id
|
||||
)
|
||||
).all(),
|
||||
["b%percent", "réveillé", "b", "a%"],
|
||||
)
|
||||
|
||||
@testing.requires.insert_executemany_returning
|
||||
def test_round_trip_executemany_returning(self, connection):
|
||||
result = connection.execute(
|
||||
self.tables.enum_table.insert().returning(
|
||||
self.tables.enum_table.c.enum_data
|
||||
),
|
||||
[
|
||||
{"id": 1, "enum_data": "b%percent"},
|
||||
{"id": 2, "enum_data": "réveillé"},
|
||||
{"id": 3, "enum_data": "b"},
|
||||
{"id": 4, "enum_data": "a%"},
|
||||
],
|
||||
)
|
||||
|
||||
eq_(result.scalars().all(), ["b%percent", "réveillé", "b", "a%"])
|
||||
|
||||
|
||||
class UuidTest(_LiteralRoundTripFixture, fixtures.TablesTest):
|
||||
__backend__ = True
|
||||
|
||||
@@ -2139,7 +2060,6 @@ __all__ = (
|
||||
"DateHistoricTest",
|
||||
"StringTest",
|
||||
"BooleanTest",
|
||||
"EnumTest",
|
||||
"UuidTest",
|
||||
"NativeUUIDTest",
|
||||
)
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_unicode_ddl.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
# testing/suite/test_update_delete.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from .. import fixtures
|
||||
@@ -93,11 +87,9 @@ class SimpleUpdateDeleteTest(fixtures.TablesTest):
|
||||
|
||||
eq_(
|
||||
connection.execute(t.select().order_by(t.c.id)).fetchall(),
|
||||
(
|
||||
[(1, "d1"), (2, "d2_new"), (3, "d3")]
|
||||
if criteria.rows
|
||||
else [(1, "d1"), (2, "d2"), (3, "d3")]
|
||||
),
|
||||
[(1, "d1"), (2, "d2_new"), (3, "d3")]
|
||||
if criteria.rows
|
||||
else [(1, "d1"), (2, "d2"), (3, "d3")],
|
||||
)
|
||||
|
||||
@testing.variation("criteria", ["rows", "norows", "emptyin"])
|
||||
@@ -128,11 +120,9 @@ class SimpleUpdateDeleteTest(fixtures.TablesTest):
|
||||
|
||||
eq_(
|
||||
connection.execute(t.select().order_by(t.c.id)).fetchall(),
|
||||
(
|
||||
[(1, "d1"), (3, "d3")]
|
||||
if criteria.rows
|
||||
else [(1, "d1"), (2, "d2"), (3, "d3")]
|
||||
),
|
||||
[(1, "d1"), (3, "d3")]
|
||||
if criteria.rows
|
||||
else [(1, "d1"), (2, "d2"), (3, "d3")],
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/util.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -10,18 +10,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import deque
|
||||
from collections import namedtuple
|
||||
import contextlib
|
||||
import decimal
|
||||
import gc
|
||||
from itertools import chain
|
||||
import pickle
|
||||
import random
|
||||
import sys
|
||||
from sys import getsizeof
|
||||
import time
|
||||
import types
|
||||
from typing import Any
|
||||
|
||||
from . import config
|
||||
from . import mock
|
||||
@@ -57,10 +52,15 @@ else:
|
||||
|
||||
|
||||
def picklers():
|
||||
nt = namedtuple("picklers", ["loads", "dumps"])
|
||||
picklers = set()
|
||||
import pickle
|
||||
|
||||
for protocol in range(-2, pickle.HIGHEST_PROTOCOL + 1):
|
||||
yield nt(pickle.loads, lambda d: pickle.dumps(d, protocol))
|
||||
picklers.add(pickle)
|
||||
|
||||
# yes, this thing needs this much testing
|
||||
for pickle_ in picklers:
|
||||
for protocol in range(-2, pickle.HIGHEST_PROTOCOL + 1):
|
||||
yield pickle_.loads, lambda d: pickle_.dumps(d, protocol)
|
||||
|
||||
|
||||
def random_choices(population, k=1):
|
||||
@@ -251,19 +251,18 @@ def flag_combinations(*combinations):
|
||||
dict(lazy=False, passive=True),
|
||||
dict(lazy=False, passive=True, raiseload=True),
|
||||
)
|
||||
def test_fn(lazy, passive, raiseload): ...
|
||||
|
||||
|
||||
would result in::
|
||||
|
||||
@testing.combinations(
|
||||
("", False, False, False),
|
||||
("lazy", True, False, False),
|
||||
("lazy_passive", True, True, False),
|
||||
("lazy_passive", True, True, True),
|
||||
id_="iaaa",
|
||||
argnames="lazy,passive,raiseload",
|
||||
('', False, False, False),
|
||||
('lazy', True, False, False),
|
||||
('lazy_passive', True, True, False),
|
||||
('lazy_passive', True, True, True),
|
||||
id_='iaaa',
|
||||
argnames='lazy,passive,raiseload'
|
||||
)
|
||||
def test_fn(lazy, passive, raiseload): ...
|
||||
|
||||
"""
|
||||
|
||||
@@ -518,18 +517,3 @@ def count_cache_key_tuples(tup):
|
||||
if elem:
|
||||
stack = list(elem) + [sentinel] + stack
|
||||
return num_elements
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def skip_if_timeout(seconds: float, cleanup: Any = None):
|
||||
|
||||
now = time.time()
|
||||
yield
|
||||
sec = time.time() - now
|
||||
if sec > seconds:
|
||||
try:
|
||||
cleanup()
|
||||
finally:
|
||||
config.skip_test(
|
||||
f"test took too long ({sec:.4f} seconds > {seconds})"
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# testing/warnings.py
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
||||
Reference in New Issue
Block a user