API refactor
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-07 16:25:52 +09:00
parent 76d0d86211
commit 91c7e04474
1171 changed files with 81940 additions and 44117 deletions

View File

@@ -1,5 +1,5 @@
# event/__init__.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under

View File

@@ -1,13 +1,11 @@
# event/api.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Public API functions for the event system.
"""
"""Public API functions for the event system."""
from __future__ import annotations
from typing import Any
@@ -51,15 +49,14 @@ def listen(
from sqlalchemy import event
from sqlalchemy.schema import UniqueConstraint
def unique_constraint_name(const, table):
const.name = "uq_%s_%s" % (
table.name,
list(const.columns)[0].name
)
const.name = "uq_%s_%s" % (table.name, list(const.columns)[0].name)
event.listen(
UniqueConstraint,
"after_parent_attach",
unique_constraint_name)
UniqueConstraint, "after_parent_attach", unique_constraint_name
)
:param bool insert: The default behavior for event handlers is to append
the decorated user defined function to an internal list of registered
@@ -132,19 +129,17 @@ def listens_for(
The :func:`.listens_for` decorator is part of the primary interface for the
SQLAlchemy event system, documented at :ref:`event_toplevel`.
This function generally shares the same kwargs as :func:`.listens`.
This function generally shares the same kwargs as :func:`.listen`.
e.g.::
from sqlalchemy import event
from sqlalchemy.schema import UniqueConstraint
@event.listens_for(UniqueConstraint, "after_parent_attach")
def unique_constraint_name(const, table):
const.name = "uq_%s_%s" % (
table.name,
list(const.columns)[0].name
)
const.name = "uq_%s_%s" % (table.name, list(const.columns)[0].name)
A given function can also be invoked for only the first invocation
of the event using the ``once`` argument::
@@ -153,7 +148,6 @@ def listens_for(
def on_config():
do_config()
.. warning:: The ``once`` argument does not imply automatic de-registration
of the listener function after it has been invoked a first time; a
listener entry will remain associated with the target object.
@@ -189,6 +183,7 @@ def remove(target: Any, identifier: str, fn: Callable[..., Any]) -> None:
def my_listener_function(*arg):
pass
# ... it's removed like this
event.remove(SomeMappedClass, "before_insert", my_listener_function)

View File

@@ -1,5 +1,5 @@
# event/attr.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -391,20 +391,23 @@ class _EmptyListener(_InstanceLevelDispatch[_ET]):
class _MutexProtocol(Protocol):
def __enter__(self) -> bool:
...
def __enter__(self) -> bool: ...
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
...
) -> Optional[bool]: ...
class _CompoundListener(_InstanceLevelDispatch[_ET]):
__slots__ = "_exec_once_mutex", "_exec_once", "_exec_w_sync_once"
__slots__ = (
"_exec_once_mutex",
"_exec_once",
"_exec_w_sync_once",
"_is_asyncio",
)
_exec_once_mutex: _MutexProtocol
parent_listeners: Collection[_ListenerFnType]
@@ -412,11 +415,18 @@ class _CompoundListener(_InstanceLevelDispatch[_ET]):
_exec_once: bool
_exec_w_sync_once: bool
def __init__(self, *arg: Any, **kw: Any):
super().__init__(*arg, **kw)
self._is_asyncio = False
def _set_asyncio(self) -> None:
self._exec_once_mutex = AsyncAdaptedLock()
self._is_asyncio = True
def _memoized_attr__exec_once_mutex(self) -> _MutexProtocol:
return threading.Lock()
if self._is_asyncio:
return AsyncAdaptedLock()
else:
return threading.Lock()
def _exec_once_impl(
self, retry_on_exception: bool, *args: Any, **kw: Any
@@ -525,6 +535,7 @@ class _ListenerCollection(_CompoundListener[_ET]):
propagate: Set[_ListenerFnType]
def __init__(self, parent: _ClsLevelDispatch[_ET], target_cls: Type[_ET]):
super().__init__()
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
self._exec_once = False
@@ -564,6 +575,9 @@ class _ListenerCollection(_CompoundListener[_ET]):
existing_listeners.extend(other_listeners)
if other._is_asyncio:
self._set_asyncio()
to_associate = other.propagate.union(other_listeners)
registry._stored_in_collection_multi(self, other, to_associate)

View File

@@ -1,5 +1,5 @@
# event/base.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -42,9 +42,9 @@ from .registry import _EventKey
from .. import util
from ..util.typing import Literal
_registrars: MutableMapping[
str, List[Type[_HasEventsDispatch[Any]]]
] = util.defaultdict(list)
_registrars: MutableMapping[str, List[Type[_HasEventsDispatch[Any]]]] = (
util.defaultdict(list)
)
def _is_event_name(name: str) -> bool:
@@ -191,13 +191,8 @@ class _Dispatch(_DispatchCommon[_ET]):
:class:`._Dispatch` objects.
"""
if "_joined_dispatch_cls" not in self.__class__.__dict__:
cls = type(
"Joined%s" % self.__class__.__name__,
(_JoinedDispatcher,),
{"__slots__": self._event_names},
)
self.__class__._joined_dispatch_cls = cls
assert "_joined_dispatch_cls" in self.__class__.__dict__
return self._joined_dispatch_cls(self, other)
def __reduce__(self) -> Union[str, Tuple[Any, ...]]:
@@ -240,8 +235,7 @@ class _HasEventsDispatch(Generic[_ET]):
if typing.TYPE_CHECKING:
def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]:
...
def __getattr__(self, name: str) -> _InstanceLevelDispatch[_ET]: ...
def __init_subclass__(cls) -> None:
"""Intercept new Event subclasses and create associated _Dispatch
@@ -329,6 +323,51 @@ class _HasEventsDispatch(Generic[_ET]):
else:
dispatch_target_cls.dispatch = dispatcher(cls)
klass = type(
"Joined%s" % dispatch_cls.__name__,
(_JoinedDispatcher,),
{"__slots__": event_names},
)
dispatch_cls._joined_dispatch_cls = klass
# establish pickle capability by adding it to this module
globals()[klass.__name__] = klass
class _JoinedDispatcher(_DispatchCommon[_ET]):
"""Represent a connection between two _Dispatch objects."""
__slots__ = "local", "parent", "_instance_cls"
local: _DispatchCommon[_ET]
parent: _DispatchCommon[_ET]
_instance_cls: Optional[Type[_ET]]
def __init__(
self, local: _DispatchCommon[_ET], parent: _DispatchCommon[_ET]
):
self.local = local
self.parent = parent
self._instance_cls = self.local._instance_cls
def __reduce__(self) -> Any:
return (self.__class__, (self.local, self.parent))
def __getattr__(self, name: str) -> _JoinedListener[_ET]:
# Assign _JoinedListeners as attributes on demand
# to reduce startup time for new dispatch objects.
ls = getattr(self.local, name)
jl = _JoinedListener(self.parent, ls.name, ls)
setattr(self, ls.name, jl)
return jl
def _listen(self, event_key: _EventKey[_ET], **kw: Any) -> None:
return self.parent._listen(event_key, **kw)
@property
def _events(self) -> Type[_HasEventsDispatch[_ET]]:
return self.parent._events
class Events(_HasEventsDispatch[_ET]):
"""Define event listening functions for a particular target type."""
@@ -341,9 +380,11 @@ class Events(_HasEventsDispatch[_ET]):
return all(isinstance(target.dispatch, t) for t in types)
def dispatch_parent_is(t: Type[Any]) -> bool:
return isinstance(
cast("_JoinedDispatcher[_ET]", target.dispatch).parent, t
)
parent = cast("_JoinedDispatcher[_ET]", target.dispatch).parent
while isinstance(parent, _JoinedDispatcher):
parent = cast("_JoinedDispatcher[_ET]", parent).parent
return isinstance(parent, t)
# Mapper, ClassManager, Session override this to
# also accept classes, scoped_sessions, sessionmakers, etc.
@@ -383,38 +424,6 @@ class Events(_HasEventsDispatch[_ET]):
cls.dispatch._clear()
class _JoinedDispatcher(_DispatchCommon[_ET]):
"""Represent a connection between two _Dispatch objects."""
__slots__ = "local", "parent", "_instance_cls"
local: _DispatchCommon[_ET]
parent: _DispatchCommon[_ET]
_instance_cls: Optional[Type[_ET]]
def __init__(
self, local: _DispatchCommon[_ET], parent: _DispatchCommon[_ET]
):
self.local = local
self.parent = parent
self._instance_cls = self.local._instance_cls
def __getattr__(self, name: str) -> _JoinedListener[_ET]:
# Assign _JoinedListeners as attributes on demand
# to reduce startup time for new dispatch objects.
ls = getattr(self.local, name)
jl = _JoinedListener(self.parent, ls.name, ls)
setattr(self, ls.name, jl)
return jl
def _listen(self, event_key: _EventKey[_ET], **kw: Any) -> None:
return self.parent._listen(event_key, **kw)
@property
def _events(self) -> Type[_HasEventsDispatch[_ET]]:
return self.parent._events
class dispatcher(Generic[_ET]):
"""Descriptor used by target classes to
deliver the _Dispatch class at the class level
@@ -430,12 +439,10 @@ class dispatcher(Generic[_ET]):
@overload
def __get__(
self, obj: Literal[None], cls: Type[Any]
) -> Type[_Dispatch[_ET]]:
...
) -> Type[_Dispatch[_ET]]: ...
@overload
def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]:
...
def __get__(self, obj: Any, cls: Type[Any]) -> _DispatchCommon[_ET]: ...
def __get__(self, obj: Any, cls: Type[Any]) -> Any:
if obj is None:

View File

@@ -1,5 +1,5 @@
# event/legacy.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -147,9 +147,9 @@ def _standard_listen_example(
)
text %= {
"current_since": " (arguments as of %s)" % current_since
if current_since
else "",
"current_since": (
" (arguments as of %s)" % current_since if current_since else ""
),
"event_name": fn.__name__,
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
@@ -177,9 +177,9 @@ def _legacy_listen_examples(
% {
"since": since,
"event_name": fn.__name__,
"has_kw_arguments": " **kw"
if dispatch_collection.has_kw
else "",
"has_kw_arguments": (
" **kw" if dispatch_collection.has_kw else ""
),
"named_event_arguments": ", ".join(args),
"sample_target": sample_target,
}

View File

@@ -1,5 +1,5 @@
# event/registry.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -66,9 +66,9 @@ _RefCollectionToListenerType = Dict[
"weakref.ref[_ListenerFnType]",
]
_key_to_collection: Dict[
_EventKeyTupleType, _RefCollectionToListenerType
] = collections.defaultdict(dict)
_key_to_collection: Dict[_EventKeyTupleType, _RefCollectionToListenerType] = (
collections.defaultdict(dict)
)
"""
Given an original listen() argument, can locate all
listener collections and the listener fn contained
@@ -154,7 +154,11 @@ def _removed_from_collection(
if owner_ref in _collection_to_key:
listener_to_key = _collection_to_key[owner_ref]
listener_to_key.pop(listen_ref)
# see #12216 - this guards against a removal that already occurred
# here. however, I cannot come up with a test that shows any negative
# side effects occurring from this removal happening, even though an
# event key may still be referenced from a clsleveldispatch here
listener_to_key.pop(listen_ref, None)
def _stored_in_collection_multi(