API refactor
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-07 16:25:52 +09:00
parent 76d0d86211
commit 91c7e04474
1171 changed files with 81940 additions and 44117 deletions

View File

@@ -1,5 +1,5 @@
# ext/serializer.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -28,13 +28,17 @@ when it is deserialized.
Usage is nearly the same as that of the standard Python pickle module::
from sqlalchemy.ext.serializer import loads, dumps
metadata = MetaData(bind=some_engine)
Session = scoped_session(sessionmaker())
# ... define mappers
query = Session.query(MyClass).
filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
query = (
Session.query(MyClass)
.filter(MyClass.somedata == "foo")
.order_by(MyClass.sortkey)
)
# pickle the query
serialized = dumps(query)
@@ -42,7 +46,7 @@ Usage is nearly the same as that of the standard Python pickle module::
# unpickle. Pass in metadata + scoped_session
query2 = loads(serialized, metadata, Session)
print query2.all()
print(query2.all())
Similar restrictions as when using raw pickle apply; mapped classes must be
themselves be pickleable, meaning they are importable from a module-level
@@ -82,10 +86,9 @@ from ..util import b64encode
__all__ = ["Serializer", "Deserializer", "dumps", "loads"]
def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
class Serializer(pickle.Pickler):
def persistent_id(obj):
def persistent_id(self, obj):
# print "serializing:", repr(obj)
if isinstance(obj, Mapper) and not obj.non_primary:
id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
@@ -113,9 +116,6 @@ def Serializer(*args, **kw):
return None
return id_
pickler.persistent_id = persistent_id
return pickler
our_ids = re.compile(
r"(mapperprop|mapper|mapper_selectable|table|column|"
@@ -123,20 +123,23 @@ our_ids = re.compile(
)
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler = pickle.Unpickler(file)
class Deserializer(pickle.Unpickler):
def get_engine():
if engine:
return engine
elif scoped_session and scoped_session().bind:
return scoped_session().bind
elif metadata and metadata.bind:
return metadata.bind
def __init__(self, file, metadata=None, scoped_session=None, engine=None):
super().__init__(file)
self.metadata = metadata
self.scoped_session = scoped_session
self.engine = engine
def get_engine(self):
if self.engine:
return self.engine
elif self.scoped_session and self.scoped_session().bind:
return self.scoped_session().bind
else:
return None
def persistent_load(id_):
def persistent_load(self, id_):
m = our_ids.match(str(id_))
if not m:
return None
@@ -157,20 +160,17 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
cls = pickle.loads(b64decode(mapper))
return class_mapper(cls).attrs[keyname]
elif type_ == "table":
return metadata.tables[args]
return self.metadata.tables[args]
elif type_ == "column":
table, colname = args.split(":")
return metadata.tables[table].c[colname]
return self.metadata.tables[table].c[colname]
elif type_ == "session":
return scoped_session()
return self.scoped_session()
elif type_ == "engine":
return get_engine()
return self.get_engine()
else:
raise Exception("Unknown token: %s" % type_)
unpickler.persistent_load = persistent_load
return unpickler
def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL):
buf = BytesIO()