main commit
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-10-16 16:30:25 +09:00
parent 91c7e04474
commit 537e7b363f
1146 changed files with 45926 additions and 77196 deletions

View File

@@ -104,7 +104,7 @@ class Context:
def _get_custom_headers(self, *args, **kwargs):
headers = {}
headers.update(*args, **kwargs)
celery_keys = {*Context.__dict__.keys(), 'lang', 'task', 'argsrepr', 'kwargsrepr', 'compression'}
celery_keys = {*Context.__dict__.keys(), 'lang', 'task', 'argsrepr', 'kwargsrepr'}
for key in celery_keys:
headers.pop(key, None)
if not headers:
@@ -466,7 +466,7 @@ class Task:
shadow (str): Override task name used in logs/monitoring.
Default is retrieved from :meth:`shadow_name`.
connection (kombu.Connection): Reuse existing broker connection
connection (kombu.Connection): Re-use existing broker connection
instead of acquiring one from the connection pool.
retry (bool): If enabled sending of the task message will be
@@ -535,8 +535,6 @@ class Task:
publisher (kombu.Producer): Deprecated alias to ``producer``.
headers (Dict): Message headers to be included in the message.
The headers can be used as an overlay for custom labeling
using the :ref:`canvas-stamping` feature.
Returns:
celery.result.AsyncResult: Promise of future evaluation.
@@ -545,8 +543,6 @@ class Task:
TypeError: If not enough arguments are passed, or too many
arguments are passed. Note that signature checks may
be disabled by specifying ``@task(typing=False)``.
ValueError: If soft_time_limit and time_limit both are set
but soft_time_limit is greater than time_limit
kombu.exceptions.OperationalError: If a connection to the
transport cannot be made, or if the connection is lost.
@@ -554,9 +550,6 @@ class Task:
Also supports all keyword arguments supported by
:meth:`kombu.Producer.publish`.
"""
if self.soft_time_limit and self.time_limit and self.soft_time_limit > self.time_limit:
raise ValueError('soft_time_limit must be less than or equal to time_limit')
if self.typing:
try:
check_arguments = self.__header__
@@ -795,7 +788,6 @@ class Task:
request = {
'id': task_id,
'task': self.name,
'retries': retries,
'is_eager': True,
'logfile': logfile,
@@ -832,7 +824,7 @@ class Task:
if isinstance(retval, Retry) and retval.sig is not None:
return retval.sig.apply(retries=retries + 1)
state = states.SUCCESS if ret.info is None else ret.info.state
return EagerResult(task_id, retval, state, traceback=tb, name=self.name)
return EagerResult(task_id, retval, state, traceback=tb)
def AsyncResult(self, task_id, **kwargs):
"""Get AsyncResult instance for the specified task.
@@ -962,20 +954,11 @@ class Task:
root_id=self.request.root_id,
replaced_task_nesting=replaced_task_nesting
)
# If the replaced task is a chain, we want to set all of the chain tasks
# with the same replaced_task_nesting value to mark their replacement nesting level
if isinstance(sig, _chain):
for chain_task in maybe_list(sig.tasks) or []:
chain_task.set(replaced_task_nesting=replaced_task_nesting)
# If the task being replaced is part of a chain, we need to re-create
# it with the replacement signature - these subsequent tasks will
# retain their original task IDs as well
for t in reversed(self.request.chain or []):
chain_task = signature(t, app=self.app)
chain_task.set(replaced_task_nesting=replaced_task_nesting)
sig |= chain_task
sig |= signature(t, app=self.app)
return self.on_replace(sig)
def add_to_chord(self, sig, lazy=False):
@@ -1116,7 +1099,7 @@ class Task:
return result
def push_request(self, *args, **kwargs):
self.request_stack.push(Context(*args, **{**self.request.__dict__, **kwargs}))
self.request_stack.push(Context(*args, **kwargs))
def pop_request(self):
self.request_stack.pop()