This commit is contained in:
@@ -29,7 +29,7 @@ from structlog._config import (
|
||||
wrap_logger,
|
||||
)
|
||||
from structlog._generic import BoundLogger
|
||||
from structlog._native import make_filtering_bound_logger
|
||||
from structlog._log_levels import make_filtering_bound_logger
|
||||
from structlog._output import (
|
||||
BytesLogger,
|
||||
BytesLoggerFactory,
|
||||
@@ -61,25 +61,23 @@ __all__ = [
|
||||
"BoundLoggerBase",
|
||||
"BytesLogger",
|
||||
"BytesLoggerFactory",
|
||||
"DropEvent",
|
||||
"PrintLogger",
|
||||
"PrintLoggerFactory",
|
||||
"ReturnLogger",
|
||||
"ReturnLoggerFactory",
|
||||
"WriteLogger",
|
||||
"WriteLoggerFactory",
|
||||
"configure",
|
||||
"configure_once",
|
||||
"configure",
|
||||
"contextvars",
|
||||
"dev",
|
||||
"getLogger",
|
||||
"DropEvent",
|
||||
"get_config",
|
||||
"get_context",
|
||||
"get_logger",
|
||||
"getLogger",
|
||||
"is_configured",
|
||||
"make_filtering_bound_logger",
|
||||
"PrintLogger",
|
||||
"PrintLoggerFactory",
|
||||
"processors",
|
||||
"reset_defaults",
|
||||
"ReturnLogger",
|
||||
"ReturnLoggerFactory",
|
||||
"stdlib",
|
||||
"testing",
|
||||
"threadlocal",
|
||||
@@ -88,6 +86,8 @@ __all__ = [
|
||||
"types",
|
||||
"typing",
|
||||
"wrap_logger",
|
||||
"WriteLogger",
|
||||
"WriteLoggerFactory",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@ Logger wrapper and helper class.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from typing import Any, Iterable, Mapping, Sequence
|
||||
|
||||
from structlog.exceptions import DropEvent
|
||||
@@ -18,12 +16,6 @@ from structlog.exceptions import DropEvent
|
||||
from .typing import BindableLogger, Context, Processor, WrappedLogger
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import Self
|
||||
else:
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
class BoundLoggerBase:
|
||||
"""
|
||||
Immutable context carrier.
|
||||
@@ -59,7 +51,9 @@ class BoundLoggerBase:
|
||||
self._context = context
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}(context={self._context!r}, processors={self._processors!r})>"
|
||||
return "<{}(context={!r}, processors={!r})>".format(
|
||||
self.__class__.__name__, self._context, self._processors
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
try:
|
||||
@@ -70,7 +64,7 @@ class BoundLoggerBase:
|
||||
def __ne__(self, other: object) -> bool:
|
||||
return not self.__eq__(other)
|
||||
|
||||
def bind(self, **new_values: Any) -> Self:
|
||||
def bind(self, **new_values: Any) -> BoundLoggerBase:
|
||||
"""
|
||||
Return a new logger with *new_values* added to the existing ones.
|
||||
"""
|
||||
@@ -80,11 +74,12 @@ class BoundLoggerBase:
|
||||
self._context.__class__(self._context, **new_values),
|
||||
)
|
||||
|
||||
def unbind(self, *keys: str) -> Self:
|
||||
def unbind(self, *keys: str) -> BoundLoggerBase:
|
||||
"""
|
||||
Return a new logger with *keys* removed from the context.
|
||||
|
||||
Raises:
|
||||
|
||||
KeyError: If the key is not part of the context.
|
||||
"""
|
||||
bl = self.bind()
|
||||
@@ -93,7 +88,7 @@ class BoundLoggerBase:
|
||||
|
||||
return bl
|
||||
|
||||
def try_unbind(self, *keys: str) -> Self:
|
||||
def try_unbind(self, *keys: str) -> BoundLoggerBase:
|
||||
"""
|
||||
Like :meth:`unbind`, but best effort: missing keys are ignored.
|
||||
|
||||
@@ -105,13 +100,13 @@ class BoundLoggerBase:
|
||||
|
||||
return bl
|
||||
|
||||
def new(self, **new_values: Any) -> Self:
|
||||
def new(self, **new_values: Any) -> BoundLoggerBase:
|
||||
"""
|
||||
Clear context and binds *new_values* using `bind`.
|
||||
|
||||
Only necessary with dict implementations that keep global state like
|
||||
those wrapped by `structlog.threadlocal.wrap_dict` when threads
|
||||
are reused.
|
||||
are re-used.
|
||||
"""
|
||||
self._context.clear()
|
||||
|
||||
@@ -128,7 +123,8 @@ class BoundLoggerBase:
|
||||
Call it to combine your *event* and *context* into an event_dict and
|
||||
process using the processor chain.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
method_name:
|
||||
The name of the logger method. Is passed into the processors.
|
||||
|
||||
@@ -141,6 +137,7 @@ class BoundLoggerBase:
|
||||
*event_kw* ``{"bar": 42}``.
|
||||
|
||||
Raises:
|
||||
|
||||
structlog.DropEvent: if log entry should be dropped.
|
||||
|
||||
ValueError:
|
||||
@@ -151,6 +148,7 @@ class BoundLoggerBase:
|
||||
`tuple` of ``(*args, **kw)``
|
||||
|
||||
.. note::
|
||||
|
||||
Despite underscore available to custom wrapper classes.
|
||||
|
||||
See also `custom-wrappers`.
|
||||
@@ -178,7 +176,7 @@ class BoundLoggerBase:
|
||||
if isinstance(event_dict, tuple):
|
||||
# In this case we assume that the last processor returned a tuple
|
||||
# of ``(args, kwargs)`` and pass it right through.
|
||||
return event_dict
|
||||
return event_dict # type: ignore[return-value]
|
||||
|
||||
if isinstance(event_dict, dict):
|
||||
return (), event_dict
|
||||
@@ -199,7 +197,8 @@ class BoundLoggerBase:
|
||||
handling :exc:`structlog.DropEvent`, and finally calls *method_name* on
|
||||
:attr:`_logger` with the result.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
method_name:
|
||||
The name of the method that's going to get called. Technically
|
||||
it should be identical to the method the user called because it
|
||||
@@ -214,6 +213,7 @@ class BoundLoggerBase:
|
||||
*event_kw* ``{"bar": 42}``.
|
||||
|
||||
.. note::
|
||||
|
||||
Despite underscore available to custom wrapper classes.
|
||||
|
||||
See also `custom-wrappers`.
|
||||
@@ -232,10 +232,12 @@ def get_context(bound_logger: BindableLogger) -> Context:
|
||||
The type of *bound_logger* and the type returned depend on your
|
||||
configuration.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
bound_logger: The bound logger whose context you want.
|
||||
|
||||
Returns:
|
||||
|
||||
The *actual* context from *bound_logger*. It is *not* copied first.
|
||||
|
||||
.. versionadded:: 20.2.0
|
||||
|
||||
@@ -15,7 +15,7 @@ import warnings
|
||||
|
||||
from typing import Any, Callable, Iterable, Sequence, Type, cast
|
||||
|
||||
from ._native import make_filtering_bound_logger
|
||||
from ._log_levels import make_filtering_bound_logger
|
||||
from ._output import PrintLoggerFactory
|
||||
from .contextvars import merge_contextvars
|
||||
from .dev import ConsoleRenderer, _has_colors, set_exc_info
|
||||
@@ -63,9 +63,9 @@ class _Configuration:
|
||||
default_processors: Iterable[Processor] = _BUILTIN_DEFAULT_PROCESSORS[:]
|
||||
default_context_class: type[Context] = _BUILTIN_DEFAULT_CONTEXT_CLASS
|
||||
default_wrapper_class: Any = _BUILTIN_DEFAULT_WRAPPER_CLASS
|
||||
logger_factory: Callable[..., WrappedLogger] = (
|
||||
_BUILTIN_DEFAULT_LOGGER_FACTORY
|
||||
)
|
||||
logger_factory: Callable[
|
||||
..., WrappedLogger
|
||||
] = _BUILTIN_DEFAULT_LOGGER_FACTORY
|
||||
cache_logger_on_first_use: bool = _BUILTIN_CACHE_LOGGER_ON_FIRST_USE
|
||||
|
||||
|
||||
@@ -114,7 +114,8 @@ def get_logger(*args: Any, **initial_values: Any) -> Any:
|
||||
>>> log.info("hello", x=42)
|
||||
y=23 x=42 event='hello'
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
args:
|
||||
*Optional* positional arguments that are passed unmodified to the
|
||||
logger factory. Therefore it depends on the factory what they
|
||||
@@ -123,6 +124,7 @@ def get_logger(*args: Any, **initial_values: Any) -> Any:
|
||||
initial_values: Values that are used to pre-populate your contexts.
|
||||
|
||||
Returns:
|
||||
|
||||
A proxy that creates a correctly configured bound logger when
|
||||
necessary. The type of that bound logger depends on your configuration
|
||||
and is `structlog.BoundLogger` by default.
|
||||
@@ -167,7 +169,8 @@ def wrap_logger(
|
||||
In other words: selective overwriting of the defaults while keeping some
|
||||
*is* possible.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
initial_values: Values that are used to pre-populate your contexts.
|
||||
|
||||
logger_factory_args:
|
||||
@@ -175,6 +178,7 @@ def wrap_logger(
|
||||
the logger factory if not `None`.
|
||||
|
||||
Returns:
|
||||
|
||||
A proxy that creates a correctly configured bound logger when
|
||||
necessary.
|
||||
|
||||
@@ -213,7 +217,8 @@ def configure(
|
||||
|
||||
Use `reset_defaults` to undo your changes.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
processors: The processor chain. See :doc:`processors` for details.
|
||||
|
||||
wrapper_class:
|
||||
@@ -264,6 +269,7 @@ def configure_once(
|
||||
`configure_once` before.
|
||||
|
||||
Raises:
|
||||
|
||||
RuntimeWarning: if repeated configuration is attempted.
|
||||
"""
|
||||
if not _CONFIG.is_configured:
|
||||
@@ -309,11 +315,6 @@ class BoundLoggerLazyProxy:
|
||||
.. versionchanged:: 0.4.0 Added support for *logger_factory_args*.
|
||||
"""
|
||||
|
||||
# fulfill BindableLogger protocol without carrying accidental state
|
||||
@property
|
||||
def _context(self) -> dict[str, str]:
|
||||
return self._initial_values
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
logger: WrappedLogger | None,
|
||||
@@ -363,9 +364,7 @@ class BoundLoggerLazyProxy:
|
||||
# Looks like Protocols ignore definitions of __init__ so we have to
|
||||
# silence Mypy here.
|
||||
logger = cls(
|
||||
_logger,
|
||||
processors=procs,
|
||||
context=ctx, # type: ignore[call-arg]
|
||||
_logger, processors=procs, context=ctx # type: ignore[call-arg]
|
||||
)
|
||||
|
||||
def finalized_bind(**new_values: Any) -> BindableLogger:
|
||||
|
||||
@@ -10,9 +10,7 @@ import traceback
|
||||
|
||||
from io import StringIO
|
||||
from types import FrameType
|
||||
from typing import Callable
|
||||
|
||||
from .contextvars import _ASYNC_CALLING_STACK
|
||||
from .typing import ExcInfo
|
||||
|
||||
|
||||
@@ -22,6 +20,9 @@ def _format_exception(exc_info: ExcInfo) -> str:
|
||||
|
||||
Shamelessly stolen from stdlib's logging module.
|
||||
"""
|
||||
if exc_info == (None, None, None): # type: ignore[comparison-overlap]
|
||||
return "MISSING"
|
||||
|
||||
sio = StringIO()
|
||||
|
||||
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, sio)
|
||||
@@ -35,27 +36,23 @@ def _format_exception(exc_info: ExcInfo) -> str:
|
||||
|
||||
def _find_first_app_frame_and_name(
|
||||
additional_ignores: list[str] | None = None,
|
||||
*,
|
||||
_getframe: Callable[[], FrameType] = sys._getframe,
|
||||
) -> tuple[FrameType, str]:
|
||||
"""
|
||||
Remove all intra-structlog calls and return the relevant app frame.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
additional_ignores:
|
||||
Additional names with which the first frame must not start.
|
||||
|
||||
_getframe:
|
||||
Callable to find current frame. Only for testing to avoid
|
||||
monkeypatching of sys._getframe.
|
||||
|
||||
Returns:
|
||||
|
||||
tuple of (frame, name)
|
||||
"""
|
||||
ignores = tuple(["structlog"] + (additional_ignores or []))
|
||||
f = _ASYNC_CALLING_STACK.get(_getframe())
|
||||
ignores = ["structlog"] + (additional_ignores or [])
|
||||
f = sys._getframe()
|
||||
name = f.f_globals.get("__name__") or "?"
|
||||
while name.startswith(ignores):
|
||||
while any(tuple(name.startswith(i) for i in ignores)):
|
||||
if f.f_back is None:
|
||||
name = "?"
|
||||
break
|
||||
|
||||
@@ -9,9 +9,15 @@ Extracted log level data used by both stdlib and native log level filters.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
import asyncio
|
||||
import contextvars
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from .typing import EventDict
|
||||
from typing import Any, Callable
|
||||
|
||||
from ._base import BoundLoggerBase
|
||||
from .typing import EventDict, FilteringBoundLogger
|
||||
|
||||
|
||||
# Adapted from the stdlib
|
||||
@@ -24,7 +30,7 @@ INFO = 20
|
||||
DEBUG = 10
|
||||
NOTSET = 0
|
||||
|
||||
NAME_TO_LEVEL = {
|
||||
_NAME_TO_LEVEL = {
|
||||
"critical": CRITICAL,
|
||||
"exception": ERROR,
|
||||
"error": ERROR,
|
||||
@@ -35,31 +41,15 @@ NAME_TO_LEVEL = {
|
||||
"notset": NOTSET,
|
||||
}
|
||||
|
||||
LEVEL_TO_NAME = {
|
||||
_LEVEL_TO_NAME = {
|
||||
v: k
|
||||
for k, v in NAME_TO_LEVEL.items()
|
||||
for k, v in _NAME_TO_LEVEL.items()
|
||||
if k not in ("warn", "exception", "notset")
|
||||
}
|
||||
|
||||
# Keep around for backwards-compatability in case someone imported them.
|
||||
_LEVEL_TO_NAME = LEVEL_TO_NAME
|
||||
_NAME_TO_LEVEL = NAME_TO_LEVEL
|
||||
|
||||
|
||||
def map_method_name(method_name: str) -> str:
|
||||
# warn is just a deprecated alias in the stdlib.
|
||||
if method_name == "warn":
|
||||
return "warning"
|
||||
|
||||
# Calling exception("") is the same as error("", exc_info=True)
|
||||
if method_name == "exception":
|
||||
return "error"
|
||||
|
||||
return method_name
|
||||
|
||||
|
||||
def add_log_level(
|
||||
logger: Any, method_name: str, event_dict: EventDict
|
||||
logger: logging.Logger, method_name: str, event_dict: EventDict
|
||||
) -> EventDict:
|
||||
"""
|
||||
Add the log level to the event dict under the ``level`` key.
|
||||
@@ -72,10 +62,189 @@ def add_log_level(
|
||||
.. versionchanged:: 20.2.0
|
||||
Importable from `structlog.processors` (additionally to
|
||||
`structlog.stdlib`).
|
||||
.. versionchanged:: 24.1.0
|
||||
Added mapping from "exception" to "error"
|
||||
"""
|
||||
if method_name == "warn":
|
||||
# The stdlib has an alias
|
||||
method_name = "warning"
|
||||
|
||||
event_dict["level"] = map_method_name(method_name)
|
||||
event_dict["level"] = method_name
|
||||
|
||||
return event_dict
|
||||
|
||||
|
||||
def _nop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
return None
|
||||
|
||||
|
||||
async def _anop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
return None
|
||||
|
||||
|
||||
def exception(
|
||||
self: FilteringBoundLogger, event: str, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
kw.setdefault("exc_info", True)
|
||||
|
||||
return self.error(event, *args, **kw)
|
||||
|
||||
|
||||
async def aexception(
|
||||
self: FilteringBoundLogger, event: str, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
# Exception info has to be extracted this early, because it is no longer
|
||||
# available once control is passed to the executor.
|
||||
if kw.get("exc_info", True) is True:
|
||||
kw["exc_info"] = sys.exc_info()
|
||||
|
||||
ctx = contextvars.copy_context()
|
||||
return await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(lambda: self.error(event, *args, **kw)),
|
||||
)
|
||||
|
||||
|
||||
def make_filtering_bound_logger(min_level: int) -> type[FilteringBoundLogger]:
|
||||
"""
|
||||
Create a new `FilteringBoundLogger` that only logs *min_level* or higher.
|
||||
|
||||
The logger is optimized such that log levels below *min_level* only consist
|
||||
of a ``return None``.
|
||||
|
||||
All familiar log methods are present, with async variants of each that are
|
||||
prefixed by an ``a``. Therefore, the async version of ``log.info("hello")``
|
||||
is ``await log.ainfo("hello")``.
|
||||
|
||||
Additionally it has a ``log(self, level: int, **kw: Any)`` method to mirror
|
||||
`logging.Logger.log` and `structlog.stdlib.BoundLogger.log`.
|
||||
|
||||
Compared to using *structlog*'s standard library integration and the
|
||||
`structlog.stdlib.filter_by_level` processor:
|
||||
|
||||
- It's faster because once the logger is built at program start; it's a
|
||||
static class.
|
||||
- For the same reason you can't change the log level once configured. Use
|
||||
the dynamic approach of `standard-library` instead, if you need this
|
||||
feature.
|
||||
- You *can* have (much) more fine-grained filtering by :ref:`writing a
|
||||
simple processor <finer-filtering>`.
|
||||
|
||||
Arguments:
|
||||
|
||||
min_level:
|
||||
The log level as an integer. You can use the constants from
|
||||
`logging` like ``logging.INFO`` or pass the values directly. See
|
||||
`this table from the logging docs
|
||||
<https://docs.python.org/3/library/logging.html#levels>`_ for
|
||||
possible values.
|
||||
|
||||
.. versionadded:: 20.2.0
|
||||
.. versionchanged:: 21.1.0 The returned loggers are now pickleable.
|
||||
.. versionadded:: 20.1.0 The ``log()`` method.
|
||||
.. versionadded:: 22.2.0
|
||||
Async variants ``alog()``, ``adebug()``, ``ainfo()``, and so forth.
|
||||
"""
|
||||
|
||||
return _LEVEL_TO_FILTERING_LOGGER[min_level]
|
||||
|
||||
|
||||
def _make_filtering_bound_logger(min_level: int) -> type[FilteringBoundLogger]:
|
||||
"""
|
||||
Create a new `FilteringBoundLogger` that only logs *min_level* or higher.
|
||||
|
||||
The logger is optimized such that log levels below *min_level* only consist
|
||||
of a ``return None``.
|
||||
"""
|
||||
|
||||
def make_method(
|
||||
level: int,
|
||||
) -> tuple[Callable[..., Any], Callable[..., Any]]:
|
||||
if level < min_level:
|
||||
return _nop, _anop
|
||||
|
||||
name = _LEVEL_TO_NAME[level]
|
||||
|
||||
def meth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
if not args:
|
||||
return self._proxy_to_logger(name, event, **kw)
|
||||
|
||||
return self._proxy_to_logger(name, event % args, **kw)
|
||||
|
||||
async def ameth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
if args:
|
||||
event = event % args
|
||||
|
||||
ctx = contextvars.copy_context()
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(
|
||||
lambda: self._proxy_to_logger(name, event, **kw)
|
||||
),
|
||||
)
|
||||
|
||||
meth.__name__ = name
|
||||
ameth.__name__ = f"a{name}"
|
||||
|
||||
return meth, ameth
|
||||
|
||||
def log(self: Any, level: int, event: str, *args: Any, **kw: Any) -> Any:
|
||||
if level < min_level:
|
||||
return None
|
||||
name = _LEVEL_TO_NAME[level]
|
||||
|
||||
if not args:
|
||||
return self._proxy_to_logger(name, event, **kw)
|
||||
|
||||
return self._proxy_to_logger(name, event % args, **kw)
|
||||
|
||||
async def alog(
|
||||
self: Any, level: int, event: str, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
if level < min_level:
|
||||
return None
|
||||
name = _LEVEL_TO_NAME[level]
|
||||
if args:
|
||||
event = event % args
|
||||
|
||||
ctx = contextvars.copy_context()
|
||||
return await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(lambda: self._proxy_to_logger(name, event, **kw)),
|
||||
)
|
||||
|
||||
meths: dict[str, Callable[..., Any]] = {"log": log, "alog": alog}
|
||||
for lvl, name in _LEVEL_TO_NAME.items():
|
||||
meths[name], meths[f"a{name}"] = make_method(lvl)
|
||||
|
||||
meths["exception"] = exception
|
||||
meths["aexception"] = aexception
|
||||
meths["fatal"] = meths["error"]
|
||||
meths["afatal"] = meths["aerror"]
|
||||
meths["warn"] = meths["warning"]
|
||||
meths["awarn"] = meths["awarning"]
|
||||
meths["msg"] = meths["info"]
|
||||
meths["amsg"] = meths["ainfo"]
|
||||
|
||||
return type(
|
||||
"BoundLoggerFilteringAt%s"
|
||||
% (_LEVEL_TO_NAME.get(min_level, "Notset").capitalize()),
|
||||
(BoundLoggerBase,),
|
||||
meths,
|
||||
)
|
||||
|
||||
|
||||
# Pre-create all possible filters to make them pickleable.
|
||||
BoundLoggerFilteringAtNotset = _make_filtering_bound_logger(NOTSET)
|
||||
BoundLoggerFilteringAtDebug = _make_filtering_bound_logger(DEBUG)
|
||||
BoundLoggerFilteringAtInfo = _make_filtering_bound_logger(INFO)
|
||||
BoundLoggerFilteringAtWarning = _make_filtering_bound_logger(WARNING)
|
||||
BoundLoggerFilteringAtError = _make_filtering_bound_logger(ERROR)
|
||||
BoundLoggerFilteringAtCritical = _make_filtering_bound_logger(CRITICAL)
|
||||
|
||||
_LEVEL_TO_FILTERING_LOGGER = {
|
||||
CRITICAL: BoundLoggerFilteringAtCritical,
|
||||
ERROR: BoundLoggerFilteringAtError,
|
||||
WARNING: BoundLoggerFilteringAtWarning,
|
||||
INFO: BoundLoggerFilteringAtInfo,
|
||||
DEBUG: BoundLoggerFilteringAtDebug,
|
||||
NOTSET: BoundLoggerFilteringAtNotset,
|
||||
}
|
||||
|
||||
@@ -1,248 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT OR Apache-2.0
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the MIT License. See the LICENSE file in the root of this
|
||||
# repository for complete details.
|
||||
|
||||
"""
|
||||
structlog's native high-performance loggers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextvars
|
||||
import sys
|
||||
|
||||
from typing import Any, Callable
|
||||
|
||||
from ._base import BoundLoggerBase
|
||||
from ._log_levels import (
|
||||
CRITICAL,
|
||||
DEBUG,
|
||||
ERROR,
|
||||
INFO,
|
||||
LEVEL_TO_NAME,
|
||||
NAME_TO_LEVEL,
|
||||
NOTSET,
|
||||
WARNING,
|
||||
)
|
||||
from .contextvars import _ASYNC_CALLING_STACK
|
||||
from .typing import FilteringBoundLogger
|
||||
|
||||
|
||||
def _nop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
return None
|
||||
|
||||
|
||||
async def _anop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
return None
|
||||
|
||||
|
||||
def exception(
|
||||
self: FilteringBoundLogger, event: str, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
kw.setdefault("exc_info", True)
|
||||
|
||||
return self.error(event, *args, **kw)
|
||||
|
||||
|
||||
async def aexception(
|
||||
self: FilteringBoundLogger, event: str, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
"""
|
||||
.. versionchanged:: 23.3.0
|
||||
Callsite parameters are now also collected under asyncio.
|
||||
"""
|
||||
# Exception info has to be extracted this early, because it is no longer
|
||||
# available once control is passed to the executor.
|
||||
if kw.get("exc_info", True) is True:
|
||||
kw["exc_info"] = sys.exc_info()
|
||||
|
||||
scs_token = _ASYNC_CALLING_STACK.set(sys._getframe().f_back) # type: ignore[arg-type]
|
||||
ctx = contextvars.copy_context()
|
||||
try:
|
||||
runner = await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(lambda: self.error(event, *args, **kw)),
|
||||
)
|
||||
finally:
|
||||
_ASYNC_CALLING_STACK.reset(scs_token)
|
||||
|
||||
return runner
|
||||
|
||||
|
||||
def make_filtering_bound_logger(
|
||||
min_level: int | str,
|
||||
) -> type[FilteringBoundLogger]:
|
||||
"""
|
||||
Create a new `FilteringBoundLogger` that only logs *min_level* or higher.
|
||||
|
||||
The logger is optimized such that log levels below *min_level* only consist
|
||||
of a ``return None``.
|
||||
|
||||
All familiar log methods are present, with async variants of each that are
|
||||
prefixed by an ``a``. Therefore, the async version of ``log.info("hello")``
|
||||
is ``await log.ainfo("hello")``.
|
||||
|
||||
Additionally it has a ``log(self, level: int, **kw: Any)`` method to mirror
|
||||
`logging.Logger.log` and `structlog.stdlib.BoundLogger.log`.
|
||||
|
||||
Compared to using *structlog*'s standard library integration and the
|
||||
`structlog.stdlib.filter_by_level` processor:
|
||||
|
||||
- It's faster because once the logger is built at program start; it's a
|
||||
static class.
|
||||
- For the same reason you can't change the log level once configured. Use
|
||||
the dynamic approach of `standard-library` instead, if you need this
|
||||
feature.
|
||||
- You *can* have (much) more fine-grained filtering by :ref:`writing a
|
||||
simple processor <finer-filtering>`.
|
||||
|
||||
Args:
|
||||
min_level:
|
||||
The log level as an integer. You can use the constants from
|
||||
`logging` like ``logging.INFO`` or pass the values directly. See
|
||||
`this table from the logging docs
|
||||
<https://docs.python.org/3/library/logging.html#levels>`_ for
|
||||
possible values.
|
||||
|
||||
If you pass a string, it must be one of: ``critical``, ``error``,
|
||||
``warning``, ``info``, ``debug``, ``notset`` (upper/lower case
|
||||
doesn't matter).
|
||||
|
||||
.. versionadded:: 20.2.0
|
||||
.. versionchanged:: 21.1.0 The returned loggers are now pickleable.
|
||||
.. versionadded:: 20.1.0 The ``log()`` method.
|
||||
.. versionadded:: 22.2.0
|
||||
Async variants ``alog()``, ``adebug()``, ``ainfo()``, and so forth.
|
||||
.. versionchanged:: 25.1.0 *min_level* can now be a string.
|
||||
"""
|
||||
if isinstance(min_level, str):
|
||||
min_level = NAME_TO_LEVEL[min_level.lower()]
|
||||
|
||||
return LEVEL_TO_FILTERING_LOGGER[min_level]
|
||||
|
||||
|
||||
def _make_filtering_bound_logger(min_level: int) -> type[FilteringBoundLogger]:
|
||||
"""
|
||||
Create a new `FilteringBoundLogger` that only logs *min_level* or higher.
|
||||
|
||||
The logger is optimized such that log levels below *min_level* only consist
|
||||
of a ``return None``.
|
||||
"""
|
||||
|
||||
def make_method(
|
||||
level: int,
|
||||
) -> tuple[Callable[..., Any], Callable[..., Any]]:
|
||||
if level < min_level:
|
||||
return _nop, _anop
|
||||
|
||||
name = LEVEL_TO_NAME[level]
|
||||
|
||||
def meth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
if not args:
|
||||
return self._proxy_to_logger(name, event, **kw)
|
||||
|
||||
return self._proxy_to_logger(name, event % args, **kw)
|
||||
|
||||
async def ameth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
|
||||
"""
|
||||
.. versionchanged:: 23.3.0
|
||||
Callsite parameters are now also collected under asyncio.
|
||||
"""
|
||||
if args:
|
||||
event = event % args
|
||||
|
||||
scs_token = _ASYNC_CALLING_STACK.set(sys._getframe().f_back) # type: ignore[arg-type]
|
||||
ctx = contextvars.copy_context()
|
||||
try:
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(
|
||||
lambda: self._proxy_to_logger(name, event, **kw)
|
||||
),
|
||||
)
|
||||
finally:
|
||||
_ASYNC_CALLING_STACK.reset(scs_token)
|
||||
|
||||
meth.__name__ = name
|
||||
ameth.__name__ = f"a{name}"
|
||||
|
||||
return meth, ameth
|
||||
|
||||
def log(self: Any, level: int, event: str, *args: Any, **kw: Any) -> Any:
|
||||
if level < min_level:
|
||||
return None
|
||||
name = LEVEL_TO_NAME[level]
|
||||
|
||||
if not args:
|
||||
return self._proxy_to_logger(name, event, **kw)
|
||||
|
||||
return self._proxy_to_logger(name, event % args, **kw)
|
||||
|
||||
async def alog(
|
||||
self: Any, level: int, event: str, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
"""
|
||||
.. versionchanged:: 23.3.0
|
||||
Callsite parameters are now also collected under asyncio.
|
||||
"""
|
||||
if level < min_level:
|
||||
return None
|
||||
name = LEVEL_TO_NAME[level]
|
||||
if args:
|
||||
event = event % args
|
||||
|
||||
scs_token = _ASYNC_CALLING_STACK.set(sys._getframe().f_back) # type: ignore[arg-type]
|
||||
ctx = contextvars.copy_context()
|
||||
try:
|
||||
runner = await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(
|
||||
lambda: self._proxy_to_logger(name, event, **kw)
|
||||
),
|
||||
)
|
||||
finally:
|
||||
_ASYNC_CALLING_STACK.reset(scs_token)
|
||||
return runner
|
||||
|
||||
meths: dict[str, Callable[..., Any]] = {"log": log, "alog": alog}
|
||||
for lvl, name in LEVEL_TO_NAME.items():
|
||||
meths[name], meths[f"a{name}"] = make_method(lvl)
|
||||
|
||||
meths["exception"] = exception
|
||||
meths["aexception"] = aexception
|
||||
meths["fatal"] = meths["critical"]
|
||||
meths["afatal"] = meths["acritical"]
|
||||
meths["warn"] = meths["warning"]
|
||||
meths["awarn"] = meths["awarning"]
|
||||
meths["msg"] = meths["info"]
|
||||
meths["amsg"] = meths["ainfo"]
|
||||
|
||||
# Introspection
|
||||
meths["is_enabled_for"] = lambda self, level: level >= min_level
|
||||
meths["get_effective_level"] = lambda self: min_level
|
||||
|
||||
return type(
|
||||
f"BoundLoggerFilteringAt{LEVEL_TO_NAME.get(min_level, 'Notset').capitalize()}",
|
||||
(BoundLoggerBase,),
|
||||
meths,
|
||||
)
|
||||
|
||||
|
||||
# Pre-create all possible filters to make them pickleable.
|
||||
BoundLoggerFilteringAtNotset = _make_filtering_bound_logger(NOTSET)
|
||||
BoundLoggerFilteringAtDebug = _make_filtering_bound_logger(DEBUG)
|
||||
BoundLoggerFilteringAtInfo = _make_filtering_bound_logger(INFO)
|
||||
BoundLoggerFilteringAtWarning = _make_filtering_bound_logger(WARNING)
|
||||
BoundLoggerFilteringAtError = _make_filtering_bound_logger(ERROR)
|
||||
BoundLoggerFilteringAtCritical = _make_filtering_bound_logger(CRITICAL)
|
||||
|
||||
LEVEL_TO_FILTERING_LOGGER = {
|
||||
CRITICAL: BoundLoggerFilteringAtCritical,
|
||||
ERROR: BoundLoggerFilteringAtError,
|
||||
WARNING: BoundLoggerFilteringAtWarning,
|
||||
INFO: BoundLoggerFilteringAtInfo,
|
||||
DEBUG: BoundLoggerFilteringAtDebug,
|
||||
NOTSET: BoundLoggerFilteringAtNotset,
|
||||
}
|
||||
@@ -17,6 +17,8 @@ from pickle import PicklingError
|
||||
from sys import stderr, stdout
|
||||
from typing import IO, Any, BinaryIO, TextIO
|
||||
|
||||
from structlog._utils import until_not_interrupted
|
||||
|
||||
|
||||
WRITE_LOCKS: dict[IO[Any], threading.Lock] = {}
|
||||
|
||||
@@ -34,7 +36,8 @@ class PrintLogger:
|
||||
"""
|
||||
Print events into a file.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
file: File to print to. (default: `sys.stdout`)
|
||||
|
||||
>>> from structlog import PrintLogger
|
||||
@@ -107,7 +110,7 @@ class PrintLogger:
|
||||
"""
|
||||
f = self._file if self._file is not stdout else None
|
||||
with self._lock:
|
||||
print(message, file=f, flush=True)
|
||||
until_not_interrupted(print, message, file=f, flush=True)
|
||||
|
||||
log = debug = info = warn = warning = msg
|
||||
fatal = failure = err = error = critical = exception = msg
|
||||
@@ -119,7 +122,8 @@ class PrintLoggerFactory:
|
||||
|
||||
To be used with `structlog.configure`\ 's ``logger_factory``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
file: File to print to. (default: `sys.stdout`)
|
||||
|
||||
Positional arguments are silently ignored.
|
||||
@@ -138,7 +142,8 @@ class WriteLogger:
|
||||
"""
|
||||
Write events into a file.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
file: File to print to. (default: `sys.stdout`)
|
||||
|
||||
>>> from structlog import WriteLogger
|
||||
@@ -214,8 +219,8 @@ class WriteLogger:
|
||||
Write and flush *message*.
|
||||
"""
|
||||
with self._lock:
|
||||
self._write(message + "\n")
|
||||
self._flush()
|
||||
until_not_interrupted(self._write, message + "\n")
|
||||
until_not_interrupted(self._flush)
|
||||
|
||||
log = debug = info = warn = warning = msg
|
||||
fatal = failure = err = error = critical = exception = msg
|
||||
@@ -227,7 +232,8 @@ class WriteLoggerFactory:
|
||||
|
||||
To be used with `structlog.configure`\ 's ``logger_factory``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
file: File to print to. (default: `sys.stdout`)
|
||||
|
||||
Positional arguments are silently ignored.
|
||||
@@ -246,7 +252,7 @@ class BytesLogger:
|
||||
r"""
|
||||
Writes bytes into a file.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
file: File to print to. (default: `sys.stdout`\ ``.buffer``)
|
||||
|
||||
Useful if you follow `current logging best practices
|
||||
@@ -255,8 +261,7 @@ class BytesLogger:
|
||||
|
||||
.. versionadded:: 20.2.0
|
||||
"""
|
||||
|
||||
__slots__ = ("_file", "_flush", "_lock", "_write")
|
||||
__slots__ = ("_file", "_write", "_flush", "_lock")
|
||||
|
||||
def __init__(self, file: BinaryIO | None = None):
|
||||
self._file = file or sys.stdout.buffer
|
||||
@@ -318,8 +323,8 @@ class BytesLogger:
|
||||
Write *message*.
|
||||
"""
|
||||
with self._lock:
|
||||
self._write(message + b"\n")
|
||||
self._flush()
|
||||
until_not_interrupted(self._write, message + b"\n")
|
||||
until_not_interrupted(self._flush)
|
||||
|
||||
log = debug = info = warn = warning = msg
|
||||
fatal = failure = err = error = critical = exception = msg
|
||||
@@ -331,14 +336,14 @@ class BytesLoggerFactory:
|
||||
|
||||
To be used with `structlog.configure`\ 's ``logger_factory``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
file: File to print to. (default: `sys.stdout`\ ``.buffer``)
|
||||
|
||||
Positional arguments are silently ignored.
|
||||
|
||||
.. versionadded:: 20.2.0
|
||||
"""
|
||||
|
||||
__slots__ = ("_file",)
|
||||
|
||||
def __init__(self, file: BinaryIO | None = None):
|
||||
|
||||
@@ -9,10 +9,32 @@ Generic utilities.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import errno
|
||||
import sys
|
||||
|
||||
from contextlib import suppress
|
||||
from typing import Any
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
def until_not_interrupted(f: Callable[..., Any], *args: Any, **kw: Any) -> Any:
|
||||
"""
|
||||
Retry until *f* succeeds or an exception that isn't caused by EINTR occurs.
|
||||
|
||||
Arguments:
|
||||
|
||||
f: A callable like a function.
|
||||
|
||||
*args: Positional arguments for *f*.
|
||||
|
||||
**kw: Keyword arguments for *f*.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
except OSError as e: # noqa: PERF203
|
||||
if e.args[0] == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
||||
|
||||
def get_processname() -> str:
|
||||
|
||||
@@ -11,8 +11,6 @@ Python 3.7 as :mod:`contextvars`.
|
||||
.. versionchanged:: 21.1.0
|
||||
Reimplemented without using a single dict as context carrier for improved
|
||||
isolation. Every key-value pair is a separate `contextvars.ContextVar` now.
|
||||
.. versionchanged:: 23.3.0
|
||||
Callsite parameters are now also collected under asyncio.
|
||||
|
||||
See :doc:`contextvars`.
|
||||
"""
|
||||
@@ -22,7 +20,6 @@ from __future__ import annotations
|
||||
import contextlib
|
||||
import contextvars
|
||||
|
||||
from types import FrameType
|
||||
from typing import Any, Generator, Mapping
|
||||
|
||||
import structlog
|
||||
@@ -33,10 +30,6 @@ from .typing import BindableLogger, EventDict, WrappedLogger
|
||||
STRUCTLOG_KEY_PREFIX = "structlog_"
|
||||
STRUCTLOG_KEY_PREFIX_LEN = len(STRUCTLOG_KEY_PREFIX)
|
||||
|
||||
_ASYNC_CALLING_STACK: contextvars.ContextVar[FrameType] = (
|
||||
contextvars.ContextVar("_ASYNC_CALLING_STACK")
|
||||
)
|
||||
|
||||
# For proper isolation, we have to use a dict of ContextVars instead of a
|
||||
# single ContextVar with a dict.
|
||||
# See https://github.com/hynek/structlog/pull/302 for details.
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"""
|
||||
Helpers that make development with *structlog* more pleasant.
|
||||
|
||||
See also the narrative documentation in `console-output`.
|
||||
See also the narrative documentation in `development`.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -20,14 +20,13 @@ from io import StringIO
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
Literal,
|
||||
Protocol,
|
||||
Sequence,
|
||||
TextIO,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from ._frames import _format_exception
|
||||
@@ -53,12 +52,12 @@ try:
|
||||
except ImportError:
|
||||
rich = None # type: ignore[assignment]
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ConsoleRenderer",
|
||||
"RichTracebackFormatter",
|
||||
"better_traceback",
|
||||
"plain_traceback",
|
||||
"rich_traceback",
|
||||
"better_traceback",
|
||||
]
|
||||
|
||||
_IS_WINDOWS = sys.platform == "win32"
|
||||
@@ -73,7 +72,7 @@ def _pad(s: str, length: int) -> str:
|
||||
"""
|
||||
missing = length - len(s)
|
||||
|
||||
return s + " " * (max(0, missing))
|
||||
return s + " " * (missing if missing > 0 else 0)
|
||||
|
||||
|
||||
if colorama is not None:
|
||||
@@ -165,167 +164,6 @@ class _PlainStyles:
|
||||
kv_value = ""
|
||||
|
||||
|
||||
class ColumnFormatter(Protocol):
|
||||
"""
|
||||
:class:`~typing.Protocol` for column formatters.
|
||||
|
||||
See `KeyValueColumnFormatter` and `LogLevelColumnFormatter` for examples.
|
||||
|
||||
.. versionadded:: 23.3.0
|
||||
"""
|
||||
|
||||
def __call__(self, key: str, value: object) -> str:
|
||||
"""
|
||||
Format *value* for *key*.
|
||||
|
||||
This method is responsible for formatting, *key*, the ``=``, and the
|
||||
*value*. That means that it can use any string instead of the ``=`` and
|
||||
it can leave out both the *key* or the *value*.
|
||||
|
||||
If it returns an empty string, the column is omitted completely.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class Column:
|
||||
"""
|
||||
A column defines the way a key-value pair is formatted, and, by it's
|
||||
position to the *columns* argument of `ConsoleRenderer`, the order in which
|
||||
it is rendered.
|
||||
|
||||
Args:
|
||||
key:
|
||||
The key for which this column is responsible. Leave empty to define
|
||||
it as the default formatter.
|
||||
|
||||
formatter: The formatter for columns with *key*.
|
||||
|
||||
.. versionadded:: 23.3.0
|
||||
"""
|
||||
|
||||
key: str
|
||||
formatter: ColumnFormatter
|
||||
|
||||
|
||||
@dataclass
|
||||
class KeyValueColumnFormatter:
|
||||
"""
|
||||
Format a key-value pair.
|
||||
|
||||
Args:
|
||||
key_style: The style to apply to the key. If None, the key is omitted.
|
||||
|
||||
value_style: The style to apply to the value.
|
||||
|
||||
reset_style: The style to apply whenever a style is no longer needed.
|
||||
|
||||
value_repr:
|
||||
A callable that returns the string representation of the value.
|
||||
|
||||
width: The width to pad the value to. If 0, no padding is done.
|
||||
|
||||
prefix:
|
||||
A string to prepend to the formatted key-value pair. May contain
|
||||
styles.
|
||||
|
||||
postfix:
|
||||
A string to append to the formatted key-value pair. May contain
|
||||
styles.
|
||||
|
||||
.. versionadded:: 23.3.0
|
||||
"""
|
||||
|
||||
key_style: str | None
|
||||
value_style: str
|
||||
reset_style: str
|
||||
value_repr: Callable[[object], str]
|
||||
width: int = 0
|
||||
prefix: str = ""
|
||||
postfix: str = ""
|
||||
|
||||
def __call__(self, key: str, value: object) -> str:
|
||||
sio = StringIO()
|
||||
|
||||
if self.prefix:
|
||||
sio.write(self.prefix)
|
||||
sio.write(self.reset_style)
|
||||
|
||||
if self.key_style is not None:
|
||||
sio.write(self.key_style)
|
||||
sio.write(key)
|
||||
sio.write(self.reset_style)
|
||||
sio.write("=")
|
||||
|
||||
sio.write(self.value_style)
|
||||
sio.write(_pad(self.value_repr(value), self.width))
|
||||
sio.write(self.reset_style)
|
||||
|
||||
if self.postfix:
|
||||
sio.write(self.postfix)
|
||||
sio.write(self.reset_style)
|
||||
|
||||
return sio.getvalue()
|
||||
|
||||
|
||||
class LogLevelColumnFormatter:
|
||||
"""
|
||||
Format a log level according to *level_styles*.
|
||||
|
||||
The width is padded to the longest level name (if *level_styles* is passed
|
||||
-- otherwise there's no way to know the lengths of all levels).
|
||||
|
||||
Args:
|
||||
level_styles:
|
||||
A dictionary of level names to styles that are applied to it. If
|
||||
None, the level is formatted as a plain ``[level]``.
|
||||
|
||||
reset_style:
|
||||
What to use to reset the style after the level name. Ignored if
|
||||
if *level_styles* is None.
|
||||
|
||||
width:
|
||||
The width to pad the level to. If 0, no padding is done.
|
||||
|
||||
.. versionadded:: 23.3.0
|
||||
.. versionadded:: 24.2.0 *width*
|
||||
"""
|
||||
|
||||
level_styles: dict[str, str] | None
|
||||
reset_style: str
|
||||
width: int
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
level_styles: dict[str, str],
|
||||
reset_style: str,
|
||||
width: int | None = None,
|
||||
) -> None:
|
||||
self.level_styles = level_styles
|
||||
if level_styles:
|
||||
self.width = (
|
||||
0
|
||||
if width == 0
|
||||
else len(max(self.level_styles.keys(), key=lambda e: len(e)))
|
||||
)
|
||||
self.reset_style = reset_style
|
||||
else:
|
||||
self.width = 0
|
||||
self.reset_style = ""
|
||||
|
||||
def __call__(self, key: str, value: object) -> str:
|
||||
level = cast(str, value)
|
||||
style = (
|
||||
""
|
||||
if self.level_styles is None
|
||||
else self.level_styles.get(level, "")
|
||||
)
|
||||
|
||||
return f"[{style}{_pad(level, self.width)}{self.reset_style}]"
|
||||
|
||||
|
||||
_NOTHING = object()
|
||||
|
||||
|
||||
def plain_traceback(sio: TextIO, exc_info: ExcInfo) -> None:
|
||||
"""
|
||||
"Pretty"-print *exc_info* to *sio* using our own plain formatter.
|
||||
@@ -376,9 +214,7 @@ class RichTracebackFormatter:
|
||||
|
||||
sio.write("\n")
|
||||
|
||||
Console(
|
||||
file=sio, color_system=self.color_system, width=self.width
|
||||
).print(
|
||||
Console(file=sio, color_system=self.color_system).print(
|
||||
Traceback.from_exception(
|
||||
*exc_info,
|
||||
show_locals=self.show_locals,
|
||||
@@ -432,48 +268,36 @@ else:
|
||||
|
||||
|
||||
class ConsoleRenderer:
|
||||
r"""
|
||||
"""
|
||||
Render ``event_dict`` nicely aligned, possibly in colors, and ordered.
|
||||
|
||||
If ``event_dict`` contains a true-ish ``exc_info`` key, it will be rendered
|
||||
*after* the log line. If Rich_ or better-exceptions_ are present, in colors
|
||||
and with extra context.
|
||||
|
||||
Args:
|
||||
columns:
|
||||
A list of `Column` objects defining both the order and format of
|
||||
the key-value pairs in the output. If passed, most other arguments
|
||||
become meaningless.
|
||||
Arguments:
|
||||
|
||||
**Must** contain a column with ``key=''`` that defines the default
|
||||
formatter.
|
||||
|
||||
.. seealso:: `columns-config`
|
||||
|
||||
pad_event:
|
||||
Pad the event to this many characters. Ignored if *columns* are
|
||||
passed.
|
||||
pad_event: Pad the event to this many characters.
|
||||
|
||||
colors:
|
||||
Use colors for a nicer output. `True` by default. On Windows only
|
||||
if Colorama_ is installed. Ignored if *columns* are passed.
|
||||
if Colorama_ is installed.
|
||||
|
||||
force_colors:
|
||||
Force colors even for non-tty destinations. Use this option if your
|
||||
logs are stored in a file that is meant to be streamed to the
|
||||
console. Only meaningful on Windows. Ignored if *columns* are
|
||||
passed.
|
||||
console. Only meaningful on Windows.
|
||||
|
||||
repr_native_str:
|
||||
When `True`, `repr` is also applied to ``str``\ s. The ``event``
|
||||
key is *never* `repr` -ed. Ignored if *columns* are passed.
|
||||
When `True`, `repr` is also applied to native strings (i.e. unicode
|
||||
on Python 3 and bytes on Python 2). Setting this to `False` is
|
||||
useful if you want to have human-readable non-ASCII output on
|
||||
Python 2. The ``event`` key is *never* `repr` -ed.
|
||||
|
||||
level_styles:
|
||||
When present, use these styles for colors. This must be a dict from
|
||||
level names (strings) to terminal sequences (for example, Colorama)
|
||||
styles. The default can be obtained by calling
|
||||
`ConsoleRenderer.get_default_level_styles`. Ignored when *columns*
|
||||
are passed.
|
||||
level names (strings) to Colorama styles. The default can be
|
||||
obtained by calling `ConsoleRenderer.get_default_level_styles`
|
||||
|
||||
exception_formatter:
|
||||
A callable to render ``exc_infos``. If Rich_ or better-exceptions_
|
||||
@@ -483,29 +307,18 @@ class ConsoleRenderer:
|
||||
`RichTracebackFormatter` like `rich_traceback`, or implement your
|
||||
own.
|
||||
|
||||
sort_keys:
|
||||
Whether to sort keys when formatting. `True` by default. Ignored if
|
||||
*columns* are passed.
|
||||
sort_keys: Whether to sort keys when formatting. `True` by default.
|
||||
|
||||
event_key:
|
||||
The key to look for the main log message. Needed when you rename it
|
||||
e.g. using `structlog.processors.EventRenamer`. Ignored if
|
||||
*columns* are passed.
|
||||
e.g. using `structlog.processors.EventRenamer`.
|
||||
|
||||
timestamp_key:
|
||||
The key to look for timestamp of the log message. Needed when you
|
||||
rename it e.g. using `structlog.processors.EventRenamer`. Ignored
|
||||
if *columns* are passed.
|
||||
|
||||
pad_level:
|
||||
Whether to pad log level with blanks to the longest amongst all
|
||||
level label.
|
||||
rename it e.g. using `structlog.processors.EventRenamer`.
|
||||
|
||||
Requires the Colorama_ package if *colors* is `True` **on Windows**.
|
||||
|
||||
Raises:
|
||||
ValueError: If there's not exactly one default column formatter.
|
||||
|
||||
.. _Colorama: https://pypi.org/project/colorama/
|
||||
.. _better-exceptions: https://pypi.org/project/better-exceptions/
|
||||
.. _Rich: https://pypi.org/project/rich/
|
||||
@@ -539,73 +352,20 @@ class ConsoleRenderer:
|
||||
.. versionadded:: 21.3.0 *sort_keys*
|
||||
.. versionadded:: 22.1.0 *event_key*
|
||||
.. versionadded:: 23.2.0 *timestamp_key*
|
||||
.. versionadded:: 23.3.0 *columns*
|
||||
.. versionadded:: 24.2.0 *pad_level*
|
||||
"""
|
||||
|
||||
def __init__( # noqa: PLR0912, PLR0915
|
||||
def __init__(
|
||||
self,
|
||||
pad_event: int = _EVENT_WIDTH,
|
||||
colors: bool = _has_colors,
|
||||
force_colors: bool = False,
|
||||
repr_native_str: bool = False,
|
||||
level_styles: dict[str, str] | None = None,
|
||||
level_styles: Styles | None = None,
|
||||
exception_formatter: ExceptionRenderer = default_exception_formatter,
|
||||
sort_keys: bool = True,
|
||||
event_key: str = "event",
|
||||
timestamp_key: str = "timestamp",
|
||||
columns: list[Column] | None = None,
|
||||
pad_level: bool = True,
|
||||
):
|
||||
self._exception_formatter = exception_formatter
|
||||
self._sort_keys = sort_keys
|
||||
|
||||
if columns is not None:
|
||||
to_warn = []
|
||||
|
||||
def add_meaningless_arg(arg: str) -> None:
|
||||
to_warn.append(
|
||||
f"The `{arg}` argument is ignored when passing `columns`.",
|
||||
)
|
||||
|
||||
if pad_event != _EVENT_WIDTH:
|
||||
add_meaningless_arg("pad_event")
|
||||
|
||||
if colors != _has_colors:
|
||||
add_meaningless_arg("colors")
|
||||
|
||||
if force_colors is not False:
|
||||
add_meaningless_arg("force_colors")
|
||||
|
||||
if repr_native_str is not False:
|
||||
add_meaningless_arg("repr_native_str")
|
||||
|
||||
if level_styles is not None:
|
||||
add_meaningless_arg("level_styles")
|
||||
|
||||
if event_key != "event":
|
||||
add_meaningless_arg("event_key")
|
||||
|
||||
if timestamp_key != "timestamp":
|
||||
add_meaningless_arg("timestamp_key")
|
||||
|
||||
for w in to_warn:
|
||||
warnings.warn(w, stacklevel=2)
|
||||
|
||||
defaults = [col for col in columns if col.key == ""]
|
||||
if not defaults:
|
||||
raise ValueError(
|
||||
"Must pass a default column formatter (a column with `key=''`)."
|
||||
)
|
||||
if len(defaults) > 1:
|
||||
raise ValueError("Only one default column formatter allowed.")
|
||||
|
||||
self._default_column_formatter = defaults[0].formatter
|
||||
self._columns = [col for col in columns if col.key]
|
||||
|
||||
return
|
||||
|
||||
# Create default columns configuration.
|
||||
styles: Styles
|
||||
if colors:
|
||||
if _IS_WINDOWS: # pragma: no cover
|
||||
@@ -631,69 +391,24 @@ class ConsoleRenderer:
|
||||
styles = _PlainStyles
|
||||
|
||||
self._styles = styles
|
||||
self._pad_event = pad_event
|
||||
|
||||
level_to_color = (
|
||||
self.get_default_level_styles(colors)
|
||||
if level_styles is None
|
||||
else level_styles
|
||||
).copy()
|
||||
if level_styles is None:
|
||||
self._level_to_color = self.get_default_level_styles(colors)
|
||||
else:
|
||||
self._level_to_color = level_styles
|
||||
|
||||
for key in level_to_color:
|
||||
level_to_color[key] += styles.bright
|
||||
for key in self._level_to_color:
|
||||
self._level_to_color[key] += styles.bright
|
||||
self._longest_level = len(
|
||||
max(level_to_color.keys(), key=lambda e: len(e))
|
||||
max(self._level_to_color.keys(), key=lambda e: len(e))
|
||||
)
|
||||
|
||||
self._repr_native_str = repr_native_str
|
||||
|
||||
self._default_column_formatter = KeyValueColumnFormatter(
|
||||
styles.kv_key,
|
||||
styles.kv_value,
|
||||
styles.reset,
|
||||
value_repr=self._repr,
|
||||
width=0,
|
||||
)
|
||||
|
||||
logger_name_formatter = KeyValueColumnFormatter(
|
||||
key_style=None,
|
||||
value_style=styles.bright + styles.logger_name,
|
||||
reset_style=styles.reset,
|
||||
value_repr=str,
|
||||
prefix="[",
|
||||
postfix="]",
|
||||
)
|
||||
|
||||
level_width = 0 if not pad_level else None
|
||||
|
||||
self._columns = [
|
||||
Column(
|
||||
timestamp_key,
|
||||
KeyValueColumnFormatter(
|
||||
key_style=None,
|
||||
value_style=styles.timestamp,
|
||||
reset_style=styles.reset,
|
||||
value_repr=str,
|
||||
),
|
||||
),
|
||||
Column(
|
||||
"level",
|
||||
LogLevelColumnFormatter(
|
||||
level_to_color, reset_style=styles.reset, width=level_width
|
||||
),
|
||||
),
|
||||
Column(
|
||||
event_key,
|
||||
KeyValueColumnFormatter(
|
||||
key_style=None,
|
||||
value_style=styles.bright,
|
||||
reset_style=styles.reset,
|
||||
value_repr=str,
|
||||
width=pad_event,
|
||||
),
|
||||
),
|
||||
Column("logger", logger_name_formatter),
|
||||
Column("logger_name", logger_name_formatter),
|
||||
]
|
||||
self._exception_formatter = exception_formatter
|
||||
self._sort_keys = sort_keys
|
||||
self._event_key = event_key
|
||||
self._timestamp_key = timestamp_key
|
||||
|
||||
def _repr(self, val: Any) -> str:
|
||||
"""
|
||||
@@ -704,39 +419,90 @@ class ConsoleRenderer:
|
||||
return repr(val)
|
||||
|
||||
if isinstance(val, str):
|
||||
if set(val) & {" ", "\t", "=", "\r", "\n", '"', "'"}:
|
||||
return repr(val)
|
||||
return val
|
||||
|
||||
return repr(val)
|
||||
|
||||
def __call__(
|
||||
def __call__( # noqa: PLR0912
|
||||
self, logger: WrappedLogger, name: str, event_dict: EventDict
|
||||
) -> str:
|
||||
sio = StringIO()
|
||||
|
||||
ts = event_dict.pop(self._timestamp_key, None)
|
||||
if ts is not None:
|
||||
sio.write(
|
||||
# can be a number if timestamp is UNIXy
|
||||
self._styles.timestamp
|
||||
+ str(ts)
|
||||
+ self._styles.reset
|
||||
+ " "
|
||||
)
|
||||
level = event_dict.pop("level", None)
|
||||
if level is not None:
|
||||
sio.write(
|
||||
"["
|
||||
+ self._level_to_color.get(level, "")
|
||||
+ _pad(level, self._longest_level)
|
||||
+ self._styles.reset
|
||||
+ "] "
|
||||
)
|
||||
|
||||
# force event to str for compatibility with standard library
|
||||
event = event_dict.pop(self._event_key, None)
|
||||
if not isinstance(event, str):
|
||||
event = str(event)
|
||||
|
||||
if event_dict:
|
||||
event = _pad(event, self._pad_event) + self._styles.reset + " "
|
||||
else:
|
||||
event += self._styles.reset
|
||||
sio.write(self._styles.bright + event)
|
||||
|
||||
logger_name = event_dict.pop("logger", None)
|
||||
if logger_name is None:
|
||||
logger_name = event_dict.pop("logger_name", None)
|
||||
|
||||
if logger_name is not None:
|
||||
sio.write(
|
||||
"["
|
||||
+ self._styles.logger_name
|
||||
+ self._styles.bright
|
||||
+ logger_name
|
||||
+ self._styles.reset
|
||||
+ "] "
|
||||
)
|
||||
|
||||
stack = event_dict.pop("stack", None)
|
||||
exc = event_dict.pop("exception", None)
|
||||
exc_info = event_dict.pop("exc_info", None)
|
||||
|
||||
kvs = [
|
||||
col.formatter(col.key, val)
|
||||
for col in self._columns
|
||||
if (val := event_dict.pop(col.key, _NOTHING)) is not _NOTHING
|
||||
] + [
|
||||
self._default_column_formatter(key, event_dict[key])
|
||||
for key in (sorted(event_dict) if self._sort_keys else event_dict)
|
||||
]
|
||||
event_dict_keys: Iterable[str] = event_dict.keys()
|
||||
if self._sort_keys:
|
||||
event_dict_keys = sorted(event_dict_keys)
|
||||
|
||||
sio = StringIO()
|
||||
sio.write((" ".join(kv for kv in kvs if kv)).rstrip(" "))
|
||||
sio.write(
|
||||
" ".join(
|
||||
self._styles.kv_key
|
||||
+ key
|
||||
+ self._styles.reset
|
||||
+ "="
|
||||
+ self._styles.kv_value
|
||||
+ self._repr(event_dict[key])
|
||||
+ self._styles.reset
|
||||
for key in event_dict_keys
|
||||
)
|
||||
)
|
||||
|
||||
if stack is not None:
|
||||
sio.write("\n" + stack)
|
||||
if exc_info or exc is not None:
|
||||
sio.write("\n\n" + "=" * 79 + "\n")
|
||||
|
||||
exc_info = _figure_out_exc_info(exc_info)
|
||||
if exc_info:
|
||||
self._exception_formatter(sio, exc_info)
|
||||
exc_info = _figure_out_exc_info(exc_info)
|
||||
|
||||
if exc_info != (None, None, None):
|
||||
self._exception_formatter(sio, exc_info)
|
||||
elif exc is not None:
|
||||
if self._exception_formatter is not plain_traceback:
|
||||
warnings.warn(
|
||||
@@ -744,13 +510,12 @@ class ConsoleRenderer:
|
||||
"if you want pretty exceptions.",
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
sio.write("\n" + exc)
|
||||
|
||||
return sio.getvalue()
|
||||
|
||||
@staticmethod
|
||||
def get_default_level_styles(colors: bool = True) -> dict[str, str]:
|
||||
def get_default_level_styles(colors: bool = True) -> Any:
|
||||
"""
|
||||
Get the default styles for log levels
|
||||
|
||||
@@ -759,10 +524,11 @@ class ConsoleRenderer:
|
||||
home-grown :func:`~structlog.stdlib.add_log_level` you could do::
|
||||
|
||||
my_styles = ConsoleRenderer.get_default_level_styles()
|
||||
my_styles["EVERYTHING_IS_ON_FIRE"] = my_styles["critical"]
|
||||
renderer = ConsoleRenderer(level_styles=my_styles)
|
||||
my_styles["EVERYTHING_IS_ON_FIRE"] = my_styles["critical"] renderer
|
||||
= ConsoleRenderer(level_styles=my_styles)
|
||||
|
||||
Arguments:
|
||||
|
||||
Args:
|
||||
colors:
|
||||
Whether to use colorful styles. This must match the *colors*
|
||||
parameter to `ConsoleRenderer`. Default: `True`.
|
||||
|
||||
@@ -11,6 +11,7 @@ from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import enum
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
@@ -19,7 +20,6 @@ import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
from types import FrameType, TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
@@ -28,7 +28,6 @@ from typing import (
|
||||
NamedTuple,
|
||||
Sequence,
|
||||
TextIO,
|
||||
cast,
|
||||
)
|
||||
|
||||
from ._frames import (
|
||||
@@ -36,33 +35,27 @@ from ._frames import (
|
||||
_format_exception,
|
||||
_format_stack,
|
||||
)
|
||||
from ._log_levels import NAME_TO_LEVEL, add_log_level
|
||||
from ._log_levels import _NAME_TO_LEVEL, add_log_level
|
||||
from ._utils import get_processname
|
||||
from .tracebacks import ExceptionDictTransformer
|
||||
from .typing import (
|
||||
EventDict,
|
||||
ExceptionTransformer,
|
||||
ExcInfo,
|
||||
WrappedLogger,
|
||||
)
|
||||
from .typing import EventDict, ExceptionTransformer, ExcInfo, WrappedLogger
|
||||
|
||||
|
||||
__all__ = [
|
||||
"NAME_TO_LEVEL", # some people rely on it being here
|
||||
"_NAME_TO_LEVEL", # some people rely on it being here
|
||||
"add_log_level",
|
||||
"CallsiteParameter",
|
||||
"CallsiteParameterAdder",
|
||||
"dict_tracebacks",
|
||||
"EventRenamer",
|
||||
"ExceptionPrettyPrinter",
|
||||
"format_exc_info",
|
||||
"JSONRenderer",
|
||||
"KeyValueRenderer",
|
||||
"LogfmtRenderer",
|
||||
"StackInfoRenderer",
|
||||
"TimeStamper",
|
||||
"UnicodeDecoder",
|
||||
"UnicodeEncoder",
|
||||
"add_log_level",
|
||||
"dict_tracebacks",
|
||||
"format_exc_info",
|
||||
]
|
||||
|
||||
|
||||
@@ -70,7 +63,8 @@ class KeyValueRenderer:
|
||||
"""
|
||||
Render ``event_dict`` as a list of ``Key=repr(Value)`` pairs.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
sort_keys: Whether to sort keys when formatting.
|
||||
|
||||
key_order:
|
||||
@@ -125,7 +119,8 @@ class LogfmtRenderer:
|
||||
|
||||
.. _logfmt: https://brandur.org/logfmt
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
sort_keys: Whether to sort keys when formatting.
|
||||
|
||||
key_order:
|
||||
@@ -143,7 +138,8 @@ class LogfmtRenderer:
|
||||
``flag=false``.
|
||||
|
||||
Raises:
|
||||
ValueError: If a key contains non-printable or whitespace characters.
|
||||
|
||||
ValueError: If a key contains non printable or space characters.
|
||||
|
||||
.. versionadded:: 21.5.0
|
||||
"""
|
||||
@@ -177,16 +173,9 @@ class LogfmtRenderer:
|
||||
continue
|
||||
value = "true" if value else "false"
|
||||
|
||||
value = str(value)
|
||||
backslashes_need_escaping = (
|
||||
" " in value or "=" in value or '"' in value
|
||||
)
|
||||
if backslashes_need_escaping and "\\" in value:
|
||||
value = value.replace("\\", "\\\\")
|
||||
value = f"{value}".replace('"', '\\"')
|
||||
|
||||
value = value.replace('"', '\\"').replace("\n", "\\n")
|
||||
|
||||
if backslashes_need_escaping:
|
||||
if " " in value or "=" in value:
|
||||
value = f'"{value}"'
|
||||
|
||||
elements.append(f"{key}={value}")
|
||||
@@ -248,7 +237,8 @@ class UnicodeEncoder:
|
||||
"""
|
||||
Encode unicode values in ``event_dict``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
encoding: Encoding to encode to (default: ``"utf-8"``).
|
||||
|
||||
errors:
|
||||
@@ -282,7 +272,8 @@ class UnicodeDecoder:
|
||||
"""
|
||||
Decode byte string values in ``event_dict``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
encoding: Encoding to decode from (default: ``"utf-8"``).
|
||||
|
||||
errors: How to cope with encoding errors (default: ``"replace"``).
|
||||
@@ -317,7 +308,8 @@ class JSONRenderer:
|
||||
"""
|
||||
Render the ``event_dict`` using ``serializer(event_dict, **dumps_kw)``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
dumps_kw:
|
||||
Are passed unmodified to *serializer*. If *default* is passed, it
|
||||
will disable support for ``__structlog__``-based serialization.
|
||||
@@ -325,9 +317,9 @@ class JSONRenderer:
|
||||
serializer:
|
||||
A :func:`json.dumps`-compatible callable that will be used to
|
||||
format the string. This can be used to use alternative JSON
|
||||
encoders (default: :func:`json.dumps`).
|
||||
|
||||
.. seealso:: :doc:`performance` for examples.
|
||||
encoders like `orjson <https://pypi.org/project/orjson/>`__ or
|
||||
`RapidJSON <https://pypi.org/project/python-rapidjson/>`_
|
||||
(default: :func:`json.dumps`).
|
||||
|
||||
.. versionadded:: 0.2.0 Support for ``__structlog__`` serialization method.
|
||||
.. versionadded:: 15.4.0 *serializer* parameter.
|
||||
@@ -393,7 +385,8 @@ class ExceptionRenderer:
|
||||
If there is no ``exc_info`` key, the *event_dict* is not touched. This
|
||||
behavior is analog to the one of the stdlib's logging.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
exception_formatter:
|
||||
A callable that is used to format the exception from the
|
||||
``exc_info`` field into the ``exception`` field.
|
||||
@@ -414,9 +407,11 @@ class ExceptionRenderer:
|
||||
def __call__(
|
||||
self, logger: WrappedLogger, name: str, event_dict: EventDict
|
||||
) -> EventDict:
|
||||
exc_info = _figure_out_exc_info(event_dict.pop("exc_info", None))
|
||||
exc_info = event_dict.pop("exc_info", None)
|
||||
if exc_info:
|
||||
event_dict["exception"] = self.format_exception(exc_info)
|
||||
event_dict["exception"] = self.format_exception(
|
||||
_figure_out_exc_info(exc_info)
|
||||
)
|
||||
|
||||
return event_dict
|
||||
|
||||
@@ -464,7 +459,8 @@ class TimeStamper:
|
||||
"""
|
||||
Add a timestamp to ``event_dict``.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
fmt:
|
||||
strftime format string, or ``"iso"`` for `ISO 8601
|
||||
<https://en.wikipedia.org/wiki/ISO_8601>`_, or `None` for a `UNIX
|
||||
@@ -477,7 +473,7 @@ class TimeStamper:
|
||||
.. versionchanged:: 19.2.0 Can be pickled now.
|
||||
"""
|
||||
|
||||
__slots__ = ("_stamper", "fmt", "key", "utc")
|
||||
__slots__ = ("_stamper", "fmt", "utc", "key")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -525,8 +521,7 @@ def _make_stamper(
|
||||
else:
|
||||
|
||||
def now() -> datetime.datetime:
|
||||
# We don't need the TZ for our own formatting. We add it only for
|
||||
# user-defined formats later.
|
||||
# A naive local datetime is fine here, because we only format it.
|
||||
return datetime.datetime.now() # noqa: DTZ005
|
||||
|
||||
if fmt is None:
|
||||
@@ -553,18 +548,12 @@ def _make_stamper(
|
||||
|
||||
return stamper_iso_local
|
||||
|
||||
def stamper_fmt_local(event_dict: EventDict) -> EventDict:
|
||||
event_dict[key] = now().astimezone().strftime(fmt)
|
||||
return event_dict
|
||||
|
||||
def stamper_fmt_utc(event_dict: EventDict) -> EventDict:
|
||||
def stamper_fmt(event_dict: EventDict) -> EventDict:
|
||||
event_dict[key] = now().strftime(fmt)
|
||||
|
||||
return event_dict
|
||||
|
||||
if utc:
|
||||
return stamper_fmt_utc
|
||||
|
||||
return stamper_fmt_local
|
||||
return stamper_fmt
|
||||
|
||||
|
||||
class MaybeTimeStamper:
|
||||
@@ -598,49 +587,36 @@ class MaybeTimeStamper:
|
||||
return event_dict
|
||||
|
||||
|
||||
def _figure_out_exc_info(v: Any) -> ExcInfo | None:
|
||||
def _figure_out_exc_info(v: Any) -> ExcInfo:
|
||||
"""
|
||||
Try to convert *v* into an ``exc_info`` tuple.
|
||||
|
||||
Return ``None`` if *v* does not represent an exception or if there is no
|
||||
current exception.
|
||||
Depending on the Python version will try to do the smartest thing possible
|
||||
to transform *v* into an ``exc_info`` tuple.
|
||||
"""
|
||||
if isinstance(v, BaseException):
|
||||
return (v.__class__, v, v.__traceback__)
|
||||
|
||||
if isinstance(v, tuple) and len(v) == 3:
|
||||
has_type = isinstance(v[0], type) and issubclass(v[0], BaseException)
|
||||
has_exc = isinstance(v[1], BaseException)
|
||||
has_tb = v[2] is None or isinstance(v[2], TracebackType)
|
||||
if has_type and has_exc and has_tb:
|
||||
return v
|
||||
if isinstance(v, tuple):
|
||||
return v # type: ignore[return-value]
|
||||
|
||||
if v:
|
||||
result = sys.exc_info()
|
||||
if result == (None, None, None):
|
||||
return None
|
||||
return cast(ExcInfo, result)
|
||||
return sys.exc_info() # type: ignore[return-value]
|
||||
|
||||
return None
|
||||
return v
|
||||
|
||||
|
||||
class ExceptionPrettyPrinter:
|
||||
"""
|
||||
Pretty print exceptions rendered by *exception_formatter* and remove them
|
||||
from the ``event_dict``.
|
||||
Pretty print exceptions and remove them from the ``event_dict``.
|
||||
|
||||
Arguments:
|
||||
|
||||
Args:
|
||||
file: Target file for output (default: ``sys.stdout``).
|
||||
exception_formatter:
|
||||
A callable that is used to format the exception from the
|
||||
``exc_info`` field into the ``exception`` field.
|
||||
|
||||
This processor is mostly for development and testing so you can read
|
||||
exceptions properly formatted.
|
||||
|
||||
It behaves like `format_exc_info`, except that it removes the exception data
|
||||
from the event dictionary after printing it using the passed
|
||||
*exception_formatter*, which defaults to Python's built-in traceback formatting.
|
||||
It behaves like `format_exc_info` except it removes the exception data from
|
||||
the event dictionary after printing it.
|
||||
|
||||
It's tolerant to having `format_exc_info` in front of itself in the
|
||||
processor chain but doesn't require it. In other words, it handles both
|
||||
@@ -650,9 +626,6 @@ class ExceptionPrettyPrinter:
|
||||
|
||||
.. versionchanged:: 16.0.0
|
||||
Added support for passing exceptions as ``exc_info`` on Python 3.
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
Fixed *exception_formatter* so that it overrides the default if set.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -660,7 +633,6 @@ class ExceptionPrettyPrinter:
|
||||
file: TextIO | None = None,
|
||||
exception_formatter: ExceptionTransformer = _format_exception,
|
||||
) -> None:
|
||||
self.format_exception = exception_formatter
|
||||
if file is not None:
|
||||
self._file = file
|
||||
else:
|
||||
@@ -673,7 +645,7 @@ class ExceptionPrettyPrinter:
|
||||
if exc is None:
|
||||
exc_info = _figure_out_exc_info(event_dict.pop("exc_info", None))
|
||||
if exc_info:
|
||||
exc = self.format_exception(exc_info)
|
||||
exc = _format_exception(exc_info)
|
||||
|
||||
if exc:
|
||||
print(exc, file=self._file)
|
||||
@@ -689,7 +661,8 @@ class StackInfoRenderer:
|
||||
involving an exception and works analogously to the *stack_info* argument
|
||||
of the Python standard library logging.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
additional_ignores:
|
||||
By default, stack frames coming from *structlog* are ignored. With
|
||||
this argument you can add additional names that are ignored, before
|
||||
@@ -754,42 +727,6 @@ class CallsiteParameter(enum.Enum):
|
||||
PROCESS_NAME = "process_name"
|
||||
|
||||
|
||||
def _get_callsite_pathname(module: str, frame: FrameType) -> Any:
|
||||
return frame.f_code.co_filename
|
||||
|
||||
|
||||
def _get_callsite_filename(module: str, frame: FrameType) -> Any:
|
||||
return os.path.basename(frame.f_code.co_filename)
|
||||
|
||||
|
||||
def _get_callsite_module(module: str, frame: FrameType) -> Any:
|
||||
return os.path.splitext(os.path.basename(frame.f_code.co_filename))[0]
|
||||
|
||||
|
||||
def _get_callsite_func_name(module: str, frame: FrameType) -> Any:
|
||||
return frame.f_code.co_name
|
||||
|
||||
|
||||
def _get_callsite_lineno(module: str, frame: FrameType) -> Any:
|
||||
return frame.f_lineno
|
||||
|
||||
|
||||
def _get_callsite_thread(module: str, frame: FrameType) -> Any:
|
||||
return threading.get_ident()
|
||||
|
||||
|
||||
def _get_callsite_thread_name(module: str, frame: FrameType) -> Any:
|
||||
return threading.current_thread().name
|
||||
|
||||
|
||||
def _get_callsite_process(module: str, frame: FrameType) -> Any:
|
||||
return os.getpid()
|
||||
|
||||
|
||||
def _get_callsite_process_name(module: str, frame: FrameType) -> Any:
|
||||
return get_processname()
|
||||
|
||||
|
||||
class CallsiteParameterAdder:
|
||||
"""
|
||||
Adds parameters of the callsite that an event dictionary originated from to
|
||||
@@ -797,6 +734,10 @@ class CallsiteParameterAdder:
|
||||
dictionaries with information such as the function name, line number and
|
||||
filename that an event dictionary originated from.
|
||||
|
||||
.. warning::
|
||||
This processor cannot detect the correct callsite for invocation of
|
||||
async functions.
|
||||
|
||||
If the event dictionary has an embedded `logging.LogRecord` object and did
|
||||
not originate from *structlog* then the callsite information will be
|
||||
determined from the `logging.LogRecord` object. For event dictionaries
|
||||
@@ -808,7 +749,8 @@ class CallsiteParameterAdder:
|
||||
The keys used for callsite parameters in the event dictionary are the
|
||||
string values of `CallsiteParameter` enum members.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
parameters:
|
||||
A collection of `CallsiteParameter` values that should be added to
|
||||
the event dictionary.
|
||||
@@ -831,17 +773,35 @@ class CallsiteParameterAdder:
|
||||
"""
|
||||
|
||||
_handlers: ClassVar[
|
||||
dict[CallsiteParameter, Callable[[str, FrameType], Any]]
|
||||
dict[CallsiteParameter, Callable[[str, inspect.Traceback], Any]]
|
||||
] = {
|
||||
CallsiteParameter.PATHNAME: _get_callsite_pathname,
|
||||
CallsiteParameter.FILENAME: _get_callsite_filename,
|
||||
CallsiteParameter.MODULE: _get_callsite_module,
|
||||
CallsiteParameter.FUNC_NAME: _get_callsite_func_name,
|
||||
CallsiteParameter.LINENO: _get_callsite_lineno,
|
||||
CallsiteParameter.THREAD: _get_callsite_thread,
|
||||
CallsiteParameter.THREAD_NAME: _get_callsite_thread_name,
|
||||
CallsiteParameter.PROCESS: _get_callsite_process,
|
||||
CallsiteParameter.PROCESS_NAME: _get_callsite_process_name,
|
||||
CallsiteParameter.PATHNAME: (
|
||||
lambda module, frame_info: frame_info.filename
|
||||
),
|
||||
CallsiteParameter.FILENAME: (
|
||||
lambda module, frame_info: os.path.basename(frame_info.filename)
|
||||
),
|
||||
CallsiteParameter.MODULE: (
|
||||
lambda module, frame_info: os.path.splitext(
|
||||
os.path.basename(frame_info.filename)
|
||||
)[0]
|
||||
),
|
||||
CallsiteParameter.FUNC_NAME: (
|
||||
lambda module, frame_info: frame_info.function
|
||||
),
|
||||
CallsiteParameter.LINENO: (
|
||||
lambda module, frame_info: frame_info.lineno
|
||||
),
|
||||
CallsiteParameter.THREAD: (
|
||||
lambda module, frame_info: threading.get_ident()
|
||||
),
|
||||
CallsiteParameter.THREAD_NAME: (
|
||||
lambda module, frame_info: threading.current_thread().name
|
||||
),
|
||||
CallsiteParameter.PROCESS: (lambda module, frame_info: os.getpid()),
|
||||
CallsiteParameter.PROCESS_NAME: (
|
||||
lambda module, frame_info: get_processname()
|
||||
),
|
||||
}
|
||||
_record_attribute_map: ClassVar[dict[CallsiteParameter, str]] = {
|
||||
CallsiteParameter.PATHNAME: "pathname",
|
||||
@@ -875,7 +835,7 @@ class CallsiteParameterAdder:
|
||||
# module should not be logging using structlog.
|
||||
self._additional_ignores = ["logging", *additional_ignores]
|
||||
self._active_handlers: list[
|
||||
tuple[CallsiteParameter, Callable[[str, FrameType], Any]]
|
||||
tuple[CallsiteParameter, Callable[[str, inspect.Traceback], Any]]
|
||||
] = []
|
||||
self._record_mappings: list[CallsiteParameterAdder._RecordMapping] = []
|
||||
for parameter in parameters:
|
||||
@@ -905,8 +865,9 @@ class CallsiteParameterAdder:
|
||||
frame, module = _find_first_app_frame_and_name(
|
||||
additional_ignores=self._additional_ignores
|
||||
)
|
||||
frame_info = inspect.getframeinfo(frame)
|
||||
for parameter, handler in self._active_handlers:
|
||||
event_dict[parameter.value] = handler(module, frame)
|
||||
event_dict[parameter.value] = handler(module, frame_info)
|
||||
return event_dict
|
||||
|
||||
|
||||
@@ -923,7 +884,8 @@ class EventRenamer:
|
||||
some processors may rely on the presence and meaning of the ``event``
|
||||
key.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
to: Rename ``event_dict["event"]`` to ``event_dict[to]``
|
||||
|
||||
replace_by:
|
||||
|
||||
@@ -16,48 +16,32 @@ import contextvars
|
||||
import functools
|
||||
import logging
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from functools import partial
|
||||
from typing import Any, Callable, Collection, Dict, Iterable, Sequence, cast
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import Self
|
||||
else:
|
||||
from typing_extensions import Self
|
||||
|
||||
from typing import Any, Callable, Collection, Iterable, Sequence
|
||||
|
||||
from . import _config
|
||||
from ._base import BoundLoggerBase
|
||||
from ._frames import _find_first_app_frame_and_name, _format_stack
|
||||
from ._log_levels import LEVEL_TO_NAME, NAME_TO_LEVEL, add_log_level
|
||||
from .contextvars import _ASYNC_CALLING_STACK, merge_contextvars
|
||||
from ._log_levels import _LEVEL_TO_NAME, _NAME_TO_LEVEL, add_log_level
|
||||
from .contextvars import merge_contextvars
|
||||
from .exceptions import DropEvent
|
||||
from .processors import StackInfoRenderer
|
||||
from .typing import (
|
||||
Context,
|
||||
EventDict,
|
||||
ExcInfo,
|
||||
Processor,
|
||||
ProcessorReturnValue,
|
||||
WrappedLogger,
|
||||
)
|
||||
from .typing import Context, EventDict, ExcInfo, Processor, WrappedLogger
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BoundLogger",
|
||||
"add_log_level_number",
|
||||
"add_log_level",
|
||||
"add_logger_name",
|
||||
"ExtraAdder",
|
||||
"BoundLogger",
|
||||
"filter_by_level",
|
||||
"get_logger",
|
||||
"LoggerFactory",
|
||||
"PositionalArgumentsFormatter",
|
||||
"ProcessorFormatter",
|
||||
"add_log_level",
|
||||
"add_log_level_number",
|
||||
"add_logger_name",
|
||||
"filter_by_level",
|
||||
"get_logger",
|
||||
"recreate_defaults",
|
||||
"render_to_log_args_and_kwargs",
|
||||
"render_to_log_kwargs",
|
||||
]
|
||||
|
||||
@@ -71,7 +55,8 @@ def recreate_defaults(*, log_level: int | None = logging.NOTSET) -> None:
|
||||
As with vanilla defaults, the backwards-compatibility guarantees don't
|
||||
apply to the settings applied here.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
log_level:
|
||||
If `None`, don't configure standard library logging **at all**.
|
||||
|
||||
@@ -82,8 +67,6 @@ def recreate_defaults(*, log_level: int | None = logging.NOTSET) -> None:
|
||||
configure it yourself.
|
||||
|
||||
.. versionadded:: 22.1.0
|
||||
.. versionchanged:: 23.3.0 Added `add_logger_name`.
|
||||
.. versionchanged:: 25.1.0 Added `PositionalArgumentsFormatter`.
|
||||
"""
|
||||
if log_level is not None:
|
||||
kw = {"force": True}
|
||||
@@ -98,10 +81,8 @@ def recreate_defaults(*, log_level: int | None = logging.NOTSET) -> None:
|
||||
_config.reset_defaults()
|
||||
_config.configure(
|
||||
processors=[
|
||||
PositionalArgumentsFormatter(), # handled by native loggers
|
||||
merge_contextvars,
|
||||
add_log_level,
|
||||
add_logger_name,
|
||||
StackInfoRenderer(),
|
||||
_config._BUILTIN_DEFAULT_PROCESSORS[-2], # TimeStamper
|
||||
_config._BUILTIN_DEFAULT_PROCESSORS[-1], # ConsoleRenderer
|
||||
@@ -154,46 +135,43 @@ class BoundLogger(BoundLoggerBase):
|
||||
|
||||
.. versionadded:: 23.1.0
|
||||
Async variants `alog()`, `adebug()`, `ainfo()`, and so forth.
|
||||
|
||||
.. versionchanged:: 24.2.0
|
||||
Callsite parameters are now also collected by
|
||||
`structlog.processors.CallsiteParameterAdder` for async log methods.
|
||||
"""
|
||||
|
||||
_logger: logging.Logger
|
||||
|
||||
def bind(self, **new_values: Any) -> Self:
|
||||
def bind(self, **new_values: Any) -> BoundLogger:
|
||||
"""
|
||||
Return a new logger with *new_values* added to the existing ones.
|
||||
"""
|
||||
return super().bind(**new_values)
|
||||
return super().bind(**new_values) # type: ignore[return-value]
|
||||
|
||||
def unbind(self, *keys: str) -> Self:
|
||||
def unbind(self, *keys: str) -> BoundLogger:
|
||||
"""
|
||||
Return a new logger with *keys* removed from the context.
|
||||
|
||||
Raises:
|
||||
|
||||
KeyError: If the key is not part of the context.
|
||||
"""
|
||||
return super().unbind(*keys)
|
||||
return super().unbind(*keys) # type: ignore[return-value]
|
||||
|
||||
def try_unbind(self, *keys: str) -> Self:
|
||||
def try_unbind(self, *keys: str) -> BoundLogger:
|
||||
"""
|
||||
Like :meth:`unbind`, but best effort: missing keys are ignored.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
return super().try_unbind(*keys)
|
||||
return super().try_unbind(*keys) # type: ignore[return-value]
|
||||
|
||||
def new(self, **new_values: Any) -> Self:
|
||||
def new(self, **new_values: Any) -> BoundLogger:
|
||||
"""
|
||||
Clear context and binds *initial_values* using `bind`.
|
||||
|
||||
Only necessary with dict implementations that keep global state like
|
||||
those wrapped by `structlog.threadlocal.wrap_dict` when threads
|
||||
are reused.
|
||||
are re-used.
|
||||
"""
|
||||
return super().new(**new_values)
|
||||
return super().new(**new_values) # type: ignore[return-value]
|
||||
|
||||
def debug(self, event: str | None = None, *args: Any, **kw: Any) -> Any:
|
||||
"""
|
||||
@@ -227,21 +205,16 @@ class BoundLogger(BoundLoggerBase):
|
||||
"""
|
||||
return self._proxy_to_logger("critical", event, *args, **kw)
|
||||
|
||||
def fatal(self, event: str | None = None, *args: Any, **kw: Any) -> Any:
|
||||
"""
|
||||
Process event and call `logging.Logger.critical` with the result.
|
||||
"""
|
||||
return self._proxy_to_logger("critical", event, *args, **kw)
|
||||
|
||||
def exception(
|
||||
self, event: str | None = None, *args: Any, **kw: Any
|
||||
) -> Any:
|
||||
"""
|
||||
Process event and call `logging.Logger.exception` with the result,
|
||||
after setting ``exc_info`` to `True` if it's not already set.
|
||||
Process event and call `logging.Logger.error` with the result,
|
||||
after setting ``exc_info`` to `True`.
|
||||
"""
|
||||
kw.setdefault("exc_info", True)
|
||||
return self._proxy_to_logger("exception", event, *args, **kw)
|
||||
|
||||
return self.error(event, *args, **kw)
|
||||
|
||||
def log(
|
||||
self, level: int, event: str | None = None, *args: Any, **kw: Any
|
||||
@@ -250,7 +223,9 @@ class BoundLogger(BoundLoggerBase):
|
||||
Process *event* and call the appropriate logging method depending on
|
||||
*level*.
|
||||
"""
|
||||
return self._proxy_to_logger(LEVEL_TO_NAME[level], event, *args, **kw)
|
||||
return self._proxy_to_logger(_LEVEL_TO_NAME[level], event, *args, **kw)
|
||||
|
||||
fatal = critical
|
||||
|
||||
def _proxy_to_logger(
|
||||
self,
|
||||
@@ -411,16 +386,12 @@ class BoundLogger(BoundLoggerBase):
|
||||
"""
|
||||
Merge contextvars and log using the sync logger in a thread pool.
|
||||
"""
|
||||
scs_token = _ASYNC_CALLING_STACK.set(sys._getframe().f_back.f_back) # type: ignore[union-attr, arg-type, unused-ignore]
|
||||
ctx = contextvars.copy_context()
|
||||
|
||||
try:
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(lambda: meth(event, *args, **kw)),
|
||||
)
|
||||
finally:
|
||||
_ASYNC_CALLING_STACK.reset(scs_token)
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: ctx.run(lambda: meth(event, *args, **kw)),
|
||||
)
|
||||
|
||||
async def adebug(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
"""
|
||||
@@ -462,13 +433,7 @@ class BoundLogger(BoundLoggerBase):
|
||||
"""
|
||||
await self._dispatch_to_sync(self.critical, event, args, kw)
|
||||
|
||||
async def afatal(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
"""
|
||||
Log using `critical()`, but asynchronously in a separate thread.
|
||||
|
||||
.. versionadded:: 23.1.0
|
||||
"""
|
||||
await self._dispatch_to_sync(self.critical, event, args, kw)
|
||||
afatal = acritical
|
||||
|
||||
async def aexception(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
"""
|
||||
@@ -527,11 +492,9 @@ class AsyncBoundLogger:
|
||||
.. versionchanged:: 20.2.0 fix _dispatch_to_sync contextvars usage
|
||||
.. deprecated:: 23.1.0
|
||||
Use the regular `BoundLogger` with its a-prefixed methods instead.
|
||||
.. versionchanged:: 23.3.0
|
||||
Callsite parameters are now also collected for async log methods.
|
||||
"""
|
||||
|
||||
__slots__ = ("_loop", "sync_bl")
|
||||
__slots__ = ("sync_bl", "_loop")
|
||||
|
||||
#: The wrapped synchronous logger. It is useful to be able to log
|
||||
#: synchronously occasionally.
|
||||
@@ -625,16 +588,12 @@ class AsyncBoundLogger:
|
||||
"""
|
||||
Merge contextvars and log using the sync logger in a thread pool.
|
||||
"""
|
||||
scs_token = _ASYNC_CALLING_STACK.set(sys._getframe().f_back.f_back) # type: ignore[union-attr, arg-type, unused-ignore]
|
||||
ctx = contextvars.copy_context()
|
||||
|
||||
try:
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
self._executor,
|
||||
lambda: ctx.run(lambda: meth(event, *args, **kw)),
|
||||
)
|
||||
finally:
|
||||
_ASYNC_CALLING_STACK.reset(scs_token)
|
||||
await asyncio.get_running_loop().run_in_executor(
|
||||
self._executor,
|
||||
lambda: ctx.run(lambda: meth(event, *args, **kw)),
|
||||
)
|
||||
|
||||
async def debug(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
await self._dispatch_to_sync(self.sync_bl.debug, event, args, kw)
|
||||
@@ -645,8 +604,7 @@ class AsyncBoundLogger:
|
||||
async def warning(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
await self._dispatch_to_sync(self.sync_bl.warning, event, args, kw)
|
||||
|
||||
async def warn(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
await self._dispatch_to_sync(self.sync_bl.warning, event, args, kw)
|
||||
warn = warning
|
||||
|
||||
async def error(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
await self._dispatch_to_sync(self.sync_bl.error, event, args, kw)
|
||||
@@ -654,8 +612,7 @@ class AsyncBoundLogger:
|
||||
async def critical(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
await self._dispatch_to_sync(self.sync_bl.critical, event, args, kw)
|
||||
|
||||
async def fatal(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
await self._dispatch_to_sync(self.sync_bl.critical, event, args, kw)
|
||||
fatal = critical
|
||||
|
||||
async def exception(self, event: str, *args: Any, **kw: Any) -> None:
|
||||
# To make `log.exception("foo") work, we have to check if the user
|
||||
@@ -685,7 +642,8 @@ class LoggerFactory:
|
||||
>>> from structlog.stdlib import LoggerFactory
|
||||
>>> configure(logger_factory=LoggerFactory())
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
ignore_frame_names:
|
||||
When guessing the name of a logger, skip frames whose names *start*
|
||||
with one of these. For example, in pyramid applications you'll
|
||||
@@ -787,12 +745,7 @@ def filter_by_level(
|
||||
...
|
||||
DropEvent
|
||||
"""
|
||||
if (
|
||||
# We can't use logger.isEnabledFor() because it's always disabled when
|
||||
# a log entry is in flight on Python 3.14 and later,
|
||||
not logger.disabled
|
||||
and NAME_TO_LEVEL[method_name] >= logger.getEffectiveLevel()
|
||||
):
|
||||
if logger.isEnabledFor(_NAME_TO_LEVEL[method_name]):
|
||||
return event_dict
|
||||
|
||||
raise DropEvent
|
||||
@@ -816,7 +769,7 @@ def add_log_level_number(
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
event_dict["level_number"] = NAME_TO_LEVEL[method_name]
|
||||
event_dict["level_number"] = _NAME_TO_LEVEL[method_name]
|
||||
|
||||
return event_dict
|
||||
|
||||
@@ -848,7 +801,8 @@ class ExtraAdder:
|
||||
This processor can be used for adding data passed in the ``extra``
|
||||
parameter of the `logging` module's log methods to the event dictionary.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
allow:
|
||||
An optional collection of attributes that, if present in
|
||||
`logging.LogRecord` objects, will be copied to event dictionaries.
|
||||
@@ -900,66 +854,28 @@ class ExtraAdder:
|
||||
event_dict[key] = record.__dict__[key]
|
||||
|
||||
|
||||
LOG_KWARG_NAMES = ("exc_info", "stack_info", "stacklevel")
|
||||
|
||||
|
||||
def render_to_log_args_and_kwargs(
|
||||
_: logging.Logger, __: str, event_dict: EventDict
|
||||
) -> tuple[tuple[Any, ...], dict[str, Any]]:
|
||||
"""
|
||||
Render ``event_dict`` into positional and keyword arguments for
|
||||
`logging.Logger` logging methods.
|
||||
See `logging.Logger.debug` method for keyword arguments reference.
|
||||
|
||||
The ``event`` field is passed in the first positional argument, positional
|
||||
arguments from ``positional_args`` field are passed in subsequent positional
|
||||
arguments, keyword arguments are extracted from the *event_dict* and the
|
||||
rest of the *event_dict* is added as ``extra``.
|
||||
|
||||
This allows you to defer formatting to `logging`.
|
||||
|
||||
.. versionadded:: 25.1.0
|
||||
"""
|
||||
args = (event_dict.pop("event"), *event_dict.pop("positional_args", ()))
|
||||
|
||||
kwargs = {
|
||||
kwarg_name: event_dict.pop(kwarg_name)
|
||||
for kwarg_name in LOG_KWARG_NAMES
|
||||
if kwarg_name in event_dict
|
||||
}
|
||||
if event_dict:
|
||||
kwargs["extra"] = event_dict
|
||||
|
||||
return args, kwargs
|
||||
|
||||
|
||||
def render_to_log_kwargs(
|
||||
_: logging.Logger, __: str, event_dict: EventDict
|
||||
) -> EventDict:
|
||||
"""
|
||||
Render ``event_dict`` into keyword arguments for `logging.Logger` logging
|
||||
methods.
|
||||
See `logging.Logger.debug` method for keyword arguments reference.
|
||||
Render ``event_dict`` into keyword arguments for `logging.log`.
|
||||
|
||||
The ``event`` field is translated into ``msg``, keyword arguments are
|
||||
extracted from the *event_dict* and the rest of the *event_dict* is added as
|
||||
``extra``.
|
||||
The ``event`` field is translated into ``msg`` and the rest of the
|
||||
*event_dict* is added as ``extra``.
|
||||
|
||||
This allows you to defer formatting to `logging`.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
.. versionchanged:: 22.1.0
|
||||
``exc_info``, ``stack_info``, and ``stacklevel`` are passed as proper
|
||||
``exc_info``, ``stack_info``, and ``stackLevel`` are passed as proper
|
||||
kwargs and not put into ``extra``.
|
||||
.. versionchanged:: 24.2.0
|
||||
``stackLevel`` corrected to ``stacklevel``.
|
||||
"""
|
||||
return {
|
||||
"msg": event_dict.pop("event"),
|
||||
"extra": event_dict,
|
||||
**{
|
||||
kw: event_dict.pop(kw)
|
||||
for kw in LOG_KWARG_NAMES
|
||||
for kw in ("exc_info", "stack_info", "stackLevel")
|
||||
if kw in event_dict
|
||||
},
|
||||
}
|
||||
@@ -977,7 +893,8 @@ class ProcessorFormatter(logging.Formatter):
|
||||
|
||||
Please refer to :ref:`processor-formatter` for examples.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
foreign_pre_chain:
|
||||
If not `None`, it is used as a processor chain that is applied to
|
||||
**non**-*structlog* log entries before the event dictionary is
|
||||
@@ -1034,11 +951,8 @@ class ProcessorFormatter(logging.Formatter):
|
||||
This parameter exists for historic reasons. Please use *processors*
|
||||
instead.
|
||||
|
||||
use_get_message:
|
||||
If True, use ``record.getMessage`` to get a fully rendered log
|
||||
message, otherwise use ``str(record.msg)``. (default: True)
|
||||
|
||||
Raises:
|
||||
|
||||
TypeError: If both or neither *processor* and *processors* are passed.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
@@ -1049,7 +963,6 @@ class ProcessorFormatter(logging.Formatter):
|
||||
.. deprecated:: 21.3.0
|
||||
*processor* (singular) in favor of *processors* (plural). Removal is not
|
||||
planned.
|
||||
.. versionadded:: 23.3.0 *use_get_message*
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -1061,7 +974,6 @@ class ProcessorFormatter(logging.Formatter):
|
||||
keep_stack_info: bool = False,
|
||||
logger: logging.Logger | None = None,
|
||||
pass_foreign_args: bool = False,
|
||||
use_get_message: bool = True,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
@@ -1069,10 +981,7 @@ class ProcessorFormatter(logging.Formatter):
|
||||
super().__init__(*args, fmt=fmt, **kwargs) # type: ignore[misc]
|
||||
|
||||
if processor and processors:
|
||||
msg = (
|
||||
"The `processor` and `processors` arguments are mutually"
|
||||
" exclusive."
|
||||
)
|
||||
msg = "The `processor` and `processors` arguments are mutually exclusive."
|
||||
raise TypeError(msg)
|
||||
|
||||
self.processors: Sequence[Processor]
|
||||
@@ -1089,7 +998,6 @@ class ProcessorFormatter(logging.Formatter):
|
||||
self.keep_stack_info = keep_stack_info
|
||||
self.logger = logger
|
||||
self.pass_foreign_args = pass_foreign_args
|
||||
self.use_get_message = use_get_message
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""
|
||||
@@ -1105,28 +1013,23 @@ class ProcessorFormatter(logging.Formatter):
|
||||
logger = getattr(record, "_logger", _SENTINEL)
|
||||
meth_name = getattr(record, "_name", "__structlog_sentinel__")
|
||||
|
||||
ed: ProcessorReturnValue
|
||||
if logger is not _SENTINEL and meth_name != "__structlog_sentinel__":
|
||||
# Both attached by wrap_for_formatter
|
||||
if self.logger is not None:
|
||||
logger = self.logger
|
||||
meth_name = cast(str, record._name) # type:ignore[attr-defined]
|
||||
meth_name = record._name # type: ignore[attr-defined]
|
||||
|
||||
# We need to copy because it's possible that the same record gets
|
||||
# processed by multiple logging formatters. LogRecord.getMessage
|
||||
# processed by multiple logging formatters. LogRecord.getMessage
|
||||
# would transform our dict into a str.
|
||||
ed = cast(Dict[str, Any], record.msg).copy()
|
||||
ed = record.msg.copy() # type: ignore[union-attr]
|
||||
ed["_record"] = record
|
||||
ed["_from_structlog"] = True
|
||||
else:
|
||||
logger = self.logger
|
||||
meth_name = record.levelname.lower()
|
||||
ed = {
|
||||
"event": (
|
||||
record.getMessage()
|
||||
if self.use_get_message
|
||||
else str(record.msg)
|
||||
),
|
||||
"event": record.getMessage(),
|
||||
"_record": record,
|
||||
"_from_structlog": False,
|
||||
}
|
||||
@@ -1136,38 +1039,27 @@ class ProcessorFormatter(logging.Formatter):
|
||||
|
||||
record.args = ()
|
||||
|
||||
# Add stack-related attributes to the event dict
|
||||
# Add stack-related attributes to event_dict and unset them
|
||||
# on the record copy so that the base implementation wouldn't
|
||||
# append stacktraces to the output.
|
||||
if record.exc_info:
|
||||
ed["exc_info"] = record.exc_info
|
||||
if record.stack_info:
|
||||
ed["stack_info"] = record.stack_info
|
||||
|
||||
if not self.keep_exc_info:
|
||||
record.exc_text = None
|
||||
record.exc_info = None
|
||||
if not self.keep_stack_info:
|
||||
record.stack_info = None
|
||||
|
||||
# Non-structlog allows to run through a chain to prepare it for the
|
||||
# final processor (e.g. adding timestamps and log levels).
|
||||
for proc in self.foreign_pre_chain or ():
|
||||
ed = cast(EventDict, proc(logger, meth_name, ed))
|
||||
|
||||
# If required, unset stack-related attributes on the record copy so
|
||||
# that the base implementation doesn't append stacktraces to the
|
||||
# output.
|
||||
if not self.keep_exc_info:
|
||||
record.exc_text = None
|
||||
record.exc_info = None
|
||||
if not self.keep_stack_info:
|
||||
record.stack_info = None
|
||||
ed = proc(logger, meth_name, ed)
|
||||
|
||||
for p in self.processors:
|
||||
ed = p(logger, meth_name, ed) # type: ignore[arg-type]
|
||||
|
||||
if not isinstance(ed, str):
|
||||
warnings.warn(
|
||||
"The last processor in ProcessorFormatter.processors must "
|
||||
f"return a string, but {self.processors[-1]} returned a "
|
||||
f"{type(ed)} instead.",
|
||||
category=RuntimeWarning,
|
||||
stacklevel=1,
|
||||
)
|
||||
ed = cast(str, ed)
|
||||
ed = p(logger, meth_name, ed)
|
||||
|
||||
record.msg = ed
|
||||
|
||||
@@ -1180,12 +1072,11 @@ class ProcessorFormatter(logging.Formatter):
|
||||
"""
|
||||
Wrap *logger*, *name*, and *event_dict*.
|
||||
|
||||
The result is later unpacked by `ProcessorFormatter` when formatting
|
||||
log entries.
|
||||
The result is later unpacked by `ProcessorFormatter` when
|
||||
formatting log entries.
|
||||
|
||||
Use this static method as the renderer (in other words, final
|
||||
processor) if you want to use `ProcessorFormatter` in your `logging`
|
||||
configuration.
|
||||
Use this static method as the renderer (i.e. final processor) if you
|
||||
want to use `ProcessorFormatter` in your `logging` configuration.
|
||||
"""
|
||||
return (event_dict,), {"extra": {"_logger": logger, "_name": name}}
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ from contextlib import contextmanager
|
||||
from typing import Any, Generator, NamedTuple, NoReturn
|
||||
|
||||
from ._config import configure, get_config
|
||||
from ._log_levels import map_method_name
|
||||
from .exceptions import DropEvent
|
||||
from .typing import EventDict, WrappedLogger
|
||||
|
||||
@@ -42,10 +41,6 @@ class LogCapture:
|
||||
:ivar List[structlog.typing.EventDict] entries: The captured log entries.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
|
||||
.. versionchanged:: 24.3.0
|
||||
Added mapping from "exception" to "error"
|
||||
Added mapping from "warn" to "warning"
|
||||
"""
|
||||
|
||||
entries: list[EventDict]
|
||||
@@ -56,7 +51,7 @@ class LogCapture:
|
||||
def __call__(
|
||||
self, _: WrappedLogger, method_name: str, event_dict: EventDict
|
||||
) -> NoReturn:
|
||||
event_dict["log_level"] = map_method_name(method_name)
|
||||
event_dict["log_level"] = method_name
|
||||
self.entries.append(event_dict)
|
||||
|
||||
raise DropEvent
|
||||
@@ -144,7 +139,8 @@ class CapturedCall(NamedTuple):
|
||||
|
||||
Can also be unpacked like a tuple.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
method_name: The method name that got called.
|
||||
|
||||
args: A tuple of the positional arguments.
|
||||
@@ -177,7 +173,7 @@ class CapturingLogger:
|
||||
self.calls = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<CapturingLogger with {len(self.calls)} call(s)>"
|
||||
return f"<CapturingLogger with { len(self.calls) } call(s)>"
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
"""
|
||||
@@ -194,7 +190,7 @@ class CapturingLoggerFactory:
|
||||
r"""
|
||||
Produce and cache `CapturingLogger`\ s.
|
||||
|
||||
Each factory produces and reuses only **one** logger.
|
||||
Each factory produces and re-uses only **one** logger.
|
||||
|
||||
You can access it via the ``logger`` attribute.
|
||||
|
||||
@@ -204,7 +200,6 @@ class CapturingLoggerFactory:
|
||||
|
||||
.. versionadded:: 20.2.0
|
||||
"""
|
||||
|
||||
logger: CapturingLogger
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
@@ -83,7 +83,8 @@ def wrap_dict(dict_class: type[Context]) -> type[Context]:
|
||||
|
||||
The wrapped class and used to keep global in the current thread.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
dict_class: Class used for keeping context.
|
||||
|
||||
.. deprecated:: 22.1.0
|
||||
@@ -105,18 +106,20 @@ def as_immutable(logger: TLLogger) -> TLLogger:
|
||||
"""
|
||||
Extract the context from a thread local logger into an immutable logger.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
logger (structlog.typing.BindableLogger):
|
||||
A logger with *possibly* thread local state.
|
||||
|
||||
Returns:
|
||||
|
||||
:class:`~structlog.BoundLogger` with an immutable context.
|
||||
|
||||
.. deprecated:: 22.1.0
|
||||
"""
|
||||
_deprecated()
|
||||
if isinstance(logger, BoundLoggerLazyProxy):
|
||||
logger = logger.bind()
|
||||
logger = logger.bind() # type: ignore[assignment]
|
||||
|
||||
try:
|
||||
ctx = logger._context._tl.dict_.__class__( # type: ignore[union-attr]
|
||||
@@ -147,12 +150,9 @@ def tmp_bind(
|
||||
.. deprecated:: 22.1.0
|
||||
"""
|
||||
_deprecated()
|
||||
if isinstance(logger, BoundLoggerLazyProxy):
|
||||
logger = logger.bind()
|
||||
|
||||
saved = as_immutable(logger)._context
|
||||
try:
|
||||
yield logger.bind(**tmp_values)
|
||||
yield logger.bind(**tmp_values) # type: ignore[misc]
|
||||
finally:
|
||||
logger._context.clear()
|
||||
logger._context.update(saved)
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"""
|
||||
Extract a structured traceback from an exception.
|
||||
|
||||
Based on work by Will McGugan
|
||||
`Contributed by Will McGugan
|
||||
<https://github.com/hynek/structlog/pull/407#issuecomment-1150926246>`_ from
|
||||
`rich.traceback
|
||||
<https://github.com/Textualize/rich/blob/972dedff/rich/traceback.py>`_.
|
||||
@@ -15,20 +15,11 @@ Based on work by Will McGugan
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from traceback import walk_tb
|
||||
from types import ModuleType, TracebackType
|
||||
from typing import Any, Iterable, Sequence, Tuple, Union
|
||||
|
||||
|
||||
try:
|
||||
import rich
|
||||
import rich.pretty
|
||||
except ImportError:
|
||||
rich = None # type: ignore[assignment]
|
||||
from types import TracebackType
|
||||
from typing import Any, Tuple, Union
|
||||
|
||||
from .typing import ExcInfo
|
||||
|
||||
@@ -46,7 +37,6 @@ __all__ = [
|
||||
|
||||
|
||||
SHOW_LOCALS = True
|
||||
LOCALS_MAX_LENGTH = 10
|
||||
LOCALS_MAX_STRING = 80
|
||||
MAX_FRAMES = 50
|
||||
|
||||
@@ -62,6 +52,7 @@ class Frame:
|
||||
filename: str
|
||||
lineno: int
|
||||
name: str
|
||||
line: str = ""
|
||||
locals: dict[str, str] | None = None
|
||||
|
||||
|
||||
@@ -82,22 +73,13 @@ class SyntaxError_: # noqa: N801
|
||||
class Stack:
|
||||
"""
|
||||
Represents an exception and a list of stack frames.
|
||||
|
||||
.. versionchanged:: 25.2.0
|
||||
Added the *exc_notes* field.
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
Added the *is_group* and *exceptions* fields.
|
||||
"""
|
||||
|
||||
exc_type: str
|
||||
exc_value: str
|
||||
exc_notes: list[str] = field(default_factory=list)
|
||||
syntax_error: SyntaxError_ | None = None
|
||||
is_cause: bool = False
|
||||
frames: list[Frame] = field(default_factory=list)
|
||||
is_group: bool = False
|
||||
exceptions: list[Trace] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -110,72 +92,27 @@ class Trace:
|
||||
|
||||
|
||||
def safe_str(_object: Any) -> str:
|
||||
"""Don't allow exceptions from __str__ to propagate."""
|
||||
"""Don't allow exceptions from __str__ to propegate."""
|
||||
try:
|
||||
return str(_object)
|
||||
except Exception as error: # noqa: BLE001
|
||||
return f"<str-error {str(error)!r}>"
|
||||
|
||||
|
||||
def to_repr(
|
||||
obj: Any,
|
||||
max_length: int | None = None,
|
||||
max_string: int | None = None,
|
||||
use_rich: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
Get repr string for an object, but catch errors.
|
||||
|
||||
:func:`repr()` is used for strings, too, so that secret wrappers that
|
||||
inherit from :func:`str` and overwrite ``__repr__()`` are handled correctly
|
||||
(i.e. secrets are not logged in plain text).
|
||||
|
||||
Args:
|
||||
obj: Object to get a string representation for.
|
||||
|
||||
max_length: Maximum length of containers before abbreviating, or
|
||||
``None`` for no abbreviation.
|
||||
|
||||
max_string: Maximum length of string before truncating, or ``None`` to
|
||||
disable truncating.
|
||||
|
||||
use_rich: If ``True`` (the default), use rich_ to compute the repr.
|
||||
If ``False`` or if rich_ is not installed, fall back to a simpler
|
||||
algorithm.
|
||||
|
||||
Returns:
|
||||
The string representation of *obj*.
|
||||
|
||||
.. versionchanged:: 24.3.0
|
||||
Added *max_length* argument. Use :program:`rich` to render locals if it
|
||||
is available. Call :func:`repr()` on strings in fallback
|
||||
implementation.
|
||||
"""
|
||||
if use_rich and rich is not None:
|
||||
# Let rich render the repr if it is available.
|
||||
# It produces much better results for containers and dataclasses/attrs.
|
||||
obj_repr = rich.pretty.traverse(
|
||||
obj, max_length=max_length, max_string=max_string
|
||||
).render()
|
||||
def to_repr(obj: Any, max_string: int | None = None) -> str:
|
||||
"""Get repr string for an object, but catch errors."""
|
||||
if isinstance(obj, str):
|
||||
obj_repr = obj
|
||||
else:
|
||||
# Generate a (truncated) repr if rich is not available.
|
||||
# Handle str/bytes differently to get better results for truncated
|
||||
# representations. Also catch all errors, similarly to "safe_str()".
|
||||
try:
|
||||
if isinstance(obj, (str, bytes)):
|
||||
if max_string is not None and len(obj) > max_string:
|
||||
truncated = len(obj) - max_string
|
||||
obj_repr = f"{obj[:max_string]!r}+{truncated}"
|
||||
else:
|
||||
obj_repr = repr(obj)
|
||||
else:
|
||||
obj_repr = repr(obj)
|
||||
if max_string is not None and len(obj_repr) > max_string:
|
||||
truncated = len(obj_repr) - max_string
|
||||
obj_repr = f"{obj_repr[:max_string]!r}+{truncated}"
|
||||
obj_repr = repr(obj)
|
||||
except Exception as error: # noqa: BLE001
|
||||
obj_repr = f"<repr-error {str(error)!r}>"
|
||||
|
||||
if max_string is not None and len(obj_repr) > max_string:
|
||||
truncated = len(obj_repr) - max_string
|
||||
obj_repr = f"{obj_repr[:max_string]!r}+{truncated}"
|
||||
|
||||
return obj_repr
|
||||
|
||||
|
||||
@@ -185,16 +122,13 @@ def extract(
|
||||
traceback: TracebackType | None,
|
||||
*,
|
||||
show_locals: bool = False,
|
||||
locals_max_length: int = LOCALS_MAX_LENGTH,
|
||||
locals_max_string: int = LOCALS_MAX_STRING,
|
||||
locals_hide_dunder: bool = True,
|
||||
locals_hide_sunder: bool = False,
|
||||
use_rich: bool = True,
|
||||
) -> Trace:
|
||||
"""
|
||||
Extract traceback information.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
exc_type: Exception type.
|
||||
|
||||
exc_value: Exception value.
|
||||
@@ -203,38 +137,15 @@ def extract(
|
||||
|
||||
show_locals: Enable display of local variables. Defaults to False.
|
||||
|
||||
locals_max_length:
|
||||
Maximum length of containers before abbreviating, or ``None`` for
|
||||
no abbreviation.
|
||||
|
||||
locals_max_string:
|
||||
Maximum length of string before truncating, or ``None`` to disable
|
||||
truncating.
|
||||
Maximum length of string before truncating, or ``None`` to disable.
|
||||
|
||||
locals_hide_dunder:
|
||||
Hide locals prefixed with double underscore.
|
||||
Defaults to True.
|
||||
|
||||
locals_hide_sunder:
|
||||
Hide locals prefixed with single underscore.
|
||||
This implies hiding *locals_hide_dunder*.
|
||||
Defaults to False.
|
||||
|
||||
use_rich: If ``True`` (the default), use rich_ to compute the repr.
|
||||
If ``False`` or if rich_ is not installed, fall back to a simpler
|
||||
algorithm.
|
||||
max_frames: Maximum number of frames in each stack
|
||||
|
||||
Returns:
|
||||
A Trace instance with structured information about all exceptions.
|
||||
|
||||
.. versionadded:: 22.1.0
|
||||
|
||||
.. versionchanged:: 24.3.0
|
||||
Added *locals_max_length*, *locals_hide_sunder*, *locals_hide_dunder*
|
||||
and *use_rich* arguments.
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
Handle exception groups.
|
||||
"""
|
||||
|
||||
stacks: list[Stack] = []
|
||||
@@ -244,30 +155,9 @@ def extract(
|
||||
stack = Stack(
|
||||
exc_type=safe_str(exc_type.__name__),
|
||||
exc_value=safe_str(exc_value),
|
||||
exc_notes=[
|
||||
safe_str(note) for note in getattr(exc_value, "__notes__", ())
|
||||
],
|
||||
is_cause=is_cause,
|
||||
)
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
if isinstance(exc_value, (BaseExceptionGroup, ExceptionGroup)): # noqa: F821
|
||||
stack.is_group = True
|
||||
for exception in exc_value.exceptions:
|
||||
stack.exceptions.append(
|
||||
extract(
|
||||
type(exception),
|
||||
exception,
|
||||
exception.__traceback__,
|
||||
show_locals=show_locals,
|
||||
locals_max_length=locals_max_length,
|
||||
locals_max_string=locals_max_string,
|
||||
locals_hide_dunder=locals_hide_dunder,
|
||||
locals_hide_sunder=locals_hide_sunder,
|
||||
use_rich=use_rich,
|
||||
)
|
||||
)
|
||||
|
||||
if isinstance(exc_value, SyntaxError):
|
||||
stack.syntax_error = SyntaxError_(
|
||||
offset=exc_value.offset or 0,
|
||||
@@ -280,47 +170,20 @@ def extract(
|
||||
stacks.append(stack)
|
||||
append = stack.frames.append # pylint: disable=no-member
|
||||
|
||||
def get_locals(
|
||||
iter_locals: Iterable[tuple[str, object]],
|
||||
) -> Iterable[tuple[str, object]]:
|
||||
"""Extract locals from an iterator of key pairs."""
|
||||
if not (locals_hide_dunder or locals_hide_sunder):
|
||||
yield from iter_locals
|
||||
return
|
||||
for key, value in iter_locals:
|
||||
if locals_hide_dunder and key.startswith("__"):
|
||||
continue
|
||||
if locals_hide_sunder and key.startswith("_"):
|
||||
continue
|
||||
yield key, value
|
||||
|
||||
for frame_summary, line_no in walk_tb(traceback):
|
||||
filename = frame_summary.f_code.co_filename
|
||||
if filename and not filename.startswith("<"):
|
||||
filename = os.path.abspath(filename)
|
||||
# Rich has this, but we are not rich and like to keep all frames:
|
||||
# if frame_summary.f_locals.get("_rich_traceback_omit", False):
|
||||
# continue # noqa: ERA001
|
||||
|
||||
frame = Frame(
|
||||
filename=filename or "?",
|
||||
lineno=line_no,
|
||||
name=frame_summary.f_code.co_name,
|
||||
locals=(
|
||||
{
|
||||
key: to_repr(
|
||||
value,
|
||||
max_length=locals_max_length,
|
||||
max_string=locals_max_string,
|
||||
use_rich=use_rich,
|
||||
)
|
||||
for key, value in get_locals(
|
||||
frame_summary.f_locals.items()
|
||||
)
|
||||
}
|
||||
if show_locals
|
||||
else None
|
||||
),
|
||||
locals={
|
||||
key: to_repr(value, max_string=locals_max_string)
|
||||
for key, value in frame_summary.f_locals.items()
|
||||
}
|
||||
if show_locals
|
||||
else None,
|
||||
)
|
||||
append(frame)
|
||||
|
||||
@@ -357,31 +220,15 @@ class ExceptionDictTransformer:
|
||||
These dictionaries are based on :class:`Stack` instances generated by
|
||||
:func:`extract()` and can be dumped to JSON.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
show_locals:
|
||||
Whether or not to include the values of a stack frame's local
|
||||
variables.
|
||||
|
||||
locals_max_length:
|
||||
Maximum length of containers before abbreviating, or ``None`` for
|
||||
no abbreviation.
|
||||
|
||||
locals_max_string:
|
||||
Maximum length of string before truncating, or ``None`` to disable
|
||||
truncating.
|
||||
|
||||
locals_hide_dunder:
|
||||
Hide locals prefixed with double underscore.
|
||||
Defaults to True.
|
||||
|
||||
locals_hide_sunder:
|
||||
Hide locals prefixed with single underscore.
|
||||
This implies hiding *locals_hide_dunder*.
|
||||
Defaults to False.
|
||||
|
||||
suppress:
|
||||
Optional sequence of modules or paths for which to suppress the
|
||||
display of locals even if *show_locals* is ``True``.
|
||||
The maximum length after which long string representations are
|
||||
truncated.
|
||||
|
||||
max_frames:
|
||||
Maximum number of frames in each stack. Frames are removed from
|
||||
@@ -390,78 +237,32 @@ class ExceptionDictTransformer:
|
||||
the exception actually happened. With larger web frameworks, this
|
||||
does not always work, so you should stick with the default.
|
||||
|
||||
use_rich: If ``True`` (the default), use rich_ to compute the repr of
|
||||
locals. If ``False`` or if rich_ is not installed, fall back to
|
||||
a simpler algorithm.
|
||||
|
||||
.. seealso::
|
||||
:doc:`exceptions` for a broader explanation of *structlog*'s exception
|
||||
features.
|
||||
|
||||
.. versionchanged:: 24.3.0
|
||||
Added *locals_max_length*, *locals_hide_sunder*, *locals_hide_dunder*,
|
||||
*suppress* and *use_rich* arguments.
|
||||
|
||||
.. versionchanged:: 25.1.0
|
||||
*locals_max_length* and *locals_max_string* may be None to disable
|
||||
truncation.
|
||||
|
||||
.. versionchanged:: 25.4.0
|
||||
Handle exception groups.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
show_locals: bool = SHOW_LOCALS,
|
||||
locals_max_length: int = LOCALS_MAX_LENGTH,
|
||||
show_locals: bool = True,
|
||||
locals_max_string: int = LOCALS_MAX_STRING,
|
||||
locals_hide_dunder: bool = True,
|
||||
locals_hide_sunder: bool = False,
|
||||
suppress: Iterable[str | ModuleType] = (),
|
||||
max_frames: int = MAX_FRAMES,
|
||||
use_rich: bool = True,
|
||||
) -> None:
|
||||
if locals_max_length is not None and locals_max_length < 0:
|
||||
msg = f'"locals_max_length" must be >= 0: {locals_max_length}'
|
||||
raise ValueError(msg)
|
||||
if locals_max_string is not None and locals_max_string < 0:
|
||||
if locals_max_string < 0:
|
||||
msg = f'"locals_max_string" must be >= 0: {locals_max_string}'
|
||||
raise ValueError(msg)
|
||||
if max_frames < 2:
|
||||
msg = f'"max_frames" must be >= 2: {max_frames}'
|
||||
raise ValueError(msg)
|
||||
self.show_locals = show_locals
|
||||
self.locals_max_length = locals_max_length
|
||||
self.locals_max_string = locals_max_string
|
||||
self.locals_hide_dunder = locals_hide_dunder
|
||||
self.locals_hide_sunder = locals_hide_sunder
|
||||
self.suppress: Sequence[str] = []
|
||||
for suppress_entity in suppress:
|
||||
if not isinstance(suppress_entity, str):
|
||||
if suppress_entity.__file__ is None:
|
||||
msg = (
|
||||
f'"suppress" item {suppress_entity!r} must be a '
|
||||
f"module with '__file__' attribute"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
path = os.path.dirname(suppress_entity.__file__)
|
||||
else:
|
||||
path = suppress_entity
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
self.suppress.append(path)
|
||||
self.max_frames = max_frames
|
||||
self.use_rich = use_rich
|
||||
|
||||
def __call__(self, exc_info: ExcInfo) -> list[dict[str, Any]]:
|
||||
trace = extract(
|
||||
*exc_info,
|
||||
show_locals=self.show_locals,
|
||||
locals_max_length=self.locals_max_length,
|
||||
locals_max_string=self.locals_max_string,
|
||||
locals_hide_dunder=self.locals_hide_dunder,
|
||||
locals_hide_sunder=self.locals_hide_sunder,
|
||||
use_rich=self.use_rich,
|
||||
)
|
||||
|
||||
for stack in trace.stacks:
|
||||
@@ -482,21 +283,4 @@ class ExceptionDictTransformer:
|
||||
*stack.frames[-half:],
|
||||
]
|
||||
|
||||
return self._as_dict(trace)
|
||||
|
||||
def _as_dict(self, trace: Trace) -> list[dict[str, Any]]:
|
||||
stack_dicts = []
|
||||
for stack in trace.stacks:
|
||||
stack_dict = asdict(stack)
|
||||
for frame_dict in stack_dict["frames"]:
|
||||
if frame_dict["locals"] is None or any(
|
||||
frame_dict["filename"].startswith(path)
|
||||
for path in self.suppress
|
||||
):
|
||||
del frame_dict["locals"]
|
||||
if stack.is_group:
|
||||
stack_dict["exceptions"] = [
|
||||
self._as_dict(t) for t in stack.exceptions
|
||||
]
|
||||
stack_dicts.append(stack_dict)
|
||||
return stack_dicts
|
||||
return [asdict(stack) for stack in trace.stacks]
|
||||
|
||||
@@ -24,6 +24,7 @@ from zope.interface import implementer
|
||||
|
||||
from ._base import BoundLoggerBase
|
||||
from ._config import _BUILTIN_DEFAULT_PROCESSORS
|
||||
from ._utils import until_not_interrupted
|
||||
from .processors import JSONRenderer as GenericJSONRenderer
|
||||
from .typing import EventDict, WrappedLogger
|
||||
|
||||
@@ -203,7 +204,8 @@ class PlainFileLogObserver:
|
||||
Great to just print JSON to stdout where you catch it with something like
|
||||
runit.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
file: File to print to.
|
||||
|
||||
.. versionadded:: 0.2.0
|
||||
@@ -214,11 +216,12 @@ class PlainFileLogObserver:
|
||||
self._flush = file.flush
|
||||
|
||||
def __call__(self, eventDict: EventDict) -> None:
|
||||
self._write(
|
||||
until_not_interrupted(
|
||||
self._write,
|
||||
textFromEventDict(eventDict) # type: ignore[arg-type, operator]
|
||||
+ "\n",
|
||||
)
|
||||
self._flush()
|
||||
until_not_interrupted(self._flush)
|
||||
|
||||
|
||||
@implementer(ILogObserver)
|
||||
@@ -226,7 +229,8 @@ class JSONLogObserverWrapper:
|
||||
"""
|
||||
Wrap a log *observer* and render non-`JSONRenderer` entries to JSON.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
observer (ILogObserver):
|
||||
Twisted log observer to wrap. For example
|
||||
:class:`PlainFileObserver` or Twisted's stock `FileLogObserver
|
||||
@@ -289,7 +293,8 @@ class EventAdapter:
|
||||
<https://docs.twisted.org/en/stable/api/twisted.python.log.html#err>`_
|
||||
behave as expected.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
dictRenderer:
|
||||
Renderer that is used for the actual log message. Please note that
|
||||
structlog comes with a dedicated `JSONRenderer`.
|
||||
@@ -301,9 +306,8 @@ class EventAdapter:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
dictRenderer: (
|
||||
Callable[[WrappedLogger, str, EventDict], str] | None
|
||||
) = None,
|
||||
dictRenderer: Callable[[WrappedLogger, str, EventDict], str]
|
||||
| None = None,
|
||||
) -> None:
|
||||
self._dictRenderer = dictRenderer or _BUILTIN_DEFAULT_PROCESSORS[-1]
|
||||
|
||||
|
||||
@@ -26,13 +26,13 @@ from .typing import (
|
||||
|
||||
|
||||
__all__ = (
|
||||
"BindableLogger",
|
||||
"WrappedLogger",
|
||||
"Context",
|
||||
"EventDict",
|
||||
"Processor",
|
||||
"ExcInfo",
|
||||
"ExceptionRenderer",
|
||||
"ExceptionTransformer",
|
||||
"BindableLogger",
|
||||
"FilteringBoundLogger",
|
||||
"Processor",
|
||||
"WrappedLogger",
|
||||
)
|
||||
|
||||
@@ -14,8 +14,6 @@ probably change to something more elegant.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
@@ -33,12 +31,6 @@ from typing import (
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import Self
|
||||
else:
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
WrappedLogger = Any
|
||||
"""
|
||||
A logger that is wrapped by a bound logger and is ultimately responsible for
|
||||
@@ -68,15 +60,11 @@ copy itself.
|
||||
.. versionadded:: 20.2.0
|
||||
"""
|
||||
|
||||
ProcessorReturnValue = Union[
|
||||
Mapping[str, Any], str, bytes, bytearray, Tuple[Any, ...]
|
||||
Processor = Callable[
|
||||
[WrappedLogger, str, EventDict],
|
||||
Union[Mapping[str, Any], str, bytes, bytearray, Tuple[Any, ...]],
|
||||
]
|
||||
"""
|
||||
A value returned by a processor.
|
||||
"""
|
||||
|
||||
Processor = Callable[[WrappedLogger, str, EventDict], ProcessorReturnValue]
|
||||
"""
|
||||
A callable that is part of the processor chain.
|
||||
|
||||
See :doc:`processors`.
|
||||
@@ -114,17 +102,20 @@ class ExceptionTransformer(Protocol):
|
||||
Used by `structlog.processors.format_exc_info()` and
|
||||
`structlog.processors.ExceptionPrettyPrinter`.
|
||||
|
||||
Args:
|
||||
Arguments:
|
||||
|
||||
exc_info: Is the exception tuple to format
|
||||
|
||||
Returns:
|
||||
Anything that can be rendered by the last processor in your chain, for
|
||||
example, a string or a JSON-serializable structure.
|
||||
|
||||
Anything that can be rendered by the last processor in your chain,
|
||||
for example, a string or a JSON-serializable structure.
|
||||
|
||||
.. versionadded:: 22.1.0
|
||||
"""
|
||||
|
||||
def __call__(self, exc_info: ExcInfo) -> Any: ...
|
||||
def __call__(self, exc_info: ExcInfo) -> Any:
|
||||
...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
@@ -138,13 +129,17 @@ class BindableLogger(Protocol):
|
||||
|
||||
_context: Context
|
||||
|
||||
def bind(self, **new_values: Any) -> Self: ...
|
||||
def bind(self, **new_values: Any) -> BindableLogger:
|
||||
...
|
||||
|
||||
def unbind(self, *keys: str) -> Self: ...
|
||||
def unbind(self, *keys: str) -> BindableLogger:
|
||||
...
|
||||
|
||||
def try_unbind(self, *keys: str) -> Self: ...
|
||||
def try_unbind(self, *keys: str) -> BindableLogger:
|
||||
...
|
||||
|
||||
def new(self, **new_values: Any) -> Self: ...
|
||||
def new(self, **new_values: Any) -> BindableLogger:
|
||||
...
|
||||
|
||||
|
||||
class FilteringBoundLogger(BindableLogger, Protocol):
|
||||
@@ -190,20 +185,6 @@ class FilteringBoundLogger(BindableLogger, Protocol):
|
||||
.. versionadded:: 22.1.0
|
||||
"""
|
||||
|
||||
def is_enabled_for(self, level: int) -> bool:
|
||||
"""
|
||||
Check whether the logger is enabled for *level*.
|
||||
|
||||
.. versionadded:: 25.1.0
|
||||
"""
|
||||
|
||||
def get_effective_level(self) -> int:
|
||||
"""
|
||||
Return the effective level of the logger.
|
||||
|
||||
.. versionadded:: 25.1.0
|
||||
"""
|
||||
|
||||
def debug(self, event: str, *args: Any, **kw: Any) -> Any:
|
||||
"""
|
||||
Log ``event % args`` with **kw** at **debug** level.
|
||||
|
||||
Reference in New Issue
Block a user