Major fixes and new features
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
2025-09-25 15:51:48 +09:00
parent dd7349bb4c
commit ddce9f5125
5586 changed files with 1470941 additions and 0 deletions

View File

@@ -0,0 +1,128 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
from __future__ import annotations
from structlog import (
contextvars,
dev,
processors,
stdlib,
testing,
threadlocal,
tracebacks,
types,
typing,
)
from structlog._base import BoundLoggerBase, get_context
from structlog._config import (
configure,
configure_once,
get_config,
get_logger,
getLogger,
is_configured,
reset_defaults,
wrap_logger,
)
from structlog._generic import BoundLogger
from structlog._log_levels import make_filtering_bound_logger
from structlog._output import (
BytesLogger,
BytesLoggerFactory,
PrintLogger,
PrintLoggerFactory,
WriteLogger,
WriteLoggerFactory,
)
from structlog.exceptions import DropEvent
from structlog.testing import ReturnLogger, ReturnLoggerFactory
try:
from structlog import twisted
except ImportError:
twisted = None # type: ignore[assignment]
__title__ = "structlog"
__author__ = "Hynek Schlawack"
__license__ = "MIT or Apache License, Version 2.0"
__copyright__ = "Copyright (c) 2013 " + __author__
__all__ = [
"BoundLogger",
"BoundLoggerBase",
"BytesLogger",
"BytesLoggerFactory",
"configure_once",
"configure",
"contextvars",
"dev",
"DropEvent",
"get_config",
"get_context",
"get_logger",
"getLogger",
"is_configured",
"make_filtering_bound_logger",
"PrintLogger",
"PrintLoggerFactory",
"processors",
"reset_defaults",
"ReturnLogger",
"ReturnLoggerFactory",
"stdlib",
"testing",
"threadlocal",
"tracebacks",
"twisted",
"types",
"typing",
"wrap_logger",
"WriteLogger",
"WriteLoggerFactory",
]
def __getattr__(name: str) -> str:
import warnings
from importlib.metadata import metadata, version
dunder_to_metadata = {
"__description__": "summary",
"__uri__": "",
"__email__": "",
"__version__": "",
}
if name not in dunder_to_metadata:
msg = f"module {__name__} has no attribute {name}"
raise AttributeError(msg)
if name != "__version__":
warnings.warn(
f"Accessing structlog.{name} is deprecated and will be "
"removed in a future release. Use importlib.metadata directly "
"to query for structlog's packaging metadata.",
DeprecationWarning,
stacklevel=2,
)
else:
return version("structlog")
meta = metadata("structlog")
if name == "__uri__":
return meta["Project-URL"].split(" ", 1)[-1]
if name == "__email__":
return meta["Author-email"].split("<", 1)[1].rstrip(">")
return meta[dunder_to_metadata[name]]

View File

@@ -0,0 +1,246 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Logger wrapper and helper class.
"""
from __future__ import annotations
from typing import Any, Iterable, Mapping, Sequence
from structlog.exceptions import DropEvent
from .typing import BindableLogger, Context, Processor, WrappedLogger
class BoundLoggerBase:
"""
Immutable context carrier.
Doesn't do any actual logging; examples for useful subclasses are:
- the generic `BoundLogger` that can wrap anything,
- `structlog.stdlib.BoundLogger`.
- `structlog.twisted.BoundLogger`,
See also `custom-wrappers`.
"""
_logger: WrappedLogger
"""
Wrapped logger.
.. note::
Despite underscore available **read-only** to custom wrapper classes.
See also `custom-wrappers`.
"""
def __init__(
self,
logger: WrappedLogger,
processors: Iterable[Processor],
context: Context,
):
self._logger = logger
self._processors = processors
self._context = context
def __repr__(self) -> str:
return "<{}(context={!r}, processors={!r})>".format(
self.__class__.__name__, self._context, self._processors
)
def __eq__(self, other: object) -> bool:
try:
return self._context == other._context # type: ignore[attr-defined]
except AttributeError:
return False
def __ne__(self, other: object) -> bool:
return not self.__eq__(other)
def bind(self, **new_values: Any) -> BoundLoggerBase:
"""
Return a new logger with *new_values* added to the existing ones.
"""
return self.__class__(
self._logger,
self._processors,
self._context.__class__(self._context, **new_values),
)
def unbind(self, *keys: str) -> BoundLoggerBase:
"""
Return a new logger with *keys* removed from the context.
Raises:
KeyError: If the key is not part of the context.
"""
bl = self.bind()
for key in keys:
del bl._context[key]
return bl
def try_unbind(self, *keys: str) -> BoundLoggerBase:
"""
Like :meth:`unbind`, but best effort: missing keys are ignored.
.. versionadded:: 18.2.0
"""
bl = self.bind()
for key in keys:
bl._context.pop(key, None)
return bl
def new(self, **new_values: Any) -> BoundLoggerBase:
"""
Clear context and binds *new_values* using `bind`.
Only necessary with dict implementations that keep global state like
those wrapped by `structlog.threadlocal.wrap_dict` when threads
are re-used.
"""
self._context.clear()
return self.bind(**new_values)
# Helper methods for sub-classing concrete BoundLoggers.
def _process_event(
self, method_name: str, event: str | None, event_kw: dict[str, Any]
) -> tuple[Sequence[Any], Mapping[str, Any]]:
"""
Combines creates an ``event_dict`` and runs the chain.
Call it to combine your *event* and *context* into an event_dict and
process using the processor chain.
Arguments:
method_name:
The name of the logger method. Is passed into the processors.
event:
The event -- usually the first positional argument to a logger.
event_kw:
Additional event keywords. For example if someone calls
``log.info("foo", bar=42)``, *event* would to be ``"foo"`` and
*event_kw* ``{"bar": 42}``.
Raises:
structlog.DropEvent: if log entry should be dropped.
ValueError:
if the final processor doesn't return a str, bytes, bytearray,
tuple, or a dict.
Returns:
`tuple` of ``(*args, **kw)``
.. note::
Despite underscore available to custom wrapper classes.
See also `custom-wrappers`.
.. versionchanged:: 14.0.0
Allow final processor to return a `dict`.
.. versionchanged:: 20.2.0
Allow final processor to return `bytes`.
.. versionchanged:: 21.2.0
Allow final processor to return a `bytearray`.
"""
# We're typing it as Any, because processors can return more than an
# EventDict.
event_dict: Any = self._context.copy()
event_dict.update(**event_kw)
if event is not None:
event_dict["event"] = event
for proc in self._processors:
event_dict = proc(self._logger, method_name, event_dict)
if isinstance(event_dict, (str, bytes, bytearray)):
return (event_dict,), {}
if isinstance(event_dict, tuple):
# In this case we assume that the last processor returned a tuple
# of ``(args, kwargs)`` and pass it right through.
return event_dict # type: ignore[return-value]
if isinstance(event_dict, dict):
return (), event_dict
msg = (
"Last processor didn't return an appropriate value. "
"Valid return values are a dict, a tuple of (args, kwargs), bytes, or a str."
)
raise ValueError(msg)
def _proxy_to_logger(
self, method_name: str, event: str | None = None, **event_kw: Any
) -> Any:
"""
Run processor chain on event & call *method_name* on wrapped logger.
DRY convenience method that runs :func:`_process_event`, takes care of
handling :exc:`structlog.DropEvent`, and finally calls *method_name* on
:attr:`_logger` with the result.
Arguments:
method_name:
The name of the method that's going to get called. Technically
it should be identical to the method the user called because it
also get passed into processors.
event:
The event -- usually the first positional argument to a logger.
event_kw:
Additional event keywords. For example if someone calls
``log.info("foo", bar=42)``, *event* would to be ``"foo"`` and
*event_kw* ``{"bar": 42}``.
.. note::
Despite underscore available to custom wrapper classes.
See also `custom-wrappers`.
"""
try:
args, kw = self._process_event(method_name, event, event_kw)
return getattr(self._logger, method_name)(*args, **kw)
except DropEvent:
return None
def get_context(bound_logger: BindableLogger) -> Context:
"""
Return *bound_logger*'s context.
The type of *bound_logger* and the type returned depend on your
configuration.
Arguments:
bound_logger: The bound logger whose context you want.
Returns:
The *actual* context from *bound_logger*. It is *not* copied first.
.. versionadded:: 20.2.0
"""
# This probably will get more complicated in the future.
return bound_logger._context

View File

@@ -0,0 +1,432 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Global state department. Don't reload this module or everything breaks.
"""
from __future__ import annotations
import os
import sys
import warnings
from typing import Any, Callable, Iterable, Sequence, Type, cast
from ._log_levels import make_filtering_bound_logger
from ._output import PrintLoggerFactory
from .contextvars import merge_contextvars
from .dev import ConsoleRenderer, _has_colors, set_exc_info
from .processors import StackInfoRenderer, TimeStamper, add_log_level
from .typing import BindableLogger, Context, Processor, WrappedLogger
"""
Any changes to these defaults must be reflected in:
- `getting-started`.
- structlog.stdlib.recreate_defaults()'s docstring.
"""
_BUILTIN_DEFAULT_PROCESSORS: Sequence[Processor] = [
merge_contextvars,
add_log_level,
StackInfoRenderer(),
set_exc_info,
TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False),
ConsoleRenderer(
colors=os.environ.get("NO_COLOR", "") == ""
and (
os.environ.get("FORCE_COLOR", "") != ""
or (
_has_colors
and sys.stdout is not None
and hasattr(sys.stdout, "isatty")
and sys.stdout.isatty()
)
)
),
]
_BUILTIN_DEFAULT_CONTEXT_CLASS = cast(Type[Context], dict)
_BUILTIN_DEFAULT_WRAPPER_CLASS = make_filtering_bound_logger(0)
_BUILTIN_DEFAULT_LOGGER_FACTORY = PrintLoggerFactory()
_BUILTIN_CACHE_LOGGER_ON_FIRST_USE = False
class _Configuration:
"""
Global defaults.
"""
is_configured: bool = False
default_processors: Iterable[Processor] = _BUILTIN_DEFAULT_PROCESSORS[:]
default_context_class: type[Context] = _BUILTIN_DEFAULT_CONTEXT_CLASS
default_wrapper_class: Any = _BUILTIN_DEFAULT_WRAPPER_CLASS
logger_factory: Callable[
..., WrappedLogger
] = _BUILTIN_DEFAULT_LOGGER_FACTORY
cache_logger_on_first_use: bool = _BUILTIN_CACHE_LOGGER_ON_FIRST_USE
_CONFIG = _Configuration()
"""
Global defaults used when arguments to `wrap_logger` are omitted.
"""
def is_configured() -> bool:
"""
Return whether *structlog* has been configured.
If `False`, *structlog* is running with builtin defaults.
.. versionadded: 18.1.0
"""
return _CONFIG.is_configured
def get_config() -> dict[str, Any]:
"""
Get a dictionary with the current configuration.
.. note::
Changes to the returned dictionary do *not* affect *structlog*.
.. versionadded: 18.1.0
"""
return {
"processors": _CONFIG.default_processors,
"context_class": _CONFIG.default_context_class,
"wrapper_class": _CONFIG.default_wrapper_class,
"logger_factory": _CONFIG.logger_factory,
"cache_logger_on_first_use": _CONFIG.cache_logger_on_first_use,
}
def get_logger(*args: Any, **initial_values: Any) -> Any:
"""
Convenience function that returns a logger according to configuration.
>>> from structlog import get_logger
>>> log = get_logger(y=23)
>>> log.info("hello", x=42)
y=23 x=42 event='hello'
Arguments:
args:
*Optional* positional arguments that are passed unmodified to the
logger factory. Therefore it depends on the factory what they
mean.
initial_values: Values that are used to pre-populate your contexts.
Returns:
A proxy that creates a correctly configured bound logger when
necessary. The type of that bound logger depends on your configuration
and is `structlog.BoundLogger` by default.
See `configuration` for details.
If you prefer CamelCase, there's an alias for your reading pleasure:
`structlog.getLogger`.
.. versionadded:: 0.4.0 *args*
"""
return wrap_logger(None, logger_factory_args=args, **initial_values)
getLogger = get_logger # noqa: N816
"""
CamelCase alias for `structlog.get_logger`.
This function is supposed to be in every source file -- we don't want it to
stick out like a sore thumb in frameworks like Twisted or Zope.
"""
def wrap_logger(
logger: WrappedLogger | None,
processors: Iterable[Processor] | None = None,
wrapper_class: type[BindableLogger] | None = None,
context_class: type[Context] | None = None,
cache_logger_on_first_use: bool | None = None,
logger_factory_args: Iterable[Any] | None = None,
**initial_values: Any,
) -> Any:
"""
Create a new bound logger for an arbitrary *logger*.
Default values for *processors*, *wrapper_class*, and *context_class* can
be set using `configure`.
If you set an attribute here, `configure` calls have *no* effect for the
*respective* attribute.
In other words: selective overwriting of the defaults while keeping some
*is* possible.
Arguments:
initial_values: Values that are used to pre-populate your contexts.
logger_factory_args:
Values that are passed unmodified as ``*logger_factory_args`` to
the logger factory if not `None`.
Returns:
A proxy that creates a correctly configured bound logger when
necessary.
See `configure` for the meaning of the rest of the arguments.
.. versionadded:: 0.4.0 *logger_factory_args*
"""
return BoundLoggerLazyProxy(
logger,
wrapper_class=wrapper_class,
processors=processors,
context_class=context_class,
cache_logger_on_first_use=cache_logger_on_first_use,
initial_values=initial_values,
logger_factory_args=logger_factory_args,
)
def configure(
processors: Iterable[Processor] | None = None,
wrapper_class: type[BindableLogger] | None = None,
context_class: type[Context] | None = None,
logger_factory: Callable[..., WrappedLogger] | None = None,
cache_logger_on_first_use: bool | None = None,
) -> None:
"""
Configures the **global** defaults.
They are used if `wrap_logger` or `get_logger` are called without
arguments.
Can be called several times, keeping an argument at `None` leaves it
unchanged from the current setting.
After calling for the first time, `is_configured` starts returning `True`.
Use `reset_defaults` to undo your changes.
Arguments:
processors: The processor chain. See :doc:`processors` for details.
wrapper_class:
Class to use for wrapping loggers instead of
`structlog.BoundLogger`. See `standard-library`, :doc:`twisted`,
and `custom-wrappers`.
context_class:
Class to be used for internal context keeping. The default is a
`dict` and since dictionaries are ordered as of Python 3.6, there's
few reasons to change this option.
logger_factory:
Factory to be called to create a new logger that shall be wrapped.
cache_logger_on_first_use:
`wrap_logger` doesn't return an actual wrapped logger but a proxy
that assembles one when it's first used. If this option is set to
`True`, this assembled logger is cached. See `performance`.
.. versionadded:: 0.3.0 *cache_logger_on_first_use*
"""
_CONFIG.is_configured = True
if processors is not None:
_CONFIG.default_processors = processors
if wrapper_class is not None:
_CONFIG.default_wrapper_class = wrapper_class
if context_class is not None:
_CONFIG.default_context_class = context_class
if logger_factory is not None:
_CONFIG.logger_factory = logger_factory
if cache_logger_on_first_use is not None:
_CONFIG.cache_logger_on_first_use = cache_logger_on_first_use
def configure_once(
processors: Iterable[Processor] | None = None,
wrapper_class: type[BindableLogger] | None = None,
context_class: type[Context] | None = None,
logger_factory: Callable[..., WrappedLogger] | None = None,
cache_logger_on_first_use: bool | None = None,
) -> None:
"""
Configures if structlog isn't configured yet.
It does *not* matter whether it was configured using `configure` or
`configure_once` before.
Raises:
RuntimeWarning: if repeated configuration is attempted.
"""
if not _CONFIG.is_configured:
configure(
processors=processors,
wrapper_class=wrapper_class,
context_class=context_class,
logger_factory=logger_factory,
cache_logger_on_first_use=cache_logger_on_first_use,
)
else:
warnings.warn(
"Repeated configuration attempted.", RuntimeWarning, stacklevel=2
)
def reset_defaults() -> None:
"""
Resets global default values to builtin defaults.
`is_configured` starts returning `False` afterwards.
"""
_CONFIG.is_configured = False
_CONFIG.default_processors = _BUILTIN_DEFAULT_PROCESSORS[:]
_CONFIG.default_wrapper_class = _BUILTIN_DEFAULT_WRAPPER_CLASS
_CONFIG.default_context_class = _BUILTIN_DEFAULT_CONTEXT_CLASS
_CONFIG.logger_factory = _BUILTIN_DEFAULT_LOGGER_FACTORY
_CONFIG.cache_logger_on_first_use = _BUILTIN_CACHE_LOGGER_ON_FIRST_USE
class BoundLoggerLazyProxy:
"""
Instantiates a bound logger on first usage.
Takes both configuration and instantiation parameters into account.
The only points where a bound logger changes state are ``bind()``,
``unbind()``, and ``new()`` and that return the actual ``BoundLogger``.
If and only if configuration says so, that actual bound logger is cached on
first usage.
.. versionchanged:: 0.4.0 Added support for *logger_factory_args*.
"""
def __init__(
self,
logger: WrappedLogger | None,
wrapper_class: type[BindableLogger] | None = None,
processors: Iterable[Processor] | None = None,
context_class: type[Context] | None = None,
cache_logger_on_first_use: bool | None = None,
initial_values: dict[str, Any] | None = None,
logger_factory_args: Any = None,
) -> None:
self._logger = logger
self._wrapper_class = wrapper_class
self._processors = processors
self._context_class = context_class
self._cache_logger_on_first_use = cache_logger_on_first_use
self._initial_values = initial_values or {}
self._logger_factory_args = logger_factory_args or ()
def __repr__(self) -> str:
return (
f"<BoundLoggerLazyProxy(logger={self._logger!r}, wrapper_class="
f"{self._wrapper_class!r}, processors={self._processors!r}, "
f"context_class={self._context_class!r}, "
f"initial_values={self._initial_values!r}, "
f"logger_factory_args={self._logger_factory_args!r})>"
)
def bind(self, **new_values: Any) -> BindableLogger:
"""
Assemble a new BoundLogger from arguments and configuration.
"""
if self._context_class:
ctx = self._context_class(self._initial_values)
else:
ctx = _CONFIG.default_context_class(self._initial_values)
_logger = self._logger
if not _logger:
_logger = _CONFIG.logger_factory(*self._logger_factory_args)
if self._processors is None:
procs = _CONFIG.default_processors
else:
procs = self._processors
cls = self._wrapper_class or _CONFIG.default_wrapper_class
# Looks like Protocols ignore definitions of __init__ so we have to
# silence Mypy here.
logger = cls(
_logger, processors=procs, context=ctx # type: ignore[call-arg]
)
def finalized_bind(**new_values: Any) -> BindableLogger:
"""
Use cached assembled logger to bind potentially new values.
"""
if new_values:
return logger.bind(**new_values)
return logger
if self._cache_logger_on_first_use is True or (
self._cache_logger_on_first_use is None
and _CONFIG.cache_logger_on_first_use is True
):
self.bind = finalized_bind # type: ignore[method-assign]
return finalized_bind(**new_values)
def unbind(self, *keys: str) -> BindableLogger:
"""
Same as bind, except unbind *keys* first.
In our case that could be only initial values.
"""
return self.bind().unbind(*keys)
def try_unbind(self, *keys: str) -> BindableLogger:
return self.bind().try_unbind(*keys)
def new(self, **new_values: Any) -> BindableLogger:
"""
Clear context, then bind.
"""
if self._context_class:
self._context_class().clear()
else:
_CONFIG.default_context_class().clear()
return self.bind(**new_values)
def __getattr__(self, name: str) -> Any:
"""
If a logging method if called on a lazy proxy, we have to create an
ephemeral BoundLogger first.
"""
if name == "__isabstractmethod__":
raise AttributeError
bl = self.bind()
return getattr(bl, name)
def __getstate__(self) -> dict[str, Any]:
"""
Our __getattr__ magic makes this necessary.
"""
return self.__dict__
def __setstate__(self, state: dict[str, Any]) -> None:
"""
Our __getattr__ magic makes this necessary.
"""
for k, v in state.items():
setattr(self, k, v)

View File

@@ -0,0 +1,77 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
from __future__ import annotations
import sys
import traceback
from io import StringIO
from types import FrameType
from .typing import ExcInfo
def _format_exception(exc_info: ExcInfo) -> str:
"""
Prettyprint an `exc_info` tuple.
Shamelessly stolen from stdlib's logging module.
"""
if exc_info == (None, None, None): # type: ignore[comparison-overlap]
return "MISSING"
sio = StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, sio)
s = sio.getvalue()
sio.close()
if s[-1:] == "\n":
s = s[:-1]
return s
def _find_first_app_frame_and_name(
additional_ignores: list[str] | None = None,
) -> tuple[FrameType, str]:
"""
Remove all intra-structlog calls and return the relevant app frame.
Arguments:
additional_ignores:
Additional names with which the first frame must not start.
Returns:
tuple of (frame, name)
"""
ignores = ["structlog"] + (additional_ignores or [])
f = sys._getframe()
name = f.f_globals.get("__name__") or "?"
while any(tuple(name.startswith(i) for i in ignores)):
if f.f_back is None:
name = "?"
break
f = f.f_back
name = f.f_globals.get("__name__") or "?"
return f, name
def _format_stack(frame: FrameType) -> str:
"""
Pretty-print the stack of *frame* like logging would.
"""
sio = StringIO()
sio.write("Stack (most recent call last):\n")
traceback.print_stack(frame, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == "\n":
sinfo = sinfo[:-1]
sio.close()
return sinfo

View File

@@ -0,0 +1,54 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Generic bound logger that can wrap anything.
"""
from __future__ import annotations
from functools import partial
from typing import Any
from structlog._base import BoundLoggerBase
class BoundLogger(BoundLoggerBase):
"""
A generic BoundLogger that can wrap anything.
Every unknown method will be passed to the wrapped *logger*. If that's too
much magic for you, try `structlog.stdlib.BoundLogger` or
`structlog.twisted.BoundLogger` which also take advantage of knowing the
wrapped class which generally results in better performance.
Not intended to be instantiated by yourself. See
:func:`~structlog.wrap_logger` and :func:`~structlog.get_logger`.
"""
def __getattr__(self, method_name: str) -> Any:
"""
If not done so yet, wrap the desired logger method & cache the result.
"""
if method_name == "__deepcopy__":
return None
wrapped = partial(self._proxy_to_logger, method_name)
setattr(self, method_name, wrapped)
return wrapped
def __getstate__(self) -> dict[str, Any]:
"""
Our __getattr__ magic makes this necessary.
"""
return self.__dict__
def __setstate__(self, state: dict[str, Any]) -> None:
"""
Our __getattr__ magic makes this necessary.
"""
for k, v in state.items():
setattr(self, k, v)

View File

@@ -0,0 +1,44 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
greenlet-specific code that pretends to be a `threading.local`.
Fails to import if not running under greenlet.
"""
from __future__ import annotations
from typing import Any
from weakref import WeakKeyDictionary
from greenlet import getcurrent
class GreenThreadLocal:
"""
threading.local() replacement for greenlets.
"""
def __init__(self) -> None:
self.__dict__["_weakdict"] = WeakKeyDictionary()
def __getattr__(self, name: str) -> Any:
key = getcurrent()
try:
return self._weakdict[key][name]
except KeyError:
raise AttributeError(name) from None
def __setattr__(self, name: str, val: Any) -> None:
key = getcurrent()
self._weakdict.setdefault(key, {})[name] = val
def __delattr__(self, name: str) -> None:
key = getcurrent()
try:
del self._weakdict[key][name]
except KeyError:
raise AttributeError(name) from None

View File

@@ -0,0 +1,250 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Extracted log level data used by both stdlib and native log level filters.
"""
from __future__ import annotations
import asyncio
import contextvars
import logging
import sys
from typing import Any, Callable
from ._base import BoundLoggerBase
from .typing import EventDict, FilteringBoundLogger
# Adapted from the stdlib
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
_NAME_TO_LEVEL = {
"critical": CRITICAL,
"exception": ERROR,
"error": ERROR,
"warn": WARNING,
"warning": WARNING,
"info": INFO,
"debug": DEBUG,
"notset": NOTSET,
}
_LEVEL_TO_NAME = {
v: k
for k, v in _NAME_TO_LEVEL.items()
if k not in ("warn", "exception", "notset")
}
def add_log_level(
logger: logging.Logger, method_name: str, event_dict: EventDict
) -> EventDict:
"""
Add the log level to the event dict under the ``level`` key.
Since that's just the log method name, this processor works with non-stdlib
logging as well. Therefore it's importable both from `structlog.processors`
as well as from `structlog.stdlib`.
.. versionadded:: 15.0.0
.. versionchanged:: 20.2.0
Importable from `structlog.processors` (additionally to
`structlog.stdlib`).
"""
if method_name == "warn":
# The stdlib has an alias
method_name = "warning"
event_dict["level"] = method_name
return event_dict
def _nop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
return None
async def _anop(self: Any, event: str, *args: Any, **kw: Any) -> Any:
return None
def exception(
self: FilteringBoundLogger, event: str, *args: Any, **kw: Any
) -> Any:
kw.setdefault("exc_info", True)
return self.error(event, *args, **kw)
async def aexception(
self: FilteringBoundLogger, event: str, *args: Any, **kw: Any
) -> Any:
# Exception info has to be extracted this early, because it is no longer
# available once control is passed to the executor.
if kw.get("exc_info", True) is True:
kw["exc_info"] = sys.exc_info()
ctx = contextvars.copy_context()
return await asyncio.get_running_loop().run_in_executor(
None,
lambda: ctx.run(lambda: self.error(event, *args, **kw)),
)
def make_filtering_bound_logger(min_level: int) -> type[FilteringBoundLogger]:
"""
Create a new `FilteringBoundLogger` that only logs *min_level* or higher.
The logger is optimized such that log levels below *min_level* only consist
of a ``return None``.
All familiar log methods are present, with async variants of each that are
prefixed by an ``a``. Therefore, the async version of ``log.info("hello")``
is ``await log.ainfo("hello")``.
Additionally it has a ``log(self, level: int, **kw: Any)`` method to mirror
`logging.Logger.log` and `structlog.stdlib.BoundLogger.log`.
Compared to using *structlog*'s standard library integration and the
`structlog.stdlib.filter_by_level` processor:
- It's faster because once the logger is built at program start; it's a
static class.
- For the same reason you can't change the log level once configured. Use
the dynamic approach of `standard-library` instead, if you need this
feature.
- You *can* have (much) more fine-grained filtering by :ref:`writing a
simple processor <finer-filtering>`.
Arguments:
min_level:
The log level as an integer. You can use the constants from
`logging` like ``logging.INFO`` or pass the values directly. See
`this table from the logging docs
<https://docs.python.org/3/library/logging.html#levels>`_ for
possible values.
.. versionadded:: 20.2.0
.. versionchanged:: 21.1.0 The returned loggers are now pickleable.
.. versionadded:: 20.1.0 The ``log()`` method.
.. versionadded:: 22.2.0
Async variants ``alog()``, ``adebug()``, ``ainfo()``, and so forth.
"""
return _LEVEL_TO_FILTERING_LOGGER[min_level]
def _make_filtering_bound_logger(min_level: int) -> type[FilteringBoundLogger]:
"""
Create a new `FilteringBoundLogger` that only logs *min_level* or higher.
The logger is optimized such that log levels below *min_level* only consist
of a ``return None``.
"""
def make_method(
level: int,
) -> tuple[Callable[..., Any], Callable[..., Any]]:
if level < min_level:
return _nop, _anop
name = _LEVEL_TO_NAME[level]
def meth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
if not args:
return self._proxy_to_logger(name, event, **kw)
return self._proxy_to_logger(name, event % args, **kw)
async def ameth(self: Any, event: str, *args: Any, **kw: Any) -> Any:
if args:
event = event % args
ctx = contextvars.copy_context()
await asyncio.get_running_loop().run_in_executor(
None,
lambda: ctx.run(
lambda: self._proxy_to_logger(name, event, **kw)
),
)
meth.__name__ = name
ameth.__name__ = f"a{name}"
return meth, ameth
def log(self: Any, level: int, event: str, *args: Any, **kw: Any) -> Any:
if level < min_level:
return None
name = _LEVEL_TO_NAME[level]
if not args:
return self._proxy_to_logger(name, event, **kw)
return self._proxy_to_logger(name, event % args, **kw)
async def alog(
self: Any, level: int, event: str, *args: Any, **kw: Any
) -> Any:
if level < min_level:
return None
name = _LEVEL_TO_NAME[level]
if args:
event = event % args
ctx = contextvars.copy_context()
return await asyncio.get_running_loop().run_in_executor(
None,
lambda: ctx.run(lambda: self._proxy_to_logger(name, event, **kw)),
)
meths: dict[str, Callable[..., Any]] = {"log": log, "alog": alog}
for lvl, name in _LEVEL_TO_NAME.items():
meths[name], meths[f"a{name}"] = make_method(lvl)
meths["exception"] = exception
meths["aexception"] = aexception
meths["fatal"] = meths["error"]
meths["afatal"] = meths["aerror"]
meths["warn"] = meths["warning"]
meths["awarn"] = meths["awarning"]
meths["msg"] = meths["info"]
meths["amsg"] = meths["ainfo"]
return type(
"BoundLoggerFilteringAt%s"
% (_LEVEL_TO_NAME.get(min_level, "Notset").capitalize()),
(BoundLoggerBase,),
meths,
)
# Pre-create all possible filters to make them pickleable.
BoundLoggerFilteringAtNotset = _make_filtering_bound_logger(NOTSET)
BoundLoggerFilteringAtDebug = _make_filtering_bound_logger(DEBUG)
BoundLoggerFilteringAtInfo = _make_filtering_bound_logger(INFO)
BoundLoggerFilteringAtWarning = _make_filtering_bound_logger(WARNING)
BoundLoggerFilteringAtError = _make_filtering_bound_logger(ERROR)
BoundLoggerFilteringAtCritical = _make_filtering_bound_logger(CRITICAL)
_LEVEL_TO_FILTERING_LOGGER = {
CRITICAL: BoundLoggerFilteringAtCritical,
ERROR: BoundLoggerFilteringAtError,
WARNING: BoundLoggerFilteringAtWarning,
INFO: BoundLoggerFilteringAtInfo,
DEBUG: BoundLoggerFilteringAtDebug,
NOTSET: BoundLoggerFilteringAtNotset,
}

View File

@@ -0,0 +1,353 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Logger classes responsible for output.
"""
from __future__ import annotations
import copy
import sys
import threading
from pickle import PicklingError
from sys import stderr, stdout
from typing import IO, Any, BinaryIO, TextIO
from structlog._utils import until_not_interrupted
WRITE_LOCKS: dict[IO[Any], threading.Lock] = {}
def _get_lock_for_file(file: IO[Any]) -> threading.Lock:
lock = WRITE_LOCKS.get(file)
if lock is None:
lock = threading.Lock()
WRITE_LOCKS[file] = lock
return lock
class PrintLogger:
"""
Print events into a file.
Arguments:
file: File to print to. (default: `sys.stdout`)
>>> from structlog import PrintLogger
>>> PrintLogger().info("hello")
hello
Useful if you follow `current logging best practices
<logging-best-practices>`.
Also very useful for testing and examples since `logging` is finicky in
doctests.
.. versionchanged:: 22.1.0
The implementation has been switched to use `print` for better
monkeypatchability.
"""
def __init__(self, file: TextIO | None = None):
self._file = file or stdout
self._lock = _get_lock_for_file(self._file)
def __getstate__(self) -> str:
"""
Our __getattr__ magic makes this necessary.
"""
if self._file is stdout:
return "stdout"
if self._file is stderr:
return "stderr"
raise PicklingError(
"Only PrintLoggers to sys.stdout and sys.stderr can be pickled."
)
def __setstate__(self, state: Any) -> None:
"""
Our __getattr__ magic makes this necessary.
"""
if state == "stdout":
self._file = stdout
else:
self._file = stderr
self._lock = _get_lock_for_file(self._file)
def __deepcopy__(self, memodict: dict[str, object]) -> PrintLogger:
"""
Create a new PrintLogger with the same attributes. Similar to pickling.
"""
if self._file not in (stdout, stderr):
raise copy.error(
"Only PrintLoggers to sys.stdout and sys.stderr "
"can be deepcopied."
)
newself = self.__class__(self._file)
newself._lock = _get_lock_for_file(newself._file)
return newself
def __repr__(self) -> str:
return f"<PrintLogger(file={self._file!r})>"
def msg(self, message: str) -> None:
"""
Print *message*.
"""
f = self._file if self._file is not stdout else None
with self._lock:
until_not_interrupted(print, message, file=f, flush=True)
log = debug = info = warn = warning = msg
fatal = failure = err = error = critical = exception = msg
class PrintLoggerFactory:
r"""
Produce `PrintLogger`\ s.
To be used with `structlog.configure`\ 's ``logger_factory``.
Arguments:
file: File to print to. (default: `sys.stdout`)
Positional arguments are silently ignored.
.. versionadded:: 0.4.0
"""
def __init__(self, file: TextIO | None = None):
self._file = file
def __call__(self, *args: Any) -> PrintLogger:
return PrintLogger(self._file)
class WriteLogger:
"""
Write events into a file.
Arguments:
file: File to print to. (default: `sys.stdout`)
>>> from structlog import WriteLogger
>>> WriteLogger().info("hello")
hello
Useful if you follow
`current logging best practices <logging-best-practices>`.
Also very useful for testing and examples since `logging` is finicky in
doctests.
A little faster and a little less versatile than `structlog.PrintLogger`.
.. versionadded:: 22.1.0
"""
def __init__(self, file: TextIO | None = None):
self._file = file or sys.stdout
self._write = self._file.write
self._flush = self._file.flush
self._lock = _get_lock_for_file(self._file)
def __getstate__(self) -> str:
"""
Our __getattr__ magic makes this necessary.
"""
if self._file is stdout:
return "stdout"
if self._file is stderr:
return "stderr"
raise PicklingError(
"Only WriteLoggers to sys.stdout and sys.stderr can be pickled."
)
def __setstate__(self, state: Any) -> None:
"""
Our __getattr__ magic makes this necessary.
"""
if state == "stdout":
self._file = stdout
else:
self._file = stderr
self._lock = _get_lock_for_file(self._file)
def __deepcopy__(self, memodict: dict[str, object]) -> WriteLogger:
"""
Create a new WriteLogger with the same attributes. Similar to pickling.
"""
if self._file not in (sys.stdout, sys.stderr):
raise copy.error(
"Only WriteLoggers to sys.stdout and sys.stderr "
"can be deepcopied."
)
newself = self.__class__(self._file)
newself._write = newself._file.write
newself._flush = newself._file.flush
newself._lock = _get_lock_for_file(newself._file)
return newself
def __repr__(self) -> str:
return f"<WriteLogger(file={self._file!r})>"
def msg(self, message: str) -> None:
"""
Write and flush *message*.
"""
with self._lock:
until_not_interrupted(self._write, message + "\n")
until_not_interrupted(self._flush)
log = debug = info = warn = warning = msg
fatal = failure = err = error = critical = exception = msg
class WriteLoggerFactory:
r"""
Produce `WriteLogger`\ s.
To be used with `structlog.configure`\ 's ``logger_factory``.
Arguments:
file: File to print to. (default: `sys.stdout`)
Positional arguments are silently ignored.
.. versionadded:: 22.1.0
"""
def __init__(self, file: TextIO | None = None):
self._file = file
def __call__(self, *args: Any) -> WriteLogger:
return WriteLogger(self._file)
class BytesLogger:
r"""
Writes bytes into a file.
Arguments:
file: File to print to. (default: `sys.stdout`\ ``.buffer``)
Useful if you follow `current logging best practices
<logging-best-practices>` together with a formatter that returns bytes
(e.g. `orjson <https://github.com/ijl/orjson>`_).
.. versionadded:: 20.2.0
"""
__slots__ = ("_file", "_write", "_flush", "_lock")
def __init__(self, file: BinaryIO | None = None):
self._file = file or sys.stdout.buffer
self._write = self._file.write
self._flush = self._file.flush
self._lock = _get_lock_for_file(self._file)
def __getstate__(self) -> str:
"""
Our __getattr__ magic makes this necessary.
"""
if self._file is sys.stdout.buffer:
return "stdout"
if self._file is sys.stderr.buffer:
return "stderr"
raise PicklingError(
"Only BytesLoggers to sys.stdout and sys.stderr can be pickled."
)
def __setstate__(self, state: Any) -> None:
"""
Our __getattr__ magic makes this necessary.
"""
if state == "stdout":
self._file = sys.stdout.buffer
else:
self._file = sys.stderr.buffer
self._write = self._file.write
self._flush = self._file.flush
self._lock = _get_lock_for_file(self._file)
def __deepcopy__(self, memodict: dict[str, object]) -> BytesLogger:
"""
Create a new BytesLogger with the same attributes. Similar to pickling.
"""
if self._file not in (sys.stdout.buffer, sys.stderr.buffer):
raise copy.error(
"Only BytesLoggers to sys.stdout and sys.stderr "
"can be deepcopied."
)
newself = self.__class__(self._file)
newself._write = newself._file.write
newself._flush = newself._file.flush
newself._lock = _get_lock_for_file(newself._file)
return newself
def __repr__(self) -> str:
return f"<BytesLogger(file={self._file!r})>"
def msg(self, message: bytes) -> None:
"""
Write *message*.
"""
with self._lock:
until_not_interrupted(self._write, message + b"\n")
until_not_interrupted(self._flush)
log = debug = info = warn = warning = msg
fatal = failure = err = error = critical = exception = msg
class BytesLoggerFactory:
r"""
Produce `BytesLogger`\ s.
To be used with `structlog.configure`\ 's ``logger_factory``.
Arguments:
file: File to print to. (default: `sys.stdout`\ ``.buffer``)
Positional arguments are silently ignored.
.. versionadded:: 20.2.0
"""
__slots__ = ("_file",)
def __init__(self, file: BinaryIO | None = None):
self._file = file
def __call__(self, *args: Any) -> BytesLogger:
return BytesLogger(self._file)

View File

@@ -0,0 +1,52 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Generic utilities.
"""
from __future__ import annotations
import errno
import sys
from contextlib import suppress
from typing import Any, Callable
def until_not_interrupted(f: Callable[..., Any], *args: Any, **kw: Any) -> Any:
"""
Retry until *f* succeeds or an exception that isn't caused by EINTR occurs.
Arguments:
f: A callable like a function.
*args: Positional arguments for *f*.
**kw: Keyword arguments for *f*.
"""
while True:
try:
return f(*args, **kw)
except OSError as e: # noqa: PERF203
if e.args[0] == errno.EINTR:
continue
raise
def get_processname() -> str:
# based on code from
# https://github.com/python/cpython/blob/313f92a57bc3887026ec16adb536bb2b7580ce47/Lib/logging/__init__.py#L342-L352
processname = "n/a"
mp: Any = sys.modules.get("multiprocessing")
if mp is not None:
# Errors may occur if multiprocessing has not finished loading
# yet - e.g. if a custom import hook causes third-party code
# to run when multiprocessing calls import.
with suppress(Exception):
processname = mp.current_process().name
return processname

View File

@@ -0,0 +1,180 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Primitives to deal with a concurrency supporting context, as introduced in
Python 3.7 as :mod:`contextvars`.
.. versionadded:: 20.1.0
.. versionchanged:: 21.1.0
Reimplemented without using a single dict as context carrier for improved
isolation. Every key-value pair is a separate `contextvars.ContextVar` now.
See :doc:`contextvars`.
"""
from __future__ import annotations
import contextlib
import contextvars
from typing import Any, Generator, Mapping
import structlog
from .typing import BindableLogger, EventDict, WrappedLogger
STRUCTLOG_KEY_PREFIX = "structlog_"
STRUCTLOG_KEY_PREFIX_LEN = len(STRUCTLOG_KEY_PREFIX)
# For proper isolation, we have to use a dict of ContextVars instead of a
# single ContextVar with a dict.
# See https://github.com/hynek/structlog/pull/302 for details.
_CONTEXT_VARS: dict[str, contextvars.ContextVar[Any]] = {}
def get_contextvars() -> dict[str, Any]:
"""
Return a copy of the *structlog*-specific context-local context.
.. versionadded:: 21.2.0
"""
rv = {}
ctx = contextvars.copy_context()
for k in ctx:
if k.name.startswith(STRUCTLOG_KEY_PREFIX) and ctx[k] is not Ellipsis:
rv[k.name[STRUCTLOG_KEY_PREFIX_LEN:]] = ctx[k]
return rv
def get_merged_contextvars(bound_logger: BindableLogger) -> dict[str, Any]:
"""
Return a copy of the current context-local context merged with the context
from *bound_logger*.
.. versionadded:: 21.2.0
"""
ctx = get_contextvars()
ctx.update(structlog.get_context(bound_logger))
return ctx
def merge_contextvars(
logger: WrappedLogger, method_name: str, event_dict: EventDict
) -> EventDict:
"""
A processor that merges in a global (context-local) context.
Use this as your first processor in :func:`structlog.configure` to ensure
context-local context is included in all log calls.
.. versionadded:: 20.1.0
.. versionchanged:: 21.1.0 See toplevel note.
"""
ctx = contextvars.copy_context()
for k in ctx:
if k.name.startswith(STRUCTLOG_KEY_PREFIX) and ctx[k] is not Ellipsis:
event_dict.setdefault(k.name[STRUCTLOG_KEY_PREFIX_LEN:], ctx[k])
return event_dict
def clear_contextvars() -> None:
"""
Clear the context-local context.
The typical use-case for this function is to invoke it early in request-
handling code.
.. versionadded:: 20.1.0
.. versionchanged:: 21.1.0 See toplevel note.
"""
ctx = contextvars.copy_context()
for k in ctx:
if k.name.startswith(STRUCTLOG_KEY_PREFIX):
k.set(Ellipsis)
def bind_contextvars(**kw: Any) -> Mapping[str, contextvars.Token[Any]]:
r"""
Put keys and values into the context-local context.
Use this instead of :func:`~structlog.BoundLogger.bind` when you want some
context to be global (context-local).
Return the mapping of `contextvars.Token`\s resulting
from setting the backing :class:`~contextvars.ContextVar`\s.
Suitable for passing to :func:`reset_contextvars`.
.. versionadded:: 20.1.0
.. versionchanged:: 21.1.0 Return the `contextvars.Token` mapping
rather than None. See also the toplevel note.
"""
rv = {}
for k, v in kw.items():
structlog_k = f"{STRUCTLOG_KEY_PREFIX}{k}"
try:
var = _CONTEXT_VARS[structlog_k]
except KeyError:
var = contextvars.ContextVar(structlog_k, default=Ellipsis)
_CONTEXT_VARS[structlog_k] = var
rv[k] = var.set(v)
return rv
def reset_contextvars(**kw: contextvars.Token[Any]) -> None:
r"""
Reset contextvars corresponding to the given Tokens.
.. versionadded:: 21.1.0
"""
for k, v in kw.items():
structlog_k = f"{STRUCTLOG_KEY_PREFIX}{k}"
var = _CONTEXT_VARS[structlog_k]
var.reset(v)
def unbind_contextvars(*keys: str) -> None:
"""
Remove *keys* from the context-local context if they are present.
Use this instead of :func:`~structlog.BoundLogger.unbind` when you want to
remove keys from a global (context-local) context.
.. versionadded:: 20.1.0
.. versionchanged:: 21.1.0 See toplevel note.
"""
for k in keys:
structlog_k = f"{STRUCTLOG_KEY_PREFIX}{k}"
if structlog_k in _CONTEXT_VARS:
_CONTEXT_VARS[structlog_k].set(Ellipsis)
@contextlib.contextmanager
def bound_contextvars(**kw: Any) -> Generator[None, None, None]:
"""
Bind *kw* to the current context-local context. Unbind or restore *kw*
afterwards. Do **not** affect other keys.
Can be used as a context manager or decorator.
.. versionadded:: 21.4.0
"""
context = get_contextvars()
saved = {k: context[k] for k in context.keys() & kw.keys()}
bind_contextvars(**kw)
try:
yield
finally:
unbind_contextvars(*kw.keys())
bind_contextvars(**saved)

View File

@@ -0,0 +1,569 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Helpers that make development with *structlog* more pleasant.
See also the narrative documentation in `development`.
"""
from __future__ import annotations
import shutil
import sys
import warnings
from dataclasses import dataclass
from io import StringIO
from types import ModuleType
from typing import (
Any,
Iterable,
Literal,
Protocol,
Sequence,
TextIO,
Type,
Union,
)
from ._frames import _format_exception
from .processors import _figure_out_exc_info
from .typing import EventDict, ExceptionRenderer, ExcInfo, WrappedLogger
try:
import colorama
except ImportError:
colorama = None
try:
import better_exceptions
except ImportError:
better_exceptions = None
try:
import rich
from rich.console import Console
from rich.traceback import Traceback
except ImportError:
rich = None # type: ignore[assignment]
__all__ = [
"ConsoleRenderer",
"plain_traceback",
"rich_traceback",
"better_traceback",
]
_IS_WINDOWS = sys.platform == "win32"
_MISSING = "{who} requires the {package} package installed. "
_EVENT_WIDTH = 30 # pad the event name to so many characters
def _pad(s: str, length: int) -> str:
"""
Pads *s* to length *length*.
"""
missing = length - len(s)
return s + " " * (missing if missing > 0 else 0)
if colorama is not None:
RESET_ALL = colorama.Style.RESET_ALL
BRIGHT = colorama.Style.BRIGHT
DIM = colorama.Style.DIM
RED = colorama.Fore.RED
BLUE = colorama.Fore.BLUE
CYAN = colorama.Fore.CYAN
MAGENTA = colorama.Fore.MAGENTA
YELLOW = colorama.Fore.YELLOW
GREEN = colorama.Fore.GREEN
RED_BACK = colorama.Back.RED
else:
# These are the same values as the Colorama color codes. Redefining them
# here allows users to specify that they want color without having to
# install Colorama, which is only supposed to be necessary in Windows.
RESET_ALL = "\033[0m"
BRIGHT = "\033[1m"
DIM = "\033[2m"
RED = "\033[31m"
BLUE = "\033[34m"
CYAN = "\033[36m"
MAGENTA = "\033[35m"
YELLOW = "\033[33m"
GREEN = "\033[32m"
RED_BACK = "\033[41m"
# On Windows, colors are only available if Colorama is installed.
_has_colors = not _IS_WINDOWS or colorama is not None
# Prevent breakage of packages that used the old name of the variable.
_use_colors = _has_colors
class _Styles(Protocol):
reset: str
bright: str
level_critical: str
level_exception: str
level_error: str
level_warn: str
level_info: str
level_debug: str
level_notset: str
timestamp: str
logger_name: str
kv_key: str
kv_value: str
Styles = Union[_Styles, Type[_Styles]]
class _ColorfulStyles:
reset = RESET_ALL
bright = BRIGHT
level_critical = RED
level_exception = RED
level_error = RED
level_warn = YELLOW
level_info = GREEN
level_debug = GREEN
level_notset = RED_BACK
timestamp = DIM
logger_name = BLUE
kv_key = CYAN
kv_value = MAGENTA
class _PlainStyles:
reset = ""
bright = ""
level_critical = ""
level_exception = ""
level_error = ""
level_warn = ""
level_info = ""
level_debug = ""
level_notset = ""
timestamp = ""
logger_name = ""
kv_key = ""
kv_value = ""
def plain_traceback(sio: TextIO, exc_info: ExcInfo) -> None:
"""
"Pretty"-print *exc_info* to *sio* using our own plain formatter.
To be passed into `ConsoleRenderer`'s ``exception_formatter`` argument.
Used by default if neither Rich nor *better-exceptions* are present.
.. versionadded:: 21.2.0
"""
sio.write("\n" + _format_exception(exc_info))
@dataclass
class RichTracebackFormatter:
"""
A Rich traceback renderer with the given options.
Pass an instance as `ConsoleRenderer`'s ``exception_formatter`` argument.
See :class:`rich.traceback.Traceback` for details on the arguments.
If a *width* of -1 is passed, the terminal width is used. If the width
can't be determined, fall back to 80.
.. versionadded:: 23.2.0
"""
color_system: Literal[
"auto", "standard", "256", "truecolor", "windows"
] = "truecolor"
show_locals: bool = True
max_frames: int = 100
theme: str | None = None
word_wrap: bool = False
extra_lines: int = 3
width: int = 100
indent_guides: bool = True
locals_max_length: int = 10
locals_max_string: int = 80
locals_hide_dunder: bool = True
locals_hide_sunder: bool = False
suppress: Sequence[str | ModuleType] = ()
def __call__(self, sio: TextIO, exc_info: ExcInfo) -> None:
if self.width == -1:
self.width, _ = shutil.get_terminal_size((80, 0))
sio.write("\n")
Console(file=sio, color_system=self.color_system).print(
Traceback.from_exception(
*exc_info,
show_locals=self.show_locals,
max_frames=self.max_frames,
theme=self.theme,
word_wrap=self.word_wrap,
extra_lines=self.extra_lines,
width=self.width,
indent_guides=self.indent_guides,
locals_max_length=self.locals_max_length,
locals_max_string=self.locals_max_string,
locals_hide_dunder=self.locals_hide_dunder,
locals_hide_sunder=self.locals_hide_sunder,
suppress=self.suppress,
)
)
rich_traceback = RichTracebackFormatter()
"""
Pretty-print *exc_info* to *sio* using the Rich package.
To be passed into `ConsoleRenderer`'s ``exception_formatter`` argument.
This is a `RichTracebackFormatter` with default arguments and used by default
if Rich is installed.
.. versionadded:: 21.2.0
"""
def better_traceback(sio: TextIO, exc_info: ExcInfo) -> None:
"""
Pretty-print *exc_info* to *sio* using the *better-exceptions* package.
To be passed into `ConsoleRenderer`'s ``exception_formatter`` argument.
Used by default if *better-exceptions* is installed and Rich is absent.
.. versionadded:: 21.2.0
"""
sio.write("\n" + "".join(better_exceptions.format_exception(*exc_info)))
if rich is not None:
default_exception_formatter = rich_traceback
elif better_exceptions is not None:
default_exception_formatter = better_traceback
else:
default_exception_formatter = plain_traceback
class ConsoleRenderer:
"""
Render ``event_dict`` nicely aligned, possibly in colors, and ordered.
If ``event_dict`` contains a true-ish ``exc_info`` key, it will be rendered
*after* the log line. If Rich_ or better-exceptions_ are present, in colors
and with extra context.
Arguments:
pad_event: Pad the event to this many characters.
colors:
Use colors for a nicer output. `True` by default. On Windows only
if Colorama_ is installed.
force_colors:
Force colors even for non-tty destinations. Use this option if your
logs are stored in a file that is meant to be streamed to the
console. Only meaningful on Windows.
repr_native_str:
When `True`, `repr` is also applied to native strings (i.e. unicode
on Python 3 and bytes on Python 2). Setting this to `False` is
useful if you want to have human-readable non-ASCII output on
Python 2. The ``event`` key is *never* `repr` -ed.
level_styles:
When present, use these styles for colors. This must be a dict from
level names (strings) to Colorama styles. The default can be
obtained by calling `ConsoleRenderer.get_default_level_styles`
exception_formatter:
A callable to render ``exc_infos``. If Rich_ or better-exceptions_
are installed, they are used for pretty-printing by default (rich_
taking precedence). You can also manually set it to
`plain_traceback`, `better_traceback`, an instance of
`RichTracebackFormatter` like `rich_traceback`, or implement your
own.
sort_keys: Whether to sort keys when formatting. `True` by default.
event_key:
The key to look for the main log message. Needed when you rename it
e.g. using `structlog.processors.EventRenamer`.
timestamp_key:
The key to look for timestamp of the log message. Needed when you
rename it e.g. using `structlog.processors.EventRenamer`.
Requires the Colorama_ package if *colors* is `True` **on Windows**.
.. _Colorama: https://pypi.org/project/colorama/
.. _better-exceptions: https://pypi.org/project/better-exceptions/
.. _Rich: https://pypi.org/project/rich/
.. versionadded:: 16.0.0
.. versionadded:: 16.1.0 *colors*
.. versionadded:: 17.1.0 *repr_native_str*
.. versionadded:: 18.1.0 *force_colors*
.. versionadded:: 18.1.0 *level_styles*
.. versionchanged:: 19.2.0
Colorama now initializes lazily to avoid unwanted initializations as
``ConsoleRenderer`` is used by default.
.. versionchanged:: 19.2.0 Can be pickled now.
.. versionchanged:: 20.1.0
Colorama does not initialize lazily on Windows anymore because it breaks
rendering.
.. versionchanged:: 21.1.0
It is additionally possible to set the logger name using the
``logger_name`` key in the ``event_dict``.
.. versionadded:: 21.2.0 *exception_formatter*
.. versionchanged:: 21.2.0
`ConsoleRenderer` now handles the ``exc_info`` event dict key itself. Do
**not** use the `structlog.processors.format_exc_info` processor
together with `ConsoleRenderer` anymore! It will keep working, but you
can't have customize exception formatting and a warning will be raised
if you ask for it.
.. versionchanged:: 21.2.0
The colors keyword now defaults to True on non-Windows systems, and
either True or False in Windows depending on whether Colorama is
installed.
.. versionadded:: 21.3.0 *sort_keys*
.. versionadded:: 22.1.0 *event_key*
.. versionadded:: 23.2.0 *timestamp_key*
"""
def __init__(
self,
pad_event: int = _EVENT_WIDTH,
colors: bool = _has_colors,
force_colors: bool = False,
repr_native_str: bool = False,
level_styles: Styles | None = None,
exception_formatter: ExceptionRenderer = default_exception_formatter,
sort_keys: bool = True,
event_key: str = "event",
timestamp_key: str = "timestamp",
):
styles: Styles
if colors:
if _IS_WINDOWS: # pragma: no cover
# On Windows, we can't do colorful output without colorama.
if colorama is None:
classname = self.__class__.__name__
raise SystemError(
_MISSING.format(
who=classname + " with `colors=True`",
package="colorama",
)
)
# Colorama must be init'd on Windows, but must NOT be
# init'd on other OSes, because it can break colors.
if force_colors:
colorama.deinit()
colorama.init(strip=False)
else:
colorama.init()
styles = _ColorfulStyles
else:
styles = _PlainStyles
self._styles = styles
self._pad_event = pad_event
if level_styles is None:
self._level_to_color = self.get_default_level_styles(colors)
else:
self._level_to_color = level_styles
for key in self._level_to_color:
self._level_to_color[key] += styles.bright
self._longest_level = len(
max(self._level_to_color.keys(), key=lambda e: len(e))
)
self._repr_native_str = repr_native_str
self._exception_formatter = exception_formatter
self._sort_keys = sort_keys
self._event_key = event_key
self._timestamp_key = timestamp_key
def _repr(self, val: Any) -> str:
"""
Determine representation of *val* depending on its type &
self._repr_native_str.
"""
if self._repr_native_str is True:
return repr(val)
if isinstance(val, str):
return val
return repr(val)
def __call__( # noqa: PLR0912
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> str:
sio = StringIO()
ts = event_dict.pop(self._timestamp_key, None)
if ts is not None:
sio.write(
# can be a number if timestamp is UNIXy
self._styles.timestamp
+ str(ts)
+ self._styles.reset
+ " "
)
level = event_dict.pop("level", None)
if level is not None:
sio.write(
"["
+ self._level_to_color.get(level, "")
+ _pad(level, self._longest_level)
+ self._styles.reset
+ "] "
)
# force event to str for compatibility with standard library
event = event_dict.pop(self._event_key, None)
if not isinstance(event, str):
event = str(event)
if event_dict:
event = _pad(event, self._pad_event) + self._styles.reset + " "
else:
event += self._styles.reset
sio.write(self._styles.bright + event)
logger_name = event_dict.pop("logger", None)
if logger_name is None:
logger_name = event_dict.pop("logger_name", None)
if logger_name is not None:
sio.write(
"["
+ self._styles.logger_name
+ self._styles.bright
+ logger_name
+ self._styles.reset
+ "] "
)
stack = event_dict.pop("stack", None)
exc = event_dict.pop("exception", None)
exc_info = event_dict.pop("exc_info", None)
event_dict_keys: Iterable[str] = event_dict.keys()
if self._sort_keys:
event_dict_keys = sorted(event_dict_keys)
sio.write(
" ".join(
self._styles.kv_key
+ key
+ self._styles.reset
+ "="
+ self._styles.kv_value
+ self._repr(event_dict[key])
+ self._styles.reset
for key in event_dict_keys
)
)
if stack is not None:
sio.write("\n" + stack)
if exc_info or exc is not None:
sio.write("\n\n" + "=" * 79 + "\n")
if exc_info:
exc_info = _figure_out_exc_info(exc_info)
if exc_info != (None, None, None):
self._exception_formatter(sio, exc_info)
elif exc is not None:
if self._exception_formatter is not plain_traceback:
warnings.warn(
"Remove `format_exc_info` from your processor chain "
"if you want pretty exceptions.",
stacklevel=2,
)
sio.write("\n" + exc)
return sio.getvalue()
@staticmethod
def get_default_level_styles(colors: bool = True) -> Any:
"""
Get the default styles for log levels
This is intended to be used with `ConsoleRenderer`'s ``level_styles``
parameter. For example, if you are adding custom levels in your
home-grown :func:`~structlog.stdlib.add_log_level` you could do::
my_styles = ConsoleRenderer.get_default_level_styles()
my_styles["EVERYTHING_IS_ON_FIRE"] = my_styles["critical"] renderer
= ConsoleRenderer(level_styles=my_styles)
Arguments:
colors:
Whether to use colorful styles. This must match the *colors*
parameter to `ConsoleRenderer`. Default: `True`.
"""
styles: Styles
styles = _ColorfulStyles if colors else _PlainStyles
return {
"critical": styles.level_critical,
"exception": styles.level_exception,
"error": styles.level_error,
"warn": styles.level_warn,
"warning": styles.level_warn,
"info": styles.level_info,
"debug": styles.level_debug,
"notset": styles.level_notset,
}
_SENTINEL = object()
def set_exc_info(
logger: WrappedLogger, method_name: str, event_dict: EventDict
) -> EventDict:
"""
Set ``event_dict["exc_info"] = True`` if *method_name* is ``"exception"``.
Do nothing if the name is different or ``exc_info`` is already set.
"""
if (
method_name != "exception"
or event_dict.get("exc_info", _SENTINEL) is not _SENTINEL
):
return event_dict
event_dict["exc_info"] = True
return event_dict

View File

@@ -0,0 +1,18 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Exceptions factored out to avoid import loops.
"""
from __future__ import annotations
class DropEvent(BaseException):
"""
If raised by an processor, the event gets silently dropped.
Derives from BaseException because it's technically not an error.
"""

View File

@@ -0,0 +1,915 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Processors useful regardless of the logging framework.
"""
from __future__ import annotations
import datetime
import enum
import inspect
import json
import logging
import operator
import os
import sys
import threading
import time
from typing import (
Any,
Callable,
ClassVar,
Collection,
NamedTuple,
Sequence,
TextIO,
)
from ._frames import (
_find_first_app_frame_and_name,
_format_exception,
_format_stack,
)
from ._log_levels import _NAME_TO_LEVEL, add_log_level
from ._utils import get_processname
from .tracebacks import ExceptionDictTransformer
from .typing import EventDict, ExceptionTransformer, ExcInfo, WrappedLogger
__all__ = [
"_NAME_TO_LEVEL", # some people rely on it being here
"add_log_level",
"CallsiteParameter",
"CallsiteParameterAdder",
"dict_tracebacks",
"EventRenamer",
"ExceptionPrettyPrinter",
"format_exc_info",
"JSONRenderer",
"KeyValueRenderer",
"StackInfoRenderer",
"TimeStamper",
"UnicodeDecoder",
"UnicodeEncoder",
]
class KeyValueRenderer:
"""
Render ``event_dict`` as a list of ``Key=repr(Value)`` pairs.
Arguments:
sort_keys: Whether to sort keys when formatting.
key_order:
List of keys that should be rendered in this exact order. Missing
keys will be rendered as ``None``, extra keys depending on
*sort_keys* and the dict class.
drop_missing:
When ``True``, extra keys in *key_order* will be dropped rather
than rendered as ``None``.
repr_native_str:
When ``True``, :func:`repr()` is also applied to native strings.
.. versionadded:: 0.2.0 *key_order*
.. versionadded:: 16.1.0 *drop_missing*
.. versionadded:: 17.1.0 *repr_native_str*
"""
def __init__(
self,
sort_keys: bool = False,
key_order: Sequence[str] | None = None,
drop_missing: bool = False,
repr_native_str: bool = True,
):
self._ordered_items = _items_sorter(sort_keys, key_order, drop_missing)
if repr_native_str is True:
self._repr = repr
else:
def _repr(inst: Any) -> str:
if isinstance(inst, str):
return inst
return repr(inst)
self._repr = _repr
def __call__(
self, _: WrappedLogger, __: str, event_dict: EventDict
) -> str:
return " ".join(
k + "=" + self._repr(v) for k, v in self._ordered_items(event_dict)
)
class LogfmtRenderer:
"""
Render ``event_dict`` using the logfmt_ format.
.. _logfmt: https://brandur.org/logfmt
Arguments:
sort_keys: Whether to sort keys when formatting.
key_order:
List of keys that should be rendered in this exact order. Missing
keys are rendered with empty values, extra keys depending on
*sort_keys* and the dict class.
drop_missing:
When ``True``, extra keys in *key_order* will be dropped rather
than rendered with empty values.
bool_as_flag:
When ``True``, render ``{"flag": True}`` as ``flag``, instead of
``flag=true``. ``{"flag": False}`` is always rendered as
``flag=false``.
Raises:
ValueError: If a key contains non printable or space characters.
.. versionadded:: 21.5.0
"""
def __init__(
self,
sort_keys: bool = False,
key_order: Sequence[str] | None = None,
drop_missing: bool = False,
bool_as_flag: bool = True,
):
self._ordered_items = _items_sorter(sort_keys, key_order, drop_missing)
self.bool_as_flag = bool_as_flag
def __call__(
self, _: WrappedLogger, __: str, event_dict: EventDict
) -> str:
elements: list[str] = []
for key, value in self._ordered_items(event_dict):
if any(c <= " " for c in key):
msg = f'Invalid key: "{key}"'
raise ValueError(msg)
if value is None:
elements.append(f"{key}=")
continue
if isinstance(value, bool):
if self.bool_as_flag and value:
elements.append(f"{key}")
continue
value = "true" if value else "false"
value = f"{value}".replace('"', '\\"')
if " " in value or "=" in value:
value = f'"{value}"'
elements.append(f"{key}={value}")
return " ".join(elements)
def _items_sorter(
sort_keys: bool,
key_order: Sequence[str] | None,
drop_missing: bool,
) -> Callable[[EventDict], list[tuple[str, object]]]:
"""
Return a function to sort items from an ``event_dict``.
See `KeyValueRenderer` for an explanation of the parameters.
"""
# Use an optimized version for each case.
if key_order and sort_keys:
def ordered_items(event_dict: EventDict) -> list[tuple[str, Any]]:
items = []
for key in key_order:
value = event_dict.pop(key, None)
if value is not None or not drop_missing:
items.append((key, value))
items += sorted(event_dict.items())
return items
elif key_order:
def ordered_items(event_dict: EventDict) -> list[tuple[str, Any]]:
items = []
for key in key_order:
value = event_dict.pop(key, None)
if value is not None or not drop_missing:
items.append((key, value))
items += event_dict.items()
return items
elif sort_keys:
def ordered_items(event_dict: EventDict) -> list[tuple[str, Any]]:
return sorted(event_dict.items())
else:
ordered_items = operator.methodcaller( # type: ignore[assignment]
"items"
)
return ordered_items
class UnicodeEncoder:
"""
Encode unicode values in ``event_dict``.
Arguments:
encoding: Encoding to encode to (default: ``"utf-8"``).
errors:
How to cope with encoding errors (default ``"backslashreplace"``).
Just put it in the processor chain before the renderer.
.. note:: Not very useful in a Python 3-only world.
"""
_encoding: str
_errors: str
def __init__(
self, encoding: str = "utf-8", errors: str = "backslashreplace"
) -> None:
self._encoding = encoding
self._errors = errors
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
for key, value in event_dict.items():
if isinstance(value, str):
event_dict[key] = value.encode(self._encoding, self._errors)
return event_dict
class UnicodeDecoder:
"""
Decode byte string values in ``event_dict``.
Arguments:
encoding: Encoding to decode from (default: ``"utf-8"``).
errors: How to cope with encoding errors (default: ``"replace"``).
Useful to prevent ``b"abc"`` being rendered as as ``'b"abc"'``.
Just put it in the processor chain before the renderer.
.. versionadded:: 15.4.0
"""
_encoding: str
_errors: str
def __init__(
self, encoding: str = "utf-8", errors: str = "replace"
) -> None:
self._encoding = encoding
self._errors = errors
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
for key, value in event_dict.items():
if isinstance(value, bytes):
event_dict[key] = value.decode(self._encoding, self._errors)
return event_dict
class JSONRenderer:
"""
Render the ``event_dict`` using ``serializer(event_dict, **dumps_kw)``.
Arguments:
dumps_kw:
Are passed unmodified to *serializer*. If *default* is passed, it
will disable support for ``__structlog__``-based serialization.
serializer:
A :func:`json.dumps`-compatible callable that will be used to
format the string. This can be used to use alternative JSON
encoders like `orjson <https://pypi.org/project/orjson/>`__ or
`RapidJSON <https://pypi.org/project/python-rapidjson/>`_
(default: :func:`json.dumps`).
.. versionadded:: 0.2.0 Support for ``__structlog__`` serialization method.
.. versionadded:: 15.4.0 *serializer* parameter.
.. versionadded:: 18.2.0
Serializer's *default* parameter can be overwritten now.
"""
def __init__(
self,
serializer: Callable[..., str | bytes] = json.dumps,
**dumps_kw: Any,
) -> None:
dumps_kw.setdefault("default", _json_fallback_handler)
self._dumps_kw = dumps_kw
self._dumps = serializer
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> str | bytes:
"""
The return type of this depends on the return type of self._dumps.
"""
return self._dumps(event_dict, **self._dumps_kw)
def _json_fallback_handler(obj: Any) -> Any:
"""
Serialize custom datatypes and pass the rest to __structlog__ & repr().
"""
# circular imports :(
from structlog.threadlocal import _ThreadLocalDictWrapper
if isinstance(obj, _ThreadLocalDictWrapper):
return obj._dict
try:
return obj.__structlog__()
except AttributeError:
return repr(obj)
class ExceptionRenderer:
"""
Replace an ``exc_info`` field with an ``exception`` field which is rendered
by *exception_formatter*.
The contents of the ``exception`` field depends on the return value of the
*exception_formatter* that is passed:
- The default produces a formatted string via Python's built-in traceback
formatting (this is :obj:`.format_exc_info`).
- If you pass a :class:`~structlog.tracebacks.ExceptionDictTransformer`, it
becomes a list of stack dicts that can be serialized to JSON.
If *event_dict* contains the key ``exc_info``, there are three possible
behaviors:
1. If the value is a tuple, render it into the key ``exception``.
2. If the value is an Exception render it into the key ``exception``.
3. If the value true but no tuple, obtain exc_info ourselves and render
that.
If there is no ``exc_info`` key, the *event_dict* is not touched. This
behavior is analog to the one of the stdlib's logging.
Arguments:
exception_formatter:
A callable that is used to format the exception from the
``exc_info`` field into the ``exception`` field.
.. seealso::
:doc:`exceptions` for a broader explanation of *structlog*'s exception
features.
.. versionadded:: 22.1.0
"""
def __init__(
self,
exception_formatter: ExceptionTransformer = _format_exception,
) -> None:
self.format_exception = exception_formatter
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
exc_info = event_dict.pop("exc_info", None)
if exc_info:
event_dict["exception"] = self.format_exception(
_figure_out_exc_info(exc_info)
)
return event_dict
format_exc_info = ExceptionRenderer()
"""
Replace an ``exc_info`` field with an ``exception`` string field using Python's
built-in traceback formatting.
If *event_dict* contains the key ``exc_info``, there are three possible
behaviors:
1. If the value is a tuple, render it into the key ``exception``.
2. If the value is an Exception render it into the key ``exception``.
3. If the value is true but no tuple, obtain exc_info ourselves and render
that.
If there is no ``exc_info`` key, the *event_dict* is not touched. This behavior
is analog to the one of the stdlib's logging.
.. seealso::
:doc:`exceptions` for a broader explanation of *structlog*'s exception
features.
"""
dict_tracebacks = ExceptionRenderer(ExceptionDictTransformer())
"""
Replace an ``exc_info`` field with an ``exception`` field containing structured
tracebacks suitable for, e.g., JSON output.
It is a shortcut for :class:`ExceptionRenderer` with a
:class:`~structlog.tracebacks.ExceptionDictTransformer`.
The treatment of the ``exc_info`` key is identical to `format_exc_info`.
.. versionadded:: 22.1.0
.. seealso::
:doc:`exceptions` for a broader explanation of *structlog*'s exception
features.
"""
class TimeStamper:
"""
Add a timestamp to ``event_dict``.
Arguments:
fmt:
strftime format string, or ``"iso"`` for `ISO 8601
<https://en.wikipedia.org/wiki/ISO_8601>`_, or `None` for a `UNIX
timestamp <https://en.wikipedia.org/wiki/Unix_time>`_.
utc: Whether timestamp should be in UTC or local time.
key: Target key in *event_dict* for added timestamps.
.. versionchanged:: 19.2.0 Can be pickled now.
"""
__slots__ = ("_stamper", "fmt", "utc", "key")
def __init__(
self,
fmt: str | None = None,
utc: bool = True,
key: str = "timestamp",
) -> None:
self.fmt, self.utc, self.key = fmt, utc, key
self._stamper = _make_stamper(fmt, utc, key)
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
return self._stamper(event_dict)
def __getstate__(self) -> dict[str, Any]:
return {"fmt": self.fmt, "utc": self.utc, "key": self.key}
def __setstate__(self, state: dict[str, Any]) -> None:
self.fmt = state["fmt"]
self.utc = state["utc"]
self.key = state["key"]
self._stamper = _make_stamper(**state)
def _make_stamper(
fmt: str | None, utc: bool, key: str
) -> Callable[[EventDict], EventDict]:
"""
Create a stamper function.
"""
if fmt is None and not utc:
msg = "UNIX timestamps are always UTC."
raise ValueError(msg)
now: Callable[[], datetime.datetime]
if utc:
def now() -> datetime.datetime:
return datetime.datetime.now(tz=datetime.timezone.utc)
else:
def now() -> datetime.datetime:
# A naive local datetime is fine here, because we only format it.
return datetime.datetime.now() # noqa: DTZ005
if fmt is None:
def stamper_unix(event_dict: EventDict) -> EventDict:
event_dict[key] = time.time()
return event_dict
return stamper_unix
if fmt.upper() == "ISO":
def stamper_iso_local(event_dict: EventDict) -> EventDict:
event_dict[key] = now().isoformat()
return event_dict
def stamper_iso_utc(event_dict: EventDict) -> EventDict:
event_dict[key] = now().isoformat().replace("+00:00", "Z")
return event_dict
if utc:
return stamper_iso_utc
return stamper_iso_local
def stamper_fmt(event_dict: EventDict) -> EventDict:
event_dict[key] = now().strftime(fmt)
return event_dict
return stamper_fmt
class MaybeTimeStamper:
"""
A timestamper that only adds a timestamp if there is none.
This allows you to overwrite the ``timestamp`` key in the event dict for
example when the event is coming from another system.
It takes the same arguments as `TimeStamper`.
.. versionadded:: 23.2.0
"""
__slots__ = ("stamper",)
def __init__(
self,
fmt: str | None = None,
utc: bool = True,
key: str = "timestamp",
):
self.stamper = TimeStamper(fmt=fmt, utc=utc, key=key)
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
if "timestamp" not in event_dict:
return self.stamper(logger, name, event_dict)
return event_dict
def _figure_out_exc_info(v: Any) -> ExcInfo:
"""
Depending on the Python version will try to do the smartest thing possible
to transform *v* into an ``exc_info`` tuple.
"""
if isinstance(v, BaseException):
return (v.__class__, v, v.__traceback__)
if isinstance(v, tuple):
return v # type: ignore[return-value]
if v:
return sys.exc_info() # type: ignore[return-value]
return v
class ExceptionPrettyPrinter:
"""
Pretty print exceptions and remove them from the ``event_dict``.
Arguments:
file: Target file for output (default: ``sys.stdout``).
This processor is mostly for development and testing so you can read
exceptions properly formatted.
It behaves like `format_exc_info` except it removes the exception data from
the event dictionary after printing it.
It's tolerant to having `format_exc_info` in front of itself in the
processor chain but doesn't require it. In other words, it handles both
``exception`` as well as ``exc_info`` keys.
.. versionadded:: 0.4.0
.. versionchanged:: 16.0.0
Added support for passing exceptions as ``exc_info`` on Python 3.
"""
def __init__(
self,
file: TextIO | None = None,
exception_formatter: ExceptionTransformer = _format_exception,
) -> None:
if file is not None:
self._file = file
else:
self._file = sys.stdout
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
exc = event_dict.pop("exception", None)
if exc is None:
exc_info = _figure_out_exc_info(event_dict.pop("exc_info", None))
if exc_info:
exc = _format_exception(exc_info)
if exc:
print(exc, file=self._file)
return event_dict
class StackInfoRenderer:
"""
Add stack information with key ``stack`` if ``stack_info`` is `True`.
Useful when you want to attach a stack dump to a log entry without
involving an exception and works analogously to the *stack_info* argument
of the Python standard library logging.
Arguments:
additional_ignores:
By default, stack frames coming from *structlog* are ignored. With
this argument you can add additional names that are ignored, before
the stack starts being rendered. They are matched using
``startswith()``, so they don't have to match exactly. The names
are used to find the first relevant name, therefore once a frame is
found that doesn't start with *structlog* or one of
*additional_ignores*, **no filtering** is applied to subsequent
frames.
.. versionadded:: 0.4.0
.. versionadded:: 22.1.0 *additional_ignores*
"""
__slots__ = ("_additional_ignores",)
def __init__(self, additional_ignores: list[str] | None = None) -> None:
self._additional_ignores = additional_ignores
def __call__(
self, logger: WrappedLogger, name: str, event_dict: EventDict
) -> EventDict:
if event_dict.pop("stack_info", None):
event_dict["stack"] = _format_stack(
_find_first_app_frame_and_name(self._additional_ignores)[0]
)
return event_dict
class CallsiteParameter(enum.Enum):
"""
Callsite parameters that can be added to an event dictionary with the
`structlog.processors.CallsiteParameterAdder` processor class.
The string values of the members of this enum will be used as the keys for
the callsite parameters in the event dictionary.
.. versionadded:: 21.5.0
"""
#: The full path to the python source file of the callsite.
PATHNAME = "pathname"
#: The basename part of the full path to the python source file of the
#: callsite.
FILENAME = "filename"
#: The python module the callsite was in. This mimics the module attribute
#: of `logging.LogRecord` objects and will be the basename, without
#: extension, of the full path to the python source file of the callsite.
MODULE = "module"
#: The name of the function that the callsite was in.
FUNC_NAME = "func_name"
#: The line number of the callsite.
LINENO = "lineno"
#: The ID of the thread the callsite was executed in.
THREAD = "thread"
#: The name of the thread the callsite was executed in.
THREAD_NAME = "thread_name"
#: The ID of the process the callsite was executed in.
PROCESS = "process"
#: The name of the process the callsite was executed in.
PROCESS_NAME = "process_name"
class CallsiteParameterAdder:
"""
Adds parameters of the callsite that an event dictionary originated from to
the event dictionary. This processor can be used to enrich events
dictionaries with information such as the function name, line number and
filename that an event dictionary originated from.
.. warning::
This processor cannot detect the correct callsite for invocation of
async functions.
If the event dictionary has an embedded `logging.LogRecord` object and did
not originate from *structlog* then the callsite information will be
determined from the `logging.LogRecord` object. For event dictionaries
without an embedded `logging.LogRecord` object the callsite will be
determined from the stack trace, ignoring all intra-structlog calls, calls
from the `logging` module, and stack frames from modules with names that
start with values in ``additional_ignores``, if it is specified.
The keys used for callsite parameters in the event dictionary are the
string values of `CallsiteParameter` enum members.
Arguments:
parameters:
A collection of `CallsiteParameter` values that should be added to
the event dictionary.
additional_ignores:
Additional names with which a stack frame's module name must not
start for it to be considered when determening the callsite.
.. note::
When used with `structlog.stdlib.ProcessorFormatter` the most efficient
configuration is to either use this processor in ``foreign_pre_chain``
of `structlog.stdlib.ProcessorFormatter` and in ``processors`` of
`structlog.configure`, or to use it in ``processors`` of
`structlog.stdlib.ProcessorFormatter` without using it in
``processors`` of `structlog.configure` and ``foreign_pre_chain`` of
`structlog.stdlib.ProcessorFormatter`.
.. versionadded:: 21.5.0
"""
_handlers: ClassVar[
dict[CallsiteParameter, Callable[[str, inspect.Traceback], Any]]
] = {
CallsiteParameter.PATHNAME: (
lambda module, frame_info: frame_info.filename
),
CallsiteParameter.FILENAME: (
lambda module, frame_info: os.path.basename(frame_info.filename)
),
CallsiteParameter.MODULE: (
lambda module, frame_info: os.path.splitext(
os.path.basename(frame_info.filename)
)[0]
),
CallsiteParameter.FUNC_NAME: (
lambda module, frame_info: frame_info.function
),
CallsiteParameter.LINENO: (
lambda module, frame_info: frame_info.lineno
),
CallsiteParameter.THREAD: (
lambda module, frame_info: threading.get_ident()
),
CallsiteParameter.THREAD_NAME: (
lambda module, frame_info: threading.current_thread().name
),
CallsiteParameter.PROCESS: (lambda module, frame_info: os.getpid()),
CallsiteParameter.PROCESS_NAME: (
lambda module, frame_info: get_processname()
),
}
_record_attribute_map: ClassVar[dict[CallsiteParameter, str]] = {
CallsiteParameter.PATHNAME: "pathname",
CallsiteParameter.FILENAME: "filename",
CallsiteParameter.MODULE: "module",
CallsiteParameter.FUNC_NAME: "funcName",
CallsiteParameter.LINENO: "lineno",
CallsiteParameter.THREAD: "thread",
CallsiteParameter.THREAD_NAME: "threadName",
CallsiteParameter.PROCESS: "process",
CallsiteParameter.PROCESS_NAME: "processName",
}
_all_parameters: ClassVar[set[CallsiteParameter]] = set(CallsiteParameter)
class _RecordMapping(NamedTuple):
event_dict_key: str
record_attribute: str
__slots__ = ("_active_handlers", "_additional_ignores", "_record_mappings")
def __init__(
self,
parameters: Collection[CallsiteParameter] = _all_parameters,
additional_ignores: list[str] | None = None,
) -> None:
if additional_ignores is None:
additional_ignores = []
# Ignore stack frames from the logging module. They will occur if this
# processor is used in ProcessorFormatter, and additionally the logging
# module should not be logging using structlog.
self._additional_ignores = ["logging", *additional_ignores]
self._active_handlers: list[
tuple[CallsiteParameter, Callable[[str, inspect.Traceback], Any]]
] = []
self._record_mappings: list[CallsiteParameterAdder._RecordMapping] = []
for parameter in parameters:
self._active_handlers.append(
(parameter, self._handlers[parameter])
)
self._record_mappings.append(
self._RecordMapping(
parameter.value,
self._record_attribute_map[parameter],
)
)
def __call__(
self, logger: logging.Logger, name: str, event_dict: EventDict
) -> EventDict:
record: logging.LogRecord | None = event_dict.get("_record")
from_structlog: bool | None = event_dict.get("_from_structlog")
# If the event dictionary has a record, but it comes from structlog,
# then the callsite parameters of the record will not be correct.
if record is not None and not from_structlog:
for mapping in self._record_mappings:
event_dict[mapping.event_dict_key] = record.__dict__[
mapping.record_attribute
]
else:
frame, module = _find_first_app_frame_and_name(
additional_ignores=self._additional_ignores
)
frame_info = inspect.getframeinfo(frame)
for parameter, handler in self._active_handlers:
event_dict[parameter.value] = handler(module, frame_info)
return event_dict
class EventRenamer:
r"""
Rename the ``event`` key in event dicts.
This is useful if you want to use consistent log message keys across
platforms and/or use the ``event`` key for something custom.
.. warning::
It's recommended to put this processor right before the renderer, since
some processors may rely on the presence and meaning of the ``event``
key.
Arguments:
to: Rename ``event_dict["event"]`` to ``event_dict[to]``
replace_by:
Rename ``event_dict[replace_by]`` to ``event_dict["event"]``.
*replace_by* missing from ``event_dict`` is handled gracefully.
.. versionadded:: 22.1.0
See also the :ref:`rename-event` recipe.
"""
def __init__(self, to: str, replace_by: str | None = None):
self.to = to
self.replace_by = replace_by
def __call__(
self, logger: logging.Logger, name: str, event_dict: EventDict
) -> EventDict:
event = event_dict.pop("event")
event_dict[self.to] = event
if self.replace_by is not None:
replace_by = event_dict.pop(self.replace_by, None)
if replace_by is not None:
event_dict["event"] = replace_by
return event_dict

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,209 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Helpers to test your application's logging behavior.
.. versionadded:: 20.1.0
See :doc:`testing`.
"""
from __future__ import annotations
from contextlib import contextmanager
from typing import Any, Generator, NamedTuple, NoReturn
from ._config import configure, get_config
from .exceptions import DropEvent
from .typing import EventDict, WrappedLogger
__all__ = [
"CapturedCall",
"CapturingLogger",
"CapturingLoggerFactory",
"LogCapture",
"ReturnLogger",
"ReturnLoggerFactory",
"capture_logs",
]
class LogCapture:
"""
Class for capturing log messages in its entries list.
Generally you should use `structlog.testing.capture_logs`,
but you can use this class if you want to capture logs with other patterns.
:ivar List[structlog.typing.EventDict] entries: The captured log entries.
.. versionadded:: 20.1.0
"""
entries: list[EventDict]
def __init__(self) -> None:
self.entries = []
def __call__(
self, _: WrappedLogger, method_name: str, event_dict: EventDict
) -> NoReturn:
event_dict["log_level"] = method_name
self.entries.append(event_dict)
raise DropEvent
@contextmanager
def capture_logs() -> Generator[list[EventDict], None, None]:
"""
Context manager that appends all logging statements to its yielded list
while it is active. Disables all configured processors for the duration
of the context manager.
Attention: this is **not** thread-safe!
.. versionadded:: 20.1.0
"""
cap = LogCapture()
# Modify `_Configuration.default_processors` set via `configure` but always
# keep the list instance intact to not break references held by bound
# loggers.
processors = get_config()["processors"]
old_processors = processors.copy()
try:
# clear processors list and use LogCapture for testing
processors.clear()
processors.append(cap)
configure(processors=processors)
yield cap.entries
finally:
# remove LogCapture and restore original processors
processors.clear()
processors.extend(old_processors)
configure(processors=processors)
class ReturnLogger:
"""
Return the arguments that it's called with.
>>> from structlog import ReturnLogger
>>> ReturnLogger().info("hello")
'hello'
>>> ReturnLogger().info("hello", when="again")
(('hello',), {'when': 'again'})
.. versionchanged:: 0.3.0
Allow for arbitrary arguments and keyword arguments to be passed in.
"""
def msg(self, *args: Any, **kw: Any) -> Any:
"""
Return tuple of ``args, kw`` or just ``args[0]`` if only one arg passed
"""
# Slightly convoluted for backwards compatibility.
if len(args) == 1 and not kw:
return args[0]
return args, kw
log = debug = info = warn = warning = msg
fatal = failure = err = error = critical = exception = msg
class ReturnLoggerFactory:
r"""
Produce and cache `ReturnLogger`\ s.
To be used with `structlog.configure`\ 's *logger_factory*.
Positional arguments are silently ignored.
.. versionadded:: 0.4.0
"""
def __init__(self) -> None:
self._logger = ReturnLogger()
def __call__(self, *args: Any) -> ReturnLogger:
return self._logger
class CapturedCall(NamedTuple):
"""
A call as captured by `CapturingLogger`.
Can also be unpacked like a tuple.
Arguments:
method_name: The method name that got called.
args: A tuple of the positional arguments.
kwargs: A dict of the keyword arguments.
.. versionadded:: 20.2.0
"""
method_name: str
args: tuple[Any, ...]
kwargs: dict[str, Any]
class CapturingLogger:
"""
Store the method calls that it's been called with.
This is nicer than `ReturnLogger` for unit tests because the bound logger
doesn't have to cooperate.
**Any** method name is supported.
.. versionadded:: 20.2.0
"""
calls: list[CapturedCall]
def __init__(self) -> None:
self.calls = []
def __repr__(self) -> str:
return f"<CapturingLogger with { len(self.calls) } call(s)>"
def __getattr__(self, name: str) -> Any:
"""
Capture call to `calls`
"""
def log(*args: Any, **kw: Any) -> None:
self.calls.append(CapturedCall(name, args, kw))
return log
class CapturingLoggerFactory:
r"""
Produce and cache `CapturingLogger`\ s.
Each factory produces and re-uses only **one** logger.
You can access it via the ``logger`` attribute.
To be used with `structlog.configure`\ 's *logger_factory*.
Positional arguments are silently ignored.
.. versionadded:: 20.2.0
"""
logger: CapturingLogger
def __init__(self) -> None:
self.logger = CapturingLogger()
def __call__(self, *args: Any) -> CapturingLogger:
return self.logger

View File

@@ -0,0 +1,354 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
**Deprecated** primitives to keep context global but thread (and greenlet)
local.
See `thread-local`, but please use :doc:`contextvars` instead.
.. deprecated:: 22.1.0
"""
from __future__ import annotations
import contextlib
import sys
import threading
import uuid
import warnings
from typing import Any, Generator, Iterator, TypeVar
import structlog
from ._config import BoundLoggerLazyProxy
from .typing import BindableLogger, Context, EventDict, WrappedLogger
def _determine_threadlocal() -> type[Any]:
"""
Return a dict-like threadlocal storage depending on whether we run with
greenlets or not.
"""
try:
from ._greenlets import GreenThreadLocal
except ImportError:
from threading import local
return local
return GreenThreadLocal # pragma: no cover
ThreadLocal = _determine_threadlocal()
def _deprecated() -> None:
"""
Raise a warning with best-effort stacklevel adjustment.
"""
callsite = ""
with contextlib.suppress(Exception):
f = sys._getframe()
callsite = f.f_back.f_back.f_globals[ # type: ignore[union-attr]
"__name__"
]
# Avoid double warnings if TL functions call themselves.
if callsite == "structlog.threadlocal":
return
stacklevel = 3
# If a function is used as a decorator, we need to add two stack levels.
# This logic will probably break eventually, but it's not worth any more
# complexity.
if callsite == "contextlib":
stacklevel += 2
warnings.warn(
"`structlog.threadlocal` is deprecated, please use "
"`structlog.contextvars` instead.",
DeprecationWarning,
stacklevel=stacklevel,
)
def wrap_dict(dict_class: type[Context]) -> type[Context]:
"""
Wrap a dict-like class and return the resulting class.
The wrapped class and used to keep global in the current thread.
Arguments:
dict_class: Class used for keeping context.
.. deprecated:: 22.1.0
"""
_deprecated()
Wrapped = type(
"WrappedDict-" + str(uuid.uuid4()), (_ThreadLocalDictWrapper,), {}
)
Wrapped._tl = ThreadLocal() # type: ignore[attr-defined]
Wrapped._dict_class = dict_class # type: ignore[attr-defined]
return Wrapped
TLLogger = TypeVar("TLLogger", bound=BindableLogger)
def as_immutable(logger: TLLogger) -> TLLogger:
"""
Extract the context from a thread local logger into an immutable logger.
Arguments:
logger (structlog.typing.BindableLogger):
A logger with *possibly* thread local state.
Returns:
:class:`~structlog.BoundLogger` with an immutable context.
.. deprecated:: 22.1.0
"""
_deprecated()
if isinstance(logger, BoundLoggerLazyProxy):
logger = logger.bind() # type: ignore[assignment]
try:
ctx = logger._context._tl.dict_.__class__( # type: ignore[union-attr]
logger._context._dict # type: ignore[union-attr]
)
bl = logger.__class__(
logger._logger, # type: ignore[attr-defined, call-arg]
processors=logger._processors, # type: ignore[attr-defined]
context={},
)
bl._context = ctx
return bl
except AttributeError:
return logger
@contextlib.contextmanager
def tmp_bind(
logger: TLLogger, **tmp_values: Any
) -> Generator[TLLogger, None, None]:
"""
Bind *tmp_values* to *logger* & memorize current state. Rewind afterwards.
Only works with `structlog.threadlocal.wrap_dict`-based contexts.
Use :func:`~structlog.threadlocal.bound_threadlocal` for new code.
.. deprecated:: 22.1.0
"""
_deprecated()
saved = as_immutable(logger)._context
try:
yield logger.bind(**tmp_values) # type: ignore[misc]
finally:
logger._context.clear()
logger._context.update(saved)
class _ThreadLocalDictWrapper:
"""
Wrap a dict-like class and keep the state *global* but *thread-local*.
Attempts to re-initialize only updates the wrapped dictionary.
Useful for short-lived threaded applications like requests in web app.
Use :func:`wrap` to instantiate and use
:func:`structlog.BoundLogger.new` to clear the context.
"""
_tl: Any
_dict_class: type[dict[str, Any]]
def __init__(self, *args: Any, **kw: Any) -> None:
"""
We cheat. A context dict gets never recreated.
"""
if args and isinstance(args[0], self.__class__):
# our state is global, no need to look at args[0] if it's of our
# class
self._dict.update(**kw)
else:
self._dict.update(*args, **kw)
@property
def _dict(self) -> Context:
"""
Return or create and return the current context.
"""
try:
return self.__class__._tl.dict_
except AttributeError:
self.__class__._tl.dict_ = self.__class__._dict_class()
return self.__class__._tl.dict_
def __repr__(self) -> str:
return f"<{self.__class__.__name__}({self._dict!r})>"
def __eq__(self, other: object) -> bool:
# Same class == same dictionary
return self.__class__ == other.__class__
def __ne__(self, other: object) -> bool:
return not self.__eq__(other)
# Proxy methods necessary for structlog.
# Dunder methods don't trigger __getattr__ so we need to proxy by hand.
def __iter__(self) -> Iterator[str]:
return self._dict.__iter__()
def __setitem__(self, key: str, value: Any) -> None:
self._dict[key] = value
def __delitem__(self, key: str) -> None:
self._dict.__delitem__(key)
def __len__(self) -> int:
return self._dict.__len__()
def __getattr__(self, name: str) -> Any:
return getattr(self._dict, name)
_CONTEXT = threading.local()
def get_threadlocal() -> Context:
"""
Return a copy of the current thread-local context.
.. versionadded:: 21.2.0
.. deprecated:: 22.1.0
"""
_deprecated()
return _get_context().copy()
def get_merged_threadlocal(bound_logger: BindableLogger) -> Context:
"""
Return a copy of the current thread-local context merged with the context
from *bound_logger*.
.. versionadded:: 21.2.0
.. deprecated:: 22.1.0
"""
_deprecated()
ctx = _get_context().copy()
ctx.update(structlog.get_context(bound_logger))
return ctx
def merge_threadlocal(
logger: WrappedLogger, method_name: str, event_dict: EventDict
) -> EventDict:
"""
A processor that merges in a global (thread-local) context.
Use this as your first processor in :func:`structlog.configure` to ensure
thread-local context is included in all log calls.
.. versionadded:: 19.2.0
.. versionchanged:: 20.1.0
This function used to be called ``merge_threadlocal_context`` and that
name is still kept around for backward compatibility.
.. deprecated:: 22.1.0
"""
_deprecated()
context = _get_context().copy()
context.update(event_dict)
return context
# Alias that shouldn't be used anymore.
merge_threadlocal_context = merge_threadlocal
def clear_threadlocal() -> None:
"""
Clear the thread-local context.
The typical use-case for this function is to invoke it early in
request-handling code.
.. versionadded:: 19.2.0
.. deprecated:: 22.1.0
"""
_deprecated()
_CONTEXT.context = {}
def bind_threadlocal(**kw: Any) -> None:
"""
Put keys and values into the thread-local context.
Use this instead of :func:`~structlog.BoundLogger.bind` when you want some
context to be global (thread-local).
.. versionadded:: 19.2.0
.. deprecated:: 22.1.0
"""
_deprecated()
_get_context().update(kw)
def unbind_threadlocal(*keys: str) -> None:
"""
Tries to remove bound *keys* from threadlocal logging context if present.
.. versionadded:: 20.1.0
.. deprecated:: 22.1.0
"""
_deprecated()
context = _get_context()
for key in keys:
context.pop(key, None)
@contextlib.contextmanager
def bound_threadlocal(**kw: Any) -> Generator[None, None, None]:
"""
Bind *kw* to the current thread-local context. Unbind or restore *kw*
afterwards. Do **not** affect other keys.
Can be used as a context manager or decorator.
.. versionadded:: 21.4.0
.. deprecated:: 22.1.0
"""
_deprecated()
context = get_threadlocal()
saved = {k: context[k] for k in context.keys() & kw.keys()}
bind_threadlocal(**kw)
try:
yield
finally:
unbind_threadlocal(*kw.keys())
bind_threadlocal(**saved)
def _get_context() -> Context:
try:
return _CONTEXT.context
except AttributeError:
_CONTEXT.context = {}
return _CONTEXT.context

View File

@@ -0,0 +1,286 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Extract a structured traceback from an exception.
`Contributed by Will McGugan
<https://github.com/hynek/structlog/pull/407#issuecomment-1150926246>`_ from
`rich.traceback
<https://github.com/Textualize/rich/blob/972dedff/rich/traceback.py>`_.
"""
from __future__ import annotations
import os
from dataclasses import asdict, dataclass, field
from traceback import walk_tb
from types import TracebackType
from typing import Any, Tuple, Union
from .typing import ExcInfo
__all__ = [
"ExceptionDictTransformer",
"Frame",
"Stack",
"SyntaxError_",
"Trace",
"extract",
"safe_str",
"to_repr",
]
SHOW_LOCALS = True
LOCALS_MAX_STRING = 80
MAX_FRAMES = 50
OptExcInfo = Union[ExcInfo, Tuple[None, None, None]]
@dataclass
class Frame:
"""
Represents a single stack frame.
"""
filename: str
lineno: int
name: str
line: str = ""
locals: dict[str, str] | None = None
@dataclass
class SyntaxError_: # noqa: N801
"""
Contains detailed information about :exc:`SyntaxError` exceptions.
"""
offset: int
filename: str
line: str
lineno: int
msg: str
@dataclass
class Stack:
"""
Represents an exception and a list of stack frames.
"""
exc_type: str
exc_value: str
syntax_error: SyntaxError_ | None = None
is_cause: bool = False
frames: list[Frame] = field(default_factory=list)
@dataclass
class Trace:
"""
Container for a list of stack traces.
"""
stacks: list[Stack]
def safe_str(_object: Any) -> str:
"""Don't allow exceptions from __str__ to propegate."""
try:
return str(_object)
except Exception as error: # noqa: BLE001
return f"<str-error {str(error)!r}>"
def to_repr(obj: Any, max_string: int | None = None) -> str:
"""Get repr string for an object, but catch errors."""
if isinstance(obj, str):
obj_repr = obj
else:
try:
obj_repr = repr(obj)
except Exception as error: # noqa: BLE001
obj_repr = f"<repr-error {str(error)!r}>"
if max_string is not None and len(obj_repr) > max_string:
truncated = len(obj_repr) - max_string
obj_repr = f"{obj_repr[:max_string]!r}+{truncated}"
return obj_repr
def extract(
exc_type: type[BaseException],
exc_value: BaseException,
traceback: TracebackType | None,
*,
show_locals: bool = False,
locals_max_string: int = LOCALS_MAX_STRING,
) -> Trace:
"""
Extract traceback information.
Arguments:
exc_type: Exception type.
exc_value: Exception value.
traceback: Python Traceback object.
show_locals: Enable display of local variables. Defaults to False.
locals_max_string:
Maximum length of string before truncating, or ``None`` to disable.
max_frames: Maximum number of frames in each stack
Returns:
A Trace instance with structured information about all exceptions.
.. versionadded:: 22.1.0
"""
stacks: list[Stack] = []
is_cause = False
while True:
stack = Stack(
exc_type=safe_str(exc_type.__name__),
exc_value=safe_str(exc_value),
is_cause=is_cause,
)
if isinstance(exc_value, SyntaxError):
stack.syntax_error = SyntaxError_(
offset=exc_value.offset or 0,
filename=exc_value.filename or "?",
lineno=exc_value.lineno or 0,
line=exc_value.text or "",
msg=exc_value.msg,
)
stacks.append(stack)
append = stack.frames.append # pylint: disable=no-member
for frame_summary, line_no in walk_tb(traceback):
filename = frame_summary.f_code.co_filename
if filename and not filename.startswith("<"):
filename = os.path.abspath(filename)
frame = Frame(
filename=filename or "?",
lineno=line_no,
name=frame_summary.f_code.co_name,
locals={
key: to_repr(value, max_string=locals_max_string)
for key, value in frame_summary.f_locals.items()
}
if show_locals
else None,
)
append(frame)
cause = getattr(exc_value, "__cause__", None)
if cause and cause.__traceback__:
exc_type = cause.__class__
exc_value = cause
traceback = cause.__traceback__
is_cause = True
continue
cause = exc_value.__context__
if (
cause
and cause.__traceback__
and not getattr(exc_value, "__suppress_context__", False)
):
exc_type = cause.__class__
exc_value = cause
traceback = cause.__traceback__
is_cause = False
continue
# No cover, code is reached but coverage doesn't recognize it.
break # pragma: no cover
return Trace(stacks=stacks)
class ExceptionDictTransformer:
"""
Return a list of exception stack dictionaries for an exception.
These dictionaries are based on :class:`Stack` instances generated by
:func:`extract()` and can be dumped to JSON.
Arguments:
show_locals:
Whether or not to include the values of a stack frame's local
variables.
locals_max_string:
The maximum length after which long string representations are
truncated.
max_frames:
Maximum number of frames in each stack. Frames are removed from
the inside out. The idea is, that the first frames represent your
code responsible for the exception and last frames the code where
the exception actually happened. With larger web frameworks, this
does not always work, so you should stick with the default.
.. seealso::
:doc:`exceptions` for a broader explanation of *structlog*'s exception
features.
"""
def __init__(
self,
show_locals: bool = True,
locals_max_string: int = LOCALS_MAX_STRING,
max_frames: int = MAX_FRAMES,
) -> None:
if locals_max_string < 0:
msg = f'"locals_max_string" must be >= 0: {locals_max_string}'
raise ValueError(msg)
if max_frames < 2:
msg = f'"max_frames" must be >= 2: {max_frames}'
raise ValueError(msg)
self.show_locals = show_locals
self.locals_max_string = locals_max_string
self.max_frames = max_frames
def __call__(self, exc_info: ExcInfo) -> list[dict[str, Any]]:
trace = extract(
*exc_info,
show_locals=self.show_locals,
locals_max_string=self.locals_max_string,
)
for stack in trace.stacks:
if len(stack.frames) <= self.max_frames:
continue
half = (
self.max_frames // 2
) # Force int division to handle odd numbers correctly
fake_frame = Frame(
filename="",
lineno=-1,
name=f"Skipped frames: {len(stack.frames) - (2 * half)}",
)
stack.frames[:] = [
*stack.frames[:half],
fake_frame,
*stack.frames[-half:],
]
return [asdict(stack) for stack in trace.stacks]

View File

@@ -0,0 +1,333 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Processors and tools specific to the `Twisted <https://twisted.org/>`_
networking engine.
See also :doc:`structlog's Twisted support <twisted>`.
"""
from __future__ import annotations
import json
import sys
from typing import Any, Callable, Sequence, TextIO
from twisted.python import log
from twisted.python.failure import Failure
from twisted.python.log import ILogObserver, textFromEventDict
from zope.interface import implementer
from ._base import BoundLoggerBase
from ._config import _BUILTIN_DEFAULT_PROCESSORS
from ._utils import until_not_interrupted
from .processors import JSONRenderer as GenericJSONRenderer
from .typing import EventDict, WrappedLogger
class BoundLogger(BoundLoggerBase):
"""
Twisted-specific version of `structlog.BoundLogger`.
Works exactly like the generic one except that it takes advantage of
knowing the logging methods in advance.
Use it like::
configure(
wrapper_class=structlog.twisted.BoundLogger,
)
"""
def msg(self, event: str | None = None, **kw: Any) -> Any:
"""
Process event and call ``log.msg()`` with the result.
"""
return self._proxy_to_logger("msg", event, **kw)
def err(self, event: str | None = None, **kw: Any) -> Any:
"""
Process event and call ``log.err()`` with the result.
"""
return self._proxy_to_logger("err", event, **kw)
class LoggerFactory:
"""
Build a Twisted logger when an *instance* is called.
>>> from structlog import configure
>>> from structlog.twisted import LoggerFactory
>>> configure(logger_factory=LoggerFactory())
"""
def __call__(self, *args: Any) -> WrappedLogger:
"""
Positional arguments are silently ignored.
:rvalue: A new Twisted logger.
.. versionchanged:: 0.4.0
Added support for optional positional arguments.
"""
return log
_FAIL_TYPES = (BaseException, Failure)
def _extractStuffAndWhy(eventDict: EventDict) -> tuple[Any, Any, EventDict]:
"""
Removes all possible *_why*s and *_stuff*s, analyzes exc_info and returns
a tuple of ``(_stuff, _why, eventDict)``.
**Modifies** *eventDict*!
"""
_stuff = eventDict.pop("_stuff", None)
_why = eventDict.pop("_why", None)
event = eventDict.pop("event", None)
if isinstance(_stuff, _FAIL_TYPES) and isinstance(event, _FAIL_TYPES):
raise ValueError("Both _stuff and event contain an Exception/Failure.")
# `log.err('event', _why='alsoEvent')` is ambiguous.
if _why and isinstance(event, str):
raise ValueError("Both `_why` and `event` supplied.")
# Two failures are ambiguous too.
if not isinstance(_stuff, _FAIL_TYPES) and isinstance(event, _FAIL_TYPES):
_why = _why or "error"
_stuff = event
if isinstance(event, str):
_why = event
if not _stuff and sys.exc_info() != (None, None, None):
_stuff = Failure() # type: ignore[no-untyped-call]
# Either we used the error ourselves or the user supplied one for
# formatting. Avoid log.err() to dump another traceback into the log.
if isinstance(_stuff, BaseException) and not isinstance(_stuff, Failure):
_stuff = Failure(_stuff) # type: ignore[no-untyped-call]
return _stuff, _why, eventDict
class ReprWrapper:
"""
Wrap a string and return it as the ``__repr__``.
This is needed for ``twisted.python.log.err`` that calls `repr` on
``_stuff``:
>>> repr("foo")
"'foo'"
>>> repr(ReprWrapper("foo"))
'foo'
Note the extra quotes in the unwrapped example.
"""
def __init__(self, string: str) -> None:
self.string = string
def __eq__(self, other: object) -> bool:
"""
Check for equality, just for tests.
"""
return (
isinstance(other, self.__class__) and self.string == other.string
)
def __repr__(self) -> str:
return self.string
class JSONRenderer(GenericJSONRenderer):
"""
Behaves like `structlog.processors.JSONRenderer` except that it formats
tracebacks and failures itself if called with ``err()``.
.. note::
This ultimately means that the messages get logged out using ``msg()``,
and *not* ``err()`` which renders failures in separate lines.
Therefore it will break your tests that contain assertions using
`flushLoggedErrors
<https://docs.twisted.org/en/stable/api/
twisted.trial.unittest.SynchronousTestCase.html#flushLoggedErrors>`_.
*Not* an adapter like `EventAdapter` but a real formatter. Also does *not*
require to be adapted using it.
Use together with a `JSONLogObserverWrapper`-wrapped Twisted logger like
`plainJSONStdOutLogger` for pure-JSON logs.
"""
def __call__( # type: ignore[override]
self,
logger: WrappedLogger,
name: str,
eventDict: EventDict,
) -> tuple[Sequence[Any], dict[str, Any]]:
_stuff, _why, eventDict = _extractStuffAndWhy(eventDict)
if name == "err":
eventDict["event"] = _why
if isinstance(_stuff, Failure):
eventDict["exception"] = _stuff.getTraceback(detail="verbose")
_stuff.cleanFailure() # type: ignore[no-untyped-call]
else:
eventDict["event"] = _why
return (
(
ReprWrapper(
GenericJSONRenderer.__call__( # type: ignore[arg-type]
self, logger, name, eventDict
)
),
),
{"_structlog": True},
)
@implementer(ILogObserver)
class PlainFileLogObserver:
"""
Write only the plain message without timestamps or anything else.
Great to just print JSON to stdout where you catch it with something like
runit.
Arguments:
file: File to print to.
.. versionadded:: 0.2.0
"""
def __init__(self, file: TextIO) -> None:
self._write = file.write
self._flush = file.flush
def __call__(self, eventDict: EventDict) -> None:
until_not_interrupted(
self._write,
textFromEventDict(eventDict) # type: ignore[arg-type, operator]
+ "\n",
)
until_not_interrupted(self._flush)
@implementer(ILogObserver)
class JSONLogObserverWrapper:
"""
Wrap a log *observer* and render non-`JSONRenderer` entries to JSON.
Arguments:
observer (ILogObserver):
Twisted log observer to wrap. For example
:class:`PlainFileObserver` or Twisted's stock `FileLogObserver
<https://docs.twisted.org/en/stable/api/
twisted.python.log.FileLogObserver.html>`_
.. versionadded:: 0.2.0
"""
def __init__(self, observer: Any) -> None:
self._observer = observer
def __call__(self, eventDict: EventDict) -> str:
if "_structlog" not in eventDict:
eventDict["message"] = (
json.dumps(
{
"event": textFromEventDict(
eventDict # type: ignore[arg-type]
),
"system": eventDict.get("system"),
}
),
)
eventDict["_structlog"] = True
return self._observer(eventDict)
def plainJSONStdOutLogger() -> JSONLogObserverWrapper:
"""
Return a logger that writes only the message to stdout.
Transforms non-`JSONRenderer` messages to JSON.
Ideal for JSONifying log entries from Twisted plugins and libraries that
are outside of your control::
$ twistd -n --logger structlog.twisted.plainJSONStdOutLogger web
{"event": "Log opened.", "system": "-"}
{"event": "twistd 13.1.0 (python 2.7.3) starting up.", "system": "-"}
{"event": "reactor class: twisted...EPollReactor.", "system": "-"}
{"event": "Site starting on 8080", "system": "-"}
{"event": "Starting factory <twisted.web.server.Site ...>", ...}
...
Composes `PlainFileLogObserver` and `JSONLogObserverWrapper` to a usable
logger.
.. versionadded:: 0.2.0
"""
return JSONLogObserverWrapper(PlainFileLogObserver(sys.stdout))
class EventAdapter:
"""
Adapt an ``event_dict`` to Twisted logging system.
Particularly, make a wrapped `twisted.python.log.err
<https://docs.twisted.org/en/stable/api/twisted.python.log.html#err>`_
behave as expected.
Arguments:
dictRenderer:
Renderer that is used for the actual log message. Please note that
structlog comes with a dedicated `JSONRenderer`.
**Must** be the last processor in the chain and requires a *dictRenderer*
for the actual formatting as an constructor argument in order to be able to
fully support the original behaviors of ``log.msg()`` and ``log.err()``.
"""
def __init__(
self,
dictRenderer: Callable[[WrappedLogger, str, EventDict], str]
| None = None,
) -> None:
self._dictRenderer = dictRenderer or _BUILTIN_DEFAULT_PROCESSORS[-1]
def __call__(
self, logger: WrappedLogger, name: str, eventDict: EventDict
) -> Any:
if name == "err":
# This aspires to handle the following cases correctly:
# 1. log.err(failure, _why='event', **kw)
# 2. log.err('event', **kw)
# 3. log.err(_stuff=failure, _why='event', **kw)
_stuff, _why, eventDict = _extractStuffAndWhy(eventDict)
eventDict["event"] = _why
return (
(),
{
"_stuff": _stuff,
"_why": self._dictRenderer(logger, name, eventDict),
},
)
return self._dictRenderer(logger, name, eventDict)

View File

@@ -0,0 +1,38 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Deprecated name for :mod:`structlog.typing`.
.. versionadded:: 20.2.0
.. deprecated:: 22.2.0
"""
from __future__ import annotations
from .typing import (
BindableLogger,
Context,
EventDict,
ExceptionRenderer,
ExceptionTransformer,
ExcInfo,
FilteringBoundLogger,
Processor,
WrappedLogger,
)
__all__ = (
"WrappedLogger",
"Context",
"EventDict",
"Processor",
"ExcInfo",
"ExceptionRenderer",
"ExceptionTransformer",
"BindableLogger",
"FilteringBoundLogger",
)

View File

@@ -0,0 +1,309 @@
# SPDX-License-Identifier: MIT OR Apache-2.0
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the MIT License. See the LICENSE file in the root of this
# repository for complete details.
"""
Type information used throughout *structlog*.
For now, they are considered provisional. Especially `BindableLogger` will
probably change to something more elegant.
.. versionadded:: 22.2.0
"""
from __future__ import annotations
from types import TracebackType
from typing import (
Any,
Callable,
Dict,
Mapping,
MutableMapping,
Optional,
Protocol,
TextIO,
Tuple,
Type,
Union,
runtime_checkable,
)
WrappedLogger = Any
"""
A logger that is wrapped by a bound logger and is ultimately responsible for
the output of the log entries.
*structlog* makes *no* assumptions about it.
.. versionadded:: 20.2.0
"""
Context = Union[Dict[str, Any], Dict[Any, Any]]
"""
A dict-like context carrier.
.. versionadded:: 20.2.0
"""
EventDict = MutableMapping[str, Any]
"""
An event dictionary as it is passed into processors.
It's created by copying the configured `Context` but doesn't need to support
copy itself.
.. versionadded:: 20.2.0
"""
Processor = Callable[
[WrappedLogger, str, EventDict],
Union[Mapping[str, Any], str, bytes, bytearray, Tuple[Any, ...]],
]
"""
A callable that is part of the processor chain.
See :doc:`processors`.
.. versionadded:: 20.2.0
"""
ExcInfo = Tuple[Type[BaseException], BaseException, Optional[TracebackType]]
"""
An exception info tuple as returned by `sys.exc_info`.
.. versionadded:: 20.2.0
"""
ExceptionRenderer = Callable[[TextIO, ExcInfo], None]
"""
A callable that pretty-prints an `ExcInfo` into a file-like object.
Used by `structlog.dev.ConsoleRenderer`.
.. versionadded:: 21.2.0
"""
@runtime_checkable
class ExceptionTransformer(Protocol):
"""
**Protocol:** A callable that transforms an `ExcInfo` into another
datastructure.
The result should be something that your renderer can work with, e.g., a
``str`` or a JSON-serializable ``dict``.
Used by `structlog.processors.format_exc_info()` and
`structlog.processors.ExceptionPrettyPrinter`.
Arguments:
exc_info: Is the exception tuple to format
Returns:
Anything that can be rendered by the last processor in your chain,
for example, a string or a JSON-serializable structure.
.. versionadded:: 22.1.0
"""
def __call__(self, exc_info: ExcInfo) -> Any:
...
@runtime_checkable
class BindableLogger(Protocol):
"""
**Protocol**: Methods shared among all bound loggers and that are relied on
by *structlog*.
.. versionadded:: 20.2.0
"""
_context: Context
def bind(self, **new_values: Any) -> BindableLogger:
...
def unbind(self, *keys: str) -> BindableLogger:
...
def try_unbind(self, *keys: str) -> BindableLogger:
...
def new(self, **new_values: Any) -> BindableLogger:
...
class FilteringBoundLogger(BindableLogger, Protocol):
"""
**Protocol**: A `BindableLogger` that filters by a level.
The only way to instantiate one is using `make_filtering_bound_logger`.
.. versionadded:: 20.2.0
.. versionadded:: 22.2.0 String interpolation using positional arguments.
.. versionadded:: 22.2.0
Async variants ``alog()``, ``adebug()``, ``ainfo()``, and so forth.
.. versionchanged:: 22.3.0
String interpolation is only attempted if positional arguments are
passed.
"""
def bind(self, **new_values: Any) -> FilteringBoundLogger:
"""
Return a new logger with *new_values* added to the existing ones.
.. versionadded:: 22.1.0
"""
def unbind(self, *keys: str) -> FilteringBoundLogger:
"""
Return a new logger with *keys* removed from the context.
.. versionadded:: 22.1.0
"""
def try_unbind(self, *keys: str) -> FilteringBoundLogger:
"""
Like :meth:`unbind`, but best effort: missing keys are ignored.
.. versionadded:: 22.1.0
"""
def new(self, **new_values: Any) -> FilteringBoundLogger:
"""
Clear context and binds *initial_values* using `bind`.
.. versionadded:: 22.1.0
"""
def debug(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **debug** level.
"""
async def adebug(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **debug** level.
..versionadded:: 22.2.0
"""
def info(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **info** level.
"""
async def ainfo(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **info** level.
..versionadded:: 22.2.0
"""
def warning(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **warn** level.
"""
async def awarning(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **warn** level.
..versionadded:: 22.2.0
"""
def warn(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **warn** level.
"""
async def awarn(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **warn** level.
..versionadded:: 22.2.0
"""
def error(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **error** level.
"""
async def aerror(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **error** level.
..versionadded:: 22.2.0
"""
def err(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **error** level.
"""
def fatal(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **critical** level.
"""
async def afatal(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **critical** level.
..versionadded:: 22.2.0
"""
def exception(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **error** level and ensure that
``exc_info`` is set in the event dictionary.
"""
async def aexception(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **error** level and ensure that
``exc_info`` is set in the event dictionary.
..versionadded:: 22.2.0
"""
def critical(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **critical** level.
"""
async def acritical(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **critical** level.
..versionadded:: 22.2.0
"""
def msg(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **info** level.
"""
async def amsg(self, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at **info** level.
"""
def log(self, level: int, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at *level*.
"""
async def alog(self, level: int, event: str, *args: Any, **kw: Any) -> Any:
"""
Log ``event % args`` with **kw** at *level*.
"""