1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-06-26 16:45:52 +00:00
Radicale/radicale/log.py

252 lines
9.6 KiB
Python
Raw Normal View History

2021-12-08 21:45:42 +01:00
# This file is part of Radicale - CalDAV and CardDAV server
2017-05-27 17:28:07 +02:00
# Copyright © 2011-2017 Guillaume Ayoub
2024-06-18 08:24:04 +02:00
# Copyright © 2017-2023 Unrud <unrud@outlook.com>
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
2011-04-10 18:17:06 +02:00
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
2020-01-12 23:32:28 +01:00
Functions to set up Python's logging facility for Radicale's WSGI application.
2011-04-10 18:17:06 +02:00
2020-01-12 23:32:28 +01:00
Log messages are sent to the first available target of:
2020-02-19 09:50:45 +01:00
- Error stream specified by the WSGI server in "wsgi.errors"
- ``sys.stderr``
2020-01-12 23:32:28 +01:00
2011-04-10 18:17:06 +02:00
"""
import contextlib
2023-03-22 10:23:54 +01:00
import io
2011-04-10 18:17:06 +02:00
import logging
2018-08-16 07:59:57 +02:00
import os
import socket
import struct
2016-07-04 14:32:33 +02:00
import sys
import threading
import time
2023-03-22 10:23:54 +01:00
from typing import (Any, Callable, ClassVar, Dict, Iterator, Mapping, Optional,
Tuple, Union, cast)
2011-04-10 18:17:06 +02:00
2021-07-26 20:56:46 +02:00
from radicale import types
2021-07-26 20:56:46 +02:00
LOGGER_NAME: str = "radicale"
2023-03-21 16:48:39 +01:00
LOGGER_FORMATS: Mapping[str, str] = {
"verbose": "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s",
"journal": "[%(ident)s] [%(levelname)s] %(message)s",
}
2021-07-26 20:56:46 +02:00
DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
logger: logging.Logger = logging.getLogger(LOGGER_NAME)
class RemoveTracebackFilter(logging.Filter):
2021-07-26 20:56:46 +02:00
def filter(self, record: logging.LogRecord) -> bool:
record.exc_info = None
return True
2021-07-26 20:56:46 +02:00
REMOVE_TRACEBACK_FILTER: logging.Filter = RemoveTracebackFilter()
class IdentLogRecordFactory:
"""LogRecordFactory that adds ``ident`` attribute."""
2021-07-26 20:56:46 +02:00
def __init__(self, upstream_factory: Callable[..., logging.LogRecord]
) -> None:
self._upstream_factory = upstream_factory
2021-07-26 20:56:46 +02:00
def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
record = self._upstream_factory(*args, **kwargs)
2023-03-21 16:48:39 +01:00
ident = ("%d" % record.process if record.process is not None
else record.processName or "unknown")
tid = None
if record.thread is not None:
if record.thread != threading.main_thread().ident:
ident += "/%s" % (record.threadName or "unknown")
if (sys.version_info >= (3, 8) and
record.thread == threading.get_ident()):
tid = threading.get_native_id()
2021-07-26 20:56:46 +02:00
record.ident = ident # type:ignore[attr-defined]
2023-03-21 16:48:39 +01:00
record.tid = tid # type:ignore[attr-defined]
return record
2020-02-19 09:50:45 +01:00
class ThreadedStreamHandler(logging.Handler):
"""Sends logging output to the stream registered for the current thread or
``sys.stderr`` when no stream was registered."""
2018-08-16 07:59:56 +02:00
2021-07-26 20:56:46 +02:00
terminator: ClassVar[str] = "\n"
_streams: Dict[int, types.ErrorStream]
_journal_stream_id: Optional[Tuple[int, int]]
_journal_socket: Optional[socket.socket]
2023-03-22 10:23:54 +01:00
_journal_socket_failed: bool
2023-03-21 16:48:39 +01:00
_formatters: Mapping[str, logging.Formatter]
_formatter: Optional[logging.Formatter]
2018-08-16 07:59:56 +02:00
2023-03-21 16:48:39 +01:00
def __init__(self, format_name: Optional[str] = None) -> None:
2018-08-16 07:59:56 +02:00
super().__init__()
self._streams = {}
self._journal_stream_id = None
with contextlib.suppress(TypeError, ValueError):
dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
2023-03-22 10:23:54 +01:00
self._journal_stream_id = (int(dev), int(inode))
self._journal_socket = None
2023-03-22 10:23:54 +01:00
self._journal_socket_failed = False
2023-03-21 16:48:39 +01:00
self._formatters = {name: logging.Formatter(fmt, DATE_FORMAT)
for name, fmt in LOGGER_FORMATS.items()}
self._formatter = (self._formatters[format_name]
if format_name is not None else None)
def _get_formatter(self, default_format_name: str) -> logging.Formatter:
return self._formatter or self._formatters[default_format_name]
2023-03-22 10:23:54 +01:00
def _detect_journal(self, stream: types.ErrorStream) -> bool:
if not self._journal_stream_id or not isinstance(stream, io.IOBase):
return False
try:
stat = os.fstat(stream.fileno())
2023-03-22 10:23:54 +01:00
except OSError:
return False
return self._journal_stream_id == (stat.st_dev, stat.st_ino)
@staticmethod
2023-03-22 10:23:54 +01:00
def _encode_journal(data: Mapping[str, Optional[Union[str, int]]]
) -> bytes:
msg = b""
for key, value in data.items():
if value is None:
continue
2023-03-22 10:23:54 +01:00
keyb = key.encode()
valueb = str(value).encode()
if b"\n" in valueb:
msg += (keyb + b"\n" +
struct.pack("<Q", len(valueb)) + valueb + b"\n")
else:
2023-03-22 10:23:54 +01:00
msg += keyb + b"=" + valueb + b"\n"
return msg
2023-03-22 10:23:54 +01:00
def _try_emit_journal(self, record: logging.LogRecord) -> bool:
if not self._journal_socket:
# Try to connect to systemd journal socket
if self._journal_socket_failed or not hasattr(socket, "AF_UNIX"):
return False
journal_socket = None
try:
journal_socket = socket.socket(
socket.AF_UNIX, socket.SOCK_DGRAM)
journal_socket.connect("/run/systemd/journal/socket")
2023-03-22 10:23:57 +01:00
except OSError as e:
2023-03-22 10:23:54 +01:00
self._journal_socket_failed = True
if journal_socket:
journal_socket.close()
2023-03-22 10:23:57 +01:00
# Log after setting `_journal_socket_failed` to prevent loop!
logger.error("Failed to connect to systemd journal: %s",
e, exc_info=True)
2023-03-22 10:23:54 +01:00
return False
self._journal_socket = journal_socket
priority = {"DEBUG": 7,
"INFO": 6,
"WARNING": 4,
"ERROR": 3,
"CRITICAL": 2}.get(record.levelname, 4)
timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
time.gmtime(record.created)) % record.msecs
data = {"PRIORITY": priority,
2023-03-22 10:23:54 +01:00
"TID": cast(Optional[int], getattr(record, "tid", None)),
"SYSLOG_IDENTIFIER": record.name,
"SYSLOG_FACILITY": 1,
"SYSLOG_PID": record.process,
"SYSLOG_TIMESTAMP": timestamp,
"CODE_FILE": record.pathname,
"CODE_LINE": record.lineno,
"CODE_FUNC": record.funcName,
2023-03-21 16:48:39 +01:00
"MESSAGE": self._get_formatter("journal").format(record)}
self._journal_socket.sendall(self._encode_journal(data))
2023-03-22 10:23:54 +01:00
return True
2018-08-16 07:59:56 +02:00
2021-07-26 20:56:46 +02:00
def emit(self, record: logging.LogRecord) -> None:
2018-08-16 07:59:56 +02:00
try:
2020-02-19 09:50:45 +01:00
stream = self._streams.get(threading.get_ident(), sys.stderr)
2023-03-22 10:23:54 +01:00
if self._detect_journal(stream) and self._try_emit_journal(record):
return
2023-03-21 16:48:39 +01:00
msg = self._get_formatter("verbose").format(record)
2023-03-22 10:23:55 +01:00
stream.write(msg + self.terminator)
2023-03-22 10:23:56 +01:00
stream.flush()
2018-08-16 07:59:56 +02:00
except Exception:
self.handleError(record)
2021-07-26 20:56:46 +02:00
@types.contextmanager
def register_stream(self, stream: types.ErrorStream) -> Iterator[None]:
2020-02-19 09:50:45 +01:00
"""Register stream for logging output of the current thread."""
2018-08-16 07:59:56 +02:00
key = threading.get_ident()
self._streams[key] = stream
try:
yield
finally:
del self._streams[key]
2021-07-26 20:56:46 +02:00
@types.contextmanager
def register_stream(stream: types.ErrorStream) -> Iterator[None]:
2020-02-19 09:50:45 +01:00
"""Register stream for logging output of the current thread."""
2018-08-16 07:59:56 +02:00
yield
2021-07-26 20:56:46 +02:00
def setup() -> None:
"""Set global logging up."""
2018-09-06 09:12:52 +02:00
global register_stream
2023-03-21 16:48:39 +01:00
format_name = os.environ.get("RADICALE_LOG_FORMAT") or None
sane_format_name = format_name if format_name in LOGGER_FORMATS else None
handler = ThreadedStreamHandler(sane_format_name)
logging.basicConfig(handlers=[handler])
2018-08-16 07:59:56 +02:00
register_stream = handler.register_stream
log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
logging.setLogRecordFactory(log_record_factory)
2024-06-09 13:43:23 +02:00
set_level(logging.INFO, True)
2023-03-21 16:48:39 +01:00
if format_name != sane_format_name:
logger.error("Invalid RADICALE_LOG_FORMAT: %r", format_name)
logger_display_backtrace_disabled: bool = False
logger_display_backtrace_enabled: bool = False
def set_level(level: Union[int, str], backtrace_on_debug: bool) -> None:
"""Set logging level for global logger."""
global logger_display_backtrace_disabled
global logger_display_backtrace_enabled
if isinstance(level, str):
level = getattr(logging, level.upper())
2021-07-26 20:56:46 +02:00
assert isinstance(level, int)
logger.setLevel(level)
2021-07-26 20:56:46 +02:00
if level > logging.DEBUG:
if logger_display_backtrace_disabled is False:
logger.info("Logging of backtrace is disabled in this loglevel")
logger_display_backtrace_disabled = True
2020-01-17 12:45:01 +01:00
logger.addFilter(REMOVE_TRACEBACK_FILTER)
else:
if not backtrace_on_debug:
if logger_display_backtrace_disabled is False:
logger.debug("Logging of backtrace is disabled by option in this loglevel")
logger_display_backtrace_disabled = True
logger.addFilter(REMOVE_TRACEBACK_FILTER)
else:
if logger_display_backtrace_enabled is False:
logger.debug("Logging of backtrace is enabled by option in this loglevel")
logger_display_backtrace_enabled = True
logger.removeFilter(REMOVE_TRACEBACK_FILTER)