1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-06-26 16:45:52 +00:00
Radicale/radicale/log.py

202 lines
6.9 KiB
Python
Raw Normal View History

2021-12-08 21:45:42 +01:00
# This file is part of Radicale - CalDAV and CardDAV server
2017-05-27 17:28:07 +02:00
# Copyright © 2011-2017 Guillaume Ayoub
2019-06-17 04:13:25 +02:00
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
2011-04-10 18:17:06 +02:00
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
2020-01-12 23:32:28 +01:00
Functions to set up Python's logging facility for Radicale's WSGI application.
2011-04-10 18:17:06 +02:00
2020-01-12 23:32:28 +01:00
Log messages are sent to the first available target of:
2020-02-19 09:50:45 +01:00
- Error stream specified by the WSGI server in "wsgi.errors"
- ``sys.stderr``
2020-01-12 23:32:28 +01:00
2011-04-10 18:17:06 +02:00
"""
import contextlib
2011-04-10 18:17:06 +02:00
import logging
2018-08-16 07:59:57 +02:00
import os
import socket
import struct
2016-07-04 14:32:33 +02:00
import sys
import threading
import time
from typing import (Any, Callable, ClassVar, Dict, Iterator, Optional, Tuple,
Union)
2011-04-10 18:17:06 +02:00
2021-07-26 20:56:46 +02:00
from radicale import types
2021-07-26 20:56:46 +02:00
LOGGER_NAME: str = "radicale"
LOGGER_FORMAT: str = "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s"
DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
logger: logging.Logger = logging.getLogger(LOGGER_NAME)
class RemoveTracebackFilter(logging.Filter):
2021-07-26 20:56:46 +02:00
def filter(self, record: logging.LogRecord) -> bool:
record.exc_info = None
return True
2021-07-26 20:56:46 +02:00
REMOVE_TRACEBACK_FILTER: logging.Filter = RemoveTracebackFilter()
class IdentLogRecordFactory:
"""LogRecordFactory that adds ``ident`` attribute."""
2021-07-26 20:56:46 +02:00
def __init__(self, upstream_factory: Callable[..., logging.LogRecord]
) -> None:
self._upstream_factory = upstream_factory
2021-07-26 20:56:46 +02:00
def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
record = self._upstream_factory(*args, **kwargs)
2020-02-19 09:50:40 +01:00
ident = "%d" % os.getpid()
2018-08-18 16:43:18 +02:00
main_thread = threading.main_thread()
current_thread = threading.current_thread()
if current_thread.name and main_thread != current_thread:
ident += "/%s" % current_thread.name
2021-07-26 20:56:46 +02:00
record.ident = ident # type:ignore[attr-defined]
record.tid = None # type:ignore[attr-defined]
if sys.version_info >= (3, 8):
record.tid = current_thread.native_id
return record
2020-02-19 09:50:45 +01:00
class ThreadedStreamHandler(logging.Handler):
"""Sends logging output to the stream registered for the current thread or
``sys.stderr`` when no stream was registered."""
2018-08-16 07:59:56 +02:00
2021-07-26 20:56:46 +02:00
terminator: ClassVar[str] = "\n"
_streams: Dict[int, types.ErrorStream]
_journal_stream_id: Optional[Tuple[int, int]]
_journal_socket: Optional[socket.socket]
2018-08-16 07:59:56 +02:00
2021-07-26 20:56:46 +02:00
def __init__(self) -> None:
2018-08-16 07:59:56 +02:00
super().__init__()
self._streams = {}
self._journal_stream_id = None
with contextlib.suppress(TypeError, ValueError):
dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
self._journal_stream_id = int(dev), int(inode)
self._journal_socket = None
if self._journal_stream_id and hasattr(socket, "AF_UNIX"):
journal_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
try:
journal_socket.connect("/run/systemd/journal/socket")
except OSError:
journal_socket.close()
else:
self._journal_socket = journal_socket
def _detect_journal(self, stream):
if not self._journal_stream_id:
return False
try:
stat = os.fstat(stream.fileno())
except Exception:
return False
return self._journal_stream_id == (stat.st_dev, stat.st_ino)
@staticmethod
def _encode_journal(data):
msg = b""
for key, value in data.items():
if key is None:
continue
key = key.encode()
value = str(value).encode()
if b"\n" in value:
msg += (key + b"\n" +
struct.pack("<Q", len(value)) + value + b"\n")
else:
msg += key + b"=" + value + b"\n"
return msg
def _emit_journal(self, record):
priority = {"DEBUG": 7,
"INFO": 6,
"WARNING": 4,
"ERROR": 3,
"CRITICAL": 2}.get(record.levelname, 4)
timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
time.gmtime(record.created)) % record.msecs
data = {"PRIORITY": priority,
"TID": record.tid,
"SYSLOG_IDENTIFIER": record.name,
"SYSLOG_FACILITY": 1,
"SYSLOG_PID": record.process,
"SYSLOG_TIMESTAMP": timestamp,
"CODE_FILE": record.pathname,
"CODE_LINE": record.lineno,
"CODE_FUNC": record.funcName,
"MESSAGE": self.format(record)}
self._journal_socket.sendall(self._encode_journal(data))
2018-08-16 07:59:56 +02:00
2021-07-26 20:56:46 +02:00
def emit(self, record: logging.LogRecord) -> None:
2018-08-16 07:59:56 +02:00
try:
2020-02-19 09:50:45 +01:00
stream = self._streams.get(threading.get_ident(), sys.stderr)
if self._journal_socket and self._detect_journal(stream):
self._emit_journal(record)
return
2020-02-19 09:50:45 +01:00
msg = self.format(record)
stream.write(msg)
stream.write(self.terminator)
if hasattr(stream, "flush"):
stream.flush()
2018-08-16 07:59:56 +02:00
except Exception:
self.handleError(record)
2021-07-26 20:56:46 +02:00
@types.contextmanager
def register_stream(self, stream: types.ErrorStream) -> Iterator[None]:
2020-02-19 09:50:45 +01:00
"""Register stream for logging output of the current thread."""
2018-08-16 07:59:56 +02:00
key = threading.get_ident()
self._streams[key] = stream
try:
yield
finally:
del self._streams[key]
2021-07-26 20:56:46 +02:00
@types.contextmanager
def register_stream(stream: types.ErrorStream) -> Iterator[None]:
2020-02-19 09:50:45 +01:00
"""Register stream for logging output of the current thread."""
2018-08-16 07:59:56 +02:00
yield
2021-07-26 20:56:46 +02:00
def setup() -> None:
"""Set global logging up."""
2018-09-06 09:12:52 +02:00
global register_stream
2020-02-19 09:50:45 +01:00
handler = ThreadedStreamHandler()
2020-02-19 09:50:40 +01:00
logging.basicConfig(format=LOGGER_FORMAT, datefmt=DATE_FORMAT,
handlers=[handler])
2018-08-16 07:59:56 +02:00
register_stream = handler.register_stream
log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
logging.setLogRecordFactory(log_record_factory)
2019-06-17 04:13:25 +02:00
set_level(logging.WARNING)
2021-07-26 20:56:46 +02:00
def set_level(level: Union[int, str]) -> None:
"""Set logging level for global logger."""
if isinstance(level, str):
level = getattr(logging, level.upper())
2021-07-26 20:56:46 +02:00
assert isinstance(level, int)
logger.setLevel(level)
2021-07-26 20:56:46 +02:00
logger.removeFilter(REMOVE_TRACEBACK_FILTER)
if level > logging.DEBUG:
2020-01-17 12:45:01 +01:00
logger.addFilter(REMOVE_TRACEBACK_FILTER)