log.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295
  1. # This file is part of Radicale - CalDAV and CardDAV server
  2. # Copyright © 2011-2017 Guillaume Ayoub
  3. # Copyright © 2017-2023 Unrud <unrud@outlook.com>
  4. # Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
  5. #
  6. # This library is free software: you can redistribute it and/or modify
  7. # it under the terms of the GNU General Public License as published by
  8. # the Free Software Foundation, either version 3 of the License, or
  9. # (at your option) any later version.
  10. #
  11. # This library is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  18. """
  19. Functions to set up Python's logging facility for Radicale's WSGI application.
  20. Log messages are sent to the first available target of:
  21. - Error stream specified by the WSGI server in "wsgi.errors"
  22. - ``sys.stderr``
  23. """
  24. import contextlib
  25. import io
  26. import logging
  27. import os
  28. import socket
  29. import struct
  30. import sys
  31. import threading
  32. import time
  33. from typing import (Any, Callable, ClassVar, Dict, Iterator, Mapping, Optional,
  34. Tuple, Union, cast)
  35. from radicale import types
  36. LOGGER_NAME: str = "radicale"
  37. LOGGER_FORMATS: Mapping[str, str] = {
  38. "verbose": "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s",
  39. "journal": "[%(ident)s] [%(levelname)s] %(message)s",
  40. }
  41. DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
  42. logger: logging.Logger = logging.getLogger(LOGGER_NAME)
  43. class RemoveTracebackFilter(logging.Filter):
  44. def filter(self, record: logging.LogRecord) -> bool:
  45. record.exc_info = None
  46. return True
  47. class RemoveTRACEFilter(logging.Filter):
  48. def filter(self, record: logging.LogRecord) -> bool:
  49. if record.msg.startswith("TRACE"):
  50. return False
  51. else:
  52. return True
  53. class PassTRACETOKENFilter(logging.Filter):
  54. def __init__(self, trace_filter: str):
  55. super().__init__()
  56. self.trace_filter = trace_filter
  57. self.prefix = "TRACE/" + self.trace_filter
  58. def filter(self, record: logging.LogRecord) -> bool:
  59. if record.msg.startswith("TRACE"):
  60. if record.msg.startswith(self.prefix):
  61. return True
  62. else:
  63. return False
  64. else:
  65. return True
  66. REMOVE_TRACEBACK_FILTER: logging.Filter = RemoveTracebackFilter()
  67. REMOVE_TRACE_FILTER: logging.Filter = RemoveTRACEFilter()
  68. class IdentLogRecordFactory:
  69. """LogRecordFactory that adds ``ident`` attribute."""
  70. def __init__(self, upstream_factory: Callable[..., logging.LogRecord]
  71. ) -> None:
  72. self._upstream_factory = upstream_factory
  73. def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
  74. record = self._upstream_factory(*args, **kwargs)
  75. ident = ("%d" % record.process if record.process is not None
  76. else record.processName or "unknown")
  77. tid = None
  78. if record.thread is not None:
  79. if record.thread != threading.main_thread().ident:
  80. ident += "/%s" % (record.threadName or "unknown")
  81. if (sys.version_info >= (3, 8) and
  82. record.thread == threading.get_ident()):
  83. try:
  84. tid = threading.get_native_id()
  85. except AttributeError:
  86. # so far function not existing e.g. on SunOS
  87. # see also https://docs.python.org/3/library/threading.html#threading.get_native_id
  88. tid = None
  89. record.ident = ident # type:ignore[attr-defined]
  90. record.tid = tid # type:ignore[attr-defined]
  91. return record
  92. class ThreadedStreamHandler(logging.Handler):
  93. """Sends logging output to the stream registered for the current thread or
  94. ``sys.stderr`` when no stream was registered."""
  95. terminator: ClassVar[str] = "\n"
  96. _streams: Dict[int, types.ErrorStream]
  97. _journal_stream_id: Optional[Tuple[int, int]]
  98. _journal_socket: Optional[socket.socket]
  99. _journal_socket_failed: bool
  100. _formatters: Mapping[str, logging.Formatter]
  101. _formatter: Optional[logging.Formatter]
  102. def __init__(self, format_name: Optional[str] = None) -> None:
  103. super().__init__()
  104. self._streams = {}
  105. self._journal_stream_id = None
  106. with contextlib.suppress(TypeError, ValueError):
  107. dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
  108. self._journal_stream_id = (int(dev), int(inode))
  109. self._journal_socket = None
  110. self._journal_socket_failed = False
  111. self._formatters = {name: logging.Formatter(fmt, DATE_FORMAT)
  112. for name, fmt in LOGGER_FORMATS.items()}
  113. self._formatter = (self._formatters[format_name]
  114. if format_name is not None else None)
  115. def _get_formatter(self, default_format_name: str) -> logging.Formatter:
  116. return self._formatter or self._formatters[default_format_name]
  117. def _detect_journal(self, stream: types.ErrorStream) -> bool:
  118. if not self._journal_stream_id or not isinstance(stream, io.IOBase):
  119. return False
  120. try:
  121. stat = os.fstat(stream.fileno())
  122. except OSError:
  123. return False
  124. return self._journal_stream_id == (stat.st_dev, stat.st_ino)
  125. @staticmethod
  126. def _encode_journal(data: Mapping[str, Optional[Union[str, int]]]
  127. ) -> bytes:
  128. msg = b""
  129. for key, value in data.items():
  130. if value is None:
  131. continue
  132. keyb = key.encode()
  133. valueb = str(value).encode()
  134. if b"\n" in valueb:
  135. msg += (keyb + b"\n" +
  136. struct.pack("<Q", len(valueb)) + valueb + b"\n")
  137. else:
  138. msg += keyb + b"=" + valueb + b"\n"
  139. return msg
  140. def _try_emit_journal(self, record: logging.LogRecord) -> bool:
  141. if not self._journal_socket:
  142. # Try to connect to systemd journal socket
  143. if self._journal_socket_failed or not hasattr(socket, "AF_UNIX"):
  144. return False
  145. journal_socket = None
  146. try:
  147. journal_socket = socket.socket(
  148. socket.AF_UNIX, socket.SOCK_DGRAM)
  149. journal_socket.connect("/run/systemd/journal/socket")
  150. except OSError as e:
  151. self._journal_socket_failed = True
  152. if journal_socket:
  153. journal_socket.close()
  154. # Log after setting `_journal_socket_failed` to prevent loop!
  155. logger.error("Failed to connect to systemd journal: %s",
  156. e, exc_info=True)
  157. return False
  158. self._journal_socket = journal_socket
  159. priority = {"DEBUG": 7,
  160. "INFO": 6,
  161. "WARNING": 4,
  162. "ERROR": 3,
  163. "CRITICAL": 2}.get(record.levelname, 4)
  164. timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
  165. time.gmtime(record.created)) % record.msecs
  166. data = {"PRIORITY": priority,
  167. "TID": cast(Optional[int], getattr(record, "tid", None)),
  168. "SYSLOG_IDENTIFIER": record.name,
  169. "SYSLOG_FACILITY": 1,
  170. "SYSLOG_PID": record.process,
  171. "SYSLOG_TIMESTAMP": timestamp,
  172. "CODE_FILE": record.pathname,
  173. "CODE_LINE": record.lineno,
  174. "CODE_FUNC": record.funcName,
  175. "MESSAGE": self._get_formatter("journal").format(record)}
  176. self._journal_socket.sendall(self._encode_journal(data))
  177. return True
  178. def emit(self, record: logging.LogRecord) -> None:
  179. try:
  180. stream = self._streams.get(threading.get_ident(), sys.stderr)
  181. if self._detect_journal(stream) and self._try_emit_journal(record):
  182. return
  183. msg = self._get_formatter("verbose").format(record)
  184. stream.write(msg + self.terminator)
  185. stream.flush()
  186. except Exception:
  187. self.handleError(record)
  188. @types.contextmanager
  189. def register_stream(self, stream: types.ErrorStream) -> Iterator[None]:
  190. """Register stream for logging output of the current thread."""
  191. key = threading.get_ident()
  192. self._streams[key] = stream
  193. try:
  194. yield
  195. finally:
  196. del self._streams[key]
  197. @types.contextmanager
  198. def register_stream(stream: types.ErrorStream) -> Iterator[None]:
  199. """Register stream for logging output of the current thread."""
  200. yield
  201. def setup() -> None:
  202. """Set global logging up."""
  203. global register_stream
  204. format_name = os.environ.get("RADICALE_LOG_FORMAT") or None
  205. sane_format_name = format_name if format_name in LOGGER_FORMATS else None
  206. handler = ThreadedStreamHandler(sane_format_name)
  207. logging.basicConfig(handlers=[handler])
  208. register_stream = handler.register_stream
  209. log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
  210. logging.setLogRecordFactory(log_record_factory)
  211. set_level(logging.INFO, True)
  212. if format_name != sane_format_name:
  213. logger.error("Invalid RADICALE_LOG_FORMAT: %r", format_name)
  214. logger_display_backtrace_disabled: bool = False
  215. logger_display_backtrace_enabled: bool = False
  216. def set_level(level: Union[int, str], backtrace_on_debug: bool, trace_on_debug: bool = False, trace_filter: str = "") -> None:
  217. """Set logging level for global logger."""
  218. global logger_display_backtrace_disabled
  219. global logger_display_backtrace_enabled
  220. if isinstance(level, str):
  221. level = getattr(logging, level.upper())
  222. assert isinstance(level, int)
  223. logger.setLevel(level)
  224. if level > logging.DEBUG:
  225. if logger_display_backtrace_disabled is False:
  226. logger.info("Logging of backtrace is disabled in this loglevel")
  227. logger_display_backtrace_disabled = True
  228. logger.addFilter(REMOVE_TRACEBACK_FILTER)
  229. else:
  230. if not backtrace_on_debug:
  231. if logger_display_backtrace_disabled is False:
  232. logger.debug("Logging of backtrace is disabled by option in this loglevel")
  233. logger_display_backtrace_disabled = True
  234. logger.addFilter(REMOVE_TRACEBACK_FILTER)
  235. else:
  236. if logger_display_backtrace_enabled is False:
  237. logger.debug("Logging of backtrace is enabled by option in this loglevel")
  238. logger_display_backtrace_enabled = True
  239. logger.removeFilter(REMOVE_TRACEBACK_FILTER)
  240. if trace_on_debug:
  241. if trace_filter != "":
  242. logger.debug("Logging messages starting with 'TRACE/%s' enabled", trace_filter)
  243. logger.addFilter(PassTRACETOKENFilter(trace_filter))
  244. logger.removeFilter(REMOVE_TRACE_FILTER)
  245. else:
  246. logger.debug("Logging messages starting with 'TRACE' enabled")
  247. logger.removeFilter(REMOVE_TRACE_FILTER)
  248. else:
  249. logger.debug("Logging messages starting with 'TRACE' disabled")
  250. logger.addFilter(REMOVE_TRACE_FILTER)