log.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. # This file is part of Radicale - CalDAV and CardDAV server
  2. # Copyright © 2011-2017 Guillaume Ayoub
  3. # Copyright © 2017-2019 Unrud <unrud@outlook.com>
  4. #
  5. # This library is free software: you can redistribute it and/or modify
  6. # it under the terms of the GNU General Public License as published by
  7. # the Free Software Foundation, either version 3 of the License, or
  8. # (at your option) any later version.
  9. #
  10. # This library is distributed in the hope that it will be useful,
  11. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. # GNU General Public License for more details.
  14. #
  15. # You should have received a copy of the GNU General Public License
  16. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  17. """
  18. Functions to set up Python's logging facility for Radicale's WSGI application.
  19. Log messages are sent to the first available target of:
  20. - Error stream specified by the WSGI server in "wsgi.errors"
  21. - ``sys.stderr``
  22. """
  23. import contextlib
  24. import io
  25. import logging
  26. import os
  27. import socket
  28. import struct
  29. import sys
  30. import threading
  31. import time
  32. from typing import (Any, Callable, ClassVar, Dict, Iterator, Mapping, Optional,
  33. Tuple, Union, cast)
  34. from radicale import types
  35. LOGGER_NAME: str = "radicale"
  36. LOGGER_FORMAT: str = "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s"
  37. DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
  38. logger: logging.Logger = logging.getLogger(LOGGER_NAME)
  39. class RemoveTracebackFilter(logging.Filter):
  40. def filter(self, record: logging.LogRecord) -> bool:
  41. record.exc_info = None
  42. return True
  43. REMOVE_TRACEBACK_FILTER: logging.Filter = RemoveTracebackFilter()
  44. class IdentLogRecordFactory:
  45. """LogRecordFactory that adds ``ident`` attribute."""
  46. def __init__(self, upstream_factory: Callable[..., logging.LogRecord]
  47. ) -> None:
  48. self._upstream_factory = upstream_factory
  49. def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
  50. record = self._upstream_factory(*args, **kwargs)
  51. ident = "%d" % os.getpid()
  52. main_thread = threading.main_thread()
  53. current_thread = threading.current_thread()
  54. if current_thread.name and main_thread != current_thread:
  55. ident += "/%s" % current_thread.name
  56. record.ident = ident # type:ignore[attr-defined]
  57. record.tid = None # type:ignore[attr-defined]
  58. if sys.version_info >= (3, 8):
  59. record.tid = current_thread.native_id
  60. return record
  61. class ThreadedStreamHandler(logging.Handler):
  62. """Sends logging output to the stream registered for the current thread or
  63. ``sys.stderr`` when no stream was registered."""
  64. terminator: ClassVar[str] = "\n"
  65. _streams: Dict[int, types.ErrorStream]
  66. _journal_stream_id: Optional[Tuple[int, int]]
  67. _journal_socket: Optional[socket.socket]
  68. _journal_socket_failed: bool
  69. def __init__(self) -> None:
  70. super().__init__()
  71. self._streams = {}
  72. self._journal_stream_id = None
  73. with contextlib.suppress(TypeError, ValueError):
  74. dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
  75. self._journal_stream_id = (int(dev), int(inode))
  76. self._journal_socket = None
  77. self._journal_socket_failed = False
  78. def _detect_journal(self, stream: types.ErrorStream) -> bool:
  79. if not self._journal_stream_id or not isinstance(stream, io.IOBase):
  80. return False
  81. try:
  82. stat = os.fstat(stream.fileno())
  83. except OSError:
  84. return False
  85. return self._journal_stream_id == (stat.st_dev, stat.st_ino)
  86. @staticmethod
  87. def _encode_journal(data: Mapping[str, Optional[Union[str, int]]]
  88. ) -> bytes:
  89. msg = b""
  90. for key, value in data.items():
  91. if value is None:
  92. continue
  93. keyb = key.encode()
  94. valueb = str(value).encode()
  95. if b"\n" in valueb:
  96. msg += (keyb + b"\n" +
  97. struct.pack("<Q", len(valueb)) + valueb + b"\n")
  98. else:
  99. msg += keyb + b"=" + valueb + b"\n"
  100. return msg
  101. def _try_emit_journal(self, record: logging.LogRecord) -> bool:
  102. if not self._journal_socket:
  103. # Try to connect to systemd journal socket
  104. if self._journal_socket_failed or not hasattr(socket, "AF_UNIX"):
  105. return False
  106. journal_socket = None
  107. try:
  108. journal_socket = socket.socket(
  109. socket.AF_UNIX, socket.SOCK_DGRAM)
  110. journal_socket.connect("/run/systemd/journal/socket")
  111. except OSError:
  112. self._journal_socket_failed = True
  113. if journal_socket:
  114. journal_socket.close()
  115. return False
  116. self._journal_socket = journal_socket
  117. priority = {"DEBUG": 7,
  118. "INFO": 6,
  119. "WARNING": 4,
  120. "ERROR": 3,
  121. "CRITICAL": 2}.get(record.levelname, 4)
  122. timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
  123. time.gmtime(record.created)) % record.msecs
  124. data = {"PRIORITY": priority,
  125. "TID": cast(Optional[int], getattr(record, "tid", None)),
  126. "SYSLOG_IDENTIFIER": record.name,
  127. "SYSLOG_FACILITY": 1,
  128. "SYSLOG_PID": record.process,
  129. "SYSLOG_TIMESTAMP": timestamp,
  130. "CODE_FILE": record.pathname,
  131. "CODE_LINE": record.lineno,
  132. "CODE_FUNC": record.funcName,
  133. "MESSAGE": self.format(record)}
  134. self._journal_socket.sendall(self._encode_journal(data))
  135. return True
  136. def emit(self, record: logging.LogRecord) -> None:
  137. try:
  138. stream = self._streams.get(threading.get_ident(), sys.stderr)
  139. if self._detect_journal(stream) and self._try_emit_journal(record):
  140. return
  141. msg = self.format(record)
  142. stream.write(msg + self.terminator)
  143. if hasattr(stream, "flush"):
  144. stream.flush()
  145. except Exception:
  146. self.handleError(record)
  147. @types.contextmanager
  148. def register_stream(self, stream: types.ErrorStream) -> Iterator[None]:
  149. """Register stream for logging output of the current thread."""
  150. key = threading.get_ident()
  151. self._streams[key] = stream
  152. try:
  153. yield
  154. finally:
  155. del self._streams[key]
  156. @types.contextmanager
  157. def register_stream(stream: types.ErrorStream) -> Iterator[None]:
  158. """Register stream for logging output of the current thread."""
  159. yield
  160. def setup() -> None:
  161. """Set global logging up."""
  162. global register_stream
  163. handler = ThreadedStreamHandler()
  164. logging.basicConfig(format=LOGGER_FORMAT, datefmt=DATE_FORMAT,
  165. handlers=[handler])
  166. register_stream = handler.register_stream
  167. log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
  168. logging.setLogRecordFactory(log_record_factory)
  169. set_level(logging.WARNING)
  170. def set_level(level: Union[int, str]) -> None:
  171. """Set logging level for global logger."""
  172. if isinstance(level, str):
  173. level = getattr(logging, level.upper())
  174. assert isinstance(level, int)
  175. logger.setLevel(level)
  176. logger.removeFilter(REMOVE_TRACEBACK_FILTER)
  177. if level > logging.DEBUG:
  178. logger.addFilter(REMOVE_TRACEBACK_FILTER)