cloudns/src/cloudns/logger/logger.py
2025-03-23 21:08:31 +01:00

176 lines
5.2 KiB
Python

import sys
import json
import logging
import logging.config
import logging.handlers
import atexit
from datetime import datetime, timezone
from typing_extensions import override
LOG_RECORD_BUILTIN_ATTRS = {
"args",
"asctime",
"created",
"exc_info",
"exc_text",
"filename",
"funcName",
"levelname",
"levelno",
"lineno",
"module",
"msecs",
"message",
"msg",
"name",
"pathname",
"process",
"processName",
"relativeCreated",
"stack_info",
"thread",
"threadName",
"taskName",
}
class JSONFormatter(logging.Formatter):
def __init__(self, *, fmt_keys: dict[str, str] | None = None):
super().__init__()
self.fmt_keys = fmt_keys if fmt_keys is not None else {}
@override
def format(self, record: logging.LogRecord) -> str:
message = self._prepare_log_dict(record)
return json.dumps(message, default=str)
def _prepare_log_dict(self, record: logging.LogRecord) -> dict:
always_fields = {
'message': record.getMessage(),
'timestamp': datetime.fromtimestamp(
record.created, tz=timezone.utc
).isoformat()
}
if record.exc_info is not None:
always_fields['exc_info'] = self.formatException(record.exc_info)
if record.stack_info is not None:
always_fields['stack_info'] = self.formatStack(record.stack_info)
message = {
key: msg_value
if (msg_value := always_fields.pop(value, None)) is not None
else getattr(record, value)
for key, value in self.fmt_keys.items()
}
message.update(always_fields)
for key, value in record.__dict__.items():
if key not in LOG_RECORD_BUILTIN_ATTRS:
message[key] = value
return message
class NonErrorFilter(logging.Filter):
@override
def filter(self, record: logging.LogRecord) -> bool | logging.LogRecord:
return record.levelno <= logging.INFO
def generate_log_config(log_path: str | None = None, backup_count: int = 3, max_bytes: int = 1024 * 1024 * 10) -> dict:
logger_config: dict = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'no_errors': {
"()": NonErrorFilter
}
},
'formatters': {
'simple': {
'format': '[%(asctime)s][%(levelname)s] %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
'detailed': {
'format': '[%(asctime)s][%(levelname)s] %(message)s',
'datefmt': '%Y-%m-%dT%H:%M:%S%z' # ISO-8601 Timestamp
},
'json': {
'()': JSONFormatter,
'fmt_keys': {
'timestamp': 'timestamp',
'level': 'levelname',
'message': 'message',
'logger': 'name',
'module': 'module',
'function': 'funcName',
'line': 'lineno',
'thread_name': 'threadName'
},
}
},
'handlers': {
'stdout': {
'class': logging.StreamHandler,
'level': 'DEBUG',
'filters': ['no_errors'],
'formatter': 'simple',
'stream': 'ext://sys.stdout'
},
'stderr': {
'class': logging.StreamHandler,
'level': 'WARNING',
'formatter': 'simple',
'stream': 'ext://sys.stderr'
}
} | ({'file': {
'class': logging.handlers.RotatingFileHandler,
'level': 'DEBUG',
'formatter': 'json',
'filename': log_path,
'maxBytes': max_bytes,
'backupCount': backup_count
}} if log_path is not None else {}),
'loggers': {
'root': {
'level': 'DEBUG',
'handlers': [
'stdout',
'stderr'
] + (['file'] if log_path is not None else []),
}
}
}
if sys.version_info >= (3, 12): # Python 3.12+
logger_config['handlers']['queue_handler'] = {
'class': logging.handlers.QueueHandler,
'respect_handler_level': True,
'handlers': [
'stdout',
'stderr'
] + (['file'] if log_path is not None else []),
}
logger_config['loggers']['root']['handlers'] = ['queue_handler']
return logger_config
def setup_logging(log_path: str | None = None, backup_count: int = 3, max_bytes: int = 1024 * 1024 * 10) -> None:
log_config = generate_log_config(
log_path if log_path != '-' else None, backup_count, max_bytes)
logging.config.dictConfig(log_config)
if sys.version_info >= (3, 12): # Python 3.12+
queue_handler = logging.getHandlerByName('queue_handler')
if queue_handler is not None:
queue_handler.listener.start() # type: ignore
atexit.register(queue_handler.listener.stop) # type: ignore