Last active
March 6, 2025 07:06
-
-
Save alhoo/189d93ed5646e693a9823627b37ae867 to your computer and use it in GitHub Desktop.
Json logger
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import json | |
import logging | |
import traceback | |
import sys | |
import pickle | |
import base64 | |
from datetime import datetime, date, time, timedelta | |
class JSONEncoder(json.JSONEncoder): | |
def default(self, obj): | |
if isinstance(obj, (datetime, date, time)): | |
return obj.isoformat() | |
elif isinstance(obj, timedelta): | |
# Option 1: Simply use str() conversion. | |
# return str(obj) | |
# Option 2: Convert to an ISO 8601 duration format. | |
total_seconds = int(obj.total_seconds()) | |
days, remainder = divmod(total_seconds, 86400) | |
hours, remainder = divmod(remainder, 3600) | |
minutes, seconds = divmod(remainder, 60) | |
return f"P{days}DT{hours}H{minutes}M{seconds}S" | |
try: | |
return super().default(obj) | |
except TypeError: | |
return f"data:application/pickle;base64,{base64.b64encode(pickle.dumps(obj)).decode()}" | |
class JSONLogger(logging.Logger): | |
def __init__(self, name): | |
super().__init__(name) | |
handler = logging.StreamHandler() | |
handler.setFormatter(JSONFormatter()) | |
self.addHandler(handler) | |
self.setLevel(logging.DEBUG) | |
def _capture_extra(self, kwargs): | |
exc_info = kwargs.pop('exc_info', None) | |
stack_info = kwargs.pop('stack_info', False) | |
stacklevel = kwargs.pop('stacklevel', 1) | |
extra = kwargs.pop('extra', {}) | |
extra.update(kwargs) | |
return exc_info, stack_info, stacklevel, extra | |
def log(self, level, msg, *args, **kwargs): | |
exc_info, stack_info, stacklevel, extra = self._capture_extra(kwargs) | |
super().log(level, msg, *args, exc_info=exc_info, stack_info=stack_info, | |
stacklevel=stacklevel, extra=extra) | |
def exception(self, msg, **kwargs): | |
exc_type, exc_value, tb = sys.exc_info() | |
tb_str = "".join(traceback.format_exception(exc_type, exc_value, tb)) | |
local_vars = {} | |
if tb: | |
frame = tb | |
while frame.tb_next: | |
frame = frame.tb_next | |
frame = frame.tb_frame | |
local_vars = frame.f_locals | |
if local_vars.get("__name__") == "__main__": | |
local_vars = {} | |
kwargs.update({ | |
"exception": str(exc_value), | |
"traceback": tb_str, | |
"locals": local_vars | |
}) | |
self.log(logging.ERROR, msg, **kwargs) | |
class JSONFormatter(logging.Formatter): | |
def format(self, record): | |
log_record = { | |
"level": record.levelname, | |
"time": datetime.utcfromtimestamp(record.created).isoformat(), | |
"message": record.getMessage(), | |
} | |
default_attrs = { | |
'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', | |
'funcName', 'levelname', 'levelno', 'lineno', 'module', 'msecs', | |
'message', 'msg', 'name', 'pathname', 'process', 'processName', | |
'relativeCreated', 'stack_info', 'thread', 'threadName', 'taskName', | |
} | |
for key, value in record.__dict__.items(): | |
if key not in default_attrs: | |
log_record[key] = value | |
return json.dumps(log_record, cls=JSONEncoder) | |
logging.setLoggerClass(JSONLogger) | |
base_logger = logging.getLogger("json_logger") | |
# This wrapper intercepts additional keyword arguments and merges them into the `extra` dict. | |
class LoggerKeywordWrapper: | |
def __init__(self, logger): | |
self.logger = logger | |
def __getattr__(self, attr): | |
orig_attr = getattr(self.logger, attr) | |
if callable(orig_attr): | |
def wrapped(*args, **kwargs): | |
# Pop any provided 'extra' if exists; otherwise start with an empty dict. | |
extra = kwargs.pop("extra", {}) | |
# Merge all remaining keyword arguments into extra. | |
extra.update(kwargs) | |
return orig_attr(*args, extra=extra) | |
return wrapped | |
return orig_attr | |
# Wrap the base logger. | |
logger = LoggerKeywordWrapper(base_logger) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment