Created
May 5, 2025 14:02
-
-
Save Wasserwecken/639c5781e5a4f15167c59bb1b3fca96e to your computer and use it in GitHub Desktop.
Simple LangChain File Logger
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from langchain_openai import ChatOpenAI | |
from langchain_core.messages import HumanMessage, SystemMessage, BaseMessage | |
from langchain_core.outputs import LLMResult | |
from langchain_core.callbacks import BaseCallbackHandler | |
from typing import Any, Dict | |
from pathlib import Path | |
from datetime import datetime | |
class LoggingHandler(BaseCallbackHandler): | |
def __init__(self, filename: str = None, mode: str = "a") -> None: | |
self.File = Path(filename).open(mode, encoding="utf-8") if filename else None | |
def __del__(self) -> None: | |
"""Destructor to cleanup when done.""" | |
if self.File: self.File.close() | |
def _getName(self, serialized:dict[str, Any] = None, **kwargs): | |
if "name" in kwargs: | |
return kwargs["name"] | |
elif serialized: | |
return serialized.get("name", serialized.get("id", ["<unknown>"])[-1]) | |
else: | |
return "<unknown>" | |
def _logLine(self, message:str, end:str='\n'): | |
message = f'[{datetime.now().strftime("%Y/%m/%d, %H:%M:%S")}] {message}' | |
print(message, end=end) | |
if self.File: | |
print(message, end=end, file=self.File, flush=True) | |
def on_agent_action(self, action, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ({run_id}, {parent_run_id}) ON_AGENT_ACTION | |
\t(action): {action} | |
\t(kwargs): {kwargs}""") | |
def on_agent_finish(self, finish, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ({run_id}, {parent_run_id}) ON_AGENT_FINISH | |
\t{finish}""") | |
def on_chat_model_start(self, serialized, messages, *, run_id, parent_run_id = None, tags = None, metadata = None, **kwargs): | |
self._logLine(f"""[{self._getName(serialized=serialized, kwargs=kwargs)}] ON_CHAT_MODEL_START ({run_id}, {parent_run_id}) | |
\t(tags): {tags} | |
\t(metadata): {metadata} | |
\t(messages): {messages} | |
\t(serialized): {serialized} | |
\t(kwargs): {kwargs}""") | |
def on_retry(self, retry_state, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_RETRY ({run_id}, {parent_run_id}) | |
\t(retry_state): {retry_state} | |
\t(kwargs): {kwargs}""") | |
def on_text(self, text, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_TEXT ({run_id}, {parent_run_id})") | |
\t(text): {text}") | |
\t(kwargs): {kwargs}""") | |
def on_tool_start(self, serialized, input_str, *, run_id, parent_run_id = None, tags = None, metadata = None, inputs = None, **kwargs): | |
self._logLine(f"""[{self._getName(serialized=serialized, kwargs=kwargs)}] ON_TOOL_START ({run_id}, {parent_run_id})") | |
\t(input_str): {input_str}") | |
\t(tags): {tags}") | |
\t(metadata): {metadata}") | |
\t(inputs): {inputs}") | |
\t(serialized): {serialized}") | |
\t(kwargs): {kwargs}""") | |
def on_tool_error(self, error, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_TOOL_ERROR ({run_id}, {parent_run_id})") | |
\t(error): {error}") | |
\t(kwargs): {kwargs}""") | |
def on_tool_end(self, output, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_TOOL_END ({run_id}, {parent_run_id})") | |
\t(output): {output}") | |
\t(kwargs): {kwargs}""") | |
def on_retriever_start(self, serialized, query, *, run_id, parent_run_id = None, tags = None, metadata = None, **kwargs): | |
self._logLine(f"""[{self._getName(serialized=serialized, kwargs=kwargs)}] ON_RETRIEVER_START ({run_id}, {parent_run_id})") | |
\t(query): {query}") | |
\t(tags): {tags}") | |
\t(metadata): {metadata}") | |
\t(serialized): {serialized}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_retriever_error(self, error, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_RETRIEVER_ERROR ({run_id}, {parent_run_id})") | |
\t(error): {error}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_retriever_end(self, documents, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_RETRIEVER_END ({run_id}, {parent_run_id})") | |
\t(documents): {documents}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_llm_start(self, serialized, prompts, *, run_id, parent_run_id = None, tags = None, metadata = None, **kwargs): | |
self._logLine(f"""[{self._getName(serialized=serialized, kwargs=kwargs)}] ON_LLM_START ({run_id}, {parent_run_id})") | |
\t(prompts): {prompts}") | |
\t(tags): {tags}") | |
\t(metadata): {metadata}") | |
\t(serialized): {serialized}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_llm_error(self, error, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_LLM_ERROR ({run_id}, {parent_run_id})") | |
\t(error): {error}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_llm_new_token(self, token, *, chunk = None, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_LLM_NEW_TOKEN ({run_id}, {parent_run_id})") | |
\t(token): {token}") | |
\t(chunk): {chunk}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_llm_end(self, response: LLMResult, **kwargs) -> None: | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_LLM_END") | |
\t(response): {response}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_chain_start(self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs) -> None: | |
self._logLine(f"""[{self._getName(serialized=serialized, kwargs=kwargs)}] ON_CHAIN_START") | |
\t(inputs): {inputs}") | |
\t(serialized): {serialized}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_chain_error(self, error, *, run_id, parent_run_id = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_CHAIN_ERROR ({run_id}, {parent_run_id})") | |
\t(error): {error}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_chain_end(self, outputs: Dict[str, Any], **kwargs) -> None: | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_CHAIN_END") | |
\t(outputs): {outputs}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
def on_custom_event(self, name, data, *, run_id, tags = None, metadata = None, **kwargs): | |
self._logLine(f"""[{self._getName(kwargs=kwargs)}] ON_CUSTOM_EVENT ({run_id})") | |
\t(name): {name}") | |
\t(data): {data}") | |
\t(tags): {tags}") | |
\t(metadata): {metadata}") | |
\t(kwargs): {kwargs}""") | |
if self.File: self.File.flush() | |
# TEST RUN | |
callbacks = {"callbacks": [LoggingHandler('log.log')]} | |
llm = ChatOpenAI( | |
openai_api_base="http://localhost:1234/v1", | |
openai_api_key="...", | |
model_name="qwen3-0.6b" | |
) | |
from langchain_core.messages import HumanMessage, SystemMessage | |
messages = [ | |
SystemMessage("Translate the following from English into Italian"), | |
HumanMessage("hi!"), | |
] | |
llm.invoke(messages, callbacks) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Result:
