Skip to content

Instantly share code, notes, and snippets.

@ibolmo
Created October 8, 2025 16:09
Show Gist options
  • Save ibolmo/a66085f8faea671856269ec6090630d8 to your computer and use it in GitHub Desktop.
Save ibolmo/a66085f8faea671856269ec6090630d8 to your computer and use it in GitHub Desktop.
from braintrust import init_logger, os
from braintrust_langchain import BraintrustCallbackHandler, set_global_handler
from langchain.chat_models import init_chat_model
from langchain_core.messages import HumanMessage
from langchain_core.tools import tool
from langchain_openai import ChatOpenAI
from pydantic import BaseModel, Field, SecretStr
init_logger(
project="langchain-ai-proxy-py", api_key=os.environ.get("BRAINTRUST_API_KEY")
)
handler = BraintrustCallbackHandler()
set_global_handler(handler)
# 1. Define the tool's input schema using Pydantic
# This defines the arguments the tool can accept.
class WeatherInput(BaseModel):
location: str = Field(description="The city and state, e.g., San Francisco, CA")
# 2. Define the tool using the @tool decorator
# The decorator makes the Python function a LangChain Tool.
# The function's docstring is used as the tool's description for the LLM.
@tool(args_schema=WeatherInput)
def get_weather(location: str) -> str:
"""Get the current weather for a specific location."""
# In a real app, you'd call a weather API.
# We'll use a mock response for this example.
print(f"\nTool Executed: get_weather for {location}")
if "san francisco" in location.lower():
return "The weather in San Francisco is 15°C and sunny with a cool breeze."
return "The weather is 22°C and cloudy."
# 3. Initialize the model and bind the tools
# .bind_tools() makes the model aware of the tools it can use.
# model = ChatOpenAI(
# model="publishers/anthropic/models/claude-sonnet-4-5",
# base_url=f"{os.getenv('BRAINTRUST_API_URL')}/v1/proxy",
# api_key=SecretStr(os.getenv("BRAINTRUST_API_KEY") or ""),
# )
#
# or
#
model = init_chat_model(
model="publishers/anthropic/models/claude-sonnet-4-5",
base_url=f"{os.getenv('BRAINTRUST_API_URL')}/v1/proxy",
model_provider='openai',
api_key=SecretStr(os.getenv("BRAINTRUST_API_KEY") or ""),
tools=[get_weather],
)
model_with_tools = model.bind_tools([get_weather])
# 4. Create the message chain and invoke the model
# Start with the user's question.
messages = [HumanMessage(content="What's the weather like in San Francisco today?")]
print("--- Initial Call to Model ---")
ai_msg = model_with_tools.invoke(messages)
print("\n--- Model's Response (a Tool Call) ---")
print(ai_msg)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment