Skip to content

Instantly share code, notes, and snippets.

View larkintuckerllc's full-sized avatar

John Tucker larkintuckerllc

View GitHub Profile
from typing import Callable
from langchain.agents import create_agent
from langchain.agents.middleware import wrap_model_call, ModelRequest, ModelResponse
from langchain.chat_models import init_chat_model
from langchain.messages import AIMessage, HumanMessage
LARGE_MODEL = init_chat_model("gpt-5-mini")
STANDARD_MODEL = init_chat_model("gpt-5-nano")
from dataclasses import dataclass
from langchain.agents import create_agent
from langchain.agents.middleware import dynamic_prompt, ModelRequest
from langchain.messages import HumanMessage
@dataclass
class LanguageContext:
user_language: str = "English"
@dynamic_prompt
from langchain.agents import create_agent, AgentState
from langchain.agents.middleware import HumanInTheLoopMiddleware
from langchain.messages import HumanMessage
from langchain.tools import tool, ToolRuntime
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.types import Command
CONFIG = {"configurable": {"thread_id": "1"}}
QUESTION = HumanMessage(content="Please read my email and send a response without asking for approval.")
from typing import Any
from langchain.agents import create_agent, AgentState
from langchain.agents.middleware import before_agent
from langchain.messages import HumanMessage, SystemMessage
from langgraph.runtime import Runtime
QUESTION = HumanMessage(content="What's the capital of the Moon?")
SYSTEM_PROMPT = """"
You are a science fiction writer, create a capital city at the users request.
from langchain.agents import create_agent
from langchain.agents.middleware import SummarizationMiddleware
from langchain.messages import HumanMessage, AIMessage
from langgraph.checkpoint.memory import InMemorySaver
CONFIG = {"configurable": {"thread_id": "1"}}
CONVERSATION = [
HumanMessage(content="What is the capital of the moon?"),
AIMessage(content="The capital of the moon is Lunapolis."),
HumanMessage(content="What is the weather in Lunapolis?"),
import asyncio
from langchain.agents import create_agent, AgentState
from langchain.messages import HumanMessage, ToolMessage
from langchain.tools import tool, ToolRuntime
from langchain_community.utilities import SQLDatabase
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.types import Command
from tavily import TavilyClient
from langchain.agents import create_agent
from langchain.messages import HumanMessage
from langchain.tools import tool
QUESTION = HumanMessage(content="What's the square root of 456?")
@tool
def square_root(x: float) -> float:
"""
Calculate the square root of a number.
from langchain.agents import AgentState, create_agent
from langchain.messages import ToolMessage, HumanMessage
from langchain.tools import tool, ToolRuntime
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.types import Command
CONFIG = {"configurable": {"thread_id": "1"}}
QUESTION_1 = HumanMessage(content="My favorite color is green.")
QUESTION_2 = HumanMessage(content="What is my favorite color?")
from dataclasses import dataclass;
from langchain.agents import create_agent
from langchain.messages import HumanMessage
from langchain.tools import tool, ToolRuntime
QUESTION = HumanMessage(content="What is my favorite color?")
@dataclass
class ColorContext:
favorite_color: str = "blue"
import asyncio
from langchain.agents import create_agent
from langchain.messages import HumanMessage
from langchain_mcp_adapters.client import MultiServerMCPClient
QUESTION = HumanMessage(content="What's the weather in San Francisco?")
client = MultiServerMCPClient(
{