Skip to content

Instantly share code, notes, and snippets.

@tkellogg
Last active April 29, 2025 11:39
Show Gist options
  • Save tkellogg/04c59a56f0a5b574447e58caa7ae7abb to your computer and use it in GitHub Desktop.
Save tkellogg/04c59a56f0a5b574447e58caa7ae7abb to your computer and use it in GitHub Desktop.
A simple MCP Agent with qwen3
# /// script
# requires-python = ">=3.12"
# dependencies = [
# "anyio",
# "pydantic-ai-slim[mcp,streaming,code]",
# "python-dotenv",
# "mcp",
# "typer",
# ]
# ///
"""
Loads up a bunch of MCP servers and runs Qwen3:30B with them configured
as an agent.
I highly recomment creating this alias:
alias qw="uv run --script ~/bin/qwen.py"
"""
import configparser
import os
import anyio
import dotenv
from pydantic_ai import Agent
from pydantic_ai.models.openai import OpenAIModel
from pydantic_ai.providers.openai import OpenAIProvider
from pydantic_ai.mcp import MCPServerStdio
import typer
# ──────────────────────────────
# 1. LLM backend (Ollama)
# ──────────────────────────────
llm = OpenAIModel(
model_name="qwen3:30b", # the tag you pulled
provider=OpenAIProvider(
base_url="http://localhost:11434/v1", # raw Ollama or your proxy
),
)
PWD = os.getcwd()
# ──────────────────────────────
# 2. Assemble the agent
# ──────────────────────────────
# Load BRAVE_API_KEY from ~/.keys
dotenv.load_dotenv(os.path.expanduser("~/.keys"))
brave_api_key = os.getenv("BRAVE_API_KEY", None)
if not brave_api_key:
brave_api_key = "" # Or raise an exception if key is required
print("Warning: BRAVE_API_KEY not found in ~/.keys or environment. Brave Search functionality may be limited.", file=sys.stderr)
agent = Agent(
model=llm,
mcp_servers=[
MCPServerStdio(
command="uvx",
args=[
"--with", "tzdata",
"mcp-server-time",
"--local-timezone=America/New_York",
],
),
MCPServerStdio(
command="uvx",
args=["mcp-server-fetch"],
),
MCPServerStdio(
command="npx",
args=["-y", "@modelcontextprotocol/server-filesystem", "."],
),
MCPServerStdio(
command="npx",
args=["-y", "@modelcontextprotocol/server-brave-search"],
env=os.environ | {"BRAVE_API_KEY": "BSAXTBCVD84pxK_P5hgvjRHu3lVRvZH"},
),
],
system_message=(
"You are a helpful assistant. "
"Whenever an MCP tool can provide a more accurate or up-to-date answer, "
"call it instead of relying on static knowledge.\n"
"\n"
f"Allowed directories: [ '{PWD}' ]\n"
"\n"
f"Current working directory is '{PWD}'; any of the user's "
"references to a folder or directory are in relation to this.\n"
"\n"
"You cannot under any circumstance ask the user questions or seek "
"clarification. The user cannot respond to you. You must take your "
"best guess based on the information provided.\n"
"\n"
"Keep going until you have an answer. Do not under any circumstance "
"give an answer without having exhausted all possible tools that might "
"be useful."
),
)
# ──────────────────────────────
# 3. Typer CLI
# ──────────────────────────────
def ask(query: str):
"""Single question, streamed answer."""
import sys
if not sys.stdin.isatty():
piped = sys.stdin.read()
query += "\n\n\n" + piped
async def loop():
async with agent.run_mcp_servers():
result = await agent.run(query)
print(result.output)
anyio.run(loop)
if __name__ == "__main__":
typer.run(ask)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment