Skip to content

Instantly share code, notes, and snippets.

@codefromthecrypt
Created May 28, 2025 11:43
Show Gist options
  • Save codefromthecrypt/ce4ebb2e2a33a3cd50988a41edaa96b0 to your computer and use it in GitHub Desktop.
Save codefromthecrypt/ce4ebb2e2a33a3cd50988a41edaa96b0 to your computer and use it in GitHub Desktop.
openai-agents using OpenAI Responses API with MCP traced with OpenTelemetry
# Add OpenAI and OpenTelemetry ENV variables to .env and run like this:
# uv run -q --env-file .env main.py
#
# Note: Use a larger model like qwen3b:14b, if you are hosting your own models.
#
# /// script
# requires-python = ">=3.13"
# dependencies = [
# "openai-agents",
# "elastic-opentelemetry",
# "elastic-opentelemetry-instrumentation-openai",
# "opentelemetry-instrumentation-httpx"
# ]
# ///
import asyncio
import os
from agents import Agent, HostedMCPTool, ModelSettings, OpenAIProvider, Runner
from agents.tracing import GLOBAL_TRACE_PROVIDER
from opentelemetry.instrumentation import auto_instrumentation
# Shut down the global tracer as it sends to the OpenAI "/traces/ingest"
# endpoint, which we aren't using and doesn't exist on alternative backends
# like Ollama.
GLOBAL_TRACE_PROVIDER.shutdown()
auto_instrumentation.initialize()
async def main():
model_name = os.getenv("CHAT_MODEL", "gpt-4o-mini")
model = OpenAIProvider().get_model(model_name)
agent = Agent(
name="elasticsearch_agent",
tools=[
HostedMCPTool(
tool_config={
"type": "mcp",
"server_label": "gitmcp",
"server_url": "https://gitmcp.io/elastic/elasticsearch",
"require_approval": "never",
}
)
],
model=model,
model_settings=ModelSettings(temperature=0),
)
res = await Runner.run(agent, "Which language is this repo written in?")
print(res.final_output)
if __name__ == "__main__":
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment