Last active
October 28, 2025 02:15
-
-
Save sboesen/3cc414ef0fc04ed1c646c3fb6c6b7e38 to your computer and use it in GitHub Desktop.
How tool calls work under the hood
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env -S uv run | |
| # /// script | |
| # dependencies = ["openai"] | |
| # /// | |
| # repurposed from https://github.com/willccbb/agent-engineering/blob/main/lectures-1-through-4/lec1-agent-patterns/agent_patterns.ipynb | |
| import json | |
| from openai import OpenAI | |
| client = OpenAI() | |
| # The system message tells the model how to call the tool. | |
| system_prompt = """ | |
| You can call a weather tool. | |
| Return JSON like this: | |
| {"tool": "weather", "args": {"city": "Tokyo", "country": "Japan", "scale": "celsius"}} | |
| """ | |
| # A tiny fake tool to demonstrate what happens after the model calls it. | |
| def weather(city, country, scale): | |
| return f"The weather in {city}, {country} is 20° {scale}." | |
| # Ask the model to decide what tool to call. | |
| response = client.chat.completions.create( | |
| model="gpt-4.1-mini", | |
| messages=[ | |
| {"role": "system", "content": system_prompt}, | |
| {"role": "user", "content": "What's the weather in Tokyo?"} | |
| ] | |
| ) | |
| # The model’s text reply is expected to be JSON. | |
| tool_call = json.loads(response.choices[0].message.content) | |
| # Run the chosen tool with its arguments. | |
| result = weather( | |
| city=tool_call["args"]["city"], | |
| country=tool_call["args"]["country"], | |
| scale=tool_call["args"]["scale"] | |
| ) | |
| print(result) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment