Skip to content

Instantly share code, notes, and snippets.

@brayevalerien
Created August 8, 2025 16:02
Show Gist options
  • Save brayevalerien/e9b148fc60bd3c997bd0cda65c5f47ce to your computer and use it in GitHub Desktop.
Save brayevalerien/e9b148fc60bd3c997bd0cda65c5f47ce to your computer and use it in GitHub Desktop.
Example of single-turn tool call with a Groq client. In this example, the LLM can run python expressions. Requires the "GROQ_API_KEY" to be set in the environement or in a .env file.
import json
from dotenv import load_dotenv
from groq import Groq
load_dotenv()
client = Groq()
MODEL = "llama-3.3-70b-versatile"
def calculate(expression):
"""Evaluate a mathematical expression"""
try:
result = eval(expression)
return json.dumps({"result": result})
except: # noqa: E722
return json.dumps({"error": "Invalid expression"})
tools = [
{
"type": "function",
"function": {
"name": "calculate",
"description": "Evaluate a python expression, doesn't have access to math function",
"parameters": {
"type": "object",
"properties": {
"expression": {
"type": "string",
"description": "The mathematical expression to evaluate",
}
},
"required": ["expression"],
},
},
}
]
function_map = {"calculate": calculate}
user_prompt = "Compute the square root of 33 and return the greatest integer smaller than the result."
messages = [
{
"role": "system",
"content": "You are a helpful assistant with access to some tools. Answer math questions by calling a tool first and then presenting the result inside a sentence. If you need to make multiple function call, do them all at once. Once you got the function result, present it to the user.",
},
{
"role": "user",
"content": user_prompt,
},
]
print(f"[user] {user_prompt}")
chat_completion = client.chat.completions.create(
messages=messages,
tools=tools,
model=MODEL,
)
response = chat_completion.choices[0].message
tool_calls = response.tool_calls
print(f"[{MODEL}] {'(called tools)' if tool_calls else response.content}")
if tool_calls:
for tool_call in tool_calls:
function_name = tool_call.function.name
arguments = tool_call.function.arguments
function = function_map[function_name]
kargs = json.loads(arguments)
result = function(**kargs)
messages.append({"tool_call_id": tool_call.id, "role": "tool", "name": function_name, "content": result})
print(f"[system] Tool call {tool_call.id}: {function_name}({arguments}) -> {result}")
final_response = client.chat.completions.create(messages=messages, model=MODEL).choices[0].message
print(f"[{MODEL}] {final_response.content}")
# Output example:
# [user] Compute the square root of 33 and return the greatest integer smaller than the result.
# [llama-3.3-70b-versatile] (called tools)
# [system] Tool call 41csntv49: calculate({"expression":"int(33 ** (1/2))"}) -> {"result": 5}
# [llama-3.3-70b-versatile] The greatest integer smaller than the square root of 33 is $\boxed{5}$.
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment