Created
May 23, 2024 05:32
-
-
Save janakiramm/dfdc962c443bd4e383faeb49e059972e to your computer and use it in GitHub Desktop.
Integrating Function Calling with GPT-4 Omni model
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from openai import OpenAI | |
#Initialize the environment variable OPENAI_API_KEY with your api key | |
client = OpenAI() | |
#Function is available at https://gist.github.com/janakiramm/2143b909626f5f01d64739e3fe90c9c8 | |
tools = [ | |
{ | |
"type": "function", | |
"function": { | |
"name": "get_flight_status", | |
"description": "Get status of a flight", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"flight": { | |
"type": "string", | |
"description": "Flight number" | |
} | |
}, | |
"required": ["flight"] | |
} | |
} | |
} | |
] | |
def chatbot(prompt): | |
# Step 1: send the conversation and available functions to the model | |
messages = [{"role": "user", "content": prompt}] | |
response = client.chat.completions.create( | |
model="gpt-4o", | |
messages=messages, | |
tools=tools, | |
tool_choice="auto" | |
) | |
response_message = response.choices[0].message | |
tool_calls = response_message.tool_calls | |
# Step 2: check if the model wanted to call a function | |
if tool_calls: | |
available_functions = { | |
"get_flight_status": get_flight_status, | |
} | |
messages.append(response_message) | |
# Step 3: send the info for each function call and function response to the model | |
for tool_call in tool_calls: | |
function_name = tool_call.function.name | |
function_to_call = available_functions[function_name] | |
function_args = json.loads(tool_call.function.arguments) | |
function_response = function_to_call(flight=function_args.get("flight")) | |
messages.append( | |
{ | |
"tool_call_id": tool_call.id, | |
"role": "tool", | |
"name": function_name, | |
"content": function_response, | |
} | |
) | |
final_response = client.chat.completions.create( | |
model="gpt-4o", | |
messages=messages, | |
) | |
return final_response | |
res=chatbot("What's the status of EK226?") | |
print(res.choices[0].message.content) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment