Skip to content

Instantly share code, notes, and snippets.

@mn9891
Last active March 7, 2024 19:57
Show Gist options
  • Save mn9891/45655258e6c941583c8ea6b646f01cbe to your computer and use it in GitHub Desktop.
Save mn9891/45655258e6c941583c8ea6b646f01cbe to your computer and use it in GitHub Desktop.
testing tools/ function calling
## .. same code as in openai_tools_calls.py https://github.com/vllm-project/vllm/blob/2aa504870b53a1f2ac9eeb626bbc274994dd75a4/examples/openai_tools_calls.py
## check the bottom for the output
## start asking about weather in Paris
print("Asking about weather in Paris")
messages = [
{"role": "user", "content": question}
]
response = client.chat.completions.create(model=model,
messages=messages,
tools=tools,
stream=stream,
tool_choice='auto',
temperature=temperature,
# extra_body=EXTRA_BODY_OPENAI,
)
response_message = ""
tool_calls = []
if stream:
text_message = ""
for chunk in response:
if chunk.choices[0].finish_reason is not None:
if chunk.choices[0].finish_reason == "tool_calls":
tool_calls += chunk.choices[0].delta.tool_calls
# print("TEST : %s" % chunk.choices[0].delta.tool_calls)
break
if chunk.choices[0].delta.content is not None:
text_message += chunk.choices[0].delta.content
response_message = {
"role": "assistant",
"content": text_message,
"tool_calls": tool_calls
}
# print(str(response_message))
else:
if not len(response.choices):
print("Nothing generated")
response_message = response.choices[0].message
if response_message.tool_calls is not None:
tool_calls = response_message.tool_calls
else:
print("The tool_calls response is null ?!")
# Step 2: check if the model wanted to call a function
if len(tool_calls):
# Step 3: call the function
# Note: the JSON response may not always be valid; be sure to handle errors
available_functions = {
"get_current_weather": get_current_weather,
"get_current_date_utc": get_current_date_utc,
}
messages.append(response_message) # extend conversation with assistant's reply
# Step 4: send the info for each function call and function response to the model
for tool_call in tool_calls:
function_name = tool_call.function.name
if function_name in available_functions:
function_to_call = available_functions[function_name]
if function_name == "get_current_weather":
function_args = json.loads(tool_call.function.arguments)
function_response = function_to_call(
location=function_args.get("location"),
unit=function_args.get("unit"),
)
else:
function_response = function_to_call()
else:
print("The model halucinated a function : %s" % function_name)
messages.append({
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": function_response,
}) # extend conversation with function response
second_response = client.chat.completions.create(
model=model,
messages=messages,
# extra_body=EXTRA_BODY_OPENAI
) # get a new response from the model where it can see the function response
second_response_message = {
"role": "assistant",
"content": second_response.choices[0].message.content,
}
messages.append(second_response_message)
##### keep the conversation and ask about Tokyo
print("Now asking about weather in Tokyo")
messages.append({"role": "user", "content": "How about the weather in Tokyo?"})
third_response = client.chat.completions.create(model=model,
messages=messages,
tools=tools,
stream=stream,
tool_choice='auto',
temperature=temperature,
# extra_body=EXTRA_BODY_OPENAI,
)
response_message = ""
tool_calls = []
if stream:
text_message = ""
for chunk in third_response:
if chunk.choices[0].finish_reason is not None:
if chunk.choices[0].finish_reason == "tool_calls":
tool_calls += chunk.choices[0].delta.tool_calls
# print("TEST : %s" % chunk.choices[0].delta.tool_calls)
break
if chunk.choices[0].delta.content is not None:
text_message += chunk.choices[0].delta.content
response_message = {
"role": "assistant",
"content": text_message,
"tool_calls": tool_calls
}
# print(str(response_message))
else:
if not len(third_response.choices):
print("Nothing generated")
response_message = third_response.choices[0].message
if response_message.tool_calls is not None:
tool_calls = response_message.tool_calls
else:
print("The tool_calls response is null ?!")
# Step 2: check if the model wanted to call a function
if len(tool_calls):
# Step 3: call the function
# Note: the JSON response may not always be valid; be sure to handle errors
available_functions = {
"get_current_weather": get_current_weather,
"get_current_date_utc": get_current_date_utc,
}
messages.append(response_message) # extend conversation with assistant's reply
# Step 4: send the info for each function call and function response to the model
for tool_call in tool_calls:
function_name = tool_call.function.name
if function_name in available_functions:
function_to_call = available_functions[function_name]
if function_name == "get_current_weather":
function_args = json.loads(tool_call.function.arguments)
function_response = function_to_call(
location=function_args.get("location"),
unit=function_args.get("unit"),
)
else:
function_response = function_to_call()
else:
print("The model halucinated a function : %s" % function_name)
messages.append({
"tool_call_id": tool_call.id,
"role": "tool",
"name": function_name,
"content": function_response,
}) # extend conversation with function response
fourth_response = client.chat.completions.create(
model=model,
messages=messages,
# extra_body=EXTRA_BODY_OPENAI
) # get a new response from the model where it can see the function response
fourth_response_message = {
"role": "assistant",
"content": second_response.choices[0].message.content,
}
messages.append(fourth_response_message)
for it_msg, msg in enumerate(messages):
print("Message %i:\n %s\n" % (it_msg, str(msg)))
#### stdout
>> Asking about weather in Paris
>> Calling get_current_weather client side : ("Paris, France", celsius)
>> Now asking about weather in Tokyo
>> The tool_calls response is null ?!
## since tools_call is None, no further messages added:
Message 0:
{'role': 'user', 'content': "What's the weather like in Paris ?"}
Message 1:
ChatCompletionMessage(content=None, role='assistant', function_call=None, tool_calls=[ChatCompletionMessageToolCall(id='call_get_current_weather_0', function=Function(arguments='{"location": "Paris, France", "unit": "celsius"}', name='get_current_weather'), type='function', index=0)])
Message 2:
{'tool_call_id': 'call_get_current_weather_0', 'role': 'tool', 'name': 'get_current_weather', 'content': '{"location": "Paris", "temperature": "22", "unit": "celsius"}'}
Message 3:
{'role': 'assistant', 'content': 'The current temperature in Paris is 22 degrees Celsius.'}
Message 4:
{'role': 'user', 'content': 'How about the weather in Tokyo?'}
## bonus: third_response: tool_calls=None and content= call_get_current_weather_0 was ...
print(third_response.choices[0])
Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='call_get_current_weather_0 was called with arguments :{"location": "Tokyo, Japan", "unit": "celsius"}\n\n[\n {\'name\': "get_current_weather", \'description\': "Get the current weather in a given location", \'arguments\': { {\'type\': \'object\', \'properties\': {\'location\': {\'type\': \'string\', \'description\': \'The city and state, e.g. San Francisco, CA as a string\'}, \'unit\': {\'type\': \'string\', \'enum\': [\'celsius\', \'fahrenheit\']}}, \'required\': [\'location\']} }},\n]', role='assistant', function_call=None, tool_calls=None))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment