Last active
April 7, 2024 00:10
-
-
Save bonadio/96435a1b6ccc32297aa8cc1db7cfc381 to your computer and use it in GitHub Desktop.
Autogen Agent that can auto execute a function_call
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# %% | |
import os | |
import openai | |
# import autogen | |
from autogen import Agent, ConversableAgent, oai, UserProxyAgent, AssistantAgent | |
import types | |
from dotenv import load_dotenv, find_dotenv | |
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union | |
_ = load_dotenv(find_dotenv()) # read local .env file | |
openai.api_key = os.environ['OPENAI_API_KEY'] | |
# openai.log='debug' | |
# %% | |
config_list = [ | |
{ | |
"model": "gpt-3.5-turbo" | |
} | |
] | |
llm_config_order_search = { | |
"use_cache": False, | |
"model":"gpt-3.5-turbo", | |
"temperature": 0, | |
"config_list": config_list, | |
"functions": [ | |
{ | |
"name": "order_search", | |
"description": "search for order status", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"order_number": { | |
"type": "integer", | |
"description": "Order number", | |
}, | |
"customer_number": { | |
"type": "string", | |
"description": "Customer number", | |
} | |
}, | |
"required": ["order_number","customer_number"], | |
}, | |
}, | |
], | |
} | |
# %% | |
# %% | |
class SelfExecutionConversableAgent(ConversableAgent): | |
DEFAULT_SYSTEM_MESSAGE = """You are a helpful AI assistant. | |
Solve tasks using your coding and language skills. | |
In the following cases, suggest python code (in a python coding block) or shell script (in a sh coding block) for the user to execute. | |
1. When you need to collect info, use the code to output the info you need, for example, browse or search the web, download/read a file, print the content of a webpage or a file, get the current date/time, check the operating system. After sufficient info is printed and the task is ready to be solved based on your language skill, you can solve the task by yourself. | |
2. When you need to perform some task with code, use the code to perform the task and output the result. Finish the task smartly. | |
Solve the task step by step if you need to. If a plan is not provided, explain your plan first. Be clear which step uses code, and which step uses your language skill. | |
When using code, you must indicate the script type in the code block. The user cannot provide any other feedback or perform any other action beyond executing the code you suggest. The user can't modify your code. So do not suggest incomplete code which requires users to modify. Don't use a code block if it's not intended to be executed by the user. | |
If you want the user to save the code in a file before executing it, put # filename: <filename> inside the code block as the first line. Don't include multiple code blocks in one response. Do not ask users to copy and paste the result. Instead, use 'print' function for the output when relevant. Check the execution result returned by the user. | |
If the result indicates there is an error, fix the error and output the code again. Suggest the full code instead of partial code or code changes. If the error can't be fixed or if the task is not solved even after the code is executed successfully, analyze the problem, revisit your assumption, collect additional info you need, and think of a different approach to try. | |
When you find an answer, verify the answer carefully. Include verifiable evidence in your response if possible. | |
Reply "TERMINATE" in the end when everything is done. | |
""" | |
def __init__( | |
self, | |
name: str, | |
system_message: Optional[str] = DEFAULT_SYSTEM_MESSAGE, | |
llm_config: Optional[Union[Dict, bool]] = None, | |
is_termination_msg: Optional[Callable[[Dict], bool]] = None, | |
max_consecutive_auto_reply: Optional[int] = None, | |
human_input_mode: Optional[str] = "NEVER", | |
code_execution_config: Optional[Union[Dict, bool]] = False, | |
**kwargs, | |
): | |
super().__init__( | |
name, | |
system_message, | |
is_termination_msg, | |
max_consecutive_auto_reply, | |
human_input_mode, | |
code_execution_config=code_execution_config, | |
llm_config=llm_config, | |
**kwargs, | |
) | |
self._reply_func_list = [] | |
self.register_reply([Agent, None], ConversableAgent.generate_oai_reply) | |
self.register_reply([Agent, None], SelfExecutionConversableAgent.self_execute_function_call_reply) | |
self.register_reply([Agent, None], SelfExecutionConversableAgent.custom_generate_oai_reply) | |
self.register_reply([Agent, None], ConversableAgent.generate_code_execution_reply) | |
self.register_reply([Agent, None], ConversableAgent.generate_function_call_reply) | |
self.register_reply([Agent, None], ConversableAgent.check_termination_and_human_reply) | |
def custom_generate_oai_reply( | |
self, | |
messages: Optional[List[Dict]] = None, | |
sender: Optional[Agent] = None, | |
config: Optional[Any] = None, | |
) -> Tuple[bool, Union[str, Dict, None]]: | |
"""Generate a reply using autogen.oai.""" | |
llm_config = self.llm_config if config is None else config | |
if llm_config is False: | |
return False, None | |
if messages is None: | |
messages = self._oai_messages[sender] | |
# TODO: #1143 handle token limit exceeded error | |
response = oai.ChatCompletion.create( | |
context=messages[-1].pop("context", None), messages=self._oai_system_message + messages, **llm_config | |
) | |
reply_message = oai.ChatCompletion.extract_text_or_function_call(response)[0] | |
#print("custom_generate_oai_reply:", reply_message) | |
self._process_received_message(reply_message, sender, True) | |
return False, reply_message | |
def self_execute_function_call_reply( | |
self, | |
messages: Optional[List[Dict]] = None, | |
sender: Optional[Agent] = None, | |
config: Optional[Any] = None, | |
): | |
print("self_execute_function_call_reply") | |
"""Generate a reply using function call.""" | |
if config is None: | |
config = self | |
if messages is None: | |
messages = self._oai_messages[sender] | |
message = messages[-1] | |
if "function_call" in message: | |
_, func_return = self.execute_function(message["function_call"]) | |
self._process_received_message(func_return, sender, True) | |
return False, func_return | |
return True, message.get("content") | |
# %% | |
def order_search(order_number, customer_number): | |
return "Order status: delivered" | |
order_assistant = SelfExecutionConversableAgent( | |
name="order_assistant", | |
llm_config=llm_config_order_search, | |
code_execution_config=False, | |
system_message="""Order assistant, you help the user find the status of his order. | |
Only use the tools provided to do the search. Only execute the search after you have all the information needed. | |
Ask the user for the information you need to perform the search, always add the word "BRKT"" at the end of your question. | |
When you responde with the status add the word TERMINATE""", | |
function_map={ | |
"order_search": order_search | |
} | |
) | |
user_proxy = UserProxyAgent( | |
name="user_proxy", | |
human_input_mode="ALWAYS", | |
is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").rstrip().endswith("TERMINATE"), | |
max_consecutive_auto_reply=3, | |
code_execution_config=False, | |
) | |
user_proxy.initiate_chat( | |
order_assistant, | |
clear_history=True, | |
message=""" | |
Was my order delivered? | |
""", | |
) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment