Skip to content

Instantly share code, notes, and snippets.

@rajivmehtaflex
Last active July 13, 2024 12:23
Show Gist options
  • Save rajivmehtaflex/a9ad9ffb26ce8fdd8d4d523f3e9e6dfc to your computer and use it in GitHub Desktop.
Save rajivmehtaflex/a9ad9ffb26ce8fdd8d4d523f3e9e6dfc to your computer and use it in GitHub Desktop.
HuggingFace Agentic Flow
abacusai
datasets
huggingface_hub
langchain
sentence-transformers
faiss-cpu
serpapi
google-search-results
openai
langchain
langchain-community
langchain-core
langchain-experimental
langchain-openai
langchain-text-splitters
langchainhub
duckdb
loguru
pandas
e2b_code_interpreter
git+https://github.com/huggingface/transformers.git#egg=transformers[agents]
import os
from openai import OpenAI
from transformers.agents.llm_engine import MessageRole, get_clean_message_list
from transformers import ReactCodeAgent
from abacusai import ApiClient
MODEL_NAME = "CLAUDE_V3_SONNET"
OPENAI_API_KEY='<KEY>'
system_message = "Respond with a professional Mode"
max_tokens = 2000
temperature = 0.35
openai_role_conversions = {
MessageRole.TOOL_RESPONSE: "user",
}
class AbacusAIEngine:
def __init__(self, model_name=MODEL_NAME):
self.model_name = model_name
self.client = ApiClient(api_key=OPENAI_API_KEY)
def __call__(self, messages, stop_sequences=[]):
# Get clean message list
messages = get_clean_message_list(
messages, role_conversions=openai_role_conversions
)
response = self.client.evaluate_prompt(
prompt=messages,
system_message=system_message,
llm_name=self.model_name,
max_tokens=max_tokens,
temperature=temperature
)
# Print the response
return response.content
# return response.choices[0].message.content
openai_engine = AbacusAIEngine(model_name=MODEL_NAME)
agent = ReactCodeAgent(llm_engine=openai_engine, tools=[])
code = """
list=[0, 1, 2]
for i in range(4):
print(list(i))
"""
final_answer = agent.run(
"I have some code that creates a bug: please debug it and return the final code",
code=code,
)
print(f'Response->{final_answer}')
import os
from langchain_core.tools import tool
from langchain_community.agent_toolkits.load_tools import load_tools
from openai import OpenAI
from transformers.agents.llm_engine import MessageRole, get_clean_message_list
from transformers import ReactCodeAgent
from langchain.agents import load_tools
from transformers import Tool, ReactCodeAgent
from loguru import logger
logger.add("./server.log", format="{time} {level} {message}", level="INFO")
MODEL_NAME = "gemini-1.5-pro"
OPENAI_API_KEY='<KEY>'
BASE_URL='https://llm.mdb.ai/'
openai_role_conversions = {
MessageRole.TOOL_RESPONSE: "user",
}
class OpenAIEngine:
def __init__(self, model_name=MODEL_NAME):
self.model_name = model_name
self.client = OpenAI(
api_key=OPENAI_API_KEY,
base_url=BASE_URL,
)
def __call__(self, messages, stop_sequences=[]):
# Get clean message list
messages = get_clean_message_list(
messages, role_conversions=openai_role_conversions
)
# Get LLM output
response = self.client.chat.completions.create(
model=self.model_name,
messages=messages,
stop=stop_sequences,
)
return response.choices[0].message.content
@tool
def multiply(a: float, b: float) -> int:
"""Multiply two numbers. Product of two numbers as well result of a into b"""
logger.info(f"Gajraj is here,{a},{b}")
return a * b
@tool
def store_data_to_file(data: str,filename: str) -> bool:
"""
When use want to store data in file system with mentioned filename,
this function will help you to store data in file system with mentioned filename
"""
logger.info(f"Gajraj is here,{data},{filename}")
with open(filename, "w") as f:
f.write(data)
return True
if __name__ == "__main__":
multiply_tool = Tool.from_langchain(multiply)
store_data_to_file_tool = Tool.from_langchain(store_data_to_file)
openai_engine = OpenAIEngine(model_name=MODEL_NAME)
agent = ReactCodeAgent(llm_engine=openai_engine,tools=[multiply_tool,store_data_to_file_tool])
while True:
user_input = input(">>> ")
if user_input == "exit":
break
response=agent.run(user_input)
print(response)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment