Skip to content

Instantly share code, notes, and snippets.

@techforum-repo
Last active January 14, 2025 00:55
Show Gist options
  • Save techforum-repo/3036dd6235205b3de659ae2fc2ec86f5 to your computer and use it in GitHub Desktop.
Save techforum-repo/3036dd6235205b3de659ae2fc2ec86f5 to your computer and use it in GitHub Desktop.
Sample agent with Huggingface Smolagent framework
"""
This script builds an AI agent using SmolAgents, OpenAI GPT, and Serper.dev.
The agent performs a web search using the Serper.dev API and summarizes the results using GPT-4.
"""
# Import required libraries
from smolagents import tool, CodeAgent, LiteLLMModel, HfApiModel, TransformersModel
from smolagents.prompts import CODE_SYSTEM_PROMPT
import http.client
import json
import os
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
# Define the Web Search Tool using @tool decorator
@tool
def web_search(query: str) -> list:
"""
Performs a web search using Serper.dev and retrieves the complete results.
Args:
query: The search query to perform.
Returns:
list: A list of all results retrieved from the web search.
"""
# Fetch the API key from environment variables
api_key = os.getenv("SERPER_API_KEY")
if not api_key:
raise ValueError("SERPER_API_KEY is not set in the environment variables.")
# Create the HTTP connection and send the POST request
conn = http.client.HTTPSConnection("google.serper.dev")
payload = json.dumps({"q": query})
headers = {
"X-API-KEY": api_key,
"Content-Type": "application/json"
}
conn.request("POST", "/search", payload, headers)
res = conn.getresponse()
data = res.read()
conn.close()
# Parse and return the full search results
response_json = json.loads(data.decode("utf-8"))
results = response_json.get("organic", [])
print("Full Results:", results) # Debugging log
return results
# Define the LLM Model
model = LiteLLMModel(
model_id="gpt-4", # Specify the model to use
api_base="https://api.openai.com/v1", # OpenAI API endpoint
api_key=os.getenv("OPENAI_API_KEY") # OpenAI API key from environment variables
)
#model=HfApiModel(token="YOUR_HUGGINGFACEHUB_API_TOKEN"), # Default model - Qwen/Qwen2.5-Coder-32B-Instruct
#model = TransformersModel(model_id="HuggingFaceTB/SmolLM-135M-Instruct")
#model_id = "meta-llama/Llama-3.3-70B-Instruct"
#model=HfApiModel(model_id=model_id, token="YOUR_HUGGINGFACEHUB_API_TOKEN"), #Pro Model
modified_system_prompt = CODE_SYSTEM_PROMPT + "\nChanges" # Change the system prompt here
# Create the CodeAgent with tools and model
agent = CodeAgent(
tools= [web_search], # Register the web_search tool
model=model,
#system_prompt=modified_system_prompt,
verbose_level= 2
)
def main():
"""
Main function to run the AI agent workflow.
Accepts a user query, performs a web search, and summarizes the results.
"""
import argparse
# Set up argument parsing for user input
parser = argparse.ArgumentParser(description="Run the AI Agent Workflow.")
parser.add_argument("--query", type=str, help="Query for web search", required=True)
args = parser.parse_args()
# Fetch the query from command-line arguments
query = args.query
print(f"Running Agent with query: '{query}'")
# Execute the agent to handle web search and summarization
response = agent.run(f"Search the web for '{query}' and use the complete results and give detailed quick summary.")
print("Final Output:")
print(response)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment