Skip to content

Instantly share code, notes, and snippets.

@tslmy
Created September 8, 2024 20:51
Show Gist options
  • Save tslmy/3e71685d632f4ed5ba493af97e75c07d to your computer and use it in GitHub Desktop.
Save tslmy/3e71685d632f4ed5ba493af97e75c07d to your computer and use it in GitHub Desktop.
Use llama3.1's function-calling capability as a drop-in replacement for OpenAI LLM in LlamaIndex agents
#!/usr/bin/env python
import random
from llama_index.agent.openai import OpenAIAgent
from llama_index.core import Settings
from llama_index.core.tools import FunctionTool
from llama_index.llms.openai_like import OpenAILike
from pydantic import Field
def roll_a_dice(
n: int = Field(description="number of faces of the dice to roll", gt=0, le=100),
) -> int:
"""
Roll an n-faced dice and return the result.
"""
return random.randint(1, n)
if __name__ == "__main__":
Settings.llm = OpenAILike(
model="llama3.1",
api_base="http://localhost:11434/v1",
api_key="ollama",
is_function_calling_model=True,
is_chat_model=True,
)
agent = OpenAIAgent.from_tools(
tools=[
FunctionTool.from_defaults(
roll_a_dice,
)
],
verbose=True,
)
result = agent.chat("Roll a 7-faced dice just for fun. What's the outcome?")
print(result)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment