Created
September 8, 2024 20:51
-
-
Save tslmy/3e71685d632f4ed5ba493af97e75c07d to your computer and use it in GitHub Desktop.
Use llama3.1's function-calling capability as a drop-in replacement for OpenAI LLM in LlamaIndex agents
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
import random | |
from llama_index.agent.openai import OpenAIAgent | |
from llama_index.core import Settings | |
from llama_index.core.tools import FunctionTool | |
from llama_index.llms.openai_like import OpenAILike | |
from pydantic import Field | |
def roll_a_dice( | |
n: int = Field(description="number of faces of the dice to roll", gt=0, le=100), | |
) -> int: | |
""" | |
Roll an n-faced dice and return the result. | |
""" | |
return random.randint(1, n) | |
if __name__ == "__main__": | |
Settings.llm = OpenAILike( | |
model="llama3.1", | |
api_base="http://localhost:11434/v1", | |
api_key="ollama", | |
is_function_calling_model=True, | |
is_chat_model=True, | |
) | |
agent = OpenAIAgent.from_tools( | |
tools=[ | |
FunctionTool.from_defaults( | |
roll_a_dice, | |
) | |
], | |
verbose=True, | |
) | |
result = agent.chat("Roll a 7-faced dice just for fun. What's the outcome?") | |
print(result) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment