Skip to content

Instantly share code, notes, and snippets.

@deepanshululla
Last active January 21, 2025 18:15
Show Gist options
  • Save deepanshululla/ba8f7442c8aec5245eed8ac5f346b99d to your computer and use it in GitHub Desktop.
Save deepanshululla/ba8f7442c8aec5245eed8ac5f346b99d to your computer and use it in GitHub Desktop.
import litellm
from litellm.assistants.main import create_thread, add_message, run_thread
from litellm.types.llms.openai import Attachment
from openai.types.beta.threads.message_create_params import AttachmentToolFileSearch
API_KEY = ""
BASE_URL = "https://dev-shared-llmgateway.dev.billdot.io"
# litellm.api_key = API_KEY
litellm.api_base = BASE_URL
class AssistantManager:
"""
A manager class to handle the creation and interaction with an assistant using Litellm and OpenAI.
"""
def __init__(self, api_key: str, base_url):
"""
Initialize the AssistantManager with the Litellm client.
:param api_key: OpenAI API key.
"""
# self.client = LiteLLM(provider="openai", api_key=api_key, base_url=base_url)
self.api_key = api_key
self.base_url = base_url
def create_assistant(self):
"""
Create a new assistant.
:param model: The OpenAI model to use (e.g., "gpt-4").
:param description: A description of the assistant.
:param name: A name for the assistant.
:param tools: Optional tools for the assistant.
:return: The assistant ID.
"""
assistant_service = litellm.create_assistants(
custom_llm_provider="openai",
model="gpt-4o",
name="Line Items Analyzer",
description="Line Items Analyzer",
# api_base="https://dev-shared-llmgateway.dev.billdot.io",
api_key=API_KEY,
instructions="You are a pdf analyzer. Your task is to extract line items from a pdf",
tools=[{"type": "file_search"}],
)
print(assistant_service)
return assistant_service
def create_thread(self, prompt, file_id):
"""
Create a new thread for the assistant.
:return: The thread ID.
"""
"""
client.beta.threads.messages.create(
thread_id=thread.id,
role="user",
attachments=[
Attachment(
file_id=file.id, tools=[AttachmentToolFileSearch(type="file_search")]
)
],
content=prompt,
)
"""
new_thread = create_thread(
custom_llm_provider="openai",
)
add_message(thread_id=new_thread.id,
custom_llm_provider="openai",
role="user",
attachments= [{"file_id": file_id, "tools": [{"type": "file_search"}]}],
content=prompt,
)
return new_thread
def upload_file(self, file_path: str):
"""
Upload a file for use with the assistant.
:param file_path: Path to the file to upload.
:return: The file ID.
"""
return litellm.create_file(
file=open(file_path, "rb"),
purpose="assistants",
custom_llm_provider="openai",
)
def call_assistant(self, assistant_id: str, file_name: str, prompt: str):
"""
Interact with the assistant using a prompt and an uploaded file.
:param assistant_id: The ID of the assistant.
:param file_name: The ID of the uploaded file.
:param prompt: The user prompt for the assistant.
:return: The assistant's response messages.
"""
# Create a thread for the interaction
file = self.upload_file(file_name)
thread = self.create_thread(prompt, file.id)
print(thread)
response = run_thread(
custom_llm_provider="openai",
thread_id=thread.id,
assistant_id=assistant_id
)
# print(response)
return response
# return response
class AssistantService:
"""
A service class to manage the lifecycle and interactions with an assistant.
"""
def __init__(self, api_key: str):
"""
Initialize the AssistantService with an assistant.
:param api_key: OpenAI API key.
:param model: The OpenAI model to use.
:param description: A description of the assistant.
:param name: A name for the assistant.
"""
self.manager = AssistantManager(api_key, BASE_URL)
self.assistant = self.manager.create_assistant()
def handle_request(self, file_path: str, prompt: str):
"""
Process a request with the assistant.
:param file_path: Path to the file to upload.
:param prompt: The user prompt for the assistant.
:return: The assistant's response messages.
"""
run = self.manager.call_assistant(self.assistant.id, file_path, prompt)
print(run)
return run
# Example Usage
if __name__ == "__main__":
# Replace with your OpenAI API key
# OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
# Initialize the service
assistant_service = AssistantService(
api_key=API_KEY
)
# Define the file path and prompt
FILE_PATH = "XinCubeInvoice.pdf"
prompt = """
Your task is to extract line items given as a tabular data in the pdf.
A line item is defined as an item in the invoice table that has a Description, Quantity, and Unit Price associated with it.
Think before you output your response and make sure it is accurate.
Please return the answer in json format. No need to write any code or any explanation
"""
# Interact with the assistant
try:
responses = assistant_service.handle_request(FILE_PATH, prompt)
# print("Assistant Responses:")
# for message in responses:
# print(message["content"])
except Exception as e:
print(f"Error: {e}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment