Last active
September 5, 2023 05:47
-
-
Save Cdaprod/29f89e030936456d0b2deec06e42767a to your computer and use it in GitHub Desktop.
This is a demo script for an Airtable to Weaviate ChatVectorDBChain QA Agent with Chat History
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/local/bin/python | |
| import logging | |
| import weaviate | |
| import langchain | |
| from langchain.document_loaders import AirtableLoader | |
| from langchain.vectorstores.weaviate import Weaviate | |
| from langchain.llms import OpenAI | |
| from langchain.chains import ChatVectorDBChain | |
| from llama_index.langchain_helpers.agents import IndexToolConfig, LlamaIndexTool, create_llama_chat_agent | |
| from llama_index.langchain_helpers.memory import GPTIndexMemory | |
| # Set up logging | |
| logging.basicConfig(filename='logfile.log', level=logging.DEBUG) | |
| logger = logging.getLogger() | |
| # Weaviate setup | |
| WEAVIATE_URL = "http://0.0.0.0:8082" | |
| # WEAVIATE_API_KEY = "your_weaviate_api_key" | |
| client = weaviate.Client(url=WEAVIATE_URL, auth_client_secret=weaviate.AuthApiKey(WEAVIATE_API_KEY)) | |
| # Airtable loader | |
| airtable_loader = AirtableLoader("base_id", "table_name", "api_key") | |
| # LangChain setup | |
| vectorstore = Weaviate(client, "YourClass", "content") | |
| MyOpenAI = OpenAI(temperature=0.2, openai_api_key="your_openai_api_key") | |
| qa = ChatVectorDBChain.from_llm(MyOpenAI, vectorstore) | |
| # LLAMAIndex setup | |
| tool_config = IndexToolConfig(query_engine=airtable_loader, name="Airtable Index", description="Index for Airtable data", tool_kwargs={"return_direct": True}) | |
| tool = LlamaIndexTool.from_tool_config(tool_config) | |
| memory = GPTIndexMemory(index=airtable_loader, human_prefix="Human", ai_prefix="AI", memory_key="history", input_key="question", output_key="answer") | |
| # Create agent | |
| agent_chain = create_llama_chat_agent(tool, MyOpenAI, memory=memory, verbose=True) | |
| # Run chat | |
| chat_history = [] | |
| print("Welcome to the Weaviate ChatVectorDBChain Demo!") | |
| print("Please enter a question or dialogue to get started!") | |
| while True: | |
| query = input("") | |
| result = agent_chain.run(input=query) | |
| print(result["answer"]) | |
| chat_history.append((query, result["answer"])) | |
| # Log memory to a local file | |
| logger.debug(f'Chat history: {chat_history}') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment