-
-
Save lgersman/9502c85f1b6207257cacf0ff39b569d0 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from operator import itemgetter | |
from langchain.chat_models import ChatOpenAI | |
from langchain.embeddings import OpenAIEmbeddings | |
from langchain.schema import StrOutputParser | |
from langchain.schema.runnable import RunnablePassthrough, RunnableMap | |
from langchain.vectorstores import Chroma | |
from langchain.prompts import PromptTemplate | |
from langchain.document_loaders import JSONLoader | |
from dotenv import load_dotenv | |
load_dotenv() | |
def data_func(record: dict, metadata: dict) -> dict: | |
metadata["source"] = record.get("source", "") | |
return metadata | |
loader = JSONLoader( | |
file_path='videos.json', | |
jq_schema='.[]', | |
content_key="descriptions", | |
metadata_func=data_func | |
) | |
documents = loader.load() | |
vectorstore = Chroma.from_documents(documents=documents, embedding=OpenAIEmbeddings()) | |
retriever = vectorstore.as_retriever() | |
template = """ | |
Use the following pieces of context to answer the question at the end. | |
If you don't know the answer, just say that you don't know, don't try to make up an answer. | |
Try to keep the answer concise when possible. | |
{context} | |
Question: {question} | |
Helpful Answer:""" | |
rag_prompt_custom = PromptTemplate.from_template(template) | |
llm = ChatOpenAI(model_name="gpt-4-1106-preview", temperature=0) | |
def format_docs(docs): | |
formatted_docs = "\n\n".join(doc.page_content for doc in docs) | |
return formatted_docs | |
rag_chain_from_docs = ( | |
{ | |
"context": lambda input: format_docs(input["documents"]), | |
"question": itemgetter("question"), | |
} | |
| rag_prompt_custom | |
| llm | |
| StrOutputParser() | |
) | |
rag_chain_with_source = RunnableMap( | |
{"documents": retriever, "question": RunnablePassthrough()} | |
) | { | |
"documents": lambda input: [doc.metadata for doc in input["documents"]], | |
"answer": rag_chain_from_docs, | |
} | |
def chat(): | |
while True: | |
question = input("Question (type 'exit' to quit): ").strip() | |
if question.lower() == "exit": | |
break | |
response = rag_chain_with_source.invoke(question) | |
documents = response["documents"] | |
answer = response["answer"] | |
print(f"Answer: {answer}") | |
print(f"Documents: {documents}") | |
if __name__ == "__main__": | |
chat() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment