Created
May 25, 2023 10:04
-
-
Save vzeazy/5cd915e5297aadafc30b025687de8fde to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# imports | |
import getpass | |
import os | |
from langchain import LLMChain | |
from langchain.chat_models import ChatOpenAI | |
from langchain.document_loaders import UnstructuredMarkdownLoader | |
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, | |
ChatPromptTemplate, | |
SystemMessagePromptTemplate, | |
AIMessagePromptTemplate, | |
HumanMessagePromptTemplate | |
) | |
from langchain.schema import ( | |
AIMessage, | |
HumanMessage, | |
SystemMessage | |
) | |
# open ai | |
os.environ['OPENAI_API_KEY'] = getpass.getpass('OpenAI API Key:') | |
openai = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=0.2) | |
# docs | |
docs_dir = './books/' | |
ouput_dir = './output/books/' | |
# prompt templates | |
template_sys="You are a helpful assistant that extracts character names and descriptions from book summaries. Specifically, you must extract all the characters unique to this book along with a basic description and return them as a JSON object." | |
example_human="The book summary is:\n\nThis is a book about a dog. The dogs name is Spot. Spot is a good dog." | |
example_ai='[\n {\n "name":"Spot",\n "description":"Spot is a good dog."\n }\n]' | |
template_base = "The book summary is: {input}" | |
# get all he md files in the docs dir | |
docs = [f for f in os.listdir(docs_dir) if f.endswith(".md")] | |
for doc in docs: | |
# load single document from folder | |
loader = UnstructuredMarkdownLoader(docs_dir + doc) | |
data = loader.load() | |
# prepare chat prompt | |
chat_prompt = ChatPromptTemplate.from_messages([ | |
SystemMessagePromptTemplate.from_template(template_sys), | |
HumanMessagePromptTemplate.from_template(example_human), | |
AIMessagePromptTemplate.from_template(example_ai, additional_kwargs={"name": "example_assistant"}), | |
HumanMessagePromptTemplate.from_template(template_base), | |
]) | |
chat_prompt.format_prompt(input=data).to_messages() | |
# prepare request and run chat | |
llm_chain = LLMChain( | |
prompt=chat_prompt, | |
llm=openai, | |
verbose=True | |
) | |
response = llm_chain.run(data) | |
# save respnse to output dir | |
with open(ouput_dir + doc.replace('md', 'json'), "w") as f: | |
f.write(response) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment