Skip to content

Instantly share code, notes, and snippets.

View heathermiller's full-sized avatar

Heather Miller heathermiller

View GitHub Profile
@heathermiller
heathermiller / instructor.py
Created January 11, 2024 18:00
Instructor
import instructor
from openai import OpenAI
from pydantic import BaseModel
# This enables response_model keyword
# from client.chat.completions.create
client = instructor.patch(OpenAI())
class UserDetail(BaseModel):
name: str
@heathermiller
heathermiller / langchain.py
Created January 11, 2024 18:01
LangChain
llm = OpenAI(temperature =0)
class Action(BaseModel):
action: str = Field(description="action to take")
action_input: str = Field(description="input to the action")
parser = PydanticOutputParser(pydantic_object=Action)
# the parser provides natural language formatting instruction for use in LLM.
# The process is centered around prompts.
prompt = PromptTemplate(template="Answer the user query.
@ai_fn
def generate_recipe(ingredients: list[str]) -> list[str]:
"""From a list of `ingredients`, generates a
complete instruction set to cook a recipe.
"""
generate_recipe(["lemon", "chicken", "olives", "coucous"])
generate_recipe.prompt(
"I need a recipe using Lemon Chicken Olives and Coucous"
)
class BasicQA(dspy.Signature):
"""Answer questions with short factoid answers."""
question = dspy.InputField()
answer = dspy.OutputField(desc="often between 1 and 5 words")
# Define the predictor.
generate_answer = dspy.Predict(BasicQA)
# Call the predictor on a particular input.
pred = generate_answer(question=dev_example.question)
# Define the predictor. Notice we're just changing the class. The signature BasicQA is unchanged.
generate_answer_with_chain_of_thought = dspy.ChainOfThought(BasicQA)
# Call the predictor on the same input.
pred = generate_answer_with_chain_of_thought(question=dev_example.question)
# Print the input, the chain of thought, and the prediction.
print(f"Question: {dev_example.question}")
print(f"Thought: {pred.rationale.split('.', 1)[1].strip()}")
print(f"Predicted Answer: {pred.answer}")
@heathermiller
heathermiller / dspy-compile.py
Created January 11, 2024 18:56
Compiling in DSPy
from dspy.teleprompt import BootstrapFewShot
class GenerateAnswer(dspy.Signature):
"""Answer questions with short factoid answers."""
context = dspy.InputField(desc="may contain relevant facts")
question = dspy.InputField()
answer = dspy.OutputField(desc="often between 1 and 5 words")
class RAG(dspy.Module):
# the prompt is pulled from the LangSmith Hub which hosts many different prompts
prompt = hub.pull("hwchase17/self-ask-with-search")
llm = OpenAI(temperature=0)
# provide the LM with useful tools
search = SerpAPIWrapper()
tools = Tool( name="Intermediate Answer", func=search.run, description="useful for when you need to ask with search")]
llm_with_stop = llm.bind(stop=["\nIntermediate answer:"])
agent = (