Skip to content

Instantly share code, notes, and snippets.

@larkintuckerllc
Created October 27, 2025 22:58
Show Gist options
  • Save larkintuckerllc/9f88658396a07634e6b4ea39491fb1b6 to your computer and use it in GitHub Desktop.
Save larkintuckerllc/9f88658396a07634e6b4ea39491fb1b6 to your computer and use it in GitHub Desktop.
from langchain.agents.middleware import dynamic_prompt
from langchain.agents import create_agent
from langchain_chroma import Chroma
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
def main():
embeddings = OpenAIEmbeddings(
model="Qwen/Qwen3-Embedding-0.6B",
openai_api_base="http://localhost:8000/v1",
openai_api_key=""
)
vector_store = Chroma(
collection_name="example_collection",
embedding_function=embeddings,
persist_directory="./chroma_langchain_db",
)
@dynamic_prompt
def prompt_with_context(request):
last_query = request.state["messages"][-1].text
retrieved_docs = vector_store.similarity_search(last_query)
docs_content = "\n\n".join(doc.page_content for doc in retrieved_docs)
system_message = (
"You are a helpful assistant. Use the following context in your response:"
f"\n\n{docs_content}"
)
return system_message
llm = ChatOpenAI(
model="Qwen/Qwen3-1.7B",
openai_api_base="http://localhost:8001/v1",
openai_api_key=""
)
agent = create_agent(
model=llm,
middleware=[prompt_with_context]
)
result = agent.invoke(
{"messages": [{"role": "user", "content": "How does water temperature affect clam growth?"}]}
)
final_message = result["messages"][-1]
print(final_message.content)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment