Skip to content

Instantly share code, notes, and snippets.

@raphaelbauer
Last active October 18, 2024 11:34
Show Gist options
  • Save raphaelbauer/c9e6cc2c95d218cf5fe5e576ff5fa69e to your computer and use it in GitHub Desktop.
Save raphaelbauer/c9e6cc2c95d218cf5fe5e576ff5fa69e to your computer and use it in GitHub Desktop.
Private RAG Application Implementation Using Llama 3.2, Ollama & PostgreSQL (https://www.youtube.com/watch?v=-ikCYKcPoqU)
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Private RAG Application Implementation Using Llama 3.2, Ollama & PostgreSQL inspired by https://www.youtube.com/watch?v=-ikCYKcPoqU"
]
},
{
"cell_type": "code",
"execution_count": 36,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import psycopg2\n",
"import os\n",
"import frontmatter"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Dataset Preparation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def parse_markdown_files(directory):\n",
" markdown_data = []\n",
"\n",
" # Use os.walk to traverse the directory tree\n",
" for root, _, files in os.walk(directory):\n",
" for filename in files:\n",
" if filename.endswith('.md'):\n",
" # Construct the full file path\n",
" filepath = os.path.join(root, filename)\n",
" \n",
" # Open and read the markdown file\n",
" with open(filepath, 'r', encoding='utf-8') as file:\n",
" # Parse front matter and content using frontmatter library\n",
" post = frontmatter.load(file)\n",
" \n",
" # Extract title from front matter\n",
" title = post.get('title', 'No Title')\n",
" \n",
" # Extract content (the markdown content itself)\n",
" content = post.content\n",
" \n",
" # Append to markdown_data list as a dictionary\n",
" markdown_data.append({\n",
" \"title\": title,\n",
" \"content\": content\n",
" })\n",
"\n",
" return markdown_data\n",
"\n",
"directory_path = '/Users/I/workspace/raphaelbauer.com/content'\n",
"\n",
"markdown_data = parse_markdown_files(directory_path)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Connect to the database"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def connect_db():\n",
" return psycopg2.connect( # use the credentials of your postgresql database \n",
" host = 'localhost',\n",
" database = 'postgres',\n",
" user = 'postgres',\n",
" password = 'password',\n",
" port = '5432'\n",
" )"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Create the table"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"conn = connect_db()\n",
"cur = conn.cursor()\n",
"cur.execute(\"\"\"\n",
" CREATE TABLE IF NOT EXISTS documents (\n",
" id SERIAL PRIMARY KEY,\n",
" title TEXT,\n",
" content TEXT,\n",
" embedding VECTOR(768)\n",
" );\n",
" \"\"\")\n",
"conn.commit()\n",
"cur.close()\n",
"conn.close()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Insert the data and embeddings into the database"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"conn = connect_db()\n",
"cur = conn.cursor()\n",
"\n",
"# use the port at which your ollama service is running.\n",
"for doc in markdown_data:\n",
" cur.execute(\"\"\"\n",
" INSERT INTO documents (title, content, embedding)\n",
" VALUES (\n",
" %(title)s,\n",
" %(content)s,\n",
" ollama_embed('nomic-embed-text', concat(%(title)s, ' - ', %(content)s), _host=>'http://ollama:11434')\n",
" )\n",
" \"\"\", doc)\n",
"\n",
"conn.commit()\n",
"cur.close()\n",
"conn.close()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Safechecking the insert information in the database."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Retrieval and Generation"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Define the query and its embedding"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"query = \"Can you tell me how modern QA should look like\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"conn = connect_db()\n",
"cur = conn.cursor()\n",
" \n",
"# Embed the query using the ollama_embed function\n",
"cur.execute(\"\"\"\n",
" SELECT ollama_embed('nomic-embed-text', %s, _host=>'http://ollama:11434');\n",
"\"\"\", (query,))\n",
"query_embedding = cur.fetchone()[0]\n",
"\n",
"# Retrieve relevant documents based on cosine distance\n",
"cur.execute(\"\"\"\n",
" SELECT title, content, 1 - (embedding <=> %s) AS similarity\n",
" FROM documents\n",
" ORDER BY similarity DESC\n",
" LIMIT 3;\n",
"\"\"\", (query_embedding,))\n",
"\n",
"rows = cur.fetchall()\n",
" \n",
"# Prepare the context for generating the response\n",
"context = \"\\n\\n\".join([f\"Title: {row[0]}\\nContent: {row[1]}\" for row in rows])\n",
"print(context)\n",
"\n",
"cur.close()\n",
"conn.close()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"conn = connect_db()\n",
"cur = conn.cursor()\n",
"\n",
"# Generate the response using the ollama_generate function\n",
"cur.execute(\"\"\"\n",
" SELECT ollama_generate('llama3.2', %s, _host=>'http://ollama:11434');\n",
"\"\"\", (f\"Query: {query}\\nContext: {context}\",))\n",
" \n",
"model_response = cur.fetchone()[0]\n",
"print(model_response['response'])\n",
" \n",
"cur.close()\n",
"conn.close()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.0"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment