Last active
December 10, 2024 11:11
-
-
Save Gholamrezadar/6914bba2e246bbf0e82f3e932f6729e2 to your computer and use it in GitHub Desktop.
ghd-colab-ollama.ipynb
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"nbformat": 4, | |
"nbformat_minor": 0, | |
"metadata": { | |
"colab": { | |
"provenance": [], | |
"authorship_tag": "ABX9TyPGZHAA40dYGhdH9vDq92/A", | |
"include_colab_link": true | |
}, | |
"kernelspec": { | |
"name": "python3", | |
"display_name": "Python 3" | |
}, | |
"language_info": { | |
"name": "python" | |
} | |
}, | |
"cells": [ | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "view-in-github", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"<a href=\"https://colab.research.google.com/gist/Gholamrezadar/6914bba2e246bbf0e82f3e932f6729e2/ghd-colab-ollama.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"This notebook shows how to install and run ollama on google colab + tunneling using ngrok.\n", | |
"\n", | |
"set `NGROK_TOKEN` in colab secrets panel.\n", | |
"\n", | |
"Gholamreza Dar 2024" | |
], | |
"metadata": { | |
"id": "x_PlD2xgGkyx" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "gd4T8NagB0Rd" | |
}, | |
"outputs": [], | |
"source": [ | |
"# Installing Ollama\n", | |
"!curl -fsSL https://ollama.com/install.sh | sh" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# needed for tunneling via ngrok\n", | |
"%env OLLAMA_HOST=0.0.0.0" | |
], | |
"metadata": { | |
"id": "ApJQZESPOkUF" | |
}, | |
"execution_count": null, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# Running Ollama and serving on localhost:11434\n", | |
"!nohup ollama serve &" | |
], | |
"metadata": { | |
"id": "hIqrvqU0CG7a" | |
}, | |
"execution_count": null, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# Downloading a model from https://ollama.com/search\n", | |
"!ollama pull llama3.2:1b" | |
], | |
"metadata": { | |
"collapsed": true, | |
"id": "kRp5g4rfDuy_" | |
}, | |
"execution_count": null, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# Check the available models\n", | |
"!ollama list" | |
], | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"id": "WdIkYdIRFn_G", | |
"outputId": "fb8c4864-6171-4af0-fe8f-939918a3f58b" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"NAME ID SIZE MODIFIED \n", | |
"llama3.2:1b baf6a787fdff 1.3 GB 16 minutes ago \n" | |
] | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"## Curl" | |
], | |
"metadata": { | |
"id": "3hTq9RR0ElgY" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# Test the api using curl locally\n", | |
"!curl http://localhost:11434/api/generate -d '{\"model\": \"llama3.2:1b\",\"prompt\":\"Question: Who was the first president of the United States? \\n Only answer using a few words. maybe just a name Answer: \"}'" | |
], | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"id": "1nU-vzydCLzB", | |
"outputId": "00f9854f-6822-462a-fe27-68caa6f1230e" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"{\"model\":\"llama3.2:1b\",\"created_at\":\"2024-12-10T06:23:06.818957955Z\",\"response\":\"George\",\"done\":false}\n", | |
"{\"model\":\"llama3.2:1b\",\"created_at\":\"2024-12-10T06:23:07.012560323Z\",\"response\":\" Washington\",\"done\":false}\n", | |
"{\"model\":\"llama3.2:1b\",\"created_at\":\"2024-12-10T06:23:07.207398347Z\",\"response\":\"\",\"done\":true,\"done_reason\":\"stop\",\"context\":[128006,9125,128007,271,38766,1303,33025,2696,25,6790,220,2366,18,271,128009,128006,882,128007,271,14924,25,10699,574,279,1176,4872,315,279,3723,4273,30,720,8442,4320,1701,264,2478,4339,13,7344,1120,264,836,22559,25,220,128009,128006,78191,128007,271,40052,6652],\"total_duration\":7380048972,\"load_duration\":3349295750,\"prompt_eval_count\":52,\"prompt_eval_duration\":3634000000,\"eval_count\":3,\"eval_duration\":394000000}\n" | |
] | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"## Langchain\n" | |
], | |
"metadata": { | |
"id": "8EhAsV-IEqC7" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"!pip install -qU langchain-ollama" | |
], | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"id": "5QAeutfEE7jS", | |
"outputId": "580d9747-a01f-4e76-bde0-e668b53e4a5e" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"\u001b[?25l \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m0.0/76.4 kB\u001b[0m \u001b[31m?\u001b[0m eta \u001b[36m-:--:--\u001b[0m\r\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m76.4/76.4 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[?25h" | |
] | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"from langchain_ollama.llms import OllamaLLM\n", | |
"\n", | |
"model = OllamaLLM(model=\"llama3.2:1b\")\n", | |
"model.invoke(\"Who is the best soccer player? (Only answer using a few word. no explanations. do not put a period at the end.)\")" | |
], | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 35 | |
}, | |
"id": "bjZVd05gHEM1", | |
"outputId": "2a1e33eb-7f17-470b-be37-05ec0c2adf01" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"'Lionel Messi'" | |
], | |
"application/vnd.google.colaboratory.intrinsic+json": { | |
"type": "string" | |
} | |
}, | |
"metadata": {}, | |
"execution_count": 38 | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"from langchain_core.prompts import ChatPromptTemplate\n", | |
"from langchain_ollama.llms import OllamaLLM\n", | |
"\n", | |
"template = \"\"\"Question: {question}\n", | |
"Only answer using a few words. maybe just a name\n", | |
"Answer: \"\"\"\n", | |
"prompt = ChatPromptTemplate.from_template(template)\n", | |
"model = OllamaLLM(model=\"llama3.2:1b\")\n", | |
"chain = prompt | model\n", | |
"chain.invoke({\"question\": \"Who was the first president of the United States?\"})" | |
], | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 35 | |
}, | |
"id": "wcUyPKVGErYx", | |
"outputId": "7436d301-c1f6-4ffc-8833-920f9239095d" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"'George Washington.'" | |
], | |
"application/vnd.google.colaboratory.intrinsic+json": { | |
"type": "string" | |
} | |
}, | |
"metadata": {}, | |
"execution_count": 58 | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"## Expose the API publicly using ngrok" | |
], | |
"metadata": { | |
"id": "QVNlbt4THf3F" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"!pip install -qU pyngrok" | |
], | |
"metadata": { | |
"id": "6RSy-7MZHwoE" | |
}, | |
"execution_count": null, | |
"outputs": [] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"from google.colab import userdata\n", | |
"from pyngrok import ngrok, conf\n", | |
"\n", | |
"# get NGROK_TOKEN from colab secrets\n", | |
"ngrok_token = userdata.get('NGROK_TOKEN')\n", | |
"if not ngrok_token:\n", | |
" raise ValueError(\"NGROK_TOKEN secret not found. Please add it to Colab secrets.\")\n", | |
"\n", | |
"# Set the ngrok auth token using Python\n", | |
"conf.get_default().auth_token = ngrok_token\n", | |
"ngrok.set_auth_token(ngrok_token)\n", | |
"\n", | |
"# Expose Ollama server via ngrok on port 11434\n", | |
"public_url = ngrok.connect(\"http://localhost:11434\")\n", | |
"print(f\"Ollama server public URL: {public_url.public_url}\")\n" | |
], | |
"metadata": { | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"id": "j9aXTx7oIFYj", | |
"outputId": "8fc38f35-184a-43b2-edcb-2d7f69ab3a1e" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"Ollama server public URL: https://fb69-35-230-17-154.ngrok-free.app\n" | |
] | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# !ngrok http http://localhost:11434" | |
], | |
"metadata": { | |
"id": "M7yjhMKeNNyD" | |
}, | |
"execution_count": null, | |
"outputs": [] | |
} | |
] | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment