Created
April 25, 2025 05:13
-
-
Save Ovid/569129d491786cc7d13f62c5b997afa8 to your computer and use it in GitHub Desktop.
Ollama with Python
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import ollama | |
def chat_with_ollama(model_name, prompt): | |
"""Uses the ollama Python library to chat with a model.""" | |
try: | |
print(f"--- Chatting with {model_name} ---") | |
response = ollama.chat(model=model_name, messages=[ | |
{'role': 'user', 'content': prompt}, | |
]) | |
print("Response:") | |
print(response['message']['content']) | |
print("-" * 20) | |
except ollama.ResponseError as e: | |
print(f"Error interacting with Ollama API: {e}", file=sys.stderr) | |
if e.status_code == 404: | |
print(f"Model '{model_name}' not found. Please pull the model using 'ollama pull {model_name}'", file=sys.stderr) | |
except Exception as e: | |
print(f"An unexpected error occurred: {e}", file=sys.stderr) | |
def stream_chat_with_ollama(model_name, prompt): | |
"""Uses the ollama Python library to stream response from a model.""" | |
try: | |
print(f"--- Streaming response from {model_name} ---") | |
stream = ollama.chat(model=model_name, messages=[ | |
{'role': 'user', 'content': prompt}, | |
], stream=True) | |
print("Response (streaming):") | |
for chunk in stream: | |
print(chunk['message']['content'], end='', flush=True) | |
print("\n" + "-" * 20) | |
except ollama.ResponseError as e: | |
print(f"Error interacting with Ollama API: {e}", file=sys.stderr) | |
if e.status_code == 404: | |
print(f"Model '{model_name}' not found. Please pull the model using 'ollama pull {model_name}'", file=sys.stderr) | |
except Exception as e: | |
print(f"An unexpected error occurred: {e}", file=sys.stderr) | |
# Ensure you have a model pulled, e.g., 'ollama pull llama3.2:latest' | |
model_to_use = "llama3.2:latest" # Replace with a model you have pulled | |
user_prompt = "What is the capital of France?" | |
chat_with_ollama(model_to_use, user_prompt) | |
stream_chat_with_ollama(model_to_use, "Tell me a fun fact about the ocean.") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment