Skip to content

Instantly share code, notes, and snippets.

@pamelafox
Last active September 2, 2025 19:09
Show Gist options
  • Save pamelafox/c509f99080c0ffd667f2ce5e4a368fe5 to your computer and use it in GitHub Desktop.
Save pamelafox/c509f99080c0ffd667f2ce5e4a368fe5 to your computer and use it in GitHub Desktop.
Ollama gpt-oss examples
import json
import os
import azure.identity
import openai
from dotenv import load_dotenv
from rich import print
# Setup the OpenAI client to use either Azure, OpenAI.com, or Ollama API
load_dotenv(override=True)
API_HOST = os.getenv("API_HOST", "github")
if API_HOST == "azure":
token_provider = azure.identity.get_bearer_token_provider(
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
)
client = openai.AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_ad_token_provider=token_provider,
)
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
elif API_HOST == "ollama":
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
MODEL_NAME = os.environ["OLLAMA_MODEL"]
elif API_HOST == "github":
client = openai.OpenAI(base_url="https://models.github.ai/inference", api_key=os.environ["GITHUB_TOKEN"])
MODEL_NAME = os.getenv("GITHUB_MODEL", "openai/gpt-4o")
else:
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]
def lookup_weather(city_name=None, zip_code=None):
"""Lookup the weather for a given city name or zip code."""
return {
"city_name": city_name,
"zip_code": zip_code,
"weather": "sunny",
"temperature": 75,
}
tools = [
{
"type": "function",
"function": {
"name": "lookup_weather",
"description": "Lookup the weather for a given city name or zip code.",
"parameters": {
"type": "object",
"properties": {
"city_name": {
"type": "string",
"description": "The city name",
},
"zip_code": {
"type": "string",
"description": "The zip code",
},
},
"strict": True,
"additionalProperties": False,
},
},
}
]
messages = [
{"role": "system", "content": "You are a weather chatbot."},
{"role": "user", "content": "is it sunny in berkeley CA?"},
]
response = client.chat.completions.create(
model=MODEL_NAME,
messages=messages,
tools=tools,
tool_choice="auto",
reasoning_effort="medium"
)
print(f"Response from {MODEL_NAME} on {API_HOST}: \n")
# Now actually call the function as indicated
print("Thinking...")
print (response.choices[0].message.reasoning)
print("Done thinking.")
if response.choices[0].message.tool_calls:
tool_call = response.choices[0].message.tool_calls[0]
function_name = tool_call.function.name
arguments = json.loads(tool_call.function.arguments)
if function_name == "lookup_weather":
messages.append(response.choices[0].message)
result = lookup_weather(**arguments)
messages.append({"role": "tool", "tool_call_id": tool_call.id, "content": str(result)})
print(messages)
response = client.chat.completions.create(model=MODEL_NAME, messages=messages, tools=tools, reasoning_effort="medium")
# Potential bug: The follow-up message has NO reasoning field
#print("Thinking...")
#print (response.choices[0].message.reasoning)
#print("Done thinking.")
print(response.choices[0].message.content)
import os
import azure.identity
import openai
from dotenv import load_dotenv
from rich import print
# Setup the OpenAI client to use either Azure, OpenAI.com, or Ollama API
load_dotenv(override=True)
API_HOST = os.getenv("API_HOST", "github")
if API_HOST == "azure":
token_provider = azure.identity.get_bearer_token_provider(
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
)
client = openai.AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_ad_token_provider=token_provider,
)
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
elif API_HOST == "ollama":
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
MODEL_NAME = os.environ["OLLAMA_MODEL"]
elif API_HOST == "github":
client = openai.OpenAI(base_url="https://models.github.ai/inference", api_key=os.environ["GITHUB_TOKEN"])
MODEL_NAME = os.getenv("GITHUB_MODEL", "openai/gpt-5")
else:
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]
response = client.chat.completions.create(
model="gpt-oss:20b",
messages=[
{
"role": "user",
"content": "How many r's are in strawberry?",
},
],
reasoning_effort="medium",
)
print("🤔 Thinking...")
print (response.choices[0].message.reasoning)
print("🛑 Done thinking.")
print(response.choices[0].message.content)
print("Usage:")
print(response.usage)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment