Skip to content

Instantly share code, notes, and snippets.

@pamelafox
Last active August 14, 2025 06:21
Show Gist options
  • Save pamelafox/a6733a5edd53c09040864a27c1093b54 to your computer and use it in GitHub Desktop.
Save pamelafox/a6733a5edd53c09040864a27c1093b54 to your computer and use it in GitHub Desktop.
GPT-5 code
import os
import openai
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
client = openai.AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_ad_token_provider=get_bearer_token_provider(DefaultAzureCredential(),
"https://cognitiveservices.azure.com/.default"),
)
response = client.chat.completions.create(
model=os.environ["AZURE_OPENAI_DEPLOYMENT"],
messages=[
{"role": "user", "content": "Explain beta-reduction in lambda calculus."},
],
reasoning_effort="minimal",
verbosity="low"
)
print(response.choices[0].message.content)
import os
import openai
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
from pydantic import BaseModel
client = openai.AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_ad_token_provider=get_bearer_token_provider(DefaultAzureCredential(),
"https://cognitiveservices.azure.com/.default"),
)
class MathExplanation(BaseModel):
steps: list[str]
answer: int
completion = client.beta.chat.completions.parse(
model=os.environ["AZURE_OPENAI_DEPLOYMENT"],
messages=[
{"role": "system", "content": "You answer math problems."},
{"role": "user", "content": "What is 23 * 7? Show your steps."},
],
response_format=MathExplanation,
)
message = completion.choices[0].message
if message.refusal:
print(message.refusal)
else:
print(message.parsed)
import os
import azure.identity
import openai
from dotenv import load_dotenv
from pydantic import BaseModel
# Setup the OpenAI client to use either Azure, OpenAI.com, or Ollama API
load_dotenv(override=True)
API_HOST = os.getenv("API_HOST", "github")
if API_HOST == "azure":
token_provider = azure.identity.get_bearer_token_provider(
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
)
client = openai.AzureOpenAI(
api_version=os.environ["AZURE_OPENAI_VERSION"],
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
azure_ad_token_provider=token_provider,
)
MODEL_NAME = os.environ["AZURE_OPENAI_DEPLOYMENT"]
elif API_HOST == "ollama":
client = openai.OpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="nokeyneeded")
MODEL_NAME = os.environ["OLLAMA_MODEL"]
elif API_HOST == "github":
client = openai.OpenAI(base_url="https://models.github.ai/inference", api_key=os.environ["GITHUB_TOKEN"])
MODEL_NAME = os.getenv("GITHUB_MODEL", "openai/gpt-5")
else:
client = openai.OpenAI(api_key=os.environ["OPENAI_KEY"])
MODEL_NAME = os.environ["OPENAI_MODEL"]
response = client.chat.completions.create(
model=MODEL_NAME,
messages=[
{"role": "user", "content": "Explain beta-reduction in lambda calculus."},
],
reasoning_effort="minimal",
verbosity="low"
)
print(response.choices[0].message.content)
class MathExplanation(BaseModel):
steps: list[str]
answer: int
completion = client.beta.chat.completions.parse(
model=MODEL_NAME,
messages=[
{"role": "system", "content": "You answer math problems."},
{"role": "user", "content": "What is 23 * 7? Show your steps."},
],
response_format=MathExplanation,
)
message = completion.choices[0].message
if message.refusal:
print(message.refusal)
else:
print(message.parsed)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment