Skip to content

Instantly share code, notes, and snippets.

@pandr
Last active February 13, 2026 14:24
Show Gist options
  • Select an option

  • Save pandr/c832e64a9af8f34724d8ae6d1abbc8c9 to your computer and use it in GitHub Desktop.

Select an option

Save pandr/c832e64a9af8f34724d8ae6d1abbc8c9 to your computer and use it in GitHub Desktop.
Chat experiment
#!/usr/bin/env python3
import os
import sys
from openai import OpenAI
import json
import argparse
import subprocess
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
# Model configuration
DEFAULT_MODEL = "gpt-4.1"
DEEP_MODEL = "gpt-5-mini"
# Developer prompt template
DEVELOPER_PROMPT = ("You are a concise, technical assistant. Answer tersely. "
"Do not use markdown or non ascii symbols (e.g. emojis) - user is on a terminal. "
"Avoid fluff. Your answer should not contain a summary; then it was too long to begin with. "
"Prefer OS-specific paths, commands, and behaviors as indicated by the System info below. "
"If unsure, state briefly what additional information would be needed. ")
def build_developer_prompt():
"""Constructs the full developer prompt with system information."""
try:
uname_output = subprocess.check_output(['uname', '-a'], text=True).strip()
except subprocess.CalledProcessError:
uname_output = "unknown"
try:
sw_vers_output = subprocess.check_output(['sw_vers'], text=True).strip()
# Parse sw_vers output and extract key-value pairs
os_parts = {}
for line in sw_vers_output.split('\n'):
if ':' in line:
key, value = line.split(':', 1)
os_parts[key.strip()] = value.strip()
os_info = f"{os_parts.get('ProductName', 'unknown')} {os_parts.get('ProductVersion', '')} (build {os_parts.get('BuildVersion', '')})"
except (subprocess.CalledProcessError, FileNotFoundError):
os_info = "unknown"
try:
kernel_output = subprocess.check_output(['uname', '-s', '-r'], text=True).strip()
kernel_info = f"{kernel_output} (XNU)"
except subprocess.CalledProcessError:
kernel_info = "unknown"
shell = os.environ.get('SHELL', 'unknown')
return (f"{DEVELOPER_PROMPT}\n"
"System info: \n"
f" - OS: {os_info}\n"
f" - Kernel: {kernel_info}\n"
f" - Shell: {shell}\n"
f" - Raw uname -a: {uname_output}\n")
def clean_url(url):
"""Remove UTM tracking parameters from URL."""
parsed = urlparse(url)
if not parsed.query:
return url
query_params = parse_qs(parsed.query, keep_blank_values=True)
# Remove all utm_* parameters
cleaned_params = {k: v for k, v in query_params.items() if not k.startswith('utm_')}
if not cleaned_params:
# No query parameters left
return urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params, '', parsed.fragment))
# Reconstruct query string
cleaned_query = urlencode(cleaned_params, doseq=True)
return urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params, cleaned_query, parsed.fragment))
def main():
# Build help epilog with configuration info
epilog = f"""
Configuration:
Default model: {DEFAULT_MODEL} (fast)
Deep model (-d): {DEEP_MODEL}
Developer prompt sent to API:
{build_developer_prompt()}
Examples:
chat.py "what is the capital of France"
echo "some context" | chat.py "analyze this"
chat.py -d "complex question requiring deeper thinking"
"""
parser = argparse.ArgumentParser(
description="Chat with OpenAI API - A concise terminal-based AI assistant",
epilog=epilog,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("prompt", nargs="+", help="The prompt/question to send to the AI")
parser.add_argument("-d", "--deep", action="store_true", help=f"Use deeper thinking model ({DEEP_MODEL})")
parser.add_argument("--dump-headers", action="store_true", help="Dump response headers from the REST API")
args = parser.parse_args()
prompt = " ".join(args.prompt).strip()
# Select model based on flag
model = DEEP_MODEL if args.deep else DEFAULT_MODEL
stdin = ""
if not sys.stdin.isatty():
stdin = sys.stdin.read().strip()
if not prompt:
print("usage: chat <prompt> (optionally pipe input)", file=sys.stderr)
sys.exit(1)
api_key = os.environ.get("OPENAI_API_KEY")
if not api_key:
print("error: OPENAI_API_KEY environment variable is not set", file=sys.stderr)
sys.exit(1)
# Store headers and request info in closure variables
# This is for debugging purposes only
captured_request_info = {}
captured_request_headers = {}
captured_response_headers = {}
def request_hook(request):
captured_request_info['method'] = request.method
captured_request_info['url'] = str(request.url)
captured_request_headers.update(dict(request.headers))
def response_hook(response):
captured_response_headers.update(dict(response.headers))
import httpx
http_client = httpx.Client(event_hooks={'request': [request_hook], 'response': [response_hook]})
client = OpenAI(api_key=api_key, http_client=http_client)
# Build up input structure
input = [
{
"role": "developer", "content": [
{
"type": "input_text",
"text": build_developer_prompt()
}
],
},
{
"role": "user", "content": [ {"type": "input_text", "text": prompt} ]
}
]
if stdin:
input.append({
"role": "user", "content": [ {"type": "input_text", "text": f"Context:\n{stdin}"} ]
})
response = client.responses.create(
model=model,
input=input,
tools=[{"type": "web_search"}],
tool_choice="auto",
include=["web_search_call.action.sources"],
)
# Dump headers if requested
if args.dump_headers:
print("\n=== Request ===", file=sys.stderr)
if captured_request_info:
print(f"{captured_request_info.get('method', 'UNKNOWN')} {captured_request_info.get('url', 'UNKNOWN')}", file=sys.stderr)
print("", file=sys.stderr)
if captured_request_headers:
for key, value in captured_request_headers.items():
print(f"{key}: {value}", file=sys.stderr)
else:
print("Warning: No request headers captured", file=sys.stderr)
print("\n=== Response Headers ===", file=sys.stderr)
if captured_response_headers:
for key, value in captured_response_headers.items():
print(f"{key}: {value}", file=sys.stderr)
else:
print("Warning: No response headers captured", file=sys.stderr)
print("========================\n", file=sys.stderr)
# Extract citations if any
sources = []
for item in response.output:
if item.type != "message":
continue
for block in getattr(item, "content", []):
if block.type != "output_text":
continue
for ann in getattr(block, "annotations", []):
if ann.type == "url_citation":
sources.append({
"title": ann.title,
"url": ann.url,
"start": ann.start_index,
"end": ann.end_index,
})
# Replace inline citations with numbered markers
output_text = response.output_text.strip()
if sources:
# Deduplicate sources by cleaned URL while preserving first appearance order
seen_urls = {}
unique_sources = []
sources_sorted = sorted(sources, key=lambda x: x["start"])
for src in sources_sorted:
cleaned_url = clean_url(src["url"])
if cleaned_url not in seen_urls:
seen_urls[cleaned_url] = len(unique_sources) + 1
src["cleaned_url"] = cleaned_url
unique_sources.append(src)
src["num"] = seen_urls[cleaned_url]
# Replace from end to beginning to avoid index shifting
for src in reversed(sources_sorted):
output_text = output_text[:src["start"]] + f"[{src['num']}]" + output_text[src["end"]:]
# Print main text output with numbered citations
print(output_text)
#print(response.model_dump_json(indent=2), file=sys.stderr)
if sources:
print("\n----------\nSources:")
for src in unique_sources:
print(f"[{src['num']}] {src['cleaned_url']}")
if __name__ == "__main__":
main()
@pandr
Copy link
Author

pandr commented Feb 13, 2026

And dump something like this in your .bashrc or whatever

export OPENAI_API_KEY='sk-proj-XXXX'

chat () {
~/bin/chat.py "$@"
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment