Skip to content

Instantly share code, notes, and snippets.

@copyleftdev
Created January 20, 2026 04:39
Show Gist options
  • Select an option

  • Save copyleftdev/1562e37027b864bcd284102f59b6eb1c to your computer and use it in GitHub Desktop.

Select an option

Save copyleftdev/1562e37027b864bcd284102f59b6eb1c to your computer and use it in GitHub Desktop.
Fetch GitHub traffic stats (views/clones) for all your repositories using gh CLI
#!/usr/bin/env python3
"""Fetch GitHub traffic stats for all repositories using gh CLI."""
import json
import subprocess
import sys
from concurrent.futures import ThreadPoolExecutor, as_completed
def run_gh(args: list[str]) -> dict | list | None:
"""Run gh api command and return parsed JSON."""
try:
result = subprocess.run(
["gh", "api"] + args,
capture_output=True,
text=True,
check=True
)
return json.loads(result.stdout) if result.stdout.strip() else None
except subprocess.CalledProcessError:
return None
def get_all_repos() -> list[str]:
"""Get all repos for the authenticated user with push access."""
repos = []
page = 1
per_page = 100
print("Fetching repository list...", end="", flush=True)
while True:
data = run_gh([f"/user/repos?per_page={per_page}&page={page}&affiliation=owner"])
if not data or not isinstance(data, list) or len(data) == 0:
break
repos.extend([r["full_name"] for r in data])
print(".", end="", flush=True)
page += 1
if len(data) < per_page:
break
print(f" found {len(repos)} repos")
return repos
def get_traffic(repo: str) -> dict:
"""Get traffic stats for a single repo."""
views_data = run_gh([f"/repos/{repo}/traffic/views"])
clones_data = run_gh([f"/repos/{repo}/traffic/clones"])
return {
"repo": repo,
"views": views_data.get("count", 0) if views_data else 0,
"unique_views": views_data.get("uniques", 0) if views_data else 0,
"clones": clones_data.get("count", 0) if clones_data else 0,
"unique_clones": clones_data.get("uniques", 0) if clones_data else 0,
}
def main():
repos = get_all_repos()
if not repos:
print("No repositories found.")
sys.exit(1)
print(f"Fetching traffic for {len(repos)} repositories...")
stats = []
completed = 0
# Use threading for parallel API calls (respecting rate limits with reasonable concurrency)
with ThreadPoolExecutor(max_workers=10) as executor:
futures = {executor.submit(get_traffic, repo): repo for repo in repos}
for future in as_completed(futures):
completed += 1
if completed % 50 == 0 or completed == len(repos):
print(f" Progress: {completed}/{len(repos)}")
try:
result = future.result()
stats.append(result)
except Exception as e:
print(f" Error fetching {futures[future]}: {e}")
# Calculate totals
total_views = sum(s["views"] for s in stats)
total_unique_views = sum(s["unique_views"] for s in stats)
total_clones = sum(s["clones"] for s in stats)
total_unique_clones = sum(s["unique_clones"] for s in stats)
# Sort by total traffic (views + clones)
stats.sort(key=lambda x: x["views"] + x["clones"], reverse=True)
# Output results
print("\n" + "=" * 60)
print("GITHUB TRAFFIC SUMMARY (Last 14 Days)")
print("=" * 60)
print(f"\n📊 TOTAL VIEWS: {total_views:,} ({total_unique_views:,} unique)")
print(f"📦 TOTAL CLONES: {total_clones:,} ({total_unique_clones:,} unique)")
# Top repos by traffic
top_repos = [s for s in stats if s["views"] + s["clones"] > 0][:20]
if top_repos:
print(f"\n🏆 TOP REPOS BY TRAFFIC:")
print("-" * 60)
print(f"{'Repository':<40} {'Views':>8} {'Clones':>8}")
print("-" * 60)
for s in top_repos:
repo_name = s["repo"].split("/")[-1][:38]
print(f"{repo_name:<40} {s['views']:>8} {s['clones']:>8}")
else:
print("\nNo traffic recorded in the last 14 days.")
print("=" * 60)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment