Skip to content

Instantly share code, notes, and snippets.

@DannyAziz
Created July 14, 2025 19:48
Show Gist options
  • Save DannyAziz/b76d8b9c0135575478a5c7f5cee3ce84 to your computer and use it in GitHub Desktop.
Save DannyAziz/b76d8b9c0135575478a5c7f5cee3ce84 to your computer and use it in GitHub Desktop.
Get all twitter following
import click
from rich.console import Console
from rich.table import Table
from twitter.scraper import Scraper
console = Console()
def extract_user_data(account):
"""Extract user data from Twitter API's complex structure."""
if 'content' in account and 'itemContent' in account['content']:
item_content = account['content']['itemContent']
if 'user_results' in item_content and 'result' in item_content['user_results']:
return item_content['user_results']['result']
return None
# Get this from your browser, just copy and paste the cookies your browser sends to the X api
COOKIES = {}
def get_following_accounts(scraper, limit=None):
"""Fetch all accounts that the user is following."""
console.print("[blue]Fetching accounts you follow...[/blue]")
try:
# The user ID is extracted from the twid cookie
user_id = COOKIES["twid"].replace("u%3D", "")
# Get following list - this returns a complex nested structure
following_responses = list(scraper.following([user_id]))
# Extract user entries from the complex structure
user_entries = []
for response in following_responses:
if 'data' in response and 'user' in response['data']:
timeline = response['data']['user']['result']['timeline']['timeline']
for instruction in timeline.get('instructions', []):
if instruction.get('type') == 'TimelineAddEntries':
for entry in instruction.get('entries', []):
if entry.get('content', {}).get('itemContent', {}).get('itemType') == 'TimelineUser':
user_entries.append(entry)
if limit:
user_entries = user_entries[:limit]
console.print(f"[green]Found {len(user_entries)} accounts (limited to {limit} for testing)[/green]")
else:
console.print(f"[green]Found {len(user_entries)} accounts you follow[/green]")
return user_entries
except Exception as e:
console.print(f"[red]Error fetching following accounts: {e}[/red]")
return []
def display_results(inactive_accounts, dry_run=True):
"""Display the results in a formatted table."""
if not inactive_accounts:
console.print("[green]No inactive accounts found![/green]")
return
table = Table(title="Inactive Accounts")
table.add_column("Username", style="cyan")
table.add_column("Name", style="white")
table.add_column("Followers", justify="right", style="blue")
for account in inactive_accounts:
user_data = extract_user_data(account)
if not user_data or 'legacy' not in user_data:
continue
legacy = user_data['legacy']
username = legacy.get('screen_name', 'unknown')
name = legacy.get('name', 'N/A')
followers = str(legacy.get('followers_count', 'N/A'))
table.add_row(username, name, followers)
console.print(table)
if dry_run:
console.print(f"\n[yellow]DRY RUN: Would unfollow {len(inactive_accounts)} accounts[/yellow]")
else:
console.print(f"\n[red]Found {len(inactive_accounts)} accounts to unfollow[/red]")
def extract_screen_names_from_json(json_data):
"""
Extract all screen_name values from legacy objects in the JSON structure.
Equivalent to: jq -r '.. | select(type=="object") | .legacy.screen_name? // empty' | sort | uniq | grep -v '^$'
"""
screen_names = set()
def traverse(obj):
"""Recursively traverse the JSON structure."""
if isinstance(obj, dict):
# Check if this object has legacy.screen_name
if 'legacy' in obj and isinstance(obj['legacy'], dict):
screen_name = obj['legacy'].get('screen_name')
if screen_name and isinstance(screen_name, str) and screen_name.strip():
screen_names.add(screen_name)
# Recursively traverse all values in the dictionary
for value in obj.values():
traverse(value)
elif isinstance(obj, list):
# Recursively traverse all items in the list
for item in obj:
traverse(item)
traverse(json_data)
# Return sorted unique screen names (equivalent to sort | uniq)
return sorted(screen_names)
def get_screen_names_from_following(following_responses):
"""Extract screen names from the following API responses."""
screen_names = []
for response in following_responses:
extracted_names = extract_screen_names_from_json(response)
screen_names.extend(extracted_names)
# Remove duplicates and sort
return sorted(set(screen_names))
@click.command()
@click.option('--test', is_flag=True, help='Test mode - only check the last 100 accounts you follow')
def main(test):
"""Get all Twitter following"""
if test:
console.print("[cyan]Running in TEST mode - checking only the last 100 accounts[/cyan]")
scraper = Scraper(cookies=COOKIES)
# Get following accounts (limited if in test mode)
limit = 100 if test else None
following = get_following_accounts(scraper, limit)
if not following:
console.print("[red]No following accounts found or error occurred[/red]")
return
screen_names = get_screen_names_from_following(following)
print("\n".join(screen_names))
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment