Skip to content

Instantly share code, notes, and snippets.

@jonaslejon
Last active December 25, 2024 10:07
Show Gist options
  • Save jonaslejon/eae873b7edeb48b0b4073329db7e01bb to your computer and use it in GitHub Desktop.
Save jonaslejon/eae873b7edeb48b0b4073329db7e01bb to your computer and use it in GitHub Desktop.
Monitors an X (twitter) account. Run from crontab every 15 minutes: */15
#!/usr/bin/python3
"""
Twitter Account Monitor Script
===================================
This script monitors a specified Twitter account for new tweets and sends notifications
via Pushover and Mailgun (EU). It is intended to be run as a cron job for periodic execution.
Usage:
------
Run from crontab with a 15-minute interval like this:
*/15 * * * * /usr/bin/python3 /path/to/twitter_monitor.py twitter_account_to_monitor --verbose >> /tmp/twitter_monitor.log 2>&1
Setup Requirements:
-------------------
1. **Twitter API**:
- Obtain a `TWITTER_BEARER_TOKEN` from the Twitter Developer Portal (X Developer Platform).
2. **Dependencies**:
- Install required Python packages using `pip` or `pip3`:
```
pip install tweepy requests python-dotenv retry colorama
```
3. **Notification Services**:
- Configure credentials for Pushover and Mailgun:
- Add them to a `.env` file in the same directory as this script:
```
TWITTER_BEARER_TOKEN=your_twitter_bearer_token
MAILGUN_API_KEY=your_mailgun_api_key
MAILGUN_DOMAIN=your_mailgun_domain
[email protected]
[email protected]
PUSHOVER_USER_KEY=your_pushover_user_key
```
Features:
---------
- Monitors a Twitter account for new tweets.
- Sends notifications via:
- **Pushover** for push notifications.
- **Mailgun** for email alerts (EU region supported).
- Includes verbose logging for debugging and detailed execution logs.
Author:
-------
Jonas Lejon
License:
--------
MIT License
"""
import tweepy
import requests
import os
import time
from datetime import datetime
from dotenv import load_dotenv
from colorama import Fore, Style, init
# Initialize Colorama
init(autoreset=True)
# Load environment variables
load_dotenv()
# Twitter API credentials
BEARER_TOKEN = os.getenv("TWITTER_BEARER_TOKEN")
# Mailgun credentials
MAILGUN_API_KEY = os.getenv("MAILGUN_API_KEY")
MAILGUN_DOMAIN = os.getenv("MAILGUN_DOMAIN")
SENDER_EMAIL = os.getenv("SENDER_EMAIL")
ALERT_EMAIL = os.getenv("ALERT_EMAIL")
# Pushover credentials
PUSHOVER_USER_KEY = os.getenv("PUSHOVER_USER_KEY")
PUSHOVER_API_TOKEN = "ZZZZZZZZZZ" # Insert the Pushover.net API token here. Unique for this script
# Path to store the last tweet ID
STATE_FILE = "/tmp/last_tweet_id.txt"
def get_timestamp():
"""Return the current timestamp as a string."""
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
def verbose_log(message, verbose):
"""Log messages with a timestamp if verbose mode is enabled."""
if verbose:
print(f"{get_timestamp()} {Fore.CYAN}[VERBOSE]{Style.RESET_ALL} {message}")
def get_last_tweet_id(verbose=False):
"""Retrieve the last processed tweet ID."""
if os.path.exists(STATE_FILE):
verbose_log("Reading last tweet ID from state file.", verbose)
with open(STATE_FILE, "r") as file:
last_id = file.read().strip()
verbose_log(f"Last tweet ID: {last_id}", verbose)
return last_id
verbose_log("No state file found; assuming first run.", verbose)
return None
def save_last_tweet_id(tweet_id, verbose=False):
"""Save the last processed tweet ID."""
verbose_log(f"Saving last tweet ID: {tweet_id}", verbose)
with open(STATE_FILE, "w") as file:
file.write(str(tweet_id))
def fetch_recent_tweets(client, user_id, since_id=None, verbose=False):
"""Fetch recent tweets using the Twitter API v2 with rate limit handling."""
try:
params = {"id": user_id, "max_results": 5, "tweet_fields": ["created_at", "text"]}
if since_id:
params["since_id"] = since_id
verbose_log(f"Fetching tweets with params: {params}", verbose)
tweets = client.get_users_tweets(**params)
# Add a small delay to prevent rapid consecutive calls
time.sleep(2)
return tweets
except tweepy.TooManyRequests as e:
# Extract and log rate limit headers
headers = e.response.headers
print(f"{get_timestamp()} {Fore.YELLOW}Rate limit hit during tweet fetch. Response headers: {headers}{Style.RESET_ALL}")
# Extract reset time
reset_time = headers.get("x-rate-limit-reset")
if reset_time:
wait_time = max(int(reset_time) - int(time.time()), 1)
else:
wait_time = 900 # Default to 15 minutes
print(f"{get_timestamp()} {Fore.YELLOW}Rate limit exceeded during tweet fetch. Waiting for {wait_time} seconds.{Style.RESET_ALL}")
# Exit if the wait time exceeds a threshold
if wait_time > 900: # Exceeding 15 minutes
print(f"{get_timestamp()} {Fore.RED}Rate limit wait exceeds 15 minutes. Exiting.{Style.RESET_ALL}")
exit(0)
time.sleep(wait_time)
return fetch_recent_tweets(client, user_id, since_id, verbose)
except Exception as e:
print(f"{get_timestamp()} {Fore.RED}Unexpected error during tweet fetch: {e}{Style.RESET_ALL}")
raise
def fetch_user_with_retry(client, username, verbose=False):
"""Fetch user details with rate limit handling."""
try:
verbose_log(f"Fetching user details for username: {username}", verbose)
user = client.get_user(username=username)
# Add a delay to avoid hitting the limit too soon
time.sleep(5) # Wait 5 seconds between calls
return user
except tweepy.TooManyRequests as e:
headers = e.response.headers
print(f"{get_timestamp()} {Fore.YELLOW}Rate limit hit during user lookup. Response headers: {headers}{Style.RESET_ALL}")
reset_time = headers.get("x-rate-limit-reset")
if reset_time:
wait_time = max(int(reset_time) - int(time.time()), 1)
else:
wait_time = 900
print(f"{get_timestamp()} {Fore.YELLOW}Rate limit exceeded during user lookup. Waiting for {wait_time} seconds.{Style.RESET_ALL}")
time.sleep(wait_time)
return fetch_user_with_retry(client, username, verbose)
def monitor_twitter(username, verbose=False):
"""Monitor Twitter for new tweets using API v2."""
verbose_log("Starting twitter monitor", verbose)
client = tweepy.Client(bearer_token=BEARER_TOKEN)
# Handle rate limit during user lookup
try:
user = fetch_user_with_retry(client, username, verbose=verbose)
user_id = user.data.id
except Exception as e:
print(f"{get_timestamp()} {Fore.RED}Error during user lookup: {e}{Style.RESET_ALL}")
return
last_tweet_id = get_last_tweet_id(verbose)
try:
tweets = fetch_recent_tweets(client, user_id, since_id=last_tweet_id, verbose=verbose)
if tweets.data:
new_tweets = [tweet for tweet in tweets.data if str(tweet.id) != last_tweet_id]
if new_tweets:
for tweet in new_tweets:
verbose_log(f"New tweet found: {tweet.text}", verbose)
subject = f"New Tweet from @{username}"
body = f"{username} tweeted:\n\n{tweet.text}\n\nLink: https://twitter.com/{username}/status/{tweet.id}"
print(f"{get_timestamp()} {Fore.GREEN}New tweet: {tweet.text}{Style.RESET_ALL}")
# Save the most recent tweet ID
save_last_tweet_id(new_tweets[0].id, verbose)
else:
verbose_log("No new tweets detected.", verbose)
else:
verbose_log("No tweets retrieved from the API.", verbose)
except Exception as e:
print(f"{get_timestamp()} {Fore.RED}Error fetching tweets: {e}{Style.RESET_ALL}")
verbose_log("Ending twitter monitor successfully", verbose)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Monitor a Twitter account for new tweets.")
parser.add_argument("username", help="The Twitter username to monitor.")
parser.add_argument("--verbose", action="store_true", help="Enable verbose logging.")
args = parser.parse_args()
monitor_twitter(args.username, verbose=args.verbose)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment