Skip to content

Instantly share code, notes, and snippets.

@ftk
Created August 18, 2025 08:18
Show Gist options
  • Save ftk/7e694c3f4df5fd6ff4e6cdde35a0026d to your computer and use it in GitHub Desktop.
Save ftk/7e694c3f4df5fd6ff4e6cdde35a0026d to your computer and use it in GitHub Desktop.
Script that checks for torrent updates on rutracker.org, from qBitTorrent torrent list
#!/usr/bin/env -S uv run
# /// script
# dependencies = [
# "beautifulsoup4>=4.12.3",
# "bencodepy>=0.9.5",
# "requests>=2.32.3",
# ]
# ///
import sqlite3
import re
import requests
from bs4 import BeautifulSoup
import bencodepy
import os
import platform
def get_local_appdata_path(app_name):
system = platform.system()
if system == "Windows":
# For Windows
path = os.getenv("LOCALAPPDATA", os.path.expanduser("~/AppData/Local"))
elif system == "Darwin":
# For macOS
path = os.path.expanduser("~/Library/Application Support")
else:
# For Linux/Unix-based systems
path = os.path.expanduser("~/.local/share")
# Append your app-specific folder
return os.path.join(path, app_name)
def main():
# Replace with your actual database path.
db_path = os.path.join(get_local_appdata_path("qBittorrent"), "torrents.db")
# Connect to the database.
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
cursor = conn.cursor()
# Fetch the torrent_id and metadata from each row in torrents.
# "torrent_id" is assumed to store the infohash string.
# "metadata" is assumed to store the raw .torrent file.
cursor.execute("SELECT torrent_id, metadata, target_save_path FROM torrents")
rows = cursor.fetchall()
for row in rows:
torrent_id = row[0] # infohash from the DB
torrent_data = row[1] # raw .torrent data (binary/bencoded)
target_save_path = row[2]
# Decode the bencoded data to extract the "comment" field.
try:
decoded_torrent = bencodepy.decode(torrent_data)
except Exception as e:
print(f"Error decoding torrent for ID {torrent_id}: {e}")
continue
name_bytes = decoded_torrent[b'info'][b'name']
if not name_bytes:
name = torrent_id
else:
name = name_bytes.decode('utf-8', errors='replace')
# Retrieve the comment (may not be present).
comment_bytes = decoded_torrent.get(b'comment', b'')
if not comment_bytes:
print(f"[{name}] No comment found. {torrent_id}")
continue
comment_str = comment_bytes.decode('utf-8', errors='replace')
# Here we assume comment_str contains at least one rutracker.org URL.
# Example: "https://rutracker.org/forum/viewtopic.php?t=XXXX"
rutracker_urls = re.findall(r"(https?://(?:www\.)?rutracker\.org[^\s]+)", comment_str)
if not rutracker_urls:
print(f"[{name}] Check manually: {comment_str}")
continue
# For simplicity, only handle the first URL found
page_url = rutracker_urls[0]
#print(f"[{name}] Visiting rutracker page: {page_url}")
# Request the page to extract magnet link
try:
response = requests.get(page_url, timeout=10)
response.raise_for_status()
except Exception as e:
print(f"[{name}] Error requesting {page_url}: {e}")
continue
# Parse HTML to find a magnet link: "<a href="magnet:?xt=urn:btih:...">"
soup = BeautifulSoup(response.text, "html.parser")
magnet_link_tag = soup.select_one('a[href^="magnet:?xt=urn:btih:"]')
if not magnet_link_tag:
print(f"[{name}] No magnet link found on page. {comment_str}")
continue
magnet_link = magnet_link_tag.get('href')
# Extract infohash from within the magnet link
match = re.search(r"xt=urn:btih:([^&]+)", magnet_link, re.IGNORECASE)
if not match:
print(f"[{name}] Unable to parse infohash from magnet link.")
continue
# The extracted group is the infohash; normalize it (e.g., uppercase).
magnet_infohash = match.group(1).lower()
# Compare with the torrent_id from the database (assuming both are hex).
# Convert both to lowercase (or uppercase) for consistent comparison.
db_infohash = torrent_id.lower()
if magnet_infohash == db_infohash:
print(f"[{name}] OK.")
else:
print(f"[{name}] PLEASE UPDATE {magnet_link} {comment_str}")
print("")
print(f"qbittorrent {comment_str.replace('viewtopic', 'dl')} --save-path=\"{target_save_path}\"")
print("")
conn.close()
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment