Last active
October 12, 2024 01:06
-
-
Save bengalih/bd80a91c8aa789c2c3def5a5b600c32c to your computer and use it in GitHub Desktop.
sonarr-blacklist.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# sonarr-blacklist.py | |
# bengalih | |
# v.0.2 | |
# This script will perform in two ways: | |
# 1) Pass a torrent ID/HASH as the first argument. In this method, the script will search the sonarr queue for that torrent and perform the OPERATIONS. | |
# 2) Run with no argument. In this method, the script will search all items in the sonarr queue for the STATUS_MESSAGES_TO_CHECK and if a match is found perform the OPERATIONS | |
# When passed a torrent ID/HASH the script will enumerate the sonarr queue every QUEUE_CHECK_WAIT seconds for QUEUE_CHECK_MAX_ATTEMPTS. | |
# The default is 5/24, which is 2 minutes. | |
# This repeated delay is to ensure sonarr has the item in its queue which sometimes has a delay after adding to the download client. | |
############################################################################## | |
# Configuration | |
SONARR_HOST = 'http://localhost' | |
SONARR_PORT = '8989' # Default Sonarr port | |
API_KEY = 'XXXXXXXXXXXXXXXX' | |
LOG_FILE = 'D:\\sonarr-blacklist.txt' | |
# OPERATIONS - you probably don't want to change these | |
REMOVE_FROM_CLIENT = True | |
BLOCKLIST = True | |
# Define a list of messages to check against | |
STATUS_MESSAGES_TO_CHECK = [ | |
'No files found are eligible for import', | |
'No files found are eligible for import' | |
] | |
# QUEUE CHECKS | |
QUEUE_CHECK_MAX_ATTEMPTS = 24 | |
QUEUE_CHECK_WAIT = 5 | |
############################################################################## | |
import requests | |
import sys | |
import logging | |
import uuid | |
import time | |
from datetime import datetime | |
# Unique ID for this script run | |
run_id = str(uuid.uuid4()) | |
# Create a logger | |
logger = logging.getLogger() | |
logger.setLevel(logging.INFO) | |
# Create a file handler for logging to a file | |
file_handler = logging.FileHandler(LOG_FILE) | |
# Create a stream handler for console output | |
console_handler = logging.StreamHandler() | |
# Set the format for the file handler (with run_id) | |
file_handler.setFormatter(logging.Formatter( | |
f'%(asctime)s - {run_id} - %(message)s', | |
datefmt='%Y-%m-%d %H:%M:%S' | |
)) | |
# Set the format for the console handler (without run_id) | |
console_handler.setFormatter(logging.Formatter('%(message)s')) | |
# Add the handlers to the logger | |
logger.addHandler(file_handler) | |
logger.addHandler(console_handler) | |
# Accept TARGET_DOWNLOAD_ID from command line argument or set to None | |
TARGET_DOWNLOAD_ID = sys.argv[1].lower() if len(sys.argv) > 1 else None | |
# Headers with the API Key | |
headers = { | |
'X-Api-Key': API_KEY | |
} | |
# Define a function to get queue details | |
def get_queue_details(): | |
queue_url = f'{SONARR_HOST}:{SONARR_PORT}/api/v3/queue/details?&includeEpisode=true' | |
response = requests.get(queue_url, headers=headers) | |
if response.status_code == 200: | |
return response.json() | |
else: | |
logging.error(f"Error: Unable to fetch data (status code {response.status_code})") | |
return [] | |
# 1. Check for matching items based on TARGET_DOWNLOAD_ID | |
if TARGET_DOWNLOAD_ID: | |
logging.info(f"Starting Run ID: {run_id}") | |
attempts = 0 | |
max_attempts = QUEUE_CHECK_MAX_ATTEMPTS | |
while attempts < max_attempts: | |
queue_details = get_queue_details() | |
found_matching_item = False | |
for item in queue_details: | |
if item.get('downloadId', '').lower() == TARGET_DOWNLOAD_ID: | |
found_matching_item = True | |
logging.info(f"Found match for TARGET_DOWNLOAD_ID: {TARGET_DOWNLOAD_ID}") | |
episode_id = item.get('episodeId') | |
# Log details of the current item | |
logging.info(f"Download ID: {item['downloadId']}") | |
logging.info(f"Queue item ID: {item.get('id', [])}") | |
logging.info(f"Torrent Name: {item.get('title', [])}") | |
logging.info(f"Series ID: {item.get('seriesId')}") | |
logging.info(f"Episode ID: {item.get('episodeId')}") | |
logging.info(f"Episode Title: {item['episode'].get('title')}") | |
logging.info(f"Air Date: {item['episode'].get('airDate')}") | |
logging.info(f"Status: {item.get('status')}") | |
logging.info(f"Tracked Download Status: {item.get('trackedDownloadStatus')}") | |
logging.info(f"Tracked Download State: {item.get('trackedDownloadState')}") | |
logging.info(f"Download Client: {item.get('downloadClient')}") | |
logging.info(f"Output Path: {item.get('outputPath')}") | |
logging.info(f"Indexer: {item.get('indexer', [])}") | |
logging.info(f"Status Messages: {item.get('statusMessages', [])}") | |
# Send DELETE request for the episode ID to /api/v3/queue/{item['id']} | |
delete_url = f"{SONARR_HOST}:{SONARR_PORT}/api/v3/queue/{item['id']}?removeFromClient={REMOVE_FROM_CLIENT}&blocklist={BLOCKLIST}" | |
delete_response = requests.delete(delete_url, headers=headers) | |
if delete_response.status_code == 200: | |
logging.info(f"Successfully deleted episode ID {item['episodeId']} from the queue.") | |
else: | |
logging.error(f"Error deleting episode ID {item['episodeId']} from the queue (status code {delete_response.status_code})") | |
break # Exit the loop if a match is found | |
if found_matching_item: | |
break # Exit the retry loop if a match was found | |
else: | |
logging.info(f"No match found for downloadId: {TARGET_DOWNLOAD_ID}. Retrying...") | |
time.sleep(QUEUE_CHECK_WAIT) # Wait for 5 seconds before the next attempt | |
attempts += 1 | |
if not found_matching_item: | |
logging.info(f"No matching item found for TARGET_DOWNLOAD_ID {TARGET_DOWNLOAD_ID} after 2 minutes.") | |
else: | |
logging.info(f"Starting Run ID: {run_id}") | |
queue_details = get_queue_details() | |
found_matching_item = False | |
# Process each item in queue_details | |
for item in queue_details: | |
# Check for specific status messages if no TARGET_DOWNLOAD_ID is provided | |
for msg in item.get('statusMessages', []): | |
# Each message is a dictionary | |
messages_list = msg.get('messages', []) | |
# Log each message content | |
for message_content in messages_list: | |
logging.info(f"Checking message: '{message_content}'") | |
# Check if any of the defined messages are present | |
if any(status_message in message_content for status_message in STATUS_MESSAGES_TO_CHECK): | |
found_matching_item = True | |
logging.info(f"Found item with one of the specified messages: {item['downloadId']}") | |
episode_id = item.get('episodeId') | |
# Log details of the current item | |
logging.info(f"Download ID: {item['downloadId']}") | |
logging.info(f"Queue item ID: {item.get('id', [])}") | |
logging.info(f"Torrent Name: {item.get('title', [])}") | |
logging.info(f"Series ID: {item.get('seriesId')}") | |
logging.info(f"Episode ID: {item.get('episodeId')}") | |
logging.info(f"Episode Title: {item['episode'].get('title')}") | |
logging.info(f"Air Date: {item['episode'].get('airDate')}") | |
logging.info(f"Status: {item.get('status')}") | |
logging.info(f"Tracked Download Status: {item.get('trackedDownloadStatus')}") | |
logging.info(f"Tracked Download State: {item.get('trackedDownloadState')}") | |
logging.info(f"Download Client: {item.get('downloadClient')}") | |
logging.info(f"Output Path: {item.get('outputPath')}") | |
logging.info(f"Indexer: {item.get('indexer', [])}") | |
logging.info(f"Status Messages: {item.get('statusMessages', [])}") | |
# Send DELETE request for the episode ID to /api/v3/queue/{item['id']} | |
delete_url = f"{SONARR_HOST}:{SONARR_PORT}/api/v3/queue/{item['id']}?removeFromClient=true&blocklist=true" | |
delete_response = requests.delete(delete_url, headers=headers) | |
if delete_response.status_code == 200: | |
logging.info(f"Successfully deleted episode ID {item['episodeId']} from the queue.") | |
else: | |
logging.error(f"Error deleting episode ID {item['episodeId']} from the queue (status code {delete_response.status_code})") | |
if not found_matching_item: | |
logging.info("No items found with the specified messages.") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment