Last active
April 14, 2026 10:55
-
-
Save kpirnie/9b3934a6254c33e91f9da0b1d3bb3d07 to your computer and use it in GitHub Desktop.
Emby LiveTV Backup/Restore/Verify
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| import json | |
| import requests | |
| import argparse | |
| import shutil | |
| import os | |
| from pathlib import Path | |
| import base64 | |
| API_KEY = "YOUR_API_ID" | |
| USER_ID = "YOUR_USER_ID" | |
| BASE_URL = "http://YOUR_URL" | |
| BACKUP_FILE = "/YOUR_BACKUP/LOCATION/emby_channel_backup.json" | |
| LOGOS_DIR = "/YOUR_BACKUP/LOCATION/logos" | |
| EMBY_METADATA = "/YOUR_EMBYs_DATA/LOCATION/metadata/livetv" | |
| headers = {"X-Emby-Token": API_KEY} | |
| image_type_map = { | |
| "poster.png": "Primary", | |
| "logolight.png": "LogoLight", | |
| "logolightcolor.png": "LogoLightColor", | |
| "backdrop.png": "Backdrop", | |
| "poster.jpg": "Primary", | |
| "logolight.jpg": "LogoLight", | |
| "logolightcolor.jpg": "LogoLightColor", | |
| "backdrop.jpg": "Backdrop", | |
| "poster.webp": "Primary", | |
| "logolight.webp": "LogoLight", | |
| "logolightcolor.webp": "LogoLightColor", | |
| "backdrop.webp": "Backdrop", | |
| } | |
| def fetch_live_channels(): | |
| r = requests.get(f"{BASE_URL}/LiveTv/Channels", headers=headers, params={ | |
| "limit": 5000, | |
| "Fields": "ExternalId,ChannelMappingInfo,ImageTags,PresentationUniqueKey,ChannelNumber,SortName" | |
| }) | |
| return {ch["Id"]: ch for ch in r.json().get("Items", [])} | |
| def backup(dry_run=False): | |
| print("Fetching all channels...") | |
| live = fetch_live_channels() | |
| items = list(live.values()) | |
| data = [] | |
| logos_copied = 0 | |
| logos_skipped = 0 | |
| # clear out the existing backed up logos first, emby rehashes the location for them anyways... | |
| if not dry_run: | |
| if Path(LOGOS_DIR).exists(): | |
| shutil.rmtree(LOGOS_DIR) | |
| os.makedirs(LOGOS_DIR, exist_ok=True) | |
| for i, ch in enumerate(items): | |
| emby_id = ch.get("Id") | |
| puk = ch.get("PresentationUniqueKey", "") | |
| is_favorite = False | |
| ur = requests.get( | |
| f"{BASE_URL}/Users/{USER_ID}/Items/{emby_id}", | |
| headers=headers, | |
| params={"Fields": "UserData"} | |
| ) | |
| if ur.status_code == 200: | |
| is_favorite = ur.json().get("UserData", {}).get("IsFavorite", False) | |
| entry = { | |
| "EmbyId": emby_id, | |
| "Name": ch.get("Name"), | |
| "ManagementId": ch.get("ManagementId", ""), | |
| "ListingsProviderId": ch.get("ListingsProviderId", ""), | |
| "ListingsChannelId": ch.get("ListingsChannelId", ""), | |
| "PresentationUniqueKey": puk, | |
| "ImageTags": ch.get("ImageTags", {}), | |
| "ChannelNumber": ch.get("Number", ""), | |
| "SortName": ch.get("SortName", ""), | |
| "IsFavorite": is_favorite, | |
| } | |
| data.append(entry) | |
| if puk: | |
| src = Path(EMBY_METADATA) / puk | |
| dst = Path(LOGOS_DIR) / puk | |
| if src.exists(): | |
| if not dry_run: | |
| if dst.exists(): | |
| shutil.rmtree(dst) | |
| shutil.copytree(src, dst) | |
| else: | |
| print(f" WOULD COPY LOGOS: {ch.get('Name')} ({puk})") | |
| logos_copied += 1 | |
| else: | |
| logos_skipped += 1 | |
| if (i + 1) % 50 == 0: | |
| print(f" Processed {i + 1}/{len(items)}...") | |
| data.sort(key=lambda x: x["Name"] or "") | |
| mapped = sum(1 for e in data if e["ListingsChannelId"]) | |
| favorited = sum(1 for e in data if e["IsFavorite"]) | |
| print(f"Found {len(data)} channels, {mapped} with guide mappings, {favorited} favorited") | |
| print(f"Logos: {logos_copied} copied, {logos_skipped} skipped") | |
| if dry_run: | |
| print("[DRY RUN] No files written.") | |
| else: | |
| os.makedirs(LOGOS_DIR, exist_ok=True) | |
| with open(BACKUP_FILE, "w") as f: | |
| json.dump(data, f, indent=2) | |
| print(f"Backed up to {BACKUP_FILE}") | |
| def restore(dry_run=False): | |
| print(f"Loading backup from {BACKUP_FILE}...") | |
| with open(BACKUP_FILE) as f: | |
| data = json.load(f) | |
| map_ok = map_fail = meta_ok = meta_fail = img_ok = img_fail = fav_ok = fav_fail = 0 | |
| for ch in data: | |
| emby_id = ch.get("EmbyId") | |
| name = ch.get("Name") | |
| puk = ch.get("PresentationUniqueKey", "") | |
| # guide mapping | |
| if ch.get("ListingsChannelId") and ch.get("ManagementId") and ch.get("ListingsProviderId"): | |
| if dry_run: | |
| print(f" WOULD MAP: {name} -> {ch['ListingsChannelId']}") | |
| map_ok += 1 | |
| else: | |
| r = requests.post( | |
| f"{BASE_URL}/LiveTv/ChannelMappings", | |
| headers=headers, | |
| params={"ProviderId": ch["ListingsProviderId"]}, | |
| json={ | |
| "TunerChannelId": ch["ManagementId"], | |
| "ProviderChannelId": ch["ListingsChannelId"] | |
| } | |
| ) | |
| if r.status_code == 200: | |
| map_ok += 1 | |
| else: | |
| map_fail += 1 | |
| print(f" MAP FAIL [{r.status_code}]: {name}") | |
| # name / number | |
| if dry_run: | |
| print(f" WOULD UPDATE META: {name} (Number: {ch.get('ChannelNumber', '')})") | |
| meta_ok += 1 | |
| else: | |
| # GET full item first | |
| gr = requests.get( | |
| f"{BASE_URL}/Users/{USER_ID}/Items/{emby_id}", | |
| headers=headers | |
| ) | |
| if gr.status_code == 200: | |
| item = gr.json() | |
| item["Name"] = ch.get("Name") | |
| item["SortName"] = ch.get("SortName", "") | |
| item["ForcedSortName"] = ch.get("SortName", "") | |
| if ch.get("ChannelNumber"): | |
| item["Number"] = ch.get("ChannelNumber") | |
| r = requests.post( | |
| f"{BASE_URL}/Items/{emby_id}", | |
| headers=headers, | |
| json=item | |
| ) | |
| if r.status_code in (200, 204): | |
| meta_ok += 1 | |
| else: | |
| meta_fail += 1 | |
| print(f" META FAIL [{r.status_code}]: {name}") | |
| else: | |
| meta_fail += 1 | |
| print(f" META GET FAIL [{gr.status_code}]: {name}") | |
| # favorite | |
| if dry_run: | |
| print(f" WOULD SET FAVORITE: {name} -> {ch.get('IsFavorite', False)}") | |
| fav_ok += 1 | |
| else: | |
| if ch.get("IsFavorite"): | |
| r = requests.post(f"{BASE_URL}/Users/{USER_ID}/FavoriteItems/{emby_id}", headers=headers) | |
| else: | |
| r = requests.delete(f"{BASE_URL}/Users/{USER_ID}/FavoriteItems/{emby_id}", headers=headers) | |
| if r.status_code in (200, 204): | |
| fav_ok += 1 | |
| else: | |
| fav_fail += 1 | |
| print(f" FAV FAIL [{r.status_code}]: {name}") | |
| # images | |
| if puk: | |
| logo_src = Path(LOGOS_DIR) / puk / "metadata" | |
| if logo_src.exists(): | |
| for img_file in logo_src.iterdir(): | |
| img_type = image_type_map.get(img_file.name.lower()) | |
| if not img_type: | |
| continue | |
| if dry_run: | |
| print(f" WOULD UPLOAD IMAGE: {name} {img_type} ({img_file.name})") | |
| img_ok += 1 | |
| continue | |
| with open(img_file, "rb") as f: | |
| img_data = base64.b64encode(f.read()).decode("utf-8") | |
| mime = "image/png" if img_file.suffix.lower() == ".png" else "image/jpeg" if img_file.suffix.lower() == ".jpg" else "image/webp" | |
| r = requests.post( | |
| f"{BASE_URL}/Items/{emby_id}/Images/{img_type}", | |
| headers={**headers, "Content-Type": mime}, | |
| data=img_data | |
| ) | |
| if r.status_code in (200, 204): | |
| img_ok += 1 | |
| else: | |
| img_fail += 1 | |
| print(f" IMG FAIL [{r.status_code}]: {name} {img_type}") | |
| print(f"\nGuide mappings: {map_ok} ok, {map_fail} failed") | |
| print(f"Metadata: {meta_ok} ok, {meta_fail} failed") | |
| print(f"Favorites: {fav_ok} ok, {fav_fail} failed") | |
| print(f"Images: {img_ok} ok, {img_fail} failed") | |
| def verify(): | |
| print(f"Loading backup from {BACKUP_FILE}...") | |
| with open(BACKUP_FILE) as f: | |
| backup_data = {ch["EmbyId"]: ch for ch in json.load(f)} | |
| print("Fetching live channel state...") | |
| live = fetch_live_channels() | |
| diffs = [] | |
| for emby_id, bch in backup_data.items(): | |
| name = bch.get("Name") | |
| lch = live.get(emby_id) | |
| if not lch: | |
| diffs.append(f" MISSING FROM EMBY: {name}") | |
| continue | |
| # check name | |
| if lch.get("Name") != bch.get("Name"): | |
| diffs.append(f" NAME MISMATCH: {name} -> live='{lch.get('Name')}'") | |
| # check channel number | |
| if str(lch.get("Number", "")) != str(bch.get("ChannelNumber", "")): | |
| diffs.append(f" NUMBER MISMATCH: {name} -> backup='{bch.get('ChannelNumber')}' live='{lch.get('Number')}'") | |
| # check guide mapping | |
| if lch.get("ListingsChannelId", "") != bch.get("ListingsChannelId", ""): | |
| diffs.append(f" MAPPING MISMATCH: {name} -> backup='{bch.get('ListingsChannelId')}' live='{lch.get('ListingsChannelId')}'") | |
| # check images | |
| puk = bch.get("PresentationUniqueKey", "") | |
| if puk: | |
| backup_logo_dir = Path(LOGOS_DIR) / puk / "metadata" | |
| live_logo_dir = Path(EMBY_METADATA) / puk / "metadata" | |
| if backup_logo_dir.exists(): | |
| live_files = {f.stem: f for f in live_logo_dir.iterdir()} if live_logo_dir.exists() else {} | |
| for img_file in backup_logo_dir.iterdir(): | |
| if img_file.stem not in live_files: | |
| diffs.append(f" IMAGE MISSING: {name} {img_file.name}") | |
| # check favorite | |
| ur = requests.get( | |
| f"{BASE_URL}/Users/{USER_ID}/Items/{emby_id}", | |
| headers=headers, | |
| params={"Fields": "UserData"} | |
| ) | |
| if ur.status_code == 200: | |
| live_fav = ur.json().get("UserData", {}).get("IsFavorite", False) | |
| if live_fav != bch.get("IsFavorite", False): | |
| diffs.append(f" FAVORITE MISMATCH: {name} -> backup={bch.get('IsFavorite')} live={live_fav}") | |
| # check for channels in Emby not in backup | |
| for emby_id, lch in live.items(): | |
| if emby_id not in backup_data: | |
| diffs.append(f" NEW CHANNEL (not in backup): {lch.get('Name')}") | |
| if diffs: | |
| print(f"\nFound {len(diffs)} differences:") | |
| for d in diffs: | |
| print(d) | |
| else: | |
| print("\nAll good — live state matches backup.") | |
| if __name__ == "__main__": | |
| parser = argparse.ArgumentParser() | |
| group = parser.add_mutually_exclusive_group(required=True) | |
| group.add_argument("--backup", action="store_true") | |
| group.add_argument("--restore", action="store_true") | |
| group.add_argument("--verify", action="store_true") | |
| parser.add_argument("--dry-run", action="store_true", help="Only valid with --backup or --restore") | |
| args = parser.parse_args() | |
| if args.backup: | |
| backup(dry_run=args.dry_run) | |
| elif args.restore: | |
| restore(dry_run=args.dry_run) | |
| elif args.verify: | |
| verify() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment