Skip to content

Instantly share code, notes, and snippets.

@sanghviharshit
Last active July 30, 2024 13:22
Show Gist options
  • Save sanghviharshit/9bc4e09aed07d6869d41f2511951a462 to your computer and use it in GitHub Desktop.
Save sanghviharshit/9bc4e09aed07d6869d41f2511951a462 to your computer and use it in GitHub Desktop.
'''
Netgear Meural mobile app or web interface doesn't give the ability easily delete all the user uploaded items which are not in any of the playlists.
When you delete a playlist, it doesn't delete the items from the playlist and they consume the limited upload space for the user.
Use this file to identify all the uploaded items curently not part of any playlists and delete them to recover some upload space.
usage: python3 meural_clean_uploads.py
'''
import requests
import json
import time
import logging
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
api_url = "https://api.meural.com/v1"
username = "[email protected]"
password = "very_secret"
user_items_list_file = "user_items_list.json"
user_gallery_list_file = "user_gallery_list.json"
def get_auth_token(username, password):
url = api_url + "/authenticate"
payload='username={}&password={}'.format(username, password)
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
logging.info("GET auth token")
response = requests.request("POST", url, headers=headers, data=payload)
data = response.json()
return data["token"]
# Get all user items (uploads)
def get_user_items():
url = api_url + "/user/items?count=1000&page="
payload={}
headers = {
'Authorization': authorization
}
page = 1
is_last_page = False
item_ids = []
try:
while(not is_last_page):
logging.info("GET all items from page {}".format(page))
response = requests.request("GET", url + str(page), headers=headers, data=payload)
data = response.json()
if data["isLast"]:
is_last_page = True
page += 1
item_ids += [item["id"] for item in data["data"]]
except Exception as e:
logging.error("Exception: ", e)
exit
logging.debug("Item Ids: {}".format(item_ids))
logging.info("# Items: {}".format(len(item_ids)))
# Dump in a file - to avoid duplicate calls if we run into error later
# with open(user_items_list_file, 'w') as outfile:
# json.dump(item_ids, outfile)
return item_ids
def get_user_items_from_file(file_name):
item_ids = []
with open(file_name) as json_all_items:
data = json.load(json_all_items)
item_ids = [item["id"] for item in data["data"]]
logging.debug("Item Ids: {}".format(item_ids))
logging.info("# Items: {}".format(len(item_ids)))
return item_ids
# Get all user galleries (includes item ids for each gallery)
def get_user_gallery_items():
url = api_url + "/user/galleries?count=10&page="
payload={}
headers = {
'Authorization': authorization
}
page = 1
is_last_page = False
items_by_gallery = []
try:
while(not is_last_page):
logging.info("GET all galleries from page {}".format(page))
response = requests.request("GET", url + str(page), headers=headers, data=payload)
data = response.json()
items_by_gallery += [gallery["itemIds"] for gallery in data["data"]]
if data["isLast"]:
is_last_page = True
page += 1
except Exception as e:
logging.error("Exception: {}".format(e))
exit
gallery_items= sum(items_by_gallery, [])
logging.info("# Galleries: {}".format(len(items_by_gallery)))
logging.info("# Gallery Items: {}".format(len(gallery_items)))
# Dump in a file - to avoid duplicate calls if we run into error later
# with open(user_gallery_list_file, 'w') as outfile:
# json.dump(gallery_items, outfile)
return gallery_items
def get_user_gallery_items_from_file(file_name):
with open(file_name) as json_all_galleries:
data = json.load(json_all_galleries)
items_by_gallery = [gallery["itemIds"] for gallery in data["data"]]
gallery_items= sum(items_by_gallery, [])
logging.info("# Galleries: {}".format(len(items_by_gallery)))
logging.info("# Gallery Items: {}".format(len(gallery_items)))
return gallery_items
# Delete all extra items
def delete_extra_items(extra_items):
url = api_url + "/items/"
payload={}
files={}
headers = {
'authorization': authorization,
'content-type': 'application/json',
'user-agent': 'PostmanRuntime/7.28.4',
'accept': '*/*',
'accept-encoding': 'gzip, deflate, br',
'connection': 'keep-alive',
}
count = 1
for item_id in extra_items:
try:
response = requests.request("DELETE", url + str(item_id), headers=headers, data=payload, files=files)
logging.info("({}/{}) Delete {}: {}".format(count, len(extra_items), item_id, response.status_code))
count += 1
time.sleep(3)
except Exception as e:
logging.error("Exception: {}".format(e))
time.sleep(10)
authorization = "Token {}".format(get_auth_token(username, password))
item_ids = get_user_items()
# item_ids = get_user_items_from_file("all_items.json")
gallery_items = get_user_gallery_items()
# gallery_items = get_user_gallery_items_from_file("user_galleries.json")
# Subtract gallery item ids from all item ids list
extra_items = list(set(item_ids) - set(gallery_items))
logging.info("# Extra Items: {}".format(len(extra_items)))
delete_extra_items(extra_items)
'''
Sample console log:
> python3 meural_clean_uploads.py
INFO: GET auth token
INFO: # Items: 1457
INFO: GET all galleries from page 1
INFO: # Galleries: 8
INFO: # Gallery Items: 1301
INFO: # Extra Items: 156
INFO: (1/156) Delete 10589211: 204
INFO: (2/156) Delete 10589212: 204
INFO: (3/156) Delete 10589214: 204
INFO: (4/156) Delete 10589228: 204
INFO: (5/156) Delete 10589229: 204
INFO: (6/156) Delete 10589230: 204
INFO: (7/156) Delete 10589231: 204
INFO: (8/156) Delete 10589232: 204
INFO: (9/156) Delete 10589234: 204
INFO: (10/156) Delete 10589235: 204
INFO: (11/156) Delete 10589237: 204
INFO: (12/156) Delete 10589240: 204
INFO: (13/156) Delete 10589241: 204
INFO: (14/156) Delete 10589242: 204
INFO: (15/156) Delete 10589243: 204
'''
@kumarsm
Copy link

kumarsm commented Jan 23, 2023

Thanks for sharing the script. It works flawlessly and freed up some space for me!

@Bob565656
Copy link

The python script was a godsend, very much appreciated!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment