Last active
February 7, 2021 17:47
-
-
Save MaxMatti/cdbf7a94eeef7cc57d6d3eb6042672de to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
# -*- coding: utf-8 -*- | |
# This file has been created by Max Staff <[email protected]>. It is licensed under CC-BY-4.0: https://creativecommons.org/licenses/by/4.0/ | |
import requests | |
import sys | |
from time import sleep | |
class Pr0grammCrawler(): | |
def __init__(self, username, password): | |
req = requests.post("https://pr0gramm.com/api/user/login", data={"name": username,"password": password}) | |
json = req.json() | |
try: | |
if json["success"] != True: | |
if json["ban"] != None: | |
raise TypeError("User banned: " + str(json["ban"])) | |
else: | |
raise TypeError("Invalid credentials!") | |
except KeyError: | |
if "error" in json: | |
error = json["error"] | |
else: | |
error = "" | |
if "msg" in json: | |
msg = json["msg"] | |
else: | |
msg = "" | |
raise TypeError("Error: " + str(error) + " - \"" + str(msg) + "\"") | |
self.username = username | |
self.cookiejar = req.cookies | |
def crawlFavorites(self, callback): | |
urlPrefix = "https://img.pr0gramm.com/" | |
apiUrl = "https://pr0gramm.com/api/items/get?flags=9&likes=" + self.username + "&self=true" | |
req = requests.get(apiUrl, cookies=self.cookiejar) | |
content = req.json() | |
while req.status_code == 200: | |
for item in content["items"]: | |
if len(item["fullsize"]) > 0: | |
cUrl = urlPrefix + item["fullsize"] | |
else: | |
cUrl = urlPrefix + item["image"] | |
callback(cUrl) | |
if content["atEnd"] == True: | |
return False | |
sleep(1) # do not flood the api with requests | |
apiUrl = "https://pr0gramm.com/api/items/get?flags=9&likes=" + self.username + "&self=true&older=" + str(content["items"][-1]["id"]) | |
req = requests.get(apiUrl, cookies=self.cookiejar) | |
content = req.json() | |
return True | |
def main(): | |
try: | |
r = Pr0grammCrawler(username=sys.argv[1], password=sys.argv[2]) | |
r.crawlFavorites(print) | |
except TypeError as e: | |
print(e) | |
if __name__ == '__main__': | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
(english version below)
Nutzung:
Wenn du Windows hast, musst du halt erstmal rausfinden, wie du Python-Scripte ausführst. Wenn du Linux hast, dürfte das kein Problem sein:
Dann haste eine Liste mit allen Favoriten als Download-Link. Die kannste dann entweder in das Download-Programm deiner Wahl stecken (wget, jDownloader, etc) oder halt direkt mit diesem Befehl:
Wenn das mal jemand mit ein paar mehr Favoriten testen möchte, wäre ich ihm sehr verbunden. Ich konnte es bisher nur mit meinen <100 Favoriten testen.
Usage:
With windows, first find out how to run python scripts. With linux, it's easy:
The script prints out all your favorites with their download-link. You can then paste the list into a program of your choice (wget, jDownloader, etc) or directly download them with this command:
Please test this with an account that has a lot of favorites and then report back. So far I could only test it with <100 favorites in my account.