Last active
July 15, 2024 13:26
-
-
Save aNNiMON/949f81ac9faf2f867ae4ffafe77857c2 to your computer and use it in GitHub Desktop.
Reddit Images to Telegram
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
own-modules | |
main_*.own | |
redditimages.db |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
st = conn.createStatement() | |
st.executeUpdate( | |
"CREATE TABLE IF NOT EXISTS posts ( | |
id INTEGER PRIMARY KEY AUTOINCREMENT, | |
post_id STRING NOT NULL, | |
subreddit STRING NOT NULL, | |
url STRING NOT NULL, | |
created_at INTEGER NOT NULL, | |
sent_at INTEGER NOT NULL | |
)") | |
st.executeUpdate( | |
"CREATE UNIQUE INDEX IF NOT EXISTS url_idx ON posts (url)") | |
st.close() | |
stIsPostExists = conn.prepareStatement( | |
"SELECT COUNT(*) FROM posts | |
WHERE url = ?") | |
stAddPost = conn.prepareStatement( | |
"INSERT INTO posts(post_id, subreddit, url, created_at, sent_at) | |
VALUES(?, ?, ?, ?, ?)") | |
def isPostUnique(post) { | |
stIsPostExists.setString(1, post.url) | |
rs = stIsPostExists.executeQuery() | |
return rs.getInt(1) == 0 | |
} | |
def addPost(post) { | |
stAddPost.setString(1, post.id) | |
stAddPost.setString(2, post.sub) | |
stAddPost.setString(3, post.url) | |
stAddPost.setLong(4, post.time) | |
stAddPost.setLong(5, time() / 1000) | |
stAddPost.executeUpdate() | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
config = { | |
"token": "1234567890:AABBCCDDEE", | |
"cookie": "Reddit cookie string", | |
"peer": 1234, // chat_id | |
"items-in-top": 3, | |
"subreddits": [ | |
"wholesomememes", "aww" | |
] | |
} | |
include "redditimages.own" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[ | |
{ | |
"id": "18f1894447dfa72000a83011511a817c", | |
"name": "telegram-bot" | |
} | |
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std | |
if (ARGS.length >= 2 && ARGS[0] == "owm") { | |
use ["files", "json", "java"] | |
File = newClass("java.io.File") | |
runtime = newClass("java.lang.Runtime").getRuntime() | |
def loadModulesJson(path = "modules.json") { | |
f = fopen(path, "r") | |
modules = jsondecode(readText(f)) | |
fclose(f) | |
return modules | |
} | |
def exec(cmd, dir = ".") = runtime.exec(cmd, null, new File(dir)) | |
match (ARGS[1]) { | |
case "install": { | |
modulesDir = "own-modules" | |
if (!exists(modulesDir)) { | |
mkdir(modulesDir) | |
} | |
for module : loadModulesJson() { | |
print module.name | |
moduleDir = modulesDir + "/" + module.name | |
if (!exists(moduleDir)) { | |
mkdir(moduleDir) | |
cmd = "git clone https://gist.github.com/" + module.id + ".git " + module.name | |
exec(cmd, modulesDir) | |
println " installed" | |
} else { | |
exec("git pull origin master", moduleDir) | |
println " updated" | |
} | |
} | |
} | |
} | |
} | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use okhttp, types | |
class Reddit { | |
def Reddit(cookie) { | |
this.cookie = cookie | |
} | |
def fetchSubreddit(subreddit, maxItems = 5) { | |
url = "https://www.reddit.com/r/" + subreddit + ".json" | |
response = okhttp.request() | |
.headers({ | |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/116.0", | |
"Cookie": this.cookie | |
}) | |
.url(url) | |
.get() | |
.newCall(okhttp.client) | |
.execute() | |
.body() | |
.string() | |
if (!length(response)) return [] | |
jsonData = jsondecode(response) | |
if (typeof(jsonData) != MAP || !arrayKeyExists("data", jsonData)) return [] | |
data = jsonData.data ?? [] | |
if (!length(data)) return [] | |
return stream(data.children) | |
.map(def(child) = child.data) | |
.limit(min(maxItems, data.dist ?? 0)) | |
.map(def(post) = { | |
"id": post.id, | |
"sub": subreddit, | |
"url": post.url, | |
"time": post.created_utc, | |
"title": post.title, | |
"permalink": post.permalink, | |
"flair_text": arrayKeyExists("link_flair_text", post) ? post.link_flair_text : "" | |
}) | |
.toArray() | |
} | |
def fetchSubreddits(subreddits, maxItems = 5) = | |
stream(subreddits) | |
.flatMap(def(r) = try(def() = this.fetchSubreddit(r, maxItems), def(clazz, cause) = [])) | |
.toArray() | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std, math, http, json, functional, files, jdbc | |
conn = getConnection("jdbc:sqlite:redditimages.db") | |
include "own-modules/telegram-bot/TelegramBot.own" | |
include "Reddit.own" | |
include "database.own" | |
bot = new TelegramBot(config.token) | |
reddit = new Reddit(config.cookie) | |
subreddits = reddit.fetchSubreddits(config.subreddits, config["items-in-top"]) | |
media = stream(subreddits) | |
.filter(def(p) = reduce([".jpg", ".jpeg", ".png"], false, def(acc, ext) = acc || indexOf(p.url, ext) > 0)) | |
.filter(::isPostUnique) | |
//.peek(def(p) = bot.sendPhoto(config.peer, p.url)) | |
.limit(10) | |
.peek(::addPost) | |
.map(def(p) = { | |
"type": "photo", | |
"media": p.url, | |
"caption": getCaption(p), | |
"parse_mode": "html" | |
}) | |
.toArray() | |
debug(media) | |
while (length(media) > 0) { | |
r = bot.sendMediaGroupSync(config.peer, media) | |
if (r.ok) break | |
println r | |
retryAfter = try(def() = parseInt(r.description.replaceAll(".*retry after (\d+).*", "$1")), 0) | |
if (retryAfter > 0) { | |
sleep(retryAfter * 1000 + 2800) | |
continue | |
} | |
mediaIndex = try(def() = parseInt(r.description.replaceAll(".*failed.*?#(\d+).*", "$1")), -1) | |
if (mediaIndex == -1) break | |
media = arraySplice(media, mediaIndex - 1, 1) | |
sleep(3000) | |
} | |
thread(def() { | |
sleep(15000) | |
exit(0) | |
}) | |
stIsPostExists.close() | |
stAddPost.close() | |
conn.close() | |
// Helpers | |
def debug(r) { | |
echo(jsonencode(r)) | |
} | |
def strToHashtag(str) = | |
str.toLowerCase() | |
.replaceAll("[^a-z_0-9\s]", "") | |
.replaceAll("\s+", "_") | |
def safe(str) = str.replace("&", "&") | |
.replace("<", "<").replace(">", ">") | |
def getCaption(post) { | |
tag = "" | |
if (length(post.flair_text ?? "") > 0) { | |
tag = " #" + strToHashtag(post.sub + "_" + post.flair_text) | |
} else if (length(post.link_flair_text ?? "") > 0) { | |
tag = " #" + strToHashtag(post.sub + "_" + post.link_flair_text) | |
} | |
return sprintf( | |
"<a href=\"%s\">%s</a>\n" + | |
"<a href=\"https://reddit.com%s\">🗨 Comments</a>%s\n" + | |
"🔎 <a href=\"https://saucenao.com/search.php?url=%s\">SauceNAO</a>, " + | |
"<a href=\"https://www.alamy.com/search.html?imageurl=%s\">Alamy</a>", | |
safe(post.url), safe(post.title), | |
safe(post.permalink), tag, | |
urlencode(post.url), | |
urlencode(post.url) | |
) | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment