Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save NanoDano/01309caeada04eed4036040cf694c4f8 to your computer and use it in GitHub Desktop.
Save NanoDano/01309caeada04eed4036040cf694c4f8 to your computer and use it in GitHub Desktop.
Archive upscaled Midjourney images from Discord in to Google Drive
"""
Discord bot that archives all the images in a channel.
Used to download all the upscaled Midjourney images
## Have it go through on startup and go back and get all upscaled images it never saw before
## then download all upscaled ones it sees moving forward
# # to get token/creds
https://discordpy.readthedocs.io/en/latest/index.html
https://discordpy.readthedocs.io/en/latest/api.html
"""
# pip install discord.py requests google-auth google-auth-oauthlib google-auth-httplib2 google-api-python-client
"""
Requirements:
discord.py
colorama
requests
google-auth
google-auth-oauthlib
google-auth-httplib2
google-api-python-client
"""
# https://discord.com/developers/applications/XXXXXXX/bot
SECRET_TOKEN = ""
GOOGLE_DRIVE_FOLDER = "" # Midjourney Images
LOCAL_DOWNLOAD_DIR = '/Users/me/'
#ELECTROPUNK_DISCORD_SERVER = 12345
ELECTROPUNK_MIDJOURNEY_CHANNEL = 123457
import colorama
from colorama import Fore, Back, Style
import discord # pip install discord.py
import requests # pip install requests
import os
colorama.init()
print(colorama.ansi.clear_screen())
"""
Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
Style: DIM, NORMAL, BRIGHT, RESET_ALL
print(
"""
def download_file_locally(local_filepath, image_url):
r = requests.get(image_url)
with open(local_filepath, 'wb') as f:
f.write(r.content)
# Import the necessary modules
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
import os.path
class DanoGoogleDriveHelper():
# https://developers.google.com/identity/protocols/oauth2/scopes
SCOPES = [
# 'https://www.googleapis.com/auth/drive.metadata.readonly',
'https://www.googleapis.com/auth/drive', # read+write
]
def __init__(self):
# Set up the credentials for the API client
# creds = Credentials.from_authorized_user_info(info=None)
creds = self.verify_creds()
# Build the API client
self.service = build("drive", "v3", credentials=creds)
def verify_creds(self):
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', self.SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', self.SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
return creds
def upload_file(self, local_filepath, file_metadata):
# Set up the metadata for the file
"""
file_metadata = {
"name": "test_image.png",
"mimeType": "image/png",
"parents": [GOOGLE_DRIVE_FOLDER], # optional: folder ID
}
"""
# Set up the file content
media = MediaFileUpload(local_filepath, mimetype="image/png")
# Make the API request to upload the file
file = self.service.files().create(body=file_metadata, media_body=media, fields="id").execute()
return file
#print(F'File ID: "{file.get("id")}".')
class DanoDiscordClient(discord.Client):
def __init__(self, intents, **options):
super().__init__(intents=intents, **options)
self.gdrive = DanoGoogleDriveHelper()
async def on_ready(self):
print(f'{Fore.GREEN}Logged on as {self.user}!{Fore.RESET}')
self.channel = client.get_channel(ELECTROPUNK_MIDJOURNEY_CHANNEL)
await self.channel.send("I'm online!")
# Check all old messages going back X days, and download all upscales that may have been missed??
async def on_message(self, message):
print(f'Message from {message.author}: {message.content}')
# check if messages is from a certain channel
# if it's an image, was it upscaled? was it reacted to with an emoji?
# TODO: what about remastered upscales?
if "Upscaled by" in message.content or "Upscaled (Beta) by" in message.content:
print(f"{Fore.GREEN}Yes, I found an upscaled message!{Fore.RESET}")
print(message.attachments)
print(message.attachments[0].url)
print(message.attachments[0].filename)
print("Downloading the upscale.")
local_filepath = os.path.join(LOCAL_DOWNLOAD_DIR, message.attachments[0].filename)
download_file_locally(local_filepath , message.attachments[0].url)
print(f"{Fore.GREEN}Done downloading file:{Fore.RESET} {local_filepath}")
print("Uploading to Google Drive")
file_metadata = {
"name": message.attachments[0].filename,
"mimeType": "image/png",
"parents": [GOOGLE_DRIVE_FOLDER], # optional: folder ID
}
uploaded_gdrive_file = self.gdrive.upload_file(local_filepath, file_metadata)
print(f'Done uploading file: https://drive.google.com/file/d/{uploaded_gdrive_file.get("id")}/view')
async def on_raw_reaction_add(self,
raw_payload): # https://discordpy.readthedocs.io/en/latest/api.html#discord.RawReactionActionEvent
channel = client.get_channel(raw_payload.channel_id)
message = await channel.fetch_message(raw_payload.message_id)
print(message.jump_url)
#user = client.get_user(raw_payload.user_id)
print(f"A message was reacted to: {raw_payload.message_id} {raw_payload}")
print(f" was reacted to: {raw_payload.emoji, raw_payload.channel_id}")
print(f" was reacted to: {raw_payload.member}, {raw_payload.user_id}")
# TODO: If Emoji is ___ then download that image
if __name__ == '__main__':
# Run the bot
intents = discord.Intents.default() # permissions integer
intents.message_content = True
client = DanoDiscordClient(intents=intents)
client.run(SECRET_TOKEN)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment