|
#!/bin/bash -e |
|
set -o pipefail |
|
|
|
ENABLE_SCRAPE_UPDATE=FALSE |
|
ENABLE_DOWNLOAD=TRUE |
|
ROM_DIR="/home/deck/Emulation/roms" |
|
ENABLE_OPEN_TXT=TRUE |
|
|
|
GIST_URL="https://gist.githubusercontent.com/Mearman/52076c8457a55670d46bb1305dc985b4/raw/DeckLoader.sh" |
|
# if skip-update flag is set, then skip updating DeckLoader.sh |
|
if [[ "$@" != *skip-update* ]]; then |
|
echo "updating DeckLoader.sh..." |
|
# download DeckLoader.sh from gist |
|
curl -s "$GIST_URL" -o DeckLoader.sh |
|
# make DeckLoader.sh executable |
|
chmod +x DeckLoader.sh |
|
|
|
echo "DeckLoader.sh updated" |
|
echo "restarting DeckLoader.sh..." |
|
# run DeckLoader.sh with all arguments passed to this script and add skip-update flag |
|
./DeckLoader.sh "$@" skip-update |
|
exit |
|
fi |
|
|
|
# boolean variable |
|
# ROM_DIR="./Emulation/roms" |
|
# mkdir -p "$ROM_DIR" |
|
|
|
# install 7z if not already installed |
|
if ! command -v 7z &>/dev/null; then |
|
echo "7z could not be found" |
|
echo "installing 7z..." |
|
sudo apt update && sudo apt install --assume-yes p7zip-full |
|
# load new PATH environment variable |
|
source ~/.bashr |
|
fi |
|
|
|
# check if "scrape" and/or "download" have been passed as any of the unnamed arguments |
|
if [[ "$@" == *scrape* ]]; then |
|
ENABLE_SCRAPE_UPDATE=TRUE |
|
fi |
|
if [[ "$@" == *download* ]]; then |
|
ENABLE_DOWNLOAD=TRUE |
|
fi |
|
|
|
# declare array of strings |
|
declare -a PLATFORMS=( |
|
"GB" |
|
"GBC" |
|
"GBA" |
|
"GBA_MULTIBOOT" |
|
"N64" |
|
"N64DD" |
|
"NES" |
|
"SNES" |
|
"POKEMINI" |
|
) |
|
# echo "PLATFORMS = ${PLATFORMS[@]}" |
|
|
|
# define map |
|
declare -A URL_LIST=( |
|
["GB"]="https://archive.org/download/nointro.gb" |
|
["GBC"]="https://archive.org/download/nointro.gbc" |
|
["GBA"]="https://archive.org/download/nointro.gba" |
|
["GBA_MULTIBOOT"]="https://archive.org/download/nointro.gba-multiboot" |
|
["N64"]="https://archive.org/download/nointro.n64" |
|
["N64DD"]="https://archive.org/download/nointro.n64dd" |
|
["NES"]="https://archive.org/download/nointro.nes" |
|
["SNES"]="https://archive.org/download/nointro.snes" |
|
["POKEMINI"]="https://archive.org/download/nointro.poke-mini" |
|
) |
|
|
|
declare -A DESTINATIONS=( |
|
["GB"]="$ROM_DIR/gb" |
|
["GBC"]="$ROM_DIR/gbc" |
|
["GBA"]="$ROM_DIR/gba" |
|
["GBA_MULTIBOOT"]="$ROM_DIR/gba" |
|
["N64"]="$ROM_DIR/n64" |
|
["N64DD"]="$ROM_DIR/n64" |
|
["NES"]="$ROM_DIR/nes" |
|
["SNES"]="$ROM_DIR/snes" |
|
["POKEMINI"]="$ROM_DIR/pokemini" |
|
) |
|
|
|
echo |
|
for platform in "${PLATFORMS[@]}"; do |
|
echo "$platform = ${URL_LIST[$platform]}" |
|
done |
|
|
|
function getLinks() { |
|
# assign arguments to variables |
|
url=$1 |
|
fileType=$2 |
|
textFile=$3 |
|
|
|
# download the html from the url |
|
html=$(wget -qO- $url) |
|
|
|
# extract all links of specified file type |
|
links=$(echo $html | grep -o '<a .*href=.*>' | sed -e 's/<a /\n<a /g' | sed -e 's/<a .*href=['"'"'"]//' -e 's/["'"'"'].*$//' -e '/^$/ d' | grep -E "\.$fileType$") |
|
|
|
# make the links absolute |
|
absoluteLinks=$(for link in $links; do |
|
if [[ $link == *"http"* ]]; then |
|
echo $link |
|
else |
|
echo "$url/$link" |
|
fi |
|
done) |
|
|
|
# write the absolute links to the text file |
|
# echo "$absoluteLinks" > $textFile |
|
# prepend each link with "# " |
|
prependedLinks=$(for link in $absoluteLinks; do |
|
echo "# $link" |
|
done) |
|
|
|
# write the prepended links to the text file |
|
echo "$prependedLinks" >$textFile |
|
cat $textFile |
|
} |
|
|
|
# extract 7z to destination directory |
|
function extract7z() { |
|
# $1 is the 7z file |
|
# $2 is the destination directory |
|
7z x -y "$1" -o"$2" |
|
# if successful, delete the 7z file |
|
if [ $? -eq 0 ]; then |
|
rm "$1" |
|
fi |
|
} |
|
|
|
# example usage: decode URL-encoded string "hello%20world" |
|
# decode_url "hello%20world" |
|
|
|
function decode_url_encoded_characters() { |
|
local decoded_string="$1" |
|
|
|
# Replace all "+" characters with spaces. |
|
decoded_string="${decoded_string//+/ }" |
|
|
|
# Decode all URL-encoded characters. |
|
decoded_string=$(printf '%b' "${decoded_string//%/\\x}") |
|
|
|
# Print the decoded string. |
|
printf '%s' "${decoded_string}" |
|
} |
|
|
|
function downloadLinks() { |
|
# $1 is the text file |
|
# $2 is the destination directory |
|
while read line; do |
|
# check if line is commented out |
|
if [[ ! $line =~ ^# ]]; then |
|
CLEAN_URL=${line//[[:blank:]]/} |
|
FILENAME=$(decode_url_encoded_characters $(basename "$CLEAN_URL")) |
|
echo "Downloading $FILENAME from $CLEAN_URL" |
|
# wget and overwrite, only show progress bar |
|
wget -q --show-progress --progress=bar:force:noscroll -O "$2/${FILENAME}" "$CLEAN_URL" |
|
# wget -O "$2/${FILENAME}" $CLEAN_URL |
|
# curl -L -o "$2/${FILENAME}" $CLEAN_URL |
|
echo |
|
fi |
|
done <"$1" |
|
exit 0 |
|
} |
|
|
|
function downloadAndExtract() { |
|
# $1 is the text file |
|
# $2 is the temp directory |
|
# $3 is the destination directory |
|
|
|
while read line; do |
|
# check if line is commented out |
|
if [[ ! $line =~ ^# ]]; then |
|
CLEAN_URL=${line//[[:blank:]]/} |
|
FILENAME=$(decode_url_encoded_characters $(basename "$CLEAN_URL")) |
|
echo "Downloading $FILENAME from $CLEAN_URL" |
|
# wget and overwrite, only show progress bar |
|
wget -q --show-progress --progress=bar:force:noscroll -O "$2/${FILENAME}" "$CLEAN_URL" |
|
# wget -O "$2/${FILENAME}" $CLEAN_URL |
|
# curl -L -o "$2/${FILENAME}" $CLEAN_URL |
|
echo |
|
echo "Extracting $FILENAME" |
|
extract7z "$2/${FILENAME}" "$3" |
|
fi |
|
done <"$1" |
|
} |
|
|
|
# usage example |
|
|
|
WORKING_DIR="./" |
|
mkdir -p $WORKING_DIR |
|
# |
|
LINKS_DIR="${WORKING_DIR}/links" |
|
mkdir -p $LINKS_DIR |
|
|
|
ARCHIVE_DIR="${WORKING_DIR}/archives" |
|
|
|
for platform in "${PLATFORMS[@]}"; do |
|
echo "==================================" |
|
echo |
|
echo "PLATFORM = $platform" |
|
echo "URL = ${URL_LIST[$platform]}" |
|
echo |
|
if [ "$ENABLE_SCRAPE_UPDATE" = TRUE ] || [ ! -f "$LINKS_DIR/$platform.txt" ]; then |
|
getLinks "${URL_LIST[$platform]}" "7z" "$LINKS_DIR/$platform.txt" |
|
# open text file in default text editor |
|
else |
|
echo "Skipping scrape_links" |
|
fi |
|
# print absolute path of text file |
|
TXT_PATH=$(realpath "$LINKS_DIR/$platform.txt") |
|
echo "$TXT_PATH" |
|
# open text file in default text editor in background |
|
if [ "$ENABLE_OPEN_TXT" = TRUE ]; then |
|
xdg-open "$TXT_PATH" & disown |
|
fi |
|
|
|
echo |
|
# exit 0 |
|
TEMP_DIR="${ARCHIVE_DIR}/$platform" |
|
mkdir -p $TEMP_DIR |
|
OUTPUT_DIR="${DESTINATIONS[$platform]}" |
|
if [ "$ENABLE_DOWNLOAD" = TRUE ]; then |
|
# downloadLinks "$LINKS_DIR/$platform.txt" "$OUTPUT_DIR" |
|
# downloadLinks "$LINKS_DIR/$platform.txt" "$OUTPUT_DIR" |
|
downloadAndExtract "$LINKS_DIR/$platform.txt" "$TEMP_DIR" "$OUTPUT_DIR" |
|
fi |
|
done |