Last active
December 2, 2021 11:46
-
-
Save ndsamuelson/7d7710221289c6f13659a16427196121 to your computer and use it in GitHub Desktop.
anonfiles/bayfiles/fichier bash thing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# My name is ba-download.sh | |
# Usage ./ba-download.sh "bayfiles/anonfiles_URL" | |
# OR without argument and paste to the prompt ... | |
# Dependencies: System Tor OR Tor Browser running, curl | |
# Set minimal download_rate limit here ( Ko ) | |
min_rate="150" | |
tcurl() { | |
curl -x "socks5h://${cuser}:${cpass}@127.0.0.1:9050" \ | |
-A "Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0" \ | |
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8" \ | |
-H "Accept-Language: en-US,en;q=0.5" \ | |
-H "Accept-Encoding: gzip, deflate" \ | |
--compressed \ | |
--connect-to cdn-121.anonfiles.com::45.154.253.50 \ | |
--connect-to cdn-122.anonfiles.com::45.154.253.51 \ | |
--connect-to cdn-123.anonfiles.com::45.154.253.52 \ | |
--connect-to cdn-124.anonfiles.com::45.154.253.53 \ | |
--connect-to cdn-125.anonfiles.com::45.154.253.54 \ | |
--connect-to cdn-126.anonfiles.com::45.154.253.55 \ | |
--connect-to cdn-127.anonfiles.com::45.154.253.56 \ | |
--connect-to cdn-128.anonfiles.com::45.154.253.57 \ | |
--connect-to cdn-129.anonfiles.com::45.154.253.58 \ | |
--connect-to cdn-130.anonfiles.com::45.154.253.59 \ | |
--connect-to cdn-121.bayfiles.com::45.154.253.50 \ | |
--connect-to cdn-122.bayfiles.com::45.154.253.51 \ | |
--connect-to cdn-123.bayfiles.com::45.154.253.52 \ | |
--connect-to cdn-124.bayfiles.com::45.154.253.53 \ | |
--connect-to cdn-125.bayfiles.com::45.154.253.54 \ | |
--connect-to cdn-126.bayfiles.com::45.154.253.55 \ | |
--connect-to cdn-127.bayfiles.com::45.154.253.56 \ | |
--connect-to cdn-128.bayfiles.com::45.154.253.57 \ | |
--connect-to cdn-129.bayfiles.com::45.154.253.58 \ | |
--connect-to cdn-130.bayfiles.com::45.154.253.59 \ | |
"$@" | |
} | |
# Detect socks5 port ... | |
#for test in 9150 9050 ''; do | |
# { >/dev/tcp/127.0.0.1/$test; } 2>/dev/null && { tsport="$test"; break; } | |
# [ -z "$test" ] && echo >&2 -e "\nNo open Tor port ... EXITING\n" && exit 1 | |
#done | |
[ "$1" ] && ba_url="${1}" || { | |
echo | |
read -p "Paste the bayfiles / anonfiles URL> " ba_url | |
} | |
echo -e "\e[1mLAUCHING TESTS ...\e[0m" | |
count="3" | |
timeout="4" | |
iter="0" | |
while :; do | |
cuser="cu${RANDOM}" | |
cpass="cp${RANDOM}" | |
# Limiting tries to ${count} ... | |
[[ "$iter" -ne "${count}" ]] || { | |
echo -e "\n\e[1;31mNot able to get a good circuit for ...\n-> ${ba_url}\n... after ${count} tries ...\e[0m\nUsing the last one .." | |
break | |
} | |
# Testing server ... | |
file=$(tcurl -sS --max-time "${timeout}" -e "$ba_url" "$ba_url" | grep -oP 'href="https://cdn-\K[^"]*') | |
retval=$? | |
[ $retval -eq 0 ] && { | |
echo -e "\n\e[1;32mGood circuit acquired ... Continuing!\e[0m" | |
break | |
} | |
((iter++)) | |
done | |
# If URL retrieval failed during initial test loop, we fetch it again ... | |
[ -z "$file" ] && file=$(tcurl -sS -e "$ba_url" "$ba_url" | grep -oP 'href="https://cdn-\K[^"]*') | |
[[ "$file" ]] && { | |
adrss="https://cdn-${file// /%20}" | |
echo -e "\nFile URL retrieved for $ba_url ... !\n-> ${adrss}" | |
} || { | |
echo -e "\n\e[1;31mFailed to retrieve file ${ba_url} URL! 404?\e[0m" | |
exit 1 | |
} | |
# Testing for CDNs availability ... | |
echo -e "TESTING INITIAL ADDRESS -> ${adrss}" | |
test_cdn=$(tcurl -qfsS --connect-timeout 5 -I "${adrss}" -w '%{http_code}' -o /dev/null) | |
[[ "$test_cdn" = "200" ]] && echo -e "\e[1;32mHEADER RESPONSE : ${test_cdn} ... OK!\e[0m" || { | |
echo -e "\e[1;31mHEADER RESPONSE : ${test_cdn} ... KO!\e[0m" | |
for inc in {101..121}; do | |
adrss=$(sed "s/cdn-[0-9]\{1,3\}./cdn-$inc./" <<<"${adrss}") | |
echo -e "TESTING -> ${adrss}" | |
test_cdn=$(tcurl -qfsS --connect-timeout 5 -I "${adrss}" -w '%{http_code}' -o /dev/null) | |
[[ "$test_cdn" = "200" ]] && { | |
echo -e "\e[1;32mHEADER RESPONSE : ${test_cdn} ... OK!\e[0m" | |
break | |
} || echo -e "\e[1;31mHEADER RESPONSE : ${test_cdn} ... KO!\e[0m" | |
done | |
} | |
[[ "$test_cdn" = "200" ]] || { | |
echo -e "\n\e[1;31mFailed getting a working CDN! ...\e[0m" | |
exit 1 | |
} | |
# Testing actual circuit speed to the available CDN ... offering choice to use it or get a better circuit ... | |
# if the download_rate doesn't match the set min_rate ... OR directly downloading if equal or greater ... | |
echo -e "\e[1mTESTING DOWNLOAD SPEED ...\e[0m" | |
avg_speed=$(tcurl -qfsS --connect-timeout 5 --max-time 10 -w '%{speed_download}' -o /dev/null -e "$ba_url" "${adrss}") | |
avg_speed="${avg_speed%,*}" | |
echo -e "\e[1;32mActual circuit Download_Rate\e[0m = \e[1m${avg_speed} ...\e[0m\n" | |
[[ ! "$avg_speed" -ge "${min_rate}000" ]] && { | |
echo -e "\e[1mTRY to get a fast circuit matching the ${min_rate}Ko/s = \e[1;32mENTER\e[0m / \e[1mOR Download with this one =\e[0m \e[1;32mANY CHARACTER + ENTER\e[0m" | |
read -r -p "?> " choice | |
[[ -z "$choice" ]] && { | |
echo -e "\n\e[1;32mTrying to get a fast download connection\e[0m" | |
# Limiting tries to 50xtimes ... for finding a circuit matching the minimal download_rate limit | |
tries="50" | |
inc="0" | |
while :; do | |
[[ "${inc}" -ne "${tries}" ]] || { | |
echo -e "\n\e[1;31mNot able to get a circuit matching the download rate_limit for ...\n-> ${ba_url}\n... after ${tries} tries ...\e[0m" | |
exit 1 | |
} | |
avg_speed=$(tcurl -qfsS --connect-timeout 5 --max-time 10 -w '%{speed_download}' -o /dev/null -e "$ba_url" "${adrss}") | |
avg_speed="${avg_speed%,*}" | |
[[ "$avg_speed" -lt "${min_rate}000" ]] && { | |
echo -e "\e[1mDownload_Rate = ${avg_speed} ... Retrying ...\e[0m" | |
cuser="cu${RANDOM}" | |
cpass="cp${RANDOM}" | |
((inc++)) | |
continue | |
} || { | |
echo -e "\n\e[1mSTARTING ${avg_speed} download for ...\n-> ${ba_url}\e[0m" | |
break | |
} | |
done | |
} | |
} || echo -e "\n\e[1mSTARTING ${avg_speed} / octets per sec download for ...\n-> ${ba_url}\e[0m" | |
tcurl -e "$ba_url" "${adrss}" -J -O |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Dependencies: system Tor or Tor Browser running, curl. | |
# Usage ./1fichier.sh "https://1fichier.com/?URL" | |
# Or run and paste the URL to the prompt ... | |
tcurl(){ | |
curl -x "socks5h://${cuser}:${cpass}@127.0.0.1:${tsport}" -A "Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0" -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8" -H "Accept-Language: en-US,en;q=0.5" -H "Accept-Encoding: gzip, deflate" --compressed "$@" | |
} | |
for test in 9150 9050 ''; do | |
{ >/dev/tcp/127.0.0.1/$test; } 2>/dev/null && { tsport="$test"; break; } | |
[ $test ] || { echo -e "\n\e[1;31m### NO OPENED TOR PORT ...\e[0m\n"; exit 1; } | |
done | |
trap 'rm -f "${cookies}"; exit' INT | |
[[ $1 ]] && FILE="${1}" || { | |
echo; read -p "1FICHIER URL?> " FILE | |
} | |
echo -e "\n\e[1;32m### Fetching a usable Tor_Exit_Node ...\e[0m" | |
# count = xtime changing circuit ... | |
count="50"; iter="0" | |
while :; do | |
((iter++)) | |
[[ "$iter" -le "${count}" ]] && echo -e "\nRound ${iter}" || { | |
echo -e "\n\e[1;31mNot able to get a slot after ${count} tries...\e[0m\n" | |
exit 1 | |
} | |
cookies=$(mktemp "$PWD/cookies.XXXXXX") | |
cuser="cu${RANDOM}" | |
cpass="cp${RANDOM}" | |
PAGE=$(tcurl -k -c "${cookies}" -sS "${FILE}") | |
grep -Eq '<span style="color:red">Warning !</span>|<span style="color:red">Attention !</span>' <<< "${PAGE}" && { | |
echo "NO DOWNLOAD SLOT!" | |
rm -f "${cookies}" | |
} || { | |
get_me=$(grep -oP 'name="adz" value="\K[^"]+' <<< "${PAGE}") | |
[[ $get_me ]] && { | |
echo -e "SLOT OK! ... \e[1mDOWNLOADING\e[0m ...\n" | |
break | |
} | |
} | |
done | |
file_link=$(tcurl -kL -b "${cookies}" -c "${cookies}" -F "submit=Download" -F "adz=${get_me}" "${FILE}" | grep -A 2 '<div style="width:600px;height:80px;margin:auto;text-align:center;vertical-align:middle">' | grep -oP '<a href="\K[^"]+') | |
[[ $file_link ]] || { | |
echo -e "\n\e[1;31mFailed extracting CDN SLOT URL ...\e[0m\n" | |
rm -f "${cookies}" | |
exit 1 | |
} | |
echo -e "\nCDN SLOT > ${file_link}\n" | |
tcurl -k -b "${cookies}" -e "${FILE}" "${file_link}" -J -O | |
rm -f "${cookies}" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment