Skip to content

Instantly share code, notes, and snippets.

@FransUrbo
Forked from mmpx12/leakedzone.sh
Last active October 12, 2025 19:42
Show Gist options
  • Save FransUrbo/61033475c3409f8b2211c0d23daf8d90 to your computer and use it in GitHub Desktop.
Save FransUrbo/61033475c3409f8b2211c0d23daf8d90 to your computer and use it in GitHub Desktop.
leakedzone downloader
#!/usr/bin/env bash
function wait_for_new_cookie() {
local start="$(stat --format="%Y" "${HOME}/.leakedzone.cookie")"
local end=0
# Check every 30s if user have updated the cookie.
echo "ERROR: Update cookie"
while [[ "${end}" -le "${start}" ]]; do
sleep 30
end="$(stat --format="%Y" "${HOME}/.leakedzone.cookie")"
done
printf "\r "
}
function do_curl() {
local url="${1}"
local out=''
[[ -n "${2}" ]] && out="-o ${2}"
# Leakedzone and/or CloudFlare is making this difficult for us :( :(
#
# Need to go to the user in a browser, open the View->Developer->JavaScript Console
# (that's where it is in Chrome anyway) and then the 'Network' tab.
#
# Then reload the page, verify that you're a human, then at the very top of the list,
# there's an entry with the name of the user. Click that one, and in the right part of
# the console, make sure that the 'Headers' tab is selected, scroll down to the 'cookie'
# entry (the value should start with '_ga='. Copy that value, put it in a file named
# '~/.leakedzone.cookie'.
#
# UNFORTUNATELY, you'll have to keep doing that every few minutes :( :(.
if [[ -e "${HOME}/.leakedzone.cookie" ]]; then
COOKIE="$(cat "${HOME}/.leakedzone.cookie")"
else
echo "ERROR: You must first go to 'https://leakedzone.com/<user>' and get the cookie"
exit 1
fi
curl "${url}" ${out} \
-s --compressed \
-H 'X-Requested-With: XMLHttpRequest' \
-H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36'\
-H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7' \
-H 'Accept-Encoding: gzip, deflate, br, zstd' \
-H 'Accept-Language: en-GB,en-US;q=0.9,en;q=0.8,sv;q=0.7' \
-H 'Connection: keep-alive' \
-H 'Sec-Ch-Ua: "Google Chrome";v="141", "Not?A_Brand";v="8", "Chromium";v="141"' \
-H 'Sec-Ch-Ua-Arch: "x86"' \
-H 'Sec-Ch-Ua-Bitness: "64"' \
-H 'Sec-Ch-Ua-Full-Version: "141.0.7390.67"' \
-H 'Sec-Ch-Ua-Full-Version-List: "Google Chrome";v="141.0.7390.67", "Not?A_Brand";v="8.0.0.0", "Chromium";v="141.0.7390.67"' \
-H 'Sec-Ch-Ua-Mobile: ?0' \
-H 'Sec-Ch-Ua-Platform: "macOS"' \
-H 'Sec-Ch-Ua-Platform-Version: "12.7.6"' \
-H 'Sec-Fetch-Dest: document' \
-H 'Sec-Fetch-Mode: navigate' \
-H 'Sec-Fetch-Site: none' \
-H 'Sec-Fetch-User: ?1' \
-H 'Sec-Gpc: 1' \
-H 'Upgrade-Insecure-Requests: 1' \
-H "Cookie: ${COOKIE}"
}
function DownloadVideo() {
local video="${1}"
local out="${2}"
local try="${3:-1}"
# $2 suffix is something like `.m3u8`. When concatenating,
# `ffmpeg` create a whole bunch of `.ts` files, but doesn't
# output an actual concatenated video! Not sure why, but better
# just overwrite the suffix with `.mp4`. We'll deal with any
# fallout of that if there is any..
out="$(echo "${out}" | sed 's@\..*@@').mp4"
[[ -e "videos/${out}" ]] && return
# Get the part list.
do_curl "${video}" ./.tmp1
if [[ ! -e ./.tmp1 ]] || grep -q '^{' ./.tmp1 2>&1 /dev/null; then
# First message: Too Many Requests
# Second message: Unauthorized
# We really don't care either way, we just wait..
rm -f ./.tmp*
echo "FAIL/retr: try='${try}' (${video})" >> .log-error
if [[ "${try}" -le 5 ]]; then
# Try five times with staggered wait, the site is throttling connections.
# Start with 40s, seems it always succeeds after the second attempt (total: 1m30s).
sleep $((30 + try * 10))
((try++))
# Yes, recursive! Could be dangerous, but we just fall through early on
# in this function, because if *this* call succeeds, the file will exist..
DownloadVideo "${video}" "${out}" "${try}"
else
echo "FAIL/list: '${video}'" >> .log-error
fi
return
fi
# Download each individual piece
cat ./.tmp1 | grep ^http | \
while read -r url; do
name="$(echo "${url}" | sed -e 's@?.*@@' -e 's@.*/@@')"
do_curl "${url}" ".tmp-${name}"
if [[ ! -e ".tmp-${name}" ]]; then
echo -e "FAIL/part: '${url}'\n '${video}'" >> .log-error
rm -f ./.tmp*
return
fi
echo "file .tmp-${name}" >> ./.tmp2
done
# Merge the individual pieces downloaded into one video.
if grep -Eq '^http.*/icon' .tmp1; then
for file in $(cat ./.tmp2 | sed 's@^file @@'); do
ffmpeg -y -f mpegts -i "${file}" -c:a copy -c:v libx264 -pix_fmt yuv420p \
-f mpegts pipe: 2> /dev/null
done | \
ffmpeg -f mpegts -i pipe: "videos/${out}" 2> /dev/null
else
ffmpeg -f concat -safe 0 -i ./.tmp2 -c copy "videos/${out}" > /dev/null 2>&1
fi
rm -f ./.tmp*
}
function GetVideos() {
local username="$1"
page=1 ; total=0
rm -f ./.log* ./.tmp*
[[ ! -d "videos" ]] && mkdir videos
while true; do
do_curl "https://leakedzone.com/$username?page=$page&type=videos&order=0" ./.tmp1
if grep -q 'Just a moment' ./.tmp1; then
wait_for_new_cookie
continue
fi
a=($(cat ./.tmp1 | jq -r '.[]|(.slug + "/" + .stream_url_play)'))
n=1
if [[ "${#a[@]}" -eq 0 ]]; then
echo
return
fi
for i in "${a[@]}"; do
slug="$(cut -d "/" -f1 <<< "$i")"
path="$(cut -d "/" -f2 <<< "$i")"
if echo "${i}" | grep -qE '/.*/'; then
# New (?) format - URI and PATH separated..
base="$(cut -d "/" -f3 <<< "$i")"
url="$(echo -n "$base" | rev | cut -c17- | base64 -d)?$(echo -n "$path" | \
cut -c17- | rev | base64 -d)"
else
url="$(echo -n "$path" | cut -c17- | rev | cut -c17- | base64 -d)"
fi
out="$(echo $url | sed -e 's@?.*@@' -e 's@.*/@@')"
DownloadVideo "${url}" "${out}"
((n++)) ; ((total++))
printf "\rpage: %4d, video: %2d/${#a[@]} (${total})" "${page}" "${n}"
done
((page++))
done
}
function GetPhotos() {
local username="$1"
[[ ! -d "photos" ]] && mkdir photos
page=1 ; total=0
while true; do
do_curl "https://leakedzone.com/$username?page=$page&type=photos&order=0" ./.tmp1
if grep -q 'Just a moment' ./.tmp1; then
wait_for_new_cookie
continue
fi
a=($(cat ./.tmp1 | jq -r '.[].image'))
if [[ "${#a[@]}" -eq 0 ]]; then
echo
return
fi
n=1
for i in "${a[@]}"; do
if [[ ! -e "photos/$(basename "${i}")" ]]; then
url="https://leakedzone.com/storage/$i"
out="photos/$(basename "${url}")"
do_curl "https://leakedzone.com/storage/$i" "${out}"
fi
((n++)) ; ((total++))
printf "\rpage: %4d, image: %2d/${#a[@]} (${total})" "${page}" "${n}"
done
((page++))
done
}
GetPhotos "$1"
GetVideos "$1"
@goldenfreddynecro-boop
Copy link

how do i use this? can you please make a video

@FransUrbo
Copy link
Author

No point, just use it exactly like the original - leakedzone.sh <user_id>.

@stephondoestech
Copy link

I think you need to update the first line to #!/usr/bin/env bash otherwise you'll get an execution error.

@FransUrbo
Copy link
Author

Ok, so LeakedZone (or is it CloudFlare?!) have made things difficult for us again :( :(.

They're now trying to detect bots (i.e. curl!). So have to fake it with more headers, and can't use wget to get images, so use curl for that as well. Which is just as well, makes the script more .. uniform :).

What's even more troubling is that you have to get a working (personal!) cookie every few minutes :(. I've put instructions how to do that in the script, hopefully that's not too complex to understand..

@FransUrbo
Copy link
Author

Every 48 image!! Dang, that's annoying!!

@FransUrbo
Copy link
Author

Oh, sorry.. My fault!

Variables were overwriting each other. Solved by making important ones local (Rev20).

@FransUrbo
Copy link
Author

Update for videos, need to accept more (video) encodings etc, oh and somewhere an extra space for the actual video URL had snuck in..

@FransUrbo
Copy link
Author

New update, try to detect need to update cookie.. Still experimental.

@FransUrbo
Copy link
Author

Fixed the wait-for-cookie-update and add some code comments (Rev23).

@FransUrbo
Copy link
Author

Fix the wait-for-cookie-update. AGAIN!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment