Skip to content

Instantly share code, notes, and snippets.

@v01pe
Forked from adamwulf/slack-files-downloader.sh
Last active January 17, 2025 17:28
Show Gist options
  • Save v01pe/01ee7f89bfff6da0a1e26b7c1ec0bc51 to your computer and use it in GitHub Desktop.
Save v01pe/01ee7f89bfff6da0a1e26b7c1ec0bc51 to your computer and use it in GitHub Desktop.
Download all files from a Slack workspace export folder.
#!/bin/bash
#
# This script will browse a Slack export folder and download all files in a new /export folder
#
# HOW TO:
# 1. As a Workspace admin, download an export of your Slack history (https://www.slack.com/services/export)
# 2. Make sure you have jq installed (https://stedolan.github.io/jq/)
# 3. Place this file at the root of your Slack export folder, next to channels.json
# 4. Run `bash slack-files-downloader.sh` in your terminal
#
# OPTIONS
# -o Overwrite files if they already exist in destination folder, otherwise skip them.
# -s Do not show message when a file is skipped
# -l Store links to external files into an html file at expected path
# -v Verbose logging of file paths
loglevel=0
while getopts "oslv" flag
do
case $flag in
o) overwrite=true;;
s) silent=true;;
l) link_external=true;;
v) loglevel=$((loglevel+1));;
esac
done
printf "\nSelect one specific file type to download or leave empty for any (e.g. mp3, binary, jpg, png):\n"
read usertype
printf "\nSelect a channel to look into or leave empty for all channels:\n"
read userchannel
printf "\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nExporting channels\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n"
if [[ -z $userchannel ]]
then
export_channels=$(cat channels.json | jq -rc '.[].name')
else
export_channels=$userchannel
fi
for channel in $export_channels
do
if [[ -d "$channel" ]]
then
printf "\n============================================\nLooking into #$channel...\n============================================\n"
for file in "$channel"/*.json
do
(( $loglevel >= 1 )) && printf "Checking channel file '$file'\n"
for a in $(cat $file | jq -c '.[].files[]? | [.file_access, .name, .is_external, .filetype, .timestamp, .url_private_download, .external_url, .url_private] | del(..|nulls)' | sed 's/ //g')
do
filetype=$(echo $a | jq -r '.[3]')
if [[ $filetype == $usertype ]] || [[ -z $usertype ]] || [[ -z $filetype ]]
then
file_access=$(echo $a | jq -r '.[0]')
if [[ $file_access == "not_visible" ]]
then
(( $loglevel >= 1 )) && printf "Skipping non visible file '$filename_raw': $a\n"
continue
fi
filename_raw=$(echo $a | jq -r '.[1]')
if [[ ! -z $filename_raw ]] && [[ $filename_raw != "null" ]]
then
(( $loglevel >= 2 )) && printf "Checking file '$filename_raw': $a\n"
filename=$(echo $filename_raw | sed -e 'y/āáǎàçēéěèīíǐìōóǒòūúǔùǖǘǚǜüĀÁǍÀĒÉĚÈĪÍǏÌŌÓǑÒŪÚǓÙǕǗǙǛÜ:/aaaaceeeeiiiioooouuuuuuuuuAAAAEEEEIIIIOOOOUUUUUUUUU_/')
filename="${filename##*/}"
ts=$(echo $a | jq -r '.[4]')
year=$(date -r $ts '+%Y')
month=$(date -r $ts '+%m')
# prefix filenames with date/time, as the same filename could be used
# multiple times in different slack messages with different file contents.
datetime=$(date -r $ts '+%Y%m%d-%H%M%S')
filename="${datetime}-${filename}"
base_folder="EXPORT/$channel/$year/$month"
file_path="$base_folder/$filename"
if [ -f "$file_path" ] && [[ $overwrite != true ]]
then
if [ ! -s "$file_path" ]
then
printf "$file_path exists, but file is empty. Retrying!\n"
rm "$file_path"
else
if [[ $silent != true ]]
then
printf "$file_path already exists in destination folder. Skipping!\n"
fi
continue
fi
fi
is_external=$(echo $a | jq -r '.[2]')
url=$(echo $a | jq -rc '.[5]')
if [[ $is_external == "true" ]]
then
if [ ! -f "${file_path%.*}".* ]
then
if [[ $link_external == "true" ]]
then
file_path_link="${file_path%.*}.html"
echo "download external file at <a href=\"$url\">$url</a> to <tt>$file_path</tt>" > "$file_path_link"
echo "Stored link to external file as link in '$file_path_link'"
else
echo "download external file at '$url' manually to '$file_path'"
fi
fi
continue
fi
mkdir -p "$base_folder"
printf "downloading to $file_path with $ts\n"
curl --progress-bar $url -o "$file_path"
fi
fi
done
done
fi
done
if [[ ! -z $userchannel ]]
then
exit 0
fi
printf "\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nExporting canvases\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n"
root_folder_canvases="export/CANVAS"
for canvas in $(cat canvases.json | jq -c '.[] | [.name, .id, .filetype, .created, .url_private_download]')
do
filetype=$(echo $canvas | jq -r '.[2]')
if [[ $filetype != $usertype ]] && [[ ! -z $usertype ]] && [[ ! -z $filetype ]]
then
continue
fi
filename_raw=$(echo $canvas | jq -r '.[0]')
canvas_id=$(echo $canvas | jq -r '.[1]')
ts=$(echo $canvas | jq -r '.[3]')
year=$(date -r $ts '+%Y')
month=$(date -r $ts '+%m')
datetime=$(date -r $ts '+%Y%m%d-%H%M%S')
filename="${datetime}_${canvas_id}-${filename_raw}"
file_path="$root_folder_canvases/$filename"
if [ -f "$file_path" ] && [[ $overwrite != true ]]
then
if [[ $silent != true ]]
then
printf "$filename already exists in destination folder. Skipping!\n"
fi
continue
fi
mkdir -p "$root_folder_canvases"
url=$(echo $canvas | jq -rc '.[4]')
printf "downloading to $file_path with $ts\n"
curl --progress-bar "$url" -o "$file_path"
done
printf "\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\nExporting avatars\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n"
root_folder_users="export/USER"
IFS=$'\n'
for user in $(cat users.json | jq -c '.[] | [.name, .profile]')
do
user_name=$(echo $user | jq -r '.[0]')
profile=$(echo $user | jq -rc '.[1]')
root_folder_user="$root_folder_users/$user_name"
mkdir -p "$root_folder_user"
for image in $(echo $profile | jq -rc 'to_entries[] | select(.key | startswith("image")) | select(.value != "") | [.key, .value]')
do
image_name=$(echo $image | jq -rc '.[0]')
image_path="$root_folder_user/$image_name.png"
if [ -f "$image_path" ] && [[ $overwrite != true ]]
then
if [[ $silent != true ]]
then
printf "$image_path already exists in destination folder. Skipping!\n"
fi
continue
fi
image_url=$(echo $image | jq -rc '.[1]')
echo "Downloading $user_name/$image_name: $image_url"
curl --progress-bar "$image_url" -o "$image_path"
done
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment