Last active
December 22, 2016 13:31
-
-
Save stemid/17af43700b96f406fb03a3c9146d5736 to your computer and use it in GitHub Desktop.
Recursively compress files with xz using multiple threads
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# With Bash 4.3 I'd rather use wait -n to keep N | |
# processes running at all times. As it is now you | |
# unfortunately have to wait for the last process | |
# to finish. | |
# Recursively compress all files in $dir | |
compress () { | |
dir=$1 | |
count=${2:-2} | |
num_procs=${3:-0} | |
if [ $count -le 0 ]; then | |
exit 1 | |
fi | |
if [ ! -d "$dir" ]; then | |
return 1 | |
fi | |
for file in $dir/*; do | |
if [ -f "$file" ]; then | |
xz "$file" & | |
if [ $! -gt 0 ]; then | |
((num_procs++)) | |
fi | |
if [ $num_procs -ge $count ]; then | |
wait | |
num_procs=0 | |
# In bash 4.3 I would try: | |
#wait -n | |
#((num_procs--)) | |
fi | |
fi | |
if [ -d "$file" ]; then | |
compress "$file" $count $num_procs | |
fi | |
done | |
} | |
# Example: | |
# compress /var/log/remote/2016/11 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment