Skip to content

Instantly share code, notes, and snippets.

@vaibhaw
Last active March 6, 2025 19:26
Show Gist options
  • Select an option

  • Save vaibhaw/b12e36a753f446474717 to your computer and use it in GitHub Desktop.

Select an option

Save vaibhaw/b12e36a753f446474717 to your computer and use it in GitHub Desktop.
Linux Utils
## debian package manager tips
# source: http://askubuntu.com/a/17829/258167
# source: http://askubuntu.com/a/423556/258167
# To check whether a package is installed or not:
dpkg -l
dpkg -l libprotobuf # OR
dpkg -l | grep libprotobuf
# Show the location where the package is installed
sudo dpkg -S libprotobuf
## create directory structure
# http://serverfault.com/a/204320
rsync -a -f"+ */" -f"- *" source/ destination/ # OR
# source: http://stackoverflow.com/a/4073992/925216
find . -type d > dirtree.txt
xargs mkdir -p < dirtree.txt # OR
# source: http://serverfault.com/a/204320
cd /path/to/directories &&
find . -type d -exec mkdir -p -- /path/to/backup/{} \;
## list number of files in subdirectories of a given dir
# source : http://unix.stackexchange.com/a/4176/62741
# source : http://unix.stackexchange.com/a/23139/62741
for f in *; do [ -d ./"$f" ] && find ./"$f" -maxdepth 1 -exec echo \; | wc -l && echo $f; done
# newer script
find -maxdepth 1 -type d | while read -r dir; do printf "%20s:\t" "$dir"; find "$dir" -type f | wc -l; done
# better script
find -maxdepth 1 -type d | while read -r dir; do printf "%20s:\t" "$dir"; find "$dir" -type f | wc -l | sort -n -r -k2 ; done
## flatten a directory
# source: http://stackoverflow.com/a/9801972/925216
# source: http://unix.stackexchange.com/a/52816/62741
find /src -type f -iname '*.jpg' -exec cp --target-directory /dest/ {} ';' # or
find /src -mindepth 2 -type f -iname '*.jpg' -exec cp '{}' /dest ';'
# -mindepth to skip files not in the top-level directory
# -t or --target-directory, -i ignore case
## copy all files including hidden ones preserving the file attributes
# source http://askubuntu.com/a/86891/258167
cp -a /source/. /dest/
# The -a option is an improved recursive option, that preserve all file attributes, and also preserve symlinks.
# The . at end of the source path is a specific cp syntax that allow to copy all files and folders, included hidden ones.
## resize images using bash and imagemagick
for name in /path/to/folder/*.jpg; do
convert -resize 256x256\! $name $name
done
## generate md5sum recursively
md5deep -rel folder > folder.md5
## select N random lines from a file
# source: http://stackoverflow.com/a/15065490/925216
shuf -n N input > output
## move 200 random files from source to destination
# Ref http://unix.stackexchange.com/questions/38335
# If your system has shuf, you can use this quite conveniently (even handling ugly file names):
shuf -zen200 source/* | xargs -0 cp -t destination
# If you don't have shuf but have a sort that takes -R, this should work
find source -type f -print0 | sort -Rz | cut -d $'\0' -f-200 | xargs -0 cp -t dest
## find and delete all Mac OS generated ._.DS_Store and .DS_Store files recursively
cd <folder>
find . -name '*.DS_Store' -type f -delete
find . "-name" ".DS_Store" -exec rm {} \;
## reattach to already attached screen due to lost connection
screen -D -r '1234.somescreensession'
## compress a folder to multiple smaller files
tar cvzf - folder/ | split --bytes=20MB - folder.tar.gz.
## extract files from tar parts file
cat pictures.tar.gz.* | tar xzvf -
## search a string in files of type .cc and .h within a folder
# n -> line number, r -> recursive, include -> include filetypes
grep -nr --include=*.{cc,h} "string_to_search" folder
## find a file with specific extension for a given name pattern
find folder -type f -name "*.cpp"
## Copy a command's output in terminal
# Ref: http://stackoverflow.com/q/5130968/925216
# Install a utility called xclip
sudo apt-get install xclip
# To copy into X
pwd | xclip
# To use copied content
cd `xclip -o` # command within `` gets executed
# To copy into clipboard
pwd | xclip -selection clipboard
# Now you can paste it anywhere using regular paste commands
# Put this alias in .bashrc to avoid writing long command
alias cclip='xclip -selection clipboard'
# tmux
^b or ^a # Escape sequence
^a-d # detach
^a-c # created new window
^a-n # navigate to next window
^a-p # navigate to previous window
## file transfer
scp -Cvpr <source> <destination>
# C -> conpress, v -> verbose, p -> retain modification time, r -> recursive
## file transfer using rsync
rsync -avu --progress <source> <destination>
# use exclude to skip a folder. path of dir_or_file is relative to source folder
rsync -avu --progress <source> --exclude dir_or_file <destination>
# use exclude-from to skip many folders and files
# create a txt file. put folder names and files on separate lines.
# all paths in this file should be relative to source folder.
# It's structure is more or less like .gitignore file
rsync -avu --progress <source> --exclude-from exclude.txt <destination>
# find jpg files recursively and resize them to 63%
# pre-requisite ImageMagick
find . -name \*.jpg -exec convert {} -resize 63% {} \;
## rename extension
# reference: http://unix.stackexchange.com/q/19654
for f in *.jpeg; do
mv -- "$f" "${f%.jpeg}.jpg"
done
# OR
rename "s/oldExtension/newExtension/" *.txt
## check md5sums
md5sum -c some_file.md5
# It will read all the lines in some_file.md5, create md5sum hash for each file
# and compare it with hash specified in this file.
# Format of file some_file.md5 should be:
# md5sum_hash1 *filename1
# md5sum_hash2 *filename2
## check disk information
df -Ph | grep -v "Used" | awk '{ print $6}'
## disk space utilities
# display dirs whose size > 1 GB
du -h | grep -P "^[0-9]+([,.][0-9]+)?G"
# source: http://superuser.com/a/397662/307710
# find largest 10 files
find . -type f -print0 | xargs -0 du | sort -n | tail -10 | cut -f2 | xargs -I{} du -sh {}
# find largest 10 dirs
find . -type d -print0 | xargs -0 du | sort -n | tail -10 | cut -f2 | xargs -I{} du -sh {}
## copy specific file type keeping the folder structure
# source: http://unix.stackexchange.com/q/83593/62741
find . -name '*.csv' -exec cp --parents '{}' /target ';'
rsync -a --prune-empty-dirs --include '*/' --include '*.csv' --exclude '*' source/ target/
rsync -a --include '*/' --include '*.csv' --exclude '*' source/ target/ # keep empty directories
# If you do not want symlinks, modification dates, file permissions, owners etc. preserved
# replace -a with another combination of -rlptgoD
## Count number of lines in a combination of find
wc -l `find . -name "image_paths.txt"`
## Get N random but unique lines from a text file. here unique means there should be no duplicates
cat text_file.txt | sort | uniq | shuf -n N > text_file_N.txt
# source: http://stackoverflow.com/a/618381/925216
# check source for more `clever` solutions
# In conjunction with find
find . -name "img_paths.txt" -exec bash -c 'cat {} | sort | uniq | shuf -n 500 >> img_500.txt' ';'
## List all dirs which contain filenames of certain pattern
find . -name "*install*" -type f -printf "%h\n" | sort -u
# printf has a lot of options.
# %f - filename(w/o leading directories)
# %h - leading directories path without filename
# %p - filepath
## cat -> find -> cp
# source: http://superuser.com/a/180252/307710
cat list.txt | xargs -I % echo find . -type f -name % -exec cp '{}' new_folder/ ';'
# % is one line on list.txt
# echo is used just to check what command will be executed
## use filename from the remote server
curl -J -O url
# OR if you know the filename
wget -O filename.ext url
## select N random lines from a text file
shuf -n N input_file > output_file
## rename file.png to file.jpg
# source http://stackoverflow.com/q/1224766/925216
for file in *.png; do
mv "$file" "`basename "$file" .png`.jpg"
done
# or
for file in *.png; do
mv "${file}" "${file/%.png/.jpg}"
done
## check if a file exists
# source: http://stackoverflow.com/q/638975/925216
if [ ! -f /tmp/foo.txt ]; then
echo "File not found!"
fi
# OR
if [ -f $FILE ]; then
echo "File '$FILE' Exists"
else
echo "The File '$FILE' Does Not Exist"
fi
## time elapsed shell script
# source: http://stackoverflow.com/a/8903280/925216
SECONDS=0
# do some work
duration=$SECONDS
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
## remove all lines in file B from file A
# source: http://stackoverflow.com/a/32747544/925216
awk 'NR==FNR{a[$0];next} !($0 in a)' fileB fileA
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment