Skip to content

Instantly share code, notes, and snippets.

@jbutko
Last active May 8, 2024 13:50
Show Gist options
  • Save jbutko/23289d1dde538ec53803a24ae17c4cab to your computer and use it in GitHub Desktop.
Save jbutko/23289d1dde538ec53803a24ae17c4cab to your computer and use it in GitHub Desktop.
Unix commands
# 10 biggest files
sudo du -a /var | sort -n -r | head -n 10
# check available space
df -h
# check biggest folders/directories in GB
du -hsx * | sort -rh | head -10
# create user and assign password
sudo adduser admin --gecos "First Last,RoomNumber,WorkPhone,HomePhone" --disabled-password
echo "admin:password123" | sudo chpasswd
# add user sudo privileges
sudo usermod -aG sudo jenkins
# or add user to sudoers group
sudo adduser <username> sudo
# change user password
passwd
# list files + ownership and size
ls -lh
# list files in dir sorted by siz
ls -lhaS /dir
# to recursively give directories read&execute privileges:
find /path/to/base/dir -type d -exec chmod 755 {} +
# to recursively give files read privileges:
find /path/to/base/dir -type f -exec chmod 644 {} +
# zosynchronizovanie server casu
sudo ntpdate -s time.nist.gov
# cron na synchronizaciu casu
sudo crontab -u root -e
# a potom
1 1 * * * sudo ntpdate -s time.nist.gov # synchronize time
# check cron log
sudo grep CRON /var/log/syslog
# format
min (0 - 59)
hour (0 - 23)
day of month (1 - 31)
month (1 - 12)
day of week (0 - 6) (Sunday=0)
# run cron job every hour eg 01:01, 02:01 etc
1 * * * * /usr/bin/wget -O - -q -t 1 http://localhost/cron.php
# list content of tar archive
tar -ztvf my-data.tar.gz
# copy dir
cp -R source destination/
# set default bash
chsh -s /bin/bash or sudo chsh -s /bin/bash yourusername
# change owner of ssh folder
sudo chown username .ssh
# generate ssh keys
# https://kb.iu.edu/d/aews
ssh-keygen -t dsa
# login with ssh with provided private key
ssh -i ~/.ssh/jsresponsive_scaleway.ppk [email protected]
# restart bash
exec bash or source ~/.bashrc or . ~/.bashrc
# check listening ports
netstat -ano|grep LISTEN
# change the owner of the folder recursively
sudo chown -R username: ~/folder
# find directory
find / -name "dir-name-here"
# add execute permision to all sh files
chmod +x *.sh
# remove symbolic link
sudo rm path/to/the/link
# nginx - check syntax errors
sudo nginx -t
# nginx - restart nginx service
sudo service nginx restart
# list all local users
cut -d: -f1 /etc/passwd
# remeber ssh passphrase
ssh-add -k ~/.ssh/root_rsa
# start mongo as service (on startup)
sudo systemctl enable mongod.service
# watch memory consumption
watch -n 2 free -m
# cd into dir of the running bash script
cd "$(dirname "$0")"
# delete all files older than 5 days
find ./my_dir -mtime +5 -type f -delete
# get number of files in directory
sudo find /dirname -type f | wc -l
# copy folder and maintain attributes
cp -dpR source/ destination/
# list of all users
getent passwd | cut -d':' -f1
# add user to group
sudo usermod -aG groupname username
# list all users of group
grep 'grpup-name-here' /etc/group
# lock user
https://www.thegeekdiary.com/unix-linux-how-to-lock-or-disable-an-user-account/
# check folder size
du -sh dir
# Basically, .git/objects file does not have write permissions. The above line grants permission to all the files and folders in the directory.
sudo chmod -R ug+w .;
#or
cd .git/objects
ls -al
sudo chown -R yourname:yourgroup *
# or
cd .git
sudo chown -R your-username:staff .git
# edit startup service script
sudo nano /etc/systemd/system/mongodb.service
# kill process
ps -ef | grep nginx # returns PID
kill -9 19506 # PID from ps command
# via https://serverfault.com/a/565494
# add read write access to folder to group
sudo chgrp webgroup /var/www # add owner of folder to group first
sudo chmod g+rwX /var/www -R # add read/write access recursively to group
# via https://askubuntu.com/a/751311
# get server public ip
curl ipinfo.io/ip
# find string text in folder
grep -rl "string" /path
# processed sorted by CPU usage
top
# grep in whole folder and all files
grep -rni "string" *
# search for string in multiple files
grep -R --include=out-0.log --include=out-7.log '26.11.2018 14:41 +00:00'
# search for multiple string occurencies in one line in multiple files
grep -R --include=out-0.log --include=out-7.log "26.11.2018 14:41 +00:00" | grep INVALID
# grep exclude
grep -R --include=out-0.log --include=out-7.log --include=error-0.log --include=err ror-7.log "26.11.2018 19:" | grep "url.start() " | grep -v "before url.start()" | wc -l
# zip multiple files into multiple splitted archives
zip -s 500m logs.zip . error-0.log.gz error-2.log.gz error-3.log.gz out-0.log.g z out-2.log out-3.log
# unzip specific files from archive
unzip ARCHIVE_NAME PATH_OF_FILE_INSIDE_ARCHIVE
# to list all files in archive
unzip -l archive.zip
# example
unzip logs.zip out.log
# zip selected files in current directory into one archive
find . -maxdepth 1 | egrep "(error)" | zip -@ error.logs.zip
# zip file and folder into one archive
zip -r backup.zip thisisfile.log somedir/
# zip files and folders in current dir and exclude some directories
zip -r archive.zip . -x "*build/*" "*cache/*" "*logs/*" "*node_modules/*"
# sum numbers from grep
grep -R "02.05.2020" | grep "(kB):" | cut -f6 -d ":"| past e -sd+ | bc
# check linux version
cat /etc/os-release
# find 10 largest directories in current folder
du -a | sort -n -r | head -n 10
# find 10 largest directories in current folder with dir exclusion
sudo du -a --exclude=./proc | sort -n -r | head -n 10
# ls to list directories and their total sizes
du -sh *
# zip all files in current dir
zip -r logs.zip .
# view file in less mode
tac filename | less
# to sroll
ctrl+f - whole page
100j - next 100 lines
100b - previous 100 lines
# split large file into more smaller by size
split -b 1000m --numeric-suffixes error-2.log
# zip files with ext to zip file
zip log_name.zip `find . -name "*.log"`
# uninstall package, its dependencies and config files/data
sudo apt-get autoremove --purge nameofthepackage
# print file between line numbers
awk 'FNR>=1395919 && FNR<=1396268' error-2.log
# grep with line numbers
grep "some text" *.log -n
# resize extended volume in digital ocean
sudo resize2fs /dev/disk/by-id/scsi-XYZ
# get ipv6 address of the server
ip addr show dev eth0 | awk '{if ($1=="inet6") {print $2}}'
# search in history
history | grep -i searchterm
# empty file content
> nameoffile.log
# read file from end and paginate
less filename.log
# G goes to the bottom of the file
# ^b goes up one page
# ? searches backwards.
# list all loaded services
systemctl list-units --type=service --all
# list installed packages
sudo dpkg -l | more
# clear directory contents
rm -rf /path/to/directory/*
# multi grep (chained filter)
grep -E 'pattern1' filename | grep -E 'pattern2'
# check to which groups current user belongs
groups
# identify current user (id, group etc.)
id
# find files that do not contain substring (filtered by glob)
grep -IRiL "component: " . | grep -i .*stories.tsx
# how to install redis
https://www.digitalocean.com/community/tutorials/how-to-install-and-secure-redis-on-ubuntu-18-04
# filter out pm2 logs
pm2 logs app-name or pid | grep -i "Keyword keyword"
# delete all files starting with string
find /path/to/files -name 'name*' -exec rm {} \;
# list which user runs proces
ps aux | grep mongodb
# clear log file (make it empty)
truncate -s 0 /path/to/file
# get unix arch type
arch
# or
uname -a
# amd vs arm type
dpkg --print-architecture
# grep file from end with X lines before and after
tail file.log --lines=1000000 | grep "string to find" -B 15 -A 15
# check killed processes
egrep -i -r 'killed process' /var/log/
# show size of used swap
sudo swapon --show
# find text in folder files
grep -rwn '/path/to/somewhere/' -e "pattern"
# find first occurence in text file
grep -m1 whattofindgoeshere myfile.txt
# fix too big journalctl
https://askubuntu.com/a/1012913
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment