Add into begining of file ~/.bashrc
[ -d ~/bin ] && PATH=~/bin:$PATH
A mysql and a mysqldump without user:passwod
| #!/usr/bin/env bash | |
| PASSWORD=password | |
| USER=user | |
| DATABASE=database | |
| DB_FILE=/tmp/${DATABASE}.sql.gz | |
| EXCLUDED_TABLES=( | |
| table_1 | |
| table_2 | |
| table_3 |
| perl -e 'BEGIN{$/=\32768}' -pe "s/a/b/g" < one-line-250-mb.txt |
| #!/usr/bin/env bash | |
| #extract phar | |
| phar extract -f php_util.phar extracted | |
| #how to check is in phar | |
| #if (strpos(__DIR__, 'phar://') === 0) {} |
| #!/usr/bin/env bash | |
| USER=homestead | |
| PASSWORD=secret | |
| DATABASE=database | |
| DB_FILE=/tmp/${DATABASE}.sql.gz | |
| #dump specific table data | |
| mysqldump --extended-insert --skip-lock-tables -u${USER} -p${PASSWORD} ${DATABASE} | gzip > ${DB_FILE} |
vagrant plugin install vagrant-vbguest vagrant-bindfs vagrant-hostsupdater
# after.sh
git config --global user.email [email protected]SELECT
table_name AS `Table`,
round(((data_length + index_length) / 1024 / 1024), 2) `Size in MB`
FROM information_schema.TABLES
WHERE table_schema = "$DB_NAME"
AND table_name = "$TABLE_NAME"
ORDER BY ROUND(((data_length + index_length) / 1024 / 1024), 2) DESC;
-- or this query to list the size of every table in every database, largest first:
| $fileSystem = new Filesystem(); | |
| $fileSystem->mkdir($target); | |
| $directoryIterator = new \RecursiveDirectoryIterator($source, \RecursiveDirectoryIterator::SKIP_DOTS); | |
| $iterator = new \RecursiveIteratorIterator($directoryIterator, \RecursiveIteratorIterator::SELF_FIRST); | |
| foreach ($iterator as $item) { | |
| if ($item->isDir()) { | |
| $targetDir = $target.DIRECTORY_SEPARATOR.$iterator->getSubPathName(); | |
| $fileSystem->mkdir($targetDir); | |
| } else { |
| # find old files and calculate their size in Mb | |
| find ./logs/* -type f -mtime +180 -exec du -ks {} \; | cut -f1 | awk '{total=total+$1}END{print total/1024}' |
| find <folder> -type d -exec chmod 755 {} \; | |
| find <folder> -type f -exec chmod 644 {} \; |