Last active
August 13, 2020 19:03
-
-
Save jxramos/451f9273fcd05c79e12dfc6e6fad215d to your computer and use it in GitHub Desktop.
bash_profile
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# MACOS VS LINUX CONFIG | |
if [[ $OSTYPE == *"darwin"* ]]; then | |
echo LAUNCHED MACOS BASH PROFILE | |
VS_CODE_PATH="/Applications/Visual Studio Code.app/Contents/Resources/app/bin" | |
USER=`id -un` | |
IS_MAC=1 | |
alias pidof="ps -A | grep $1 | awk '{print $1}'" | |
copy_suffix=" copy" | |
sed_no_backup="''" | |
pip3Path="/Users/$USER/anaconda3/bin/pip" | |
cp_backup="" | |
SITE_PACKS="/Users/$USER/anaconda3/lib/python3.7/site-packages" | |
else | |
echo LAUNCHED LINUX BASH PROFILE | |
VS_CODE_PATH="/usr/bin" | |
IS_MAC=0 | |
alias killff="kill $(pidof firefox)" | |
copy_suffix=" (copy)" | |
sed_no_backup= | |
pip3Path="/usr/local/bin/pip3" | |
cp_backup="--backup=t" | |
SITE_PACKS="/home/$USER/.local/lib/python3.6/site-packages" | |
fi | |
# NOTE by default linux runs no-login shell via bashrc, mac runs login shell via bash_profile | |
# http://www.joshstaiger.org/archives/2005/07/bash_profile_vs.html | |
# https://stackoverflow.com/editing-help#syntax-highlighting | |
# <!-- language: lang-py --> | |
# <!-- language: lang-cpp --> | |
# https://meta.stackexchange.com/questions/184108/what-is-syntax-highlighting-and-how-does-it-work/184109#184109 | |
#DEBUG_BASH= | |
#unset DEBUG_BASH | |
#********************************************************************************************************************************************* | |
# CONVENIENCE ALIASES | |
#********************************************************************************************************************************************* | |
if [ -n "$DEBUG_BASH" ]; then | |
echo DEBUG_CONVENIENCE_STUFF | |
fi | |
ED_PACKS=~/.editable_packages.txt | |
BASH_PRO=~/.bash_profile | |
alias code="'$VS_CODE_PATH/code'" | |
alias sbp="source $BASH_PRO" | |
alias editBP="code $BASH_PRO" | |
alias desk="ssh $DESKTOP_IP" | |
alias launch_vnc="ssh $DESKTOP_IP 'x11vnc -usepw'" | |
alias sync_bash="scp ~/.bash_profile $USER@$DESKTOP_IP:/home/$USER" | |
alias diskSpace="df -h" | |
alias chrome="/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome" | |
alias wt="chrome --new-window https://calendar.google.com/ https://mail.google.com/" | |
alias clear_syslog="sudo truncate -s 0 /var/log/syslog" | |
alias funcs="compgen -A function" | |
alias paths="echo $PATH | tr : '\n' | sed 's/^/ /g'" | |
alias all_extensions="find . | sed s#.*/##g | grep [.] | sed \"s/.*\.//g\" | sort | uniq" | |
alias files_w_ext="find . -regex \".*\.\"" | |
alias t1="tree -L 1" | |
alias lsys="less /var/log/syslog" | |
alias all_shell_commands="compgen -c" | |
alias r="reset" | |
function grep_all_files { | |
pattern=$1 | |
grep_all_files=grep -rni $pattern * | |
} | |
function grep_all_ext_files { | |
ext=$1 | |
pattern=$2 | |
dir=$3 | |
grep -rni --include=$ext $pattern $dir | |
} | |
function child_dirs { | |
target_dir=$1 | |
if [ -z "$target_dir" ]; then | |
target_dir=. | |
fi | |
echo -e "target_dir=$target_dir\n" | |
find "$target_dir" -type d -maxdepth 1 -mindepth 1 | sort | |
} | |
function scpRemoteFile { | |
# copies remote file to my local machine | |
remote_file=$1 | |
local_dir=$2 | |
scp $USER@$DESKTOP_IP:$remote_file $local_dir | |
} | |
function scpLocalFileToRemoteDir { | |
# copies local file to the remote machine | |
local_file=$1 | |
remote_dir=$2 | |
scp "$local_file" $USER@$DESKTOP_IP:"$remote_dir" | |
#$USER@$DESKTOP_IP:/home/$USER | |
} | |
function rsyncRemoteDir { | |
extension=$1 | |
remote_dir=$2 | |
local_dir=$3 | |
echo Synch remote dir=$remote_dir to local_dir=$local_dir | |
rsync -zarv --prune-empty-dirs --include "*/" --include="$extension" --exclude="*" $USER@$DESKTOP_IP:"$remote_dir" "$local_dir" | |
} | |
function join_by { | |
# carries out a str.join operation where the first argument is the delimiter. | |
local delim=$1; shift; echo - n "$1"; shift; printf "%s" "${@/#/$delim}"; | |
} | |
function pre_n_suffix { | |
# Prepends and appends the prefix and suffix to each argument, space delimited | |
local PREFIX=$1; | |
local SUFFIX=$2; | |
shift | |
shift | |
set -- "${@/#/$PREFIX}" && set -- "${@/%/$SUFFIX}" | |
echo "$@" | |
} | |
function dot2gml { | |
for i in `find . -name '*.dot'`; do | |
echo CONVERTING $i... | |
gmlFile=${i%.dot}.gml | |
gv2gml -o $gmlFile $i | |
# replace names with labels to be supported in yED | |
sed -i '.bak' -E "s/^ name / label /g" s"/type \"box\"/type \"roundrectangle\"" $gmlFile | |
done | |
} | |
function tar_only_files { | |
# Creates a tar file out of target_dir including only files of the given type | |
target_dir=$1 | |
file_type=$2 | |
find $target_dir -name "$file_type" | tar -cf subset_tar.tar -T - | |
} | |
function clean_shared_mem { | |
# kills all shared memory segments | |
# (verified on clean boot that ipcs is empty, may want to repeat on new machines) | |
ipcs -m | cut -d' ' -f2 | grep '^[0-9]' | while read $x; do ipcrm -m $x; done | |
# QNX | |
find /dev/shmem/ | |
# Linux | |
find /dev/shm/ | |
} | |
function dup { | |
# Duplicate a file in its present location suffixed with " (copy)" | |
target_file=$1 | |
# Test if filename has extension | |
extension="" | |
if [[ "`basename $target_file`" == *"."* ]]; then | |
extension=".${target_file##*.}" | |
fi | |
# Copy duplicate file | |
cp $cp_backup "$target_file" "${target_file%.*} (copy)${extension}" | |
} | |
function get_linux_versions { | |
# Guard clause | |
if [ $IS_MAC -eq 1 ] | |
then | |
echo "MUST RUN FROM LINUX ENVIRONMENT ONLY!!" | |
return | |
fi | |
cat /proc/version | |
ldd --version | grep ldd | |
lsb_release -a | |
} | |
function git_hub_branch { | |
# Opens in chrome the current branch's github page | |
git_hub_url=$GITHUB/tree/`curBranch` | |
echo $git_hub_url | |
chrome $git_hub_url | |
} | |
function hunt_docker_images { | |
# grep the entire repo for any docker images round in our registry | |
grep -rF $COMPANY_DOCKER_REGISTRY/ --exclude-dir "bazel-*" --exclude-dir "experimental" . | sed "s/.*$COMPANY_DOCKER_REGISTRY/$COMPANY_DOCKER_REGISTRY/; s/[ \"\`].*//" | sort -u | |
} | |
#********************************************************************************************************************************************* | |
# PYTHON STUFF | |
#********************************************************************************************************************************************* | |
if [ -n "$DEBUG_BASH" ]; then | |
echo DEBUG_PYTHON_STUFF | |
fi | |
alias listReqs="grep -R --no-filename --include=requirements.txt [a-z] . | sort | uniq | grep -v \#" | |
#alias allReqs="echo pip install ${listReqs | tr '\n' ' '}" | |
alias apip="$pip3Path" | |
alias py3="/Users/$USER/anaconda3/bin/python" | |
alias pip3="$pip3Path" | |
alias edpacks="find $SITE_PACKS -name '*.egg-link' | sed 's/^/ /g'" | |
alias sps="open $SITE_PACKS" | |
alias supported_pip="sudo pip3 install --force-reinstall pip==9.0.1" | |
alias find_pythons="find /usr/bin | grep -E python[.0-9]*$" | |
alias site_packs="python -m site" | |
# edit package | |
function edpk { | |
touch $ED_PACKS | |
echo $1 >> $ED_PACKS | |
apip install -e $1 | |
} | |
function black_diff { | |
# Runs black formatting on any modified files | |
errs=0 | |
for i in `git_modified_files | grep -E ".py$"`; do | |
black "$i" | |
if [ $? -ne 0 ] | |
then | |
errs=1 | |
echo "FAILED TO FORMAT $i" | |
fi | |
done | |
if [ $errs -eq 1 ] | |
then | |
echo "BLACK FORMATTING FAILED" | |
fi | |
return $errs | |
} | |
function install_python36 { | |
sudo apt-get update | |
sudo apt-get install python3.6 | |
} | |
function pytest2 { | |
python2.7 -m pytest $* | |
} | |
function site_pack_search { | |
pattern=$1 | |
for p in `python -m site | grep -F " '/" | tr -d "',"`; do | |
find $p | grep $pattern | |
done | |
} | |
# Upgrade desktop pip | |
# sudo pip3.6 install --upgrade pip | |
# pip3.6 --version | |
# | |
#********************************************************************************************************************************************* | |
# GIT STUFF | |
#********************************************************************************************************************************************* | |
if [ -n "$DEBUG_BASH" ]; then | |
echo DEBUG_GIT_STUFF | |
fi | |
COMMIT_FILE=~/git_commit_message.txt | |
BRANCH_REGEX="user/$USER/([A-Z]+-[0-9]+)__.*" | |
alias gb="git branch" | |
alias gd="git diff" | |
alias gp="git pull" | |
alias gf="git fetch" | |
alias gm="git mv" | |
alias cb="curBranch" | |
alias gitCM='git commit -F $COMMIT_FILE' | |
alias gitU='git pull origin master' | |
alias gitPOM='git push origin master' | |
alias logtop='git log -n 1' | |
alias gl1='logtop' | |
alias lastmsg='logtop | sed 1,4d | sed "s/^ //g"' | |
alias switchGM='lastmsg > $COMMIT_FILE' | |
alias switch2GM='switchGM ; editGM' | |
alias gitBranches='git branch | sed "s/^. //g"' | |
alias cfs='git status --porcelain | sed "s#^.*/#* #g">> $COMMIT_FILE' | |
alias filediff='git diff $1 $2 -- $3' | |
alias all_authors='git shortlog -n -s --' | |
alias git_clean_junk="git clean --force -d -x" | |
alias git_commit_author_counts="git shortlog -sn" | |
alias git_dont_ignore="git ls-files --other" | |
alias git_staged_files="git diff --name-only --staged" | |
alias git_modified_files="git diff --name-only" | |
alias git_shallow_clone="git clone --depth=1" | |
alias git_diff_changes_only="git diff --unified=0" | |
alias git_prune_origin="git remote prune origin" | |
alias git_last_files="git show --pretty=\"format:\" --name-only HEAD" | |
function curBranch { | |
git branch | grep -E "^[*]" | awk '{print $2}' | |
} | |
function ticket_in_br { | |
# Prints the Jira ticket in the current branch name | |
curBranch | sed -E "s#$BRANCH_REGEX#\1#g" | |
} | |
function newCM { | |
echo "[`ticket_in_br`]" > $COMMIT_FILE | |
editGM | |
} | |
function gitC { | |
# Run pre commit stuff | |
pre_commit_rules | |
if [ $? -ne 0 ]; then | |
echo "ERROR pre commit errors, need to resolve before commiting." | |
return 1 | |
fi | |
# Success, proceed to commit message | |
echo gitCM | |
} | |
function git_reset { | |
git_clean_junk | |
git fetch origin | |
git reset --hard origin/master | |
} | |
function git_reset_branch { | |
#do this after forcing a push and synching on another clone that hasn't pulled yet. | |
git fetch origin | |
git reset --hard origin/`curBranch` | |
git prune | |
} | |
function git_force_push { | |
git push -f origin `curBranch` | |
} | |
function rebase_on_target_base { | |
target_base=$1 | |
CUR_BRANCH=`curBranch` | |
echo ============================================== | |
echo GIT PULL CURRENT BRANCH: $CUR_BRANCH | |
echo ============================================== | |
git pull | |
echo ============================================== | |
echo GIT CHECKOUT $target_base | |
echo ============================================== | |
git checkout $target_base | |
echo ============================================== | |
echo GIT HARD RESET $target_base | |
echo ============================================== | |
git_reset_branch | |
echo ============================================== | |
echo GIT CHECKOUT CURRENT BRANCH: $CUR_BRANCH | |
echo ============================================== | |
git checkout $CUR_BRANCH | |
echo ============================================== | |
echo GIT REBASE | |
echo ============================================== | |
git rebase $target_base | |
if [ $(git status | grep "rebasing" | wc -l) -ne 0 ]; then | |
echo ============================================== | |
echo GIT FORCE PUSH | |
echo ============================================== | |
echo git_force_push | |
fi | |
} | |
function rebase_on_dev_br { | |
rebase_on_target_base $DEV_BRANCH | |
} | |
function rebase_on_master { | |
rebase_on_target_base master | |
} | |
function selbr { | |
git checkout `gitBranches | grep $1` | |
switchGM | |
} | |
function gitua { | |
# git update all | |
echo GIT UPDATE ALL | |
CURRENT_BRANCH=`curBranch` | |
git fetch | |
echo ======================================================================= | |
echo GIT UPDATE MASTER | |
git checkout master | |
git pull origin master | |
echo ======================================================================= | |
echo GIT UPDATE $DEV_BRANCH | |
git checkout $DEV_BRANCH | |
git pull origin $DEV_BRANCH | |
echo ======================================================================= | |
echo GIT UPDATE CURRENT BRANCH | |
git checkout $CURRENT_BRANCH | |
git pull origin $CURRENT_BRANCH | |
git branch | |
git status | |
} | |
function glac { | |
# git_line_author_counts | |
git blame -w "$1" | sed -E "s/.*\((.*) +[0-9]{4}-[0-9]{2}.*/\1/g" | sort | uniq -c | sort -nr | |
} | |
function git_log_chage_set { | |
# Prints the files changed for a given git sha | |
git_sha=$1 | |
git log --name-only --oneline -n 1 $git_sha | |
} | |
function tar_stat { | |
# Creates a tar file out of the current changes to the repository | |
git status --porcelain | cut -c 4- | tar -cf /tmp/tar_stat.tar -T - | |
mv /tmp/tar_stat.tar . | |
} | |
function coSubDir { | |
# Used to clone a third party repo shallowly to have local copy around to do | |
# code studies upon | |
localRepo=$1 | |
remoteRepo=$2 | |
subDir=$3 | |
# Create local repository for subdirectory checkout, make it hidden to avoid having to drill down to the subfolder | |
mkdir ./.$localRepo | |
cd ./.$localRepo | |
git init | |
git remote add -f origin $remoteRepo | |
git config core.sparseCheckout true | |
# Add the subdirectory of interest to the sparse checkout. | |
echo $subDir >> .git/info/sparse-checkout | |
git pull --depth=1 origin master | |
# Create convenience symlink to the subdirectory of interest | |
cd .. | |
ln -s ./.$localRepo/$subDir $localRepo | |
} | |
function git_rename_branch { | |
new_name=$1 | |
old_name=`curBranch` | |
echo "Renaming branch $old_name to $new_name" | |
git pull | |
echo "Rename local branch" | |
git branch -m $new_name | |
echo "Delete the '$old_name' remote branch and push the '$new_name' local branch." | |
git push origin :$old_name $new_name | |
# Reset the upstream branch for the $new_name local branch. | |
# Switch to the branch and then: | |
#git push origin -u $new_name | |
} | |
function dup_stage { | |
# Duplicate all files in the stage | |
for i in `git_staged_files`; do | |
dup "$i" | |
done | |
} | |
function dup_last { | |
# Duplicate all files in the last commit | |
for i in `git_last_files`; do | |
dup "$i" | |
done | |
} | |
function get_dev_merge_base { | |
# Returns that git sha of the point in the development branch where the | |
# current branch was cut from. | |
git merge-base `curBranch` $DEV_BRANCH | |
} | |
function get_master_merge_base { | |
git merge-base `curBranch` master | |
} | |
function git_head_sha { | |
git log --format="%H" -n 1 | |
} | |
function git_my_changed_files { | |
# Enumerates all the files modified from the BASE development branch that this | |
# current branch was cut from. | |
git diff --name-only `get_dev_merge_base`...`git_head_sha` | |
} | |
function nbr { | |
# New branch | |
branch=$1 | |
# Validate Branch pattern (used for Jira parsing) | |
is_valid_form=`echo $branch | grep -E $BRANCH_REGEX` | |
if [ ! -z "$is_valid_form" ]; then | |
echo NBR--Invalid branch form: must match "$BRANCH_REGEX" | |
return | |
fi | |
echo NBR--Creating branch=$branch | |
# Stash any changes (only if changes exist) | |
has_modified_changes=`git status --porcelain | grep -v "^?? "` | |
if [ ! -z "$has_modified_changes" ]; then | |
echo NBR--Stashing changes... | |
git stash push | |
fi | |
# cut from development branch | |
git fetch origin | |
git checkout $DEV_BRANCH | |
git reset --hard origin/$DEV_BRANCH | |
#Create a new branch: | |
git checkout -b $branch $DEV_BRANCH | |
# Push new branch to remote | |
git push -u origin $branch | |
# Unstash (pop if changes did exist) | |
if [ ! -z "$has_modified_changes" ]; then | |
echo NBR--Popping stashed changes... | |
git stash pop | |
fi | |
echo NBR--Created branch="$branch" | |
} | |
function bfcb { | |
# branch from branch | |
branch=$1 | |
git checkout -b $branch `curBranch` | |
# Push new branch to remote | |
git push -u origin $branch | |
} | |
function fco { | |
# fetch then checkout. Especially useful for new branches added on remote. | |
branch=$1 | |
git fetch | |
git checkout $branch | |
} | |
function undo_rebase { | |
echo "git reset --hard <last sha of branch, check github history>" | |
echo "git push --force-with-lease" | |
} | |
function copy_br { | |
echo start | |
pbcopy <<< `curBranch` | |
echo stop | |
} | |
function del_br { | |
git branch -D `gitBranches | grep $1` | |
} | |
function uniq_owners { | |
grep -r "." --include=OWNERS . | sed -E "s/.*OWNERS:|per-file.* = //g" | tr ' ' '\n' | sort | uniq > owners.txt | |
} | |
function editGM { | |
# Mac OS | |
if [ $IS_MAC -eq 1 ]; then | |
code $COMMIT_FILE | |
else | |
echo "Must manually open on linux" | |
find $COMMIT_FILE | |
fi | |
} | |
#********************************************************************************************************************************************* | |
# CPP STUFF | |
#********************************************************************************************************************************************* | |
if [ -n "$DEBUG_BASH" ]; then | |
echo DEBUG_CPP_STUFF | |
fi | |
# DOXYGEN TODO COMMENT: /// @todo @jira{FW-xxxx} | |
CPP_SOURCE="--include=*.cpp --include=*.h --include=*.c" | |
CPP_EXCLUDE="(third_party)" | |
COND_COMP_PATTERN="^ *# *(el)?if" | |
COND_COMP_CLEAN1="s/.*://g; s/[!()]/ /g; s/ +/ /g" | |
COND_COMP_CLEAN2="s%(#ifn?def|#if|#elif|#endif|defined|/\*.*\*/|//.*)%%g" | |
IGNORE_MACROS="^(true|false|[0-9]+[UL]*)$" | |
DYLD_LIBRARY_PATH="/usr/local/Cellar/llvm/9.0.0_1/lib" | |
alias macro_hits="grep $CPP_SOURCE -rE \"$COND_COMP_PATTERN\" . | grep -vE \"$CPP_EXCLUDE\"" | |
alias all_macros="macro_hits | sed -E \"$COND_COMP_CLEAN1\" | tr '|&%*/<>=' '\n' | sed -E \"$COND_COMP_CLEAN2\" | grep . | tr -d ' ' | sort | uniq" | |
alias proto_hits="grep --include=*.proto -rE \"syntax +=\" . | tr \' '\"'" | |
alias proto_syntax="proto_hits | sed \"s#.*syntax##g\" | tr -d ' ;\"=' | sort | uniq" | |
alias rm_ach="rm /dev/shm/achshm*" | |
function linkerErrors() { | |
grep -F "Linking of rule " *.log | sed -E "s#.*Linking of rule '(.*)' failed.*#\1#g" | |
} | |
macro_users() { | |
filename='all_macros.txt' | |
all_macros | grep -vE "$IGNORE_MACROS" > $filename | |
while read p; do | |
echo -e "\n==================================================================================================================================================" | |
echo MACRO: $p | |
grep $CPP_SOURCE -rFl $p . | grep -vE "$CPP_EXCLUDE" | |
done < $filename | |
} | |
strace_dbg() { | |
# debugging with strace | |
# edit source to use fprintf(stderr, …); fflush(stderr); to delimit your traces and bracket them out. | |
build_target=$1 | |
strace -f -tt -T -s 500 -o out.trace "$build_target" | |
} | |
run_w_tsan() { | |
# run bazel target with thread sanitizer | |
build_target=$1 | |
bazel run --config=tsan "$build_target" | |
} | |
run_w_helgrind() { | |
# run bazel binary target with valgrind helgrind tool | |
# Showing names of stack and global variables by reading the relevant debug info, --read-var-info=yes | |
build_target=$1 | |
valgrind --tool=helgrind "$build_target" | |
} | |
run_w_dbg_helgrind() { | |
build_target=$1 | |
valgrind --tool=helgrind --read-var-info=yes "$build_target" | |
} | |
#********************************************************************************************************************************************* | |
# JIRA STUFF | |
#********************************************************************************************************************************************* | |
print_ticket() { | |
curBranch | sed -E "s#user/$USER/(.*)__.*#\1#" | |
} | |
open_ticket() { | |
if [ -z "$1" ]; then | |
ticket=`print_ticket` | |
else | |
ticket=$1 | |
fi | |
chrome $JIRA_URL/browse/$ticket | |
} | |
new_ticket() { | |
chrome $JIRA_URL/secure/CreateIssue!default.jspa | |
} | |
#********************************************************************************************************************************************* | |
# BAZEL STUFF | |
#********************************************************************************************************************************************* | |
if [ -n "$DEBUG_BASH" ]; then | |
echo DEBUG_BAZEL\_STUFF | |
fi | |
PUBLIC_BAZEL_RELEASES=https://github.com/bazelbuild/bazel/releases | |
local_bazel_cache=/private/var/tmp/_bazel_$USER | |
user_bazel_cache=/home/$USER/.cache/bazel/_bazel_$USER/ | |
BAZEL_GREP="--include=BUILD --include=*.bazel --include=*.bzl --include=WORKSPACE" | |
BAZEL_REGEX="(BUILD|\.bazel|\.bzl|WORKSPACE)$" | |
# VSCode bazel: filter BUILD,*.bazel,*.bzl,WORKSPACE | |
# run tests locally --strategy=TestRunner=local | |
alias bazel_outs="find . -maxdepth 1 -mindepth 1 -type l" | |
alias bazel_externs="bazel_outs | grep -vE \"bazel-(testlogs|genfiles|bin|out)\"" | |
alias bazel_source="find . -name 'BUILD' -o -name '*.bazel' -o -name '*.bzl' -o -name WORKSPACE" | |
alias bazel_gen_source="find `bazel_externs`/ -name 'BUILD' -o -name '*.bazel' -o -name '*.bzl' -o -name WORKSPACE" | |
alias bazel_del_cache="rm -rf ~/.cache/bazel/_bazel_$USER/" | |
alias bazel_expunge="bazel clean --expunge_async" | |
alias bbclang="bazel build --config=clang" | |
alias bazel_debug_build="bazel build --compilation_mode=dbg" | |
alias bazel_setup="./scripts/setup/setup.sh dev --tags bazel" | |
function bazel_hits() { | |
grep $BAZEL_GREP -nrE "$1" . | grep -vE "experimental/" | |
} | |
function bazel_file_hits() { | |
grep $BAZEL_GREP -lrE "$1" . | grep -vE "experimental/" | |
} | |
function convert_asts() { | |
# Iterates under the root directory finding all starlark files | |
root_dir=$1 | |
for i in `find $root_dir | grep -E "(BUILD|bzl|bazel|WORKSPACE)$"`; do | |
convert_ast "$i" | |
done | |
} | |
function convert_ast() { | |
echo "CONVERTING AST for $1" | |
go run $USER/bazel_tools/bazel_analyzer.go "$1" | |
} | |
function bq_dirs { | |
target=$1 | |
bazel query "kind( 'source file',deps($target))" | xargs dirname | sort -u | |
} | |
function bdeps() { | |
# enumerate all package dependencies for the given target | |
target=$1 | |
bazel query 'deps($target)' --output package | |
} | |
function all_bdeps() { | |
# all bazel dependencies for a list of targets | |
# enumerate all package dependencies for the given target | |
targets=`pre_n_suffix deps\( \) $@` | |
bazel query "`join_by ' union ' $targets`" --output package | |
} | |
function print_kinds() { | |
awk '{print $1}' "$1" | sort | uniq -c | sort -nr | |
} | |
function bazel_hits_in_outs() { | |
# uncover any generated BUILD files and otherwise in the bazel out symlinks. | |
# NOTE only hits expected in externals | |
for i in `bazel_outs`; do | |
find $i/ | grep -E "$BAZEL_REGEX" | |
done | |
} | |
function tar_bazel_files { | |
# Creates a tar file from the working directory including only bazel files. | |
bazel_source | grep -vE "/experimental/" | tar -cf bazel_files.tar -T - | |
bazel_gen_source | grep -vE "/experimental/" | tar --dereference --append -f bazel_files.tar -T - | |
} | |
function find_target() { | |
target_match=$1 | |
echo bazel query 'attr("name", ".*$target_match.*", //...)' | |
} | |
function python_deps { | |
target=$1 | |
bazel query 'kind(py_*, deps( $target ))' | |
} | |
function minrankquery { | |
bazel query "deps($1) except kind('source file', deps($1))" --output minrank | |
} | |
function bazel_link_opts_all { | |
grep $BAZEL_GREP -rE '"[-]l' . | sed "s/.*://g" | sed "s/linkopts//g" | sed "s/#.*//g" | tr -d "[],= \"" | sort | uniq | |
} | |
function bazel_derivatives { | |
target=$1 | |
find -L bazel-*/ | grep $target | |
} | |
function bazel_py_derivatives { | |
target=$1 | |
find -L bazel-*/ | grep $target | grep -vE "(third_party|pypi__|_solib|pycache)" | |
} | |
function bazel_test_error_feedback { | |
# runs bazel test on the given target(s) with error feedback only | |
bazel test --test_output=errors "$@" | |
} | |
function bazel_test_feedback { | |
# runs bazel test on the given target(s) with error feedback only | |
bazel test --test_output=all "$@" | |
} | |
function bazel_all_pytests { | |
# Run all bazel py_tests | |
bazel query "kind('py_test', '//...')" | xargs bazel test | |
} | |
function bazel_stress { | |
# Runs the given bazel test | |
target=$1 | |
num_runs=$2 | |
bazel test --runs_per_test $num_runs $target | |
} | |
function remove_bazel_outs { | |
# Removes from the root of the repo all those bazel-* folders | |
for j in `bazel_outs`; do | |
echo removing $j | |
rm -rf $j | |
done | |
} | |
# >>> statements = ast["Stmts"] | |
# >>> osal_libs = [ s for s in statements if "X" in s and "Fn" in s["X"] and s["X"]["Fn"]["Name"] == "osal_library" ] | |
# go get -u github.com/bazelbuild/buildtools/build | |
# go get -u github.com/bazelbuild/buildtools/tables | |
# OUTPUT FORMAT (--output) | |
# https://docs.bazel.build/versions/master/query.html#output-formats | |
# --output: build, label, label_kind, location, package, proto, record or xml | |
# the outputs will be printed in arbitrary order, the fastest option | |
# --output: graph, minrank or maxrank | |
# with these formats, results ordered by the dependency order or rank. | |
# topological ordering: graph nodes appear earlier than all of their successors. | |
#********************************************************************************************************************************************* | |
# ENVIRONMENT BOOTSTRAP | |
#********************************************************************************************************************************************* | |
if [ -n "$DEBUG_BASH" ]; then | |
echo DEBUG_ENV_BOOTSTRAP_STUFF | |
fi | |
# Input RC Settings | |
if [ ! -f "$HOME/.inputrc" ]; then | |
echo Generating inputrc file... | |
echo "# ↑" > ~/.inputrc | |
echo "\"\\e[A\":history-search-backward" >> ~/.inputrc | |
echo "# ↓" >> ~/.inputrc | |
echo "\"\\e[B\":history-search-forward" >> ~/.inputrc | |
echo "# Try to stay at the same the cursor position when moving through the history." >> ~/.inputrc | |
echo "set history-preserve-point on" >> ~/.inputrc | |
bind -f ~/.inputrc | |
fi | |
# brew install graphviz |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment