Skip to content

Instantly share code, notes, and snippets.

@eramax
Forked from johndpope/killer.sh
Created May 4, 2026 08:47
Show Gist options
  • Select an option

  • Save eramax/e3974e95e141e51af07d1bfa0fa268ca to your computer and use it in GitHub Desktop.

Select an option

Save eramax/e3974e95e141e51af07d1bfa0fa268ca to your computer and use it in GitHub Desktop.
#!/usr/bin/env bash
# Author: Oleh Pshenychnyi
# Date: 13.02.2021
#
# Kill all processes matching a provided pattern.
#
# Usage:
#
# >> bash killer.sh celery
#
# or better to alias this script in your .bashrc/.zshrc
# so you can use it like:
#
# >> killer npm
# >> killer celery
# >> killer fuckingJava
victim_name=${1}
if [ "$victim_name" == "" ]
then
echo "Nope! Gimme a victim name."
exit
fi
output="$(ps ax | grep ${victim_name} | awk '{print $1,$3}')"
# at this point output looks like this:
# 254214 S
# 254215 S
# 254216 S
# 259206 S+
# 259207 S+
# we change internal field separator to use newline as a separator
_IFS=$IFS
IFS=$'\n'
pid_state_array=($output)
IFS=$_IFS
# pids to be killed
victim_pids=()
for pid_state in "${pid_state_array[@]}"; do
pid_state=($pid_state)
# we ignore the current process and its child
if [ "${pid_state[0]}" != $$ ] && [ "${pid_state[1]}" != "S+" ]
then
victim_pids+=("${pid_state[0]}")
fi
done
if [ "${#victim_pids[@]}" == 0 ]
then
echo "Nothing found for '${victim_name}'."
exit
fi
echo "Got them: ${victim_pids[@]}";
echo "$(kill -9 "${victim_pids[@]}" >/dev/null 2>&1)"
echo ".. and smashed!"
#!/bin/bash
# llama.cpp Updater & Downloader Script (Improved)
# Automatically downloads the latest pre-built llama.cpp binaries from GitHub releases
# Supports Linux (Ubuntu x64 with optional Vulkan) and macOS
# Version: 2026-04-30
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
print_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
print_warning() { echo -e "${YELLOW}[WARN]${NC} $1"; }
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
# Detect system
detect_system() {
local os arch variant=""
case "$(uname -s)" in
Linux*) os="ubuntu" ;;
Darwin*) os="macos" ;;
*) print_error "Unsupported OS: $(uname -s)"; exit 1 ;;
esac
case "$(uname -m)" in
x86_64) arch="x64" ;;
arm64|aarch64) arch="arm64" ;;
*) print_error "Unsupported architecture: $(uname -m)"; exit 1 ;;
esac
# Auto-detect Vulkan on Ubuntu x64 unless disabled
if [ "$os" = "ubuntu" ] && [ "$arch" = "x64" ] && [ "$FORCE_REGULAR" != "1" ]; then
if [ "$FORCE_VULKAN" = "1" ] || check_vulkan_support; then
variant="vulkan"
fi
fi
if [ -n "$variant" ]; then
echo "${os}-${variant}-${arch}"
else
echo "${os}-${arch}"
fi
}
check_vulkan_support() {
if command -v vulkaninfo >/dev/null 2>&1 && vulkaninfo >/dev/null 2>&1; then
return 0
fi
if [ -f "/usr/lib/x86_64-linux-gnu/libvulkan.so.1" ] || \
[ -f "/usr/lib/libvulkan.so.1" ] || \
ldconfig -p 2>/dev/null | grep -q "libvulkan.so"; then
return 0
fi
return 1
}
get_latest_release() {
local api_url="https://api.github.com/repos/ggml-org/llama.cpp/releases/latest"
if command -v curl >/dev/null 2>&1; then
curl -sL "$api_url"
elif command -v wget >/dev/null 2>&1; then
wget -qO- "$api_url"
else
print_error "curl or wget is required"
exit 1
fi
}
get_version() {
echo "$1" | grep -o '"tag_name": *"[^"]*"' | head -n1 | cut -d'"' -f4
}
get_download_url() {
local release_data="$1"
local system_arch="$2"
local candidates=()
if [[ "$system_arch" == "ubuntu-vulkan-x64" ]]; then
candidates=("llama-.*-bin-ubuntu-vulkan-x64\.tar\.gz" "llama-.*-bin-ubuntu-x64\.tar\.gz")
elif [[ "$system_arch" == "ubuntu-x64" ]]; then
candidates=("llama-.*-bin-ubuntu-x64\.tar\.gz")
else
candidates=("llama-.*-bin-ubuntu-x64\.tar\.gz")
fi
for pattern in "${candidates[@]}"; do
local url
url=$(echo "$release_data" | grep -o '"browser_download_url": *"[^"]*"' | \
grep -E "$pattern" | head -n1 | sed 's/.*"browser_download_url": *"\([^"]*\)".*/\1/')
if [ -n "$url" ]; then
echo "$url"
return 0
fi
done
return 1
}
download_file() {
local url="$1"
local output="$2"
print_info "Downloading: $(basename "$url")"
if command -v curl >/dev/null 2>&1; then
curl -L --progress-bar -o "$output" "$url"
else
wget --show-progress -O "$output" "$url"
fi
}
extract_archive() {
local archive="$1"
local dest="$2"
mkdir -p "$dest"
if [[ "$archive" == *.tar.gz ]]; then
tar -xzf "$archive" -C "$dest"
elif [[ "$archive" == *.zip ]]; then
unzip -q "$archive" -d "$dest"
else
print_error "Unknown archive type: $archive"
exit 1
fi
}
move_binaries() {
local src="$1"
local dst="$2"
local version_dir
version_dir=$(find "$src" -maxdepth 1 -type d -name "llama-b*" | head -n1)
if [ -z "$version_dir" ]; then
print_error "Could not find llama-b* folder inside archive"
exit 1
fi
print_info "Using extracted folder: $version_dir"
# Remove anything that exists (file, symlink, or directory)
rm -rf "$dst" 2>/dev/null || true
# Create stable symlink
ln -s "$version_dir" "$dst"
print_success "Created stable link: $dst -> $version_dir"
}
make_executable() {
local dir="$1"
find "$dir" -type f ! -name "*.txt" ! -name "*.md" ! -name "*.json" -exec chmod +x {} + 2>/dev/null || true
}
cleanup() {
[ -d "$1" ] && rm -rf "$1"
}
main() {
local target_dir="${1:-$(pwd)}"
target_dir=$(cd "$target_dir" && pwd)
mkdir -p "$target_dir"
print_info "llama.cpp Updater (Improved) — Target: $target_dir"
# Detect system
local system_arch
system_arch=$(detect_system)
print_info "Detected: $system_arch"
if [[ "$system_arch" == *"vulkan"* ]]; then
print_success "Vulkan support will be used"
fi
# Get latest release
print_info "Fetching latest release info..."
local release_data
release_data=$(get_latest_release)
local version
version=$(get_version "$release_data")
print_info "Latest version: $version"
# Get download URL
local download_url
download_url=$(get_download_url "$release_data" "$system_arch") || {
print_error "No matching binary found for $system_arch"
echo "Available assets (first 15):"
echo "$release_data" | grep -o '"name": *"[^"]*"' | head -n15 | sed 's/.*"name": *"\([^"]*\)".*/ \1/'
exit 1
}
print_info "Download URL: $download_url"
# Download
local temp_dir
temp_dir=$(mktemp -d)
trap 'cleanup "$temp_dir"' EXIT
local archive="$temp_dir/llama-cpp.tar.gz"
download_file "$download_url" "$archive"
# Extract
local extract_dir="$temp_dir/extract"
print_info "Extracting..."
extract_archive "$archive" "$extract_dir"
# Install
print_info "Installing to $target_dir"
move_binaries "$extract_dir" "$target_dir"
make_executable "$target_dir"
print_success "Successfully installed llama.cpp $version"
print_info "Binaries location: $target_dir"
echo ""
echo "Installed files:"
ls -lh "$target_dir" | grep -E '\.(sh|bin|so|dylib)$' | head -n 20
}
# === Argument parsing ===
BUILD_FROM_SOURCE=""
FORCE_REGULAR=""
FORCE_VULKAN=""
TARGET_DIR=""
while [ $# -gt 0 ]; do
case $1 in
--build-from-source) BUILD_FROM_SOURCE="1"; shift ;;
--force-regular) FORCE_REGULAR="1"; shift ;;
--vulkan) FORCE_VULKAN="1"; shift ;;
-h|--help)
echo "Usage: $0 [options] [target_directory]"
echo ""
echo "Options:"
echo " --vulkan Force Vulkan build (Ubuntu x64)"
echo " --force-regular Force CPU-only build (disable Vulkan)"
echo " --build-from-source Build from source instead of downloading"
echo " -h, --help Show this help"
exit 0
;;
-*) print_error "Unknown option: $1"; exit 1 ;;
*) TARGET_DIR="$1"; shift ;;
esac
done
if [ -z "$TARGET_DIR" ]; then
TARGET_DIR="."
fi
if [ "$FORCE_REGULAR" = "1" ] && [ "$FORCE_VULKAN" = "1" ]; then
print_error "--force-regular and --vulkan cannot be used together"
exit 1
fi
if [ "$BUILD_FROM_SOURCE" = "1" ]; then
print_warning "Build-from-source mode is not implemented in this simplified version."
print_info "Please use the original script or build manually."
exit 1
fi
main "$TARGET_DIR"
~/killer.sh llama-server
mkdir -p ~/Documents/GitHub/llama
cd ~/Documents/GitHub/llama
cp ~/llama-cpp-updater.sh . # cd ~/ wget https://gist.githubusercontent.com/johndpope/a77b179c4f0013adb2a50e13e56b7929/raw/519c216171c7bcc2b684cd012034e9472f64c348/llama-cpp-updater.sh
./llama-cpp-updater.sh --vulkan
cd ~/Documents/GitHub/llama
exec ./llama-server \
-hf unsloth/Qwen3.6-27B-GGUF:Q4_K_M:Q4_K_M \
--host 0.0.0.0 \
--port 8080 \
--ctx-size 225536 \
--n-gpu-layers 99 \
--flash-attn on \
--cache-type-k q4_0 \
--cache-type-v q4_0 \
--device Vulkan0 \
--cache-ram 8192 \
--api-key 1234567890 \
--verbose \
--grammar-file ~/min-think.gbnf \
--reasoning-format none # important so <think> tags aren't stripped
# /provider
# add provider > llama-server
# base url > http://127.0.0.1:8080
# Step 3 of 7: Default model
# ❯ unsloth/Qwen3.6-27B-GGUF:Q4_K_M:Q4_K_M
# > api key 1234567890
# curl -H "Authorization: Bearer 1234567890" http://127.0.0.1:8080/v1/models
curl http://127.0.0.1:8080/v1/chat/completions \
-H "Authorization: Bearer 1234567890" \
-H "Content-Type: application/json" \
-d '{
"model": "Qwen3.6-27B-Q4_K_M",
"messages": [{"role": "user", "content": "Say a random number between 1 and 1000"}],
"stream": false
}'
root ::= "<think>\n" [A-Za-z0-9 ,.;:]{10,80} "\n</think>\n\n" answer
answer ::= [^\x00-\x08\x0B\x0C\x0E-\x1F]+
root ::= thought answer
thought ::= "<think>\n" plan approach edge "</think>\n\n"
plan ::= "Plan: " shortline "\n"
approach ::= "Approach: " shortline "\n"
edge ::= "Edge cases: " shortline "\n"
shortline ::= [A-Za-z0-9 ,.;:/_()'"+=-]{1,120} "\n"
answer ::= [^\x00-\x08\x0B\x0C\x0E-\x1F]+
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment