Skip to content

Instantly share code, notes, and snippets.

View SteelPh0enix's full-sized avatar
🅱️
yeet

Wojciech Olech SteelPh0enix

🅱️
yeet
View GitHub Profile
@SteelPh0enix
SteelPh0enix / .bash_completion
Last active April 16, 2025 17:32
Jenkins lockable-resources CLI manager.
_lockres_completions()
{
local actions
actions="list lock unlock"
case $COMP_CWORD in
1)
COMPREPLY=( $(compgen -W "${actions}" -- "${COMP_WORDS[COMP_CWORD]}") )
;;
2)
names=$(lockres list-names)
@SteelPh0enix
SteelPh0enix / docker-compose.yml
Last active March 9, 2025 16:20
Ollama + tika + OpenWebUI Docker-compose
services:
ollama:
image: ollama/ollama:rocm
container_name: ollama
pull_policy: always
devices:
- /dev/kfd
- /dev/dri
volumes:
- /home/steelph0enix/LLMs:/models/external
@SteelPh0enix
SteelPh0enix / docker-compose.yml
Last active February 8, 2025 22:15
llama.cpp & OpenWebUI management functions
services:
open-webui:
image: ghcr.io/open-webui/open-webui:dev
container_name: open-webui
ports:
- ${OPEN_WEBUI_PORT}:${OPEN_WEBUI_PORT}
volumes:
- //f/openwebui/data:/app/backend/data
environment:
- ENV=dev
@SteelPh0enix
SteelPh0enix / pull-hf-repo.py
Last active September 25, 2024 19:52
clone huggingface repo and pull Git LFS files via HTTP
import argparse
import shutil
import subprocess
import os
from pathlib import Path
def get_hf_repo_path(url: str) -> str:
return url.removeprefix("http://huggingface.co/").removeprefix(
"https://huggingface.co/"
@SteelPh0enix
SteelPh0enix / .zshrc
Last active April 3, 2025 14:21
My ZSH init file (oh-my-zsh required)
# If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:$HOME/.local/bin:/usr/local/bin:$PATH
# Path to your oh-my-zsh installation.
export ZSH="$HOME/.oh-my-zsh"
# Set name of the theme to load --- if set to "random", it will
# load a random theme each time oh-my-zsh is loaded, in which case,
# to know which specific one was loaded, run: echo $RANDOM_THEME
# See https://github.com/ohmyzsh/ohmyzsh/wiki/Themes
@SteelPh0enix
SteelPh0enix / Microsoft.PowerShell_profile.ps1
Last active January 30, 2025 14:23
My PowerShell Profile script (put in $HOME/Documents/PowerShell)
$PROFILE_DIR = Split-Path -Parent $profile
$env:Path = "C:\Users\phoen\.local\bin;$env:Path"
Import-Module -Name Microsoft.WinGet.CommandNotFound
Set-PSReadLineOption -EditMode Emacs
Function superls { eza --header --classify=always --color=always --icons=always --group $args }
Function superls_list { eza --header --classify=always --color=always --icons=always --group --long $args }
Function superls_list_all { eza --header --classify=always --color=always --icons=always --group --long --all $args }
Function remove_force { rm -Force $args }
@SteelPh0enix
SteelPh0enix / .wezterm.lua
Last active February 8, 2025 22:16
My WezTerm config
local wezterm = require('wezterm')
local config = wezterm.config_builder()
config.color_scheme = 'Kanagawa (Gogh)'
config.default_prog = { 'pwsh.exe' }
config.font_size = 10.5
config.font = wezterm.font 'MonaspiceKr NF'
config.initial_cols = 120
config.initial_rows = 30
@SteelPh0enix
SteelPh0enix / llama_cpp_utils.ps1
Last active March 17, 2025 12:40
llama.cpp PowerShell utils
# Collection of variables, aliases and Functions to work w/ locally hosted LLMs
# Source to activate.
$env:LLM_VENV_ACTIVE = 0
$env:LLM_PYTHON_VENV_PATH = "$env:USERPROFILE\.llm.venv"
$env:LLAMA_ARG_HOST = "steelph0enix.pc"
$env:LLAMA_ARG_PORT = 51536
# llm python venv stuff
Function llm-venv-activate {
@SteelPh0enix
SteelPh0enix / clean_rebuild_llama_cpp.bat
Last active October 4, 2024 06:50
Script for building llama.cpp under Windows for ROCm
REM execute via VS native tools command line prompt
REM make sure to clone the repo first, put this script next to the repo dir
REM this script is configured for building llama.cpp w/ ROCm support
REM for a system with Ryzen 9 5900X and RX 7900XT.
REM Unless you have the exact same setup, you may need to change some flags
REM and/or strings here.
set AMDGPU_TARGETS="gfx1100"
set HSA_OVERRIDE_GFX_VERSION="11.0.0"
set ROCM_VERSION="6.1.2"
@SteelPh0enix
SteelPh0enix / llama-cpp-utils.sh
Last active March 6, 2025 19:22
llama.cpp shell utils
#!/bin/zsh
# Collection of variables, aliases and functions to work w/ llama.cpp
# Source to activate.
# HARDCODED VALUES - MAKE SURE TO TUNE THEM FOR YOUR SYSTEM!
# These settings are for RX 7900 XT & latest Arch Linux
export ROCM_VERSION="6.0.2"
export USE_ROCM=1
export HIP_PLATFORM="amd"