Skip to content

Instantly share code, notes, and snippets.

@ayr-ton
Created April 23, 2025 23:11
Show Gist options
  • Save ayr-ton/29d275ca4cf51bd8c7f59de269299a40 to your computer and use it in GitHub Desktop.
Save ayr-ton/29d275ca4cf51bd8c7f59de269299a40 to your computer and use it in GitHub Desktop.
ai_stuff
ai_explain() {
local filename="$1"
local model="deepseek-r1:8b"
if [ -z "$filename" ]; then
echo "Usage: ai_explain <filename>" >&2; return 1
fi
if [ ! -f "$filename" ] || [ ! -r "$filename" ]; then
echo "Error: File '$filename' not found or is not readable." >&2; return 1
fi
echo "Asking AI ($model) to explain '$filename'..." >&2
local prompt
prompt=$(cat <<EOF
Explain the purpose and content of the following file named '$filename'. Describe what it likely does or what data it contains.
File Content:
\`\`\`
$(cat "$filename")
\`\`\`
EOF
)
ollama run "$model" "$prompt"
if [ $? -ne 0 ]; then
echo "Error: Ollama command failed." >&2; return 1
fi
return 0
}
ai_create() {
local output_file="$1"
local user_prompt="$2"
local model="qwen2.5-coder:14b"
if [ $# -lt 3 ]; then
echo "Usage: ai_create <output_file> \"<prompt>\" <context_file1> [context_file2 ...]" >&2
echo "Error: Not enough arguments provided." >&2
return 1
fi
shift 2
local context_files=("$@")
local all_files_valid=true
local file_list_for_prompt=""
for file in "${context_files[@]}"; do
if [ ! -f "$file" ] || [ ! -r "$file" ]; then
echo "Error: Context path '$file' is not a readable file. Only files are accepted as context." >&2
all_files_valid=false
fi
file_list_for_prompt+="'$file' "
done
file_list_for_prompt=${file_list_for_prompt% }
if [ "$all_files_valid" = false ]; then
echo "Error: One or more context files are invalid. Aborting." >&2
return 1
fi
local output_dir
output_dir=$(dirname "$output_file")
if ! mkdir -p "$output_dir" || [ ! -w "$output_dir" ]; then
echo "Error: Output directory '$output_dir' could not be created or is not writable." >&2
return 1
fi
echo "Asking AI ($model) to create '$output_file'..." >&2
echo " Prompt: \"$user_prompt\"" >&2
echo " Using context from files: ${file_list_for_prompt}" >&2
local context_content
context_content=$(cat "${context_files[@]}")
local final_prompt
final_prompt=$(cat <<EOF
You are an AI assistant programmed to generate file content based on user instructions and provided context.
Your task is to generate the content for the file named: '$output_file'
Follow these instructions precisely:
$user_prompt
Use the following concatenated content from the provided context file(s) (${file_list_for_prompt}) as reference material:
\`\`\`
$context_content
\`\`\`
CRITICAL OUTPUT REQUIREMENTS:
1. Generate *only* the raw content required for the target file '$output_file'.
2. Your response must *not* contain any introductory phrases, explanations, apologies, or summaries.
3. Your response must *not* include markdown formatting like \`\`\` unless it's explicitly part of the requested file content itself.
4. Your response must *not* contain any metadata or diagnostic tags like <think>...</think>.
Begin the raw file content now:
EOF
)
ollama run "$model" "$final_prompt" > "$output_file"
local exit_status=$?
if [ $exit_status -eq 0 ]; then
if [ -s "$output_file" ]; then
echo "File '$output_file' created successfully." >&2
else
echo "Warning: Ollama command succeeded, but the output file '$output_file' is empty after filtering. The AI might not have generated usable content or only produced filtered tags." >&2
fi
else
echo "Error: Ollama command or sed filtering failed (Exit Status: $exit_status). Output file '$output_file' might be incomplete or empty." >&2
return 1
fi
return 0
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment