Skip to content

Instantly share code, notes, and snippets.

@dipenparmar12
Last active October 4, 2025 12:03
Show Gist options
  • Save dipenparmar12/3991be8312e0fa75c5f74008d5643eef to your computer and use it in GitHub Desktop.
Save dipenparmar12/3991be8312e0fa75c5f74008d5643eef to your computer and use it in GitHub Desktop.
Raycast Command for Refining Text with Ollama LLMs
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Ollama & Mistral Refine-clipboard
# @raycast.mode fullOutput
# @raycast.packageName Ollama-mistral-refine-clipboard
# Optional parameters:
# @raycast.icon 🤖
# @raycast.argument1 { "type": "dropdown", "placeholder": "Model", "data": [{"title": "gpt-oss:latest", "value": "ollama:gpt-oss:latest"}, {"title": "qwen3:4b", "value": "ollama:qwen3:4b", "default": true}, {"title": "llama3:latest", "value": "ollama:llama3:latest"}, {"title": "qwen3:8b", "value": "ollama:qwen3:8b"}, {"title": "deepseek-r1:latest", "value": "ollama:deepseek-r1:latest"}, {"title": "llama3.2-vision:latest", "value": "ollama:llama3.2-vision:latest"}, {"title": "wizardlm-uncensored:13b", "value": "ollama:wizardlm-uncensored:13b"}, {"title": "gemma:latest", "value": "ollama:gemma:latest"}, {"title": "Mistral Small", "value": "mistral:mistral-small-latest"}, {"title": "Mistral Large", "value": "mistral:mistral-large-latest"}, {"title": "Mistral Medium", "value": "mistral:mistral-medium-latest"}, {"title": "Mistral Nemo", "value": "mistral:open-mistral-nemo"}] }
# @raycast.argument2 { "type": "dropdown", "placeholder": "Tone", "data": [{"title": "Casual", "value": "Casual"}, {"title": "Friendly", "value": "Friendly"}, {"title": "Professional", "value": "Professional"}, {"title": "Supportive", "value": "Supportive"}, {"title": "Empathetic", "value": "Empathetic"}, {"title": "Technical", "value": "Technical"}], "optional": true }
# @raycast.argument3 { "type": "dropdown", "placeholder": "Format", "data": [{"title": "General", "value": "General"}, {"title": "Slack message", "value": "Slack message"}, {"title": "Email", "value": "Email"}, {"title": "Product note", "value": "Product note"}, {"title": "Code comment", "value": "Code comment"}, {"title": "Customer-facing", "value": "Customer-facing"}, {"title": "Chat interface", "value": "Chat interface"}], "optional": true }
# @raycast.argument4 { "type": "text", "placeholder": "Custom instructions (optional)", "optional": true }
# Documentation:
# @raycast.description Refines text from clipboard using Ollama or Mistral AI with different models, tones, and formats
# @raycast.author Dipen Parmar
# @raycast.authorURL https://github.com/dipenparmar12
# Define color codes for better readability
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
BLUE='\033[0;34m'
PURPLE='\033[0;35m'
CYAN='\033[0;36m'
WHITE='\033[0;37m'
BOLD='\033[1m'
UNDERLINE='\033[4m'
RESET='\033[0m'
# Function to truncate text with ellipsis if it's too long
truncate_text() {
local text="$1"
local max_length=${2:-500}
local line_count=$(echo "$text" | wc -l)
local char_count=$(echo "$text" | wc -c)
if [ "$char_count" -gt "$max_length" ]; then
echo "$text" | head -c "$max_length"
echo -e "${YELLOW}... (${line_count} lines, $(( (char_count - 1) / 1000 ))k+ characters)${RESET}"
else
echo "$text"
fi
}
# Get text from clipboard
TEXT=$(pbpaste)
MODEL_FULL="$1"
TONE="$2"
FORMAT="${3:-General}"
CUSTOM_INSTRUCTIONS="$4"
# If MISTRAL_API_TOKEN is not set, parse it from ~/.zshrc
if [ -z "$MISTRAL_API_TOKEN" ]; then
MISTRAL_API_TOKEN=$(grep '^export MISTRAL_API_TOKEN=' ~/.zshrc | sed 's/export MISTRAL_API_TOKEN="//; s/"$//')
if [ -z "$MISTRAL_API_TOKEN" ]; then
echo -e "${RED}${BOLD}Error: MISTRAL_API_TOKEN not found in ~/.zshrc.${RESET}"
exit 1
fi
fi
# Parse provider and model from the combined value
PROVIDER=$(echo "$MODEL_FULL" | cut -d':' -f1)
MODEL=$(echo "$MODEL_FULL" | cut -d':' -f2-)
# Check if clipboard is empty
if [ -z "$TEXT" ]; then
echo -e "${RED}${BOLD}Error: Clipboard is empty. Please copy text to refine before running this command.${RESET}"
exit 1
fi
# Check provider-specific requirements
if [ "$PROVIDER" = "ollama" ]; then
# Check if Ollama is running
if ! pgrep -x "ollama" > /dev/null; then
echo -e "${YELLOW}${BOLD}Ollama is not running. Starting Ollama...${RESET}"
open -a Ollama
sleep 2
fi
elif [ "$PROVIDER" = "mistral" ]; then
# Check if Mistral API token is set
if [ -z "$MISTRAL_API_TOKEN" ]; then
echo -e "${RED}${BOLD}Error: MISTRAL_API_TOKEN environment variable is not set.${RESET}"
echo -e "${YELLOW}Please set your Mistral API token:${RESET}"
echo -e "${CYAN}export MISTRAL_API_TOKEN='your-api-token-here'${RESET}"
exit 1
fi
fi
# Print processing information
echo -e "${BLUE}${BOLD}Settings:${RESET}"
echo -e "${BLUE}Provider:${RESET} $PROVIDER ${BLUE}Model:${RESET} $MODEL ${BLUE}Tone:${RESET} ${TONE:-None} ${BLUE}Format:${RESET} $FORMAT"
if [ -n "$CUSTOM_INSTRUCTIONS" ]; then
echo -e "${BLUE}Custom Instructions:${RESET} $CUSTOM_INSTRUCTIONS"
fi
echo ""
# Display original text preview
echo -e "${PURPLE}${BOLD}Original Text:${RESET} ${UNDERLINE}(Preview)${RESET}"
echo -e "${YELLOW}${BOLD}▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼▼${RESET}"
truncate_text "$TEXT"
echo -e "${YELLOW}${BOLD}▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲▲${RESET}"
echo ""
# Show processing indicator
echo -e "${GREEN}${BOLD}🤖 Processing with $PROVIDER:$MODEL...${RESET}"
echo ""
# Build the prompt based on the selected tone and format
PROMPT="Refine this text to make it clear and well-structured while keeping the EXACT original meaning. DO NOT add any extra information or change the original intent in any way:"
PROMPT="$PROMPT\n\n\"$TEXT\""
if [ -n "$TONE" ]; then
PROMPT="$PROMPT\n\nTone: $TONE"
fi
# Add format if it's not General
if [ "$FORMAT" != "General" ]; then
PROMPT="$PROMPT\n\nMake it suitable for: $FORMAT"
fi
# Add custom instructions if provided
if [ -n "$CUSTOM_INSTRUCTIONS" ]; then
PROMPT="$PROMPT\n\nAdditional instructions: $CUSTOM_INSTRUCTIONS"
fi
# Add instruction for refinement notes
PROMPT="$PROMPT\n\n--- Refinement Notes ---\n Do not List what was improved (e.g., clarity, grammar, structure). Do NOT mention any added content, as none should be added."
# Process based on provider
if [ "$PROVIDER" = "ollama" ]; then
# Ollama API call
JSON_PAYLOAD=$(jq -n --arg model "$MODEL" --arg prompt "$PROMPT" '{
model: $model,
prompt: $prompt,
stream: false,
options: { temperature: 1 }
}')
RESPONSE=$(curl -s "http://localhost:11434/api/generate" \
-H "Content-Type: application/json" \
-d "$JSON_PAYLOAD")
# Check if the response contains the expected field
if echo "$RESPONSE" | grep -q "response"; then
REFINED_TEXT=$(echo "$RESPONSE" | python3 -c "
import sys, json
try:
data = json.load(sys.stdin)
if 'response' in data:
print(data['response'])
else:
print('Error: Response field missing in API response')
except json.JSONDecodeError as e:
print('Error parsing JSON response:', e)
")
else
echo -e "${RED}${BOLD}Error: Unexpected response format from Ollama API${RESET}"
echo -e "${RED}Raw response: $RESPONSE${RESET}"
exit 1
fi
elif [ "$PROVIDER" = "mistral" ]; then
# Mistral API call
JSON_PAYLOAD=$(cat <<EOF
{
"model": "$MODEL",
"messages": [
{
"role": "user",
"content": $(echo "$PROMPT" | python3 -c "import sys, json; print(json.dumps(sys.stdin.read()))")
}
],
"temperature": 0.7
}
EOF
)
RESPONSE=$(curl -s "https://api.mistral.ai/v1/chat/completions" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $MISTRAL_API_TOKEN" \
-d "$JSON_PAYLOAD")
# Check if the response contains the expected field
if echo "$RESPONSE" | grep -q "choices"; then
REFINED_TEXT=$(echo "$RESPONSE" | python3 -c "
import sys, json
try:
data = json.load(sys.stdin)
if 'choices' in data and len(data['choices']) > 0:
print(data['choices'][0]['message']['content'])
elif 'error' in data:
print('Error from Mistral API:', data['error'].get('message', 'Unknown error'))
else:
print('Error: Unexpected response structure')
except json.JSONDecodeError as e:
print('Error parsing JSON response:', e)
")
else
echo -e "${RED}${BOLD}Error: Unexpected response format from Mistral API${RESET}"
echo -e "${RED}Raw response: $RESPONSE${RESET}"
exit 1
fi
fi
# Clean up the refined text
# CLEANED_TEXT=$(echo "$REFINED_TEXT" | sed -E '1s/^[[:space:]]*(Here is the refined text:?)?[[:space:]]*//I')
# CLEANED_TEXT=$(echo "$CLEANED_TEXT" | sed -E '1s/^"//' | sed -E '$s/"$//')
CLEANED_TEXT=$(echo "$REFINED_TEXT" | sed 's/^[[:space:]]*//; s/[[:space:]]*$//')
# Display the refined text
echo -e "$CLEANED_TEXT"
# Copy refined text to clipboard
echo -n "$CLEANED_TEXT" | pbcopy
echo ""
echo -e "${GREEN}${BOLD}✓ Refined text copied to clipboard${RESET}"
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Ollama Refine-Text
# @raycast.mode fullOutput
# @raycast.packageName Ollama
# Optional parameters:
# @raycast.icon 🤖
# @raycast.argument1 { "type": "text", "placeholder": "Text to refine", "optional": false }
# @raycast.argument2 { "type": "dropdown", "placeholder": "Model", "data": [{"title": "llama3", "value": "llama3"},{"title": "qwen2.5:14b", "value": "qwen2.5:14b"},{"title": "wizardlm-uncensored:13b", "value": "wizardlm-uncensored:13b"},{"title": "gemma", "value": "gemma"},{"title": "deepseek-r1:14b", "value": "deepseek-r1:14b"},{"title": "llama3.2-vision:11b", "value": "llama3.2-vision:11b"},{"title": "wojtek/wavecoder:6.7b-ultra-Q6_K", "value": "wojtek/wavecoder:6.7b-ultra-Q6_K"},{"title": "qwen2.5-coder:14b", "value": "qwen2.5-coder:14b"}] }
# @raycast.argument3 { "type": "dropdown", "placeholder": "Tone", "data": [{"title": "Casual", "value": "Casual"}, {"title": "Friendly", "value": "Friendly"}, {"title": "Professional", "value": "Professional"}, {"title": "Supportive", "value": "Supportive"}, {"title": "Empathetic", "value": "Empathetic"}, {"title": "Technical", "value": "Technical"}], "optional": true }
# @raycast.argument4 { "type": "dropdown", "placeholder": "Format", "data": [{"title": "General", "value": "General"}, {"title": "Slack message", "value": "Slack message"}, {"title": "Email", "value": "Email"}, {"title": "Product note", "value": "Product note"}, {"title": "Code comment", "value": "Code comment"}, {"title": "Customer-facing", "value": "Customer-facing"}, {"title": "Chat interface", "value": "Chat interface"}], "optional": true }
# Documentation:
# @raycast.description Refine text using Ollama with different models, tones, and formats
# @raycast.author Dipen Parmar
# @raycast.authorURL https://github.com/dipenparmar12
TEXT="$1"
MODEL="${2:-llama3}"
TONE="${3:-}"
FORMAT="${4:-General}"
# Check if Ollama is running
if ! pgrep -x "ollama" > /dev/null; then
echo "Ollama is not running. Starting Ollama..."
open -a Ollama
sleep 2
fi
echo "🤖 Processing with $MODEL..."
echo ""
echo "Refining text with model: $MODEL, tone: $TONE and format: $FORMAT"
echo ""
echo ""
# Build the prompt based on the selected tone and format with strict instructions
PROMPT="Refine this text to make it clear and well-structured while keeping the EXACT original meaning. DO NOT add any extra information or change the original intent in any way:"
PROMPT="$PROMPT\n\n\"$TEXT\""
if [ -n "$TONE" ]; then
PROMPT="$PROMPT\n\nTone: $TONE"
fi
# Add format if it's not General
if [ "$FORMAT" != "General" ]; then
PROMPT="$PROMPT\n\nMake it suitable for: $FORMAT"
fi
# Add instruction for refinement notes
PROMPT="$PROMPT\n\n--- Refinement Notes ---\nList only what was improved (e.g., clarity, grammar, structure). Do NOT mention any added content, as none should be added."
# Prepare JSON payload with proper escaping for the Ollama API
JSON_PAYLOAD=$(printf '{"model": "%s", "prompt": "%s", "stream": false}' "$MODEL" "$(echo "$PROMPT" | sed 's/"/\\"/g' | perl -pe 's/\n/\\n/g')")
# Use curl to query the Ollama API with proper error handling
RESPONSE=$(curl -s "http://localhost:11434/api/generate" \
-H "Content-Type: application/json" \
-d "$JSON_PAYLOAD")
echo "Response"
echo "-----------------------"
echo ""
# Check if the response contains the expected field
if echo "$RESPONSE" | grep -q "response"; then
echo "$RESPONSE" | python3 -c "
import sys, json
try:
data = json.load(sys.stdin)
if 'response' in data:
print(data['response'])
else:
print('Error: Response field missing in API response')
print('Full response:', data)
except json.JSONDecodeError as e:
print('Error parsing JSON response:', e)
print('Raw response:', sys.stdin.read())
"
else
echo "Error: Unexpected response format from Ollama API"
echo "Raw response: $RESPONSE"
fi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment