Forked from jmcdice/gist:4e36ef159d79fc0ea48a771f6829573a
Last active
July 10, 2024 18:32
-
-
Save svrc/fa46102fbcb49f8419fdcf7bf3622286 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env bash | |
API_KEY='' | |
API_BASE='https://genai-proxy.sys.tas.vmtanzu.com/v1' | |
MODEL="text-embedding-ada-002"# this is actually vicuna 1.5 7b on my TAS | |
TEMPERATURE=0 | |
MAX_TOKENS=1024 | |
function chat_with_model() { | |
local prompt=$1 | |
# Construct the request body | |
local request_body=$(jq -n \ | |
--arg model "$MODEL" \ | |
--arg prompt "$prompt" \ | |
--argjson temperature $TEMPERATURE \ | |
--argjson max_tokens $MAX_TOKENS \ | |
'{model: $model, messages: [{"content": $prompt, role: "user"}], temperature: $temperature, max_tokens: $max_tokens}') | |
echo $request_body | |
# Use the curl command to get the response from the model | |
local response=$(curl -k ${API_BASE}/chat/completions \ | |
-H "Content-Type: application/json" \ | |
-H "Authorization: Bearer ${API_KEY}" \ | |
-d "$request_body") | |
# Extract and print the response | |
echo $response | jq -r '.choices[].text' | |
echo $response | jq -r | |
} | |
# Check if a prompt is provided | |
if [ -z "$1" ]; then | |
echo "Usage: $0 \"Your prompt here\"" | |
exit 1 | |
fi | |
# Process the provided prompt | |
chat_with_model "$1" | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment