Skip to content

Instantly share code, notes, and snippets.

@andrewnc
Created April 28, 2025 21:38
Show Gist options
  • Save andrewnc/061c96f9ab305da58df10da386a37e55 to your computer and use it in GitHub Desktop.
Save andrewnc/061c96f9ab305da58df10da386a37e55 to your computer and use it in GitHub Desktop.
find which model you're running to test
package main
import (
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/peterh/liner"
)
// Message represents a single chat message.
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}
// ChatCompletionRequest is the payload for the /v1/chat/completions endpoint.
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []Message `json:"messages"`
}
// ChatCompletionResponse parses the server's response.
type ChatCompletionResponse struct {
Choices []struct {
Message Message `json:"message"`
} `json:"choices"`
}
// ModelsResponse parses the /v1/models response.
type ModelsResponse struct {
Data []struct {
ID string `json:"id"`
} `json:"data"`
}
func main() {
// Server URL (hardcoded for simplicity).
serverURL := "http://localhost:8000"
// Step 1: Detect server and get available models.
resp, err := http.Get(serverURL + "/v1/models")
if err != nil {
fmt.Println("Error connecting to server:", err)
return
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
fmt.Printf("Server returned non-200 status: %s, body: %s\n", resp.Status, string(body))
return
}
var models ModelsResponse
if err := json.NewDecoder(resp.Body).Decode(&models); err != nil {
fmt.Println("Error decoding models response:", err)
return
}
if len(models.Data) == 0 {
fmt.Println("No models available on the server")
return
}
// Select the first model.
modelID := models.Data[0].ID
fmt.Println("Using model:", modelID)
// Initialize chat history.
messages := []Message{}
// Initialize liner for enhanced command-line input (arrow key support).
line := liner.NewLiner()
defer line.Close()
line.SetCtrlCAborts(true)
fmt.Println("Enter messages to chat with the model. Type 'exit' or press Ctrl+D to quit.")
for {
// Use liner's Prompt method which supports history and arrow keys.
input, err := line.Prompt("User: ")
if err == liner.ErrPromptAborted || err == io.EOF {
fmt.Println("\nGoodbye!")
break
} else if err != nil {
fmt.Println("Error reading input:", err)
continue
}
// Trim spaces and check for exit command.
input = strings.TrimSpace(input)
if input == "exit" {
fmt.Println("Goodbye!")
break
}
// Append input to history if non-empty.
if input != "" {
line.AppendHistory(input)
}
// Append user message to chat history.
messages = append(messages, Message{Role: "user", Content: input})
// Create request payload.
requestPayload := ChatCompletionRequest{
Model: modelID,
Messages: messages,
}
requestBody, err := json.Marshal(requestPayload)
if err != nil {
fmt.Println("Error marshaling request:", err)
continue
}
// Send POST request to /v1/chat/completions.
resp, err := http.Post(serverURL+"/v1/chat/completions", "application/json", strings.NewReader(string(requestBody)))
if err != nil {
fmt.Println("Error sending request:", err)
continue
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
fmt.Printf("Server returned error: %s, body: %s\n", resp.Status, string(body))
continue
}
var response ChatCompletionResponse
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
fmt.Println("Error decoding response:", err)
continue
}
if len(response.Choices) == 0 {
fmt.Println("No response from the model")
continue
}
// Get assistant's message.
assistantMessage := response.Choices[0].Message.Content
fmt.Println("Assistant:", assistantMessage)
// Append assistant's message to history.
messages = append(messages, Message{Role: "assistant", Content: assistantMessage})
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment