Skip to content

Instantly share code, notes, and snippets.

@andrevdm
Last active April 19, 2025 12:57
Show Gist options
  • Save andrevdm/e05abe400b2a9a5d9d6258bb03bd3a15 to your computer and use it in GitHub Desktop.
Save andrevdm/e05abe400b2a9a5d9d6258bb03bd3a15 to your computer and use it in GitHub Desktop.
Haskell ollama call showing how to respond to a tool usage
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedRecordDot #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE QuasiQuotes #-}
module App where
import Verset
import qualified Data.Aeson as Ae
import qualified Data.List.NonEmpty as NE
import qualified Ollama as O
import Text.Pretty.Simple (pPrint)
import Text.RawString.QQ (r)
import Control.Exception.Safe (throwString)
app :: IO ()
app = do
-- Model to use. Install with `ollama pull <model_name>`
-- Make sure it has the tool capability (use `ollama show <model_name>` to check)
-- And look at the Berkeley leaderboard to pick a good model for function calls. https://gorilla.cs.berkeley.edu/leaderboard.html
let modelName = "qwen2.5:7b-instruct"
-- Example tool JSON
let toolsJson' = [r|
[
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather for a location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The location to get the weather for"
},
"format": {
"type": "string",
"description": "The format to return the weather in, e.g. 'celsius' or 'fahrenheit', default is 'celsius'",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location", "format"]
}
}
}
]
|]
toolsJson <-
case Ae.eitherDecode toolsJson' :: Either [Char] [Ae.Value] of
Right v -> pure v
Left e -> throwString e
-- Chat options, you could use O.defaultChatOptions
let mkCopt msg =
O.ChatOps
{ chatModelName = modelName
, messages = msg
, tools = Nothing
, format = Nothing
, keepAlive = Nothing
, hostUrl = Nothing
, responseTimeOut = Nothing
, options = Nothing
, stream = Nothing
}
-- First message e.g. "from the user"
let msg1 =
O.Message
{ role = O.User
, content = "What is the weather in Dublin?"
, images = Nothing
, tool_calls = Nothing
}
-- Options for the first call. Send the 1st message and set the tools
let chatOpts1 =
(mkCopt (NE.fromList [msg1]))
{ O.tools = Just toolsJson
}
-- Chat call
chatRes1' <- O.chat chatOpts1
-- Check for errors
chatRes1 <-
case chatRes1' of
Left e -> throwString e
Right r' -> pure r'
putText ""
putText "=================="
pPrint chatRes1
-- Here we'd check if the response has a tool call and if so call the tool with the given parameters
-- Then in the next message we send the answer from the tool to the chat
-- NB the response tool message must also be sent to the chat
-- Get the response tool message as an array
let chatRes1Message = maybe [] (:[]) chatRes1.message
-- Response with the tool result (fake in this example)
let msg2 =
O.Message
{ role = O.Tool
, content = [r| {"get_current_weather": "cloudy"} |]
, images = Nothing
, tool_calls = Nothing
}
-- Call the LLM again with the message history and the tool result
chatRes2 <- O.chat $ mkCopt (NE.fromList $ [msg1] <> chatRes1Message <> [msg2])
putText ""
putText "=================="
pPrint chatRes2
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment