Mix.install(
[:ollama, :kino]
)
This is a demo for a tiny fraction of Livebook capabilities.
In this example, we create a simple two-input form that allows setting system prompt and sending a message to LLM hosted with Ollama.
You can see an example of me toying with this Livebook in my twitter post.
client = Ollama.init()
models =
client
|> Ollama.list_models()
|> then(fn {:ok, %{"models" => models}} -> models end)
|> Enum.map(fn model -> {Map.get(model, "name"), Map.get(model, "name")} end)
model_input = Kino.Input.select("Model", models)
system_prompt_input = Kino.Input.textarea("System prompt")
instruction_input = Kino.Input.textarea("Instruction")
form =
Kino.Control.form(
[model: model_input, system_prompt: system_prompt_input, instruction: instruction_input],
submit: "Start"
)
|> Kino.render()
reply_frame =
Kino.Frame.new()
|> Kino.render()
Kino.listen(form, fn %{
data: %{
model: model,
system_prompt: system_prompt,
instruction: instruction
}
} ->
messages = [
%{role: "system", content: system_prompt},
%{role: "user", content: instruction}
]
Kino.Frame.clear(reply_frame)
{:ok, streaming} =
Ollama.chat(client,
model: model,
messages: messages,
stream: true
)
streaming
|> Stream.each(fn %{"message" => %{"content" => chunk}} ->
Kino.Frame.append(reply_frame, Kino.Markdown.new(chunk, chunk: true))
end)
|> Stream.run()
end)
Kino.nothing()