-
-
Save jimbrig/543b857c3dbaf673d9ec396a0b70fddb to your computer and use it in GitHub Desktop.
Sample R Shiny app to run local models with an Ollama server. Coded mostly by various LLMs including Posit's Shiny Assistant
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This is a sample R Shiny app chatbot that runs local models with an Ollama server. | |
# You also need Ollama installed and a ollama server running, plus at least one local model pulled. | |
# I hard-coded a few local models, | |
# If you use this, you'll want to hard code yours (including descriptions, or take those out) | |
# Coded with lots of LLM help + Posit's Shiny Assistant. | |
library(shiny) | |
library(shinychat) | |
library(bslib) | |
ui <- page_sidebar( | |
title = "Chat with LLMs", | |
sidebar = sidebar( | |
selectInput("model_choice", | |
"Select Model:", | |
choices = c("gemma2", "mistral-small:latest", "phi4:latest"), | |
selected = "gemma2"), | |
# Add some helpful info about the models | |
card( | |
title = "About Models", | |
tags$div( | |
style = "line-height: 1.2;", | |
p(tags$b("gemma2:"), "Google's lightweight and efficient yet capable model.", style = "margin-bottom: 8px;"), | |
p(tags$b("mistral-small:"), "Mistral Small 3 sets a new benchmark in the 'small' LLM category below 70B.", style = "margin-bottom: 8px;"), | |
p(tags$b("phi4:"), "14B parameter, state-of-the-art open model from Microsoft.", style = "margin-bottom: 0;") | |
) | |
), | |
# Add a button to download the chat | |
actionButton("downloadChat", "Download Chat") | |
), | |
chat_ui("chat") | |
) | |
# Attach a JavaScript snippet to handle the download. | |
# When the download button is clicked, the script extracts the text inside | |
# the chat's <shiny-chat-messages> element, creates a Blob from it, and triggers a download. | |
ui <- tagList( | |
ui, | |
tags$script(HTML(" | |
document.addEventListener('DOMContentLoaded', function() { | |
document.getElementById('downloadChat').addEventListener('click', function() { | |
// Find the chat container by the id you provided to chat_ui() | |
var chatContainer = document.getElementById('chat'); | |
if (!chatContainer) return; | |
// The chat messages are rendered inside the <shiny-chat-messages> element | |
var chatMessages = chatContainer.querySelector('shiny-chat-messages'); | |
if (!chatMessages) return; | |
// Get the visible text (this will strip extra formatting) | |
var text = chatMessages.innerText; | |
// Create a blob and a temporary anchor to trigger download | |
var blob = new Blob([text], { type: 'text/plain' }); | |
var url = URL.createObjectURL(blob); | |
var a = document.createElement('a'); | |
a.href = url; | |
a.download = 'chat.txt'; | |
document.body.appendChild(a); | |
a.click(); | |
document.body.removeChild(a); | |
URL.revokeObjectURL(url); | |
}); | |
}); | |
")) | |
) | |
server <- function(input, output, session) { | |
# Create reactive value to store current chat instance | |
chat_rv <- reactiveVal() | |
# Initialize and update chat when model changes | |
observeEvent(input$model_choice, { | |
chat_rv(ellmer::chat_ollama( | |
system_prompt = "You're a helpful assistant", | |
model = input$model_choice | |
)) | |
}, ignoreInit = FALSE) # Don't ignore initial value | |
observeEvent(input$chat_user_input, { | |
stream <- chat_rv()$stream_async(input$chat_user_input) | |
chat_append("chat", stream) | |
}) | |
} | |
shinyApp(ui, server) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment