Skip to content

Instantly share code, notes, and snippets.

@jacob-ebey
Created October 12, 2024 05:22
Show Gist options
  • Save jacob-ebey/a08c49a55a7735fe63be174dfb754a76 to your computer and use it in GitHub Desktop.
Save jacob-ebey/a08c49a55a7735fe63be174dfb754a76 to your computer and use it in GitHub Desktop.
Micro HTMX Streaming Chat App
customElements.define(
"chat-app",
class extends HTMLElement {
connectedCallback() {
if (!this.isConnected) return;
const form = window.querySelectorExt(
this,
"find form",
) as HTMLFormElement;
const messageInput = (
form.elements as unknown as { message: HTMLInputElement }
).message;
const messageArea = window.querySelectorExt(
this,
"find .chat-messages",
) as Element;
// Handle form submissions
form.addEventListener("submit", (event) => {
const message = messageInput.value.trim();
// Show a validation message if the input is empty
if (!message) {
messageInput.setCustomValidity("Please provide a message.");
// Prevent the form from submitting
event.preventDefault();
return;
}
// Create a new message element
const msg = document.createElement("div");
msg.className = "message user-message";
msg.innerHTML = `<img src="/user-icon.svg" alt="" width="24" height="24" /><p>${message}</p>`;
// Insert the message before the pending AI message
messageArea.insertBefore(
msg,
messageArea.querySelector(".pending-ai-message"),
);
setTimeout(() => {
messageInput.value = "";
messageArea.scrollTop = messageArea.scrollHeight;
});
});
// Focus on the input field after the message is processed
form.addEventListener("htmx:afterSettle", () => {
messageInput.focus();
});
// Monitor the previousElementSibling for changes. If it's already scrolled down,
// scroll down to the new message. Otherwise, leave it alone.
let timeout: ReturnType<typeof setTimeout> | undefined;
const observer = new MutationObserver(() => {
if (typeof timeout == "undefined") {
timeout = setTimeout(() => {
if (
messageArea.scrollHeight -
(messageArea.scrollTop + messageArea.clientHeight) <
50
) {
messageArea.scrollTop = messageArea.scrollHeight;
}
timeout = undefined;
}, 200);
}
});
observer.observe(messageArea, {
childList: true,
characterData: true,
subtree: true,
});
}
},
);
import { renderToReadableStream } from "micro-htmx/jsx/server";
import { Ollama } from "ollama";
import { Shell } from "./shell.js";
import chatAppElement from "bridge:./chat-app-element.ts";
export async function chatApp(request: Request) {
const url = new URL(request.url);
switch (url.pathname) {
case "/":
return new Response(
renderToReadableStream(
<Shell
head={
<>
<title>Chat App</title>
<meta name="description" content="A simple chat app." />
<script defer type="module" src={chatAppElement} />
<style>
{`
body,
html {
height: 100%;
}
.chat-container {
display: flex;
flex-direction: column;
height: 100%;
width: 100%;
}
.chat-messages {
flex-grow: 1;
overflow-y: auto;
padding: 20px;
border: 1px solid var(--border);
}
.message {
margin-bottom: 10px;
text-wrap: wrap;
}
.user-message {
text-align: right;
}
.ai-message {
text-align: left;
}
.ai-message pre {
text-wrap: wrap;
}
.pending-ai-message {
display: none;
}
.htmx-request .user-message + .pending-ai-message {
display: block;
}
.input-area {
display: flex;
padding: 20px;
}
.input-area input {
flex-grow: 1;
margin-right: 10px;
}
.htmx-indicator-show {
display: none;
}
.htmx-request .htmx-indicator-show,
.htmx-request.htmx-indicator-show {
display: inline;
}
.htmx-request .htmx-indicator-hide,
.htmx-request.htmx-indicator-hide {
display: none;
}
`}
</style>
</>
}
>
<noscript>
<p>
<strong>Note:</strong> JavaScript is required for this app.
Please enable it to continue.
</p>
</noscript>
<chat-app>
<div class="chat-container">
<div class="chat-messages">
<div class="message ai-message">
<img src="/bot-icon.svg" alt="" width="24" height="24" />
<pre>
<p>Hello! How can I assist you today?</p>
</pre>
</div>
<div class="message pending-ai-message">
<img src="/bot-icon.svg" alt="" width="24" height="24" />
<pre>
<p>...</p>
</pre>
</div>
</div>
<form
class="input-area"
hx-post="/send-message"
hx-target="previous .chat-messages > .pending-ai-message"
hx-swap="beforebegin"
hx-disabled-elt="input, button"
hx-indicator="closest .chat-container"
>
<input
disabled
required
name="message"
type="text"
placeholder="Type your message..."
onchange="
// Clear the validation message when the input changes
this.setCustomValidity('');
"
/>
<button disabled type="submit">
<span class="htmx-indicator-hide">Send</span>
<img
class="htmx-indicator-show"
src="/spinner.svg"
alt=""
width="10"
height="10"
/>
</button>
</form>
<script>
{/* JS is required, so only enable the form once we know we have JS available */}
{`
for (const disabled of document.currentScript.previousElementSibling.querySelectorAll("[disabled]")) {
disabled.removeAttribute("disabled");
}
`}
</script>
</div>
</chat-app>
</Shell>,
),
{
headers: {
"Content-Type": "text/html",
},
},
);
case "/send-message": {
if (request.method !== "POST") return;
const formData = await request.formData();
let message = formData.get("message");
if (!message) {
message = "Please provide a message.";
} else {
message = `You said: ${message}`;
}
return new Response(
await renderToReadableStream(<StreamChatResponse message={message} />),
{
headers: {
"Content-Type": "text/html",
},
},
);
}
}
}
async function* StreamChatResponse({ message }: { message: string }) {
const ollama = new Ollama({ host: process.env.OLLAMA_HOST });
const response = await ollama.chat({
stream: true,
model: "llama3.2:1b",
messages: [
{
role: "system",
content: "You are a helpful AI assistant in the form of a chat bot.",
},
{
role: "user",
content: message,
},
],
});
yield '<div class="message ai-message"><img src="/bot-icon.svg" alt="" width="24" height="24" /><pre>';
for await (const chunk of response) {
if (chunk.message.content) {
yield <span>{chunk.message.content}</span>;
}
}
yield "</pre></div>";
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment