Skip to content

Instantly share code, notes, and snippets.

@TheCodeTherapy
Last active July 16, 2025 20:40
Show Gist options
  • Save TheCodeTherapy/8c987ff1e2b28ad2c3c5473a91f9f726 to your computer and use it in GitHub Desktop.
Save TheCodeTherapy/8c987ff1e2b28ad2c3c5473a91f9f726 to your computer and use it in GitHub Desktop.
<m-group id="agent-group">
<m-audio id="agent-audio" y="2" z="-2.7" loop="false"></m-audio>
<m-group id="float-wrapper">
<m-group id="actions-wrapper">
<m-model
id="agent-model"
src="/assets/playground/brainzo_draco.glb"
anim="/assets/playground/brainzo_draco.glb"
collide="false"
y="1.2" ry="0"
sx="0.5" sy="0.5" sz="0.5"
>
<m-cylinder radius="1.3" height="5" y="1.5" visible="false"></m-cylinder>
<m-video collide="false" id="wait-indicator" src="/assets/playground/wait.webm" x="0" y="1.35" z="1.1" sy="0" emissive="5"></m-video>
<m-attr-anim attr="ry" start="-5" end="5" duration="12000" loop="true" ping-pong="true" easing="easeInOutQuint"></m-attr-anim>
</m-model>
<m-audio id="spin-audio" src="/assets/playground/spin.mp3" loop="false" start-time="-10000" volume="0"></m-audio>
<m-audio id="horn-audio" src="/assets/playground/horn.mp3" loop="false" start-time="-10000" volume="0"></m-audio>
<m-model id="horn-model" src="/assets/playground/viking_horn.glb" sx="0" sy="0" sz="0" x="-0.8" y="2.5" z="1" rx="-20" ry="220" collide="false"></m-model>
<m-model id="thumb-model" src="/assets/playground/thumb.glb" sx="0" sy="0" sz="0" y="1.8" x="-1.5" rx="0" collide="false"></m-model>
</m-group>
<m-label id="agent-response" width="5" height="4" x="3.75" y="2"></m-label>
<m-attr-anim attr="y" start="0.2" end="0.35" duration="17000" loop="true" ping-pong="true" easing="easeInOutQuad"></m-attr-anim>
</m-group>
<m-chat-probe id="chat" range="10" debug="false"></m-chat-probe>
<m-position-probe id="position-probe" range="10" debug="false"></m-position-probe>
</m-group>
<script>
// OpenAI API key and Assistant ID
const apiKey = "OPENAI_API_KEY_GOES_HERE";
const assistantId = "OPENAI_AGENT_ID_GOES_HERE";
// Element references for various components in the scene
const actionsWrapper = document.getElementById("actions-wrapper");
const agentAudio = document.getElementById("agent-audio");
const agentModel = document.getElementById("agent-model");
const waitIndicator = document.getElementById("wait-indicator");
const thumbModel = document.getElementById("thumb-model");
const hornAudio = document.getElementById("horn-audio");
const hornModel = document.getElementById("horn-model");
const spinAudio = document.getElementById("spin-audio");
const proximityProbe = document.getElementById("position-probe");
const proximityMap = new Map(); // Tracks nearby user positions
let latestProximityActivity = document.timeline.currentTime;
// Settings for user connections and audio
const connectionPrefix = Date.now() - 1710000000000;
const connectedUsers = new Set();
const audioVolume = 3;
const audioMarginDuration = 2000;
const completionTimeout = 7000;
// Settings for thumb rotation angles
const thumbsUpRotation = 0;
const thumbsDownRotation = 180;
const thumbsNeutralRotation = 90;
let threadId;
let latestPromise = null;
let queuedMessages = [];
let latestMessageTime = document.timeline.currentTime;
// Initial chat message
const initialText = "Hi, I'm Squig! Chat with me through the text chat box on the UI.";
const responseLabel = document.getElementById("agent-response");
responseLabel.setAttribute("content", initialText);
/**
* Animates a specific attribute of an element from a start to an end value.
* @param {MMLElement} element - The HTML element to animate.
* @param {string} attr - The attribute to animate.
* @param {number} start - Starting value of the attribute.
* @param {number} end - Ending value of the attribute.
* @param {number} duration - Duration of the animation in milliseconds.
* @param {string} easing - The easing function for the animation.
*/
function animate(element, attr, start, end, duration, easing) {
const anim = document.createElement("m-attr-anim");
anim.setAttribute("attr", attr);
anim.setAttribute("start", start);
anim.setAttribute("end", end);
anim.setAttribute("start-time", document.timeline.currentTime);
anim.setAttribute("end-time", document.timeline.currentTime + duration);
anim.setAttribute("duration", duration);
anim.setAttribute("easing", easing);
anim.setAttribute("loop", false);
element.appendChild(anim);
setTimeout(() => {
element.setAttribute(attr, end);
element.removeChild(anim);
}, duration);
}
/**
* Displays a waiting indicator and sets the response label to "Thinking..."
*/
function showWaiting() {
responseLabel.setAttribute("content", "Thinking...");
animate(waitIndicator, "sy", 0, 1, 500, "easeInOutQuint");
}
/**
* Hides the waiting indicator by animating it out of view.
*/
function hideWaiting() {
animate(waitIndicator, "sy", 1, 0, 500, "easeInOutQuint");
}
/**
* Initiates a spinning animation and plays the spin audio.
*/
function spin() {
spinAudio.setAttribute("volume", 1);
spinAudio.setAttribute("start-time", document.timeline.currentTime);
spinAudio.setAttribute("pause-time", document.timeline.currentTime + 6000);
setTimeout(() => {
spinAudio.setAttribute("volume", 0);
}, 6000);
animate(actionsWrapper, "ry", 0, 1800, 3500, "easeInOutQuint");
}
/**
* Animates the thumb model for a "thumbs up" effect.
*/
function thumbsUp() {
animate(thumbModel, "sx", 0, 4, 1000, "easeInOutQuint");
animate(thumbModel, "sy", 0, 4, 1000, "easeInOutQuint");
animate(thumbModel, "sz", 0, 4, 1000, "easeInOutQuint");
animate(thumbModel, "rx", thumbsNeutralRotation, thumbsUpRotation, 1000, "easeInOutQuint");
setTimeout(() => {
animate(thumbModel, "sx", 4, 0, 1000, "easeInOutQuint");
animate(thumbModel, "sy", 4, 0, 1000, "easeInOutQuint");
animate(thumbModel, "sz", 4, 0, 1000, "easeInOutQuint");
animate(thumbModel, "rx", thumbsUpRotation, thumbsNeutralRotation, 1000, "easeInOutQuint");
}, 5000);
}
/**
* Animates the thumb model for a "thumbs down" effect.
*/
function thumbsDown() {
animate(thumbModel, "sx", 0, 4, 1000, "easeInOutQuint");
animate(thumbModel, "sy", 0, 4, 1000, "easeInOutQuint");
animate(thumbModel, "sz", 0, 4, 1000, "easeInOutQuint");
animate(thumbModel, "rx", thumbsNeutralRotation, thumbsDownRotation, 1000, "easeInOutQuint");
setTimeout(() => {
animate(thumbModel, "sx", 4, 0, 1000, "easeInOutQuint");
animate(thumbModel, "sy", 4, 0, 1000, "easeInOutQuint");
animate(thumbModel, "sz", 4, 0, 1000, "easeInOutQuint");
animate(thumbModel, "rx", thumbsDownRotation, thumbsNeutralRotation, 1000, "easeInOutQuint");
}, 5000);
}
/**
* Plays the horn sound effect and animates the horn model.
*/
function playHorn() {
hornAudio.setAttribute("volume", 1);
hornAudio.setAttribute("start-time", document.timeline.currentTime);
hornAudio.setAttribute("pause-time", document.timeline.currentTime + 6000);
animate(hornModel, "sx", 0, 4, 1000, "easeInOutQuint");
animate(hornModel, "sy", 0, 4, 1000, "easeInOutQuint");
animate(hornModel, "sz", 0, 4, 1000, "easeInOutQuint");
setTimeout(() => {
hornAudio.setAttribute("volume", 0);
animate(hornModel, "sx", 4, 0, 1000, "easeInOutQuint");
animate(hornModel, "sy", 4, 0, 1000, "easeInOutQuint");
animate(hornModel, "sz", 4, 0, 1000, "easeInOutQuint");
}, 6000);
}
/**
* Animates the agent model to grow while speaking and return to normal size afterward.
* @param {number} duration - Duration for which the agent stays enlarged.
*/
function growWhileSpeaking(duration) {
animate(agentModel, "sx", 0.5, 0.6, 2000, "easeOutBack");
animate(agentModel, "sy", 0.5, 0.6, 2000, "easeOutBack");
animate(agentModel, "sz", 0.5, 0.6, 2000, "easeOutBack");
setTimeout(() => {
animate(agentModel, "sx", 0.6, 0.5, 2000, "easeInOutQuint");
animate(agentModel, "sy", 0.6, 0.5, 2000, "easeInOutQuint");
animate(agentModel, "sz", 0.6, 0.5, 2000, "easeInOutQuint");
}, duration);
}
/**
* Plays audio from the given URL and ensures sequential playback using duration.
* @param {string} src - The audio source URL.
* @param {number} duration - The duration of the audio in milliseconds.
* @returns {Promise<void>} - Resolves after playback is complete.
*/
async function playAudio(src, duration) {
return new Promise((resolve) => {
agentAudio.setAttribute("volume", audioVolume);
agentAudio.setAttribute("src", src);
agentAudio.setAttribute("start-time", document.timeline.currentTime);
// Wait for the duration to ensure playback completion
setTimeout(() => {
agentAudio.setAttribute("volume", 0); // Reset volume
resolve();
}, duration + audioMarginDuration); // Add margin to ensure smooth transitions
});
}
/**
* Estimates the duration of an MP3 audio file based on its frame structure.
* @param {ArrayBuffer} arrayBuffer - The binary data of the MP3 file.
* @param {boolean} debug - Flag for debug logging output.
* @returns {Promise<number>} The estimated duration in milliseconds.
*/
async function estimateMP3Duration(arrayBuffer, debug = false) {
const uint8Array = new Uint8Array(arrayBuffer);
let i = 0;
// Look for the first frame sync to determine CBR/VBR
while (i < uint8Array.length - 1) {
if (uint8Array[i] === 0xFF && (uint8Array[i + 1] & 0xE0) === 0xE0) break;
i++;
}
if (i >= uint8Array.length - 4) {
if (debug) {
console.log("No valid MP3 frame header found.");
}
return 0;
}
const headerOffset = i + 4;
const isXing = uint8Array.slice(headerOffset, headerOffset + 4).toString() === "88,105,110,103";
const isVBRI = uint8Array.slice(headerOffset, headerOffset + 4).toString() === "86,66,82,73";
if (isXing || isVBRI) {
if (debug) {
console.log("VBR MP3 detected.");
}
const headerPosition = isXing ? headerOffset : headerOffset + 32;
const frames = (
(uint8Array[headerPosition + 8] << 24) |
(uint8Array[headerPosition + 9] << 16) |
(uint8Array[headerPosition + 10] << 8) |
uint8Array[headerPosition + 11]
);
const sampleRate = 44100; // Set or detect sample rate per frame headers
const durationSeconds = (frames * 1152) / sampleRate;
if (debug) {
console.log(`Frames: ${frames}, Sample rate: ${sampleRate}, Duration: ${durationSeconds}s`);
}
return Math.ceil(durationSeconds * 1000);
}
const header = (uint8Array[i + 2] << 8) | uint8Array[i + 3];
const bitrateIndex = (header >> 4) & 0x0F;
const sampleRateIndex = (header >> 2) & 0x03;
const bitrates = [0, 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, 320000];
const sampleRates = [44100, 48000, 32000];
const bitrate = bitrates[bitrateIndex - 1];
const sampleRate = sampleRates[sampleRateIndex];
if (!bitrate || !sampleRate) {
console.warn("Invalid bitrate or sample rate.");
return 0;
}
const durationSeconds = Math.ceil((arrayBuffer.byteLength * 8) / bitrate);
return durationSeconds * 1000;
}
/**
* Checks if a string matches the "number_number" pattern.
* @param {string} str - The string to test.
* @returns {boolean} True if the string matches the pattern, otherwise false.
*/
function isNumberUnderscoreNumber(str) {
const pattern = /^\d+_\d+$/;
return pattern.test(str);
}
/**
* Creates TTS audio from text and returns the audio URL and duration.
* @param {string} tts - Text to synthesize.
* @returns {Promise<{dataUrl: string, duration: number}>} - TTS audio data.
*/
async function createSpeech(tts) {
const openAIURL = "https://api.openai.com/v1/audio/speech";
const headers = {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
};
const body = {
model: "tts-1",
input: tts,
voice: "fable",
response_format: "mp3"
};
const response = await fetch(openAIURL, {
method: "POST",
headers,
body: JSON.stringify(body),
});
const audioBlob = await response.blob();
const arrayBuffer = await audioBlob.arrayBuffer();
const duration = await estimateMP3Duration(arrayBuffer, false);
const uint8Array = new Uint8Array(arrayBuffer);
let binary = "";
for (let i = 0; i < uint8Array.length; i++) {
binary += String.fromCharCode(uint8Array[i]);
}
const base64Audio = btoa(binary);
const dataUrl = `data:audio/mp3;base64,${base64Audio}`;
return {
dataUrl: dataUrl,
duration: duration,
};
}
/**
* Wraps the createSpeech function with a timeout to handle cases when speech generation takes too long.
* @param {string} tts - The text-to-speech content.
*/
async function createSpeechWithTimeout(tts) {
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error("Timeout: Speech creation took too long")), completionTimeout)
);
try {
return await Promise.race([createSpeech(tts), timeoutPromise]);
} catch (error) {
console.warn(error.message);
return { dataUrl: null, duration: 0 }; // Return a fallback object
}
}
/**
* Converts a connection ID to a user ID format.
* @param {string} connectionId - The connection ID to convert.
* @returns {string} The formatted user ID.
*/
function connectionIdToUserId(connectionId) {
return `${connectionPrefix}_${connectionId}`;
}
/**
* Checks for nearby users and manages response state based on their proximity.
*/
function checkIfHasNearbyUsers() {
if (proximityMap.size === 0) {
const diff = Math.ceil((document.timeline.currentTime - latestProximityActivity) / 1000);
if (diff > 10) responseLabel.setAttribute("content", initialText);
} else {
const diff = Math.ceil((document.timeline.currentTime - latestMessageTime) / 1000);
if (diff > 60) {
latestPromise = null;
queuedMessages = [];
responseLabel.setAttribute("content", initialText);
}
}
}
// Event listeners for proximity and connection events
proximityProbe.addEventListener("positionenter", (event) => {
const { connectionId } = event.detail;
const { position, rotation } = event.detail.documentRelative;
proximityMap.set(connectionId, { position, rotation });
latestProximityActivity = document.timeline.currentTime;
});
proximityProbe.addEventListener("positionmove", (event) => {
const { connectionId } = event.detail;
const { position, rotation } = event.detail.documentRelative;
proximityMap.set(connectionId, { position, rotation });
latestProximityActivity = document.timeline.currentTime;
});
proximityProbe.addEventListener("positionleave", (event) => {
const { connectionId } = event.detail;
if (proximityMap.has(connectionId)) proximityMap.delete(connectionId);
});
window.addEventListener("connected", (event) => {
connectedUsers.add(connectionIdToUserId(event.detail.connectionId));
});
window.addEventListener("disconnected", (event) => {
connectedUsers.delete(connectionIdToUserId(event.detail.connectionId));
if (proximityMap.has(event.detail.connectionId)) proximityMap.delete(event.detail.connectionId);
});
/**
* Sends messages to OpenAI's API, handles responses, and initiates relevant animations.
*/
async function sendMessages() {
if (queuedMessages.length === 0) return; // Exit if no messages are in the queue
const message = queuedMessages.shift(); // Process one message at a time
showWaiting();
try {
let res;
if (threadId) {
res = await fetch(`https://api.openai.com/v1/threads/${threadId}/runs`, {
method: "POST",
headers: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
"OpenAI-Beta": "assistants=v2",
},
body: JSON.stringify({
assistant_id: assistantId,
stream: true,
additional_messages: [
{
role: "user",
content: JSON.stringify(message),
},
],
}),
});
} else {
res = await fetch("https://api.openai.com/v1/threads/runs", {
method: "POST",
headers: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
"OpenAI-Beta": "assistants=v2",
},
body: JSON.stringify({
assistant_id: assistantId,
stream: true,
thread: {
messages: [
{
role: "user",
content: JSON.stringify(message),
},
],
},
}),
});
}
if (!res.ok) {
console.error("API error:", res.status, res.statusText, await res.text());
throw new Error(`HTTP error! status: ${res.status}`);
}
const text = await res.text();
const lines = text
.split("\n")
.filter((line) => line.startsWith("data: ") && line !== "data: [DONE]")
.map((line) => line.substring(6))
.map((line) => {
try {
return JSON.parse(line);
} catch (e) {
console.error("Failed to parse line", line);
return { object: "unknown", data: line };
}
});
const completionLine = lines.find(
(line) =>
line.object === "thread.message" &&
line.status === "completed" &&
line !== "data: [DONE]"
);
if (completionLine) {
try {
let completionText = completionLine.content[0].text.value;
completionText = completionText
.replace(/【[^】]*】/g, "")
.replace("```json\n", "")
.replace("```", "");
const completion = JSON.parse(completionText);
if (completion.ignore) {
responseLabel.setAttribute("content", "Ignoring");
} else if (completion.wait) {
responseLabel.setAttribute("content", "Waiting");
} else if (completion.action) {
if (completion.action === "spin") spin();
else if (completion.action === "thumbs_up") thumbsUp();
else if (completion.action === "thumbs_down") thumbsDown();
else if (completion.action === "blow_horn") playHorn();
}
if (completion.message) {
const { dataUrl, duration } = await createSpeechWithTimeout(completion.message);
responseLabel.setAttribute("content", completion.message);
if (dataUrl && duration) {
growWhileSpeaking(duration); // Animation during playback
await playAudio(dataUrl, duration); // Wait for the audio playback to finish
}
}
} catch (e) {
console.error(
"Failed to process completion:",
completionLine.content[0].text.value,
e
);
}
}
} finally {
hideWaiting();
if (queuedMessages.length > 0) {
await sendMessages();
}
}
}
/**
* Queues messages for sending, initiating the send process if no other message is in progress.
*/
function onQueue() {
if (latestPromise) return; // Exit if a request is already in progress
latestPromise = sendMessages().finally(() => {
latestPromise = null;
if (queuedMessages.length > 0) onQueue(); // Continue processing if there are more messages
});
}
/**
* Adds a user message to the queue and triggers the processing chain.
* @param {string} message - The message content.
* @param {string} userId - The ID of the user sending the message.
*/
function submitMessage(message, userId) {
const time = Date.now() / 1000;
queuedMessages.push({ message, userId, time, nearbyUsers: Array.from(connectedUsers) });
onQueue();
}
// Event listener for chat events, message queue processing, and proximity checks
const chat = document.getElementById("chat");
chat.addEventListener("chat", (event) => {
latestMessageTime = document.timeline.currentTime;
const { message, connectionId } = event.detail;
submitMessage(message, connectionIdToUserId(connectionId));
});
setInterval(() => {
checkIfHasNearbyUsers();
}, 1000);
</script>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment