Created
October 22, 2024 15:45
-
-
Save bdelacretaz/12167e823b15cc56e871bb98671938a5 to your computer and use it in GitHub Desktop.
WebLLM test page
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!doctype html> | |
<html> | |
<head> | |
<script> | |
webLLMGlobal = {}; | |
</script> | |
<style> | |
.error { | |
font-weight: bold; | |
color: red; | |
} | |
</style> | |
</head> | |
<body> | |
<h2>WebLLM Test Page</h2> | |
<p>Based on the <a href="https://github.com/mlc-ai/web-llm/tree/main/examples/get-started">web-llm/get-started</a> | |
example.</p> | |
<p>See <a href="https://webgpureport.org/">webgpureport.org</a> for WebGPU support information for your browser.</p> | |
<p>Model: <b id="model"></b></p> | |
<p><em id="progress">Model will be loaded..</em></p> | |
<h3>Prompt</h3> | |
<p id="prompt"></p> | |
<h3>Response</h3> | |
<p id="stats"></p> | |
<p><em id="response"><no response yet></em></p> | |
<script type="module"> | |
import * as webllm from "https://esm.run/@mlc-ai/web-llm"; | |
function setInfo(id, text, cssClass) { | |
const e = document.getElementById(id); | |
if (e == null) { | |
throw Error("Cannot find label " + id); | |
} | |
if (cssClass) { | |
e.classList.add(cssClass); | |
} | |
e.innerText = text; | |
} | |
async function main() { | |
setInfo('response', 'Initializing..'); | |
const initProgressCallback = (report) => { | |
setInfo("progress", report.text); | |
}; | |
const selectedModel = 'SmolLM-135M-Instruct-q4f16_1-MLC'; | |
setInfo('model', selectedModel); | |
const engine = await webllm.CreateMLCEngine( | |
selectedModel, | |
{ | |
initProgressCallback: initProgressCallback, | |
logLevel: "INFO", | |
}, | |
{ | |
context_window_size: 512, | |
}, | |
); | |
const start = performance.now(); | |
const prompt = "Hello, what can you tell me about kangaroos?"; | |
setInfo('prompt', prompt); | |
setInfo('response', 'Querying chat engine..'); | |
const reply = await engine.chat.completions.create({ | |
messages: [{ role: "user", content: prompt }], | |
}); | |
const duration = performance.now() - start; | |
const msg = reply?.choices[0]?.message?.content; | |
setInfo('stats', `Chat engine query took ${Math.floor(duration / 1000)} seconds`); | |
setInfo('response', msg); | |
console.log('usage', reply.usage); | |
} | |
main() | |
.catch(e => { setInfo('progress', e, 'error'); }) | |
</script> | |
</body> | |
</html> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment