Skip to content

Instantly share code, notes, and snippets.

@ochafik
Last active September 12, 2024 10:50
Show Gist options
  • Select an option

  • Save ochafik/69c6c0effb56a24853f603c2d7d4cc3b to your computer and use it in GitHub Desktop.

Select an option

Save ochafik/69c6c0effb56a24853f603c2d7d4cc3b to your computer and use it in GitHub Desktop.
RAG Example - MediaPipe + hnswlib-wasm + Gemma 2b it + json summary of ochafik.com
<!DOCTYPE html>
<!--
https://developer.chrome.com/docs/workbox
https://ai.google.dev/edge/mediapipe/solutions/genai/llm_inference
To develop:
- Download locally:
- https://www.kaggle.com/models/google/gemma/tfLite/gemma-2b-it-gpu-int4
- https://storage.googleapis.com/mediapipe-models/text_embedder/universal_sentence_encoder/float32/1/universal_sentence_encoder.tflite
- Run locally:
python3 -m http.server 8000
- WIP for Linux users: to get a local LLM server w/ llama.cpp:
( git clone https://github.com/ggerganov/llama.cpp /tmp/llama.cpp && cd /tmp/llama.cpp && make -j LLAMA_CURL=1 )
/tmp/llama.cpp/llama-server -mu https://huggingface.co/bartowski/gemma-2-2b-it-GGUF/resolve/main/gemma-2-2b-it-Q4_K_S.gguf -fa -c 4096
-->
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style>
html, body {
display: flex;
flex-direction: column;
flex: 1;
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
margin: 0;
padding: 0;
}
.chatter {
position: fixed;
bottom: 20px;
right: 20px;
width: 100%;
max-width: 400px;
height: 100%;
max-height: 600px;
overflow: auto;
background-color: white;
border: 2px solid #ccc;
border-radius: 10px;
box-shadow: 0 0 10px rgba(0,0,0,0.1);
z-index: 1000;
display: flex;
flex-direction: column;
}
/*
.chatter {
flex: 1;
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
flex-wrap: nowrap;
}
*/
.chatter-footer {
display: flex;
flex-direction: column;
padding: 10px;
background-color: #f9f9f9;
border-top: 1px solid #ccc;
}
.messages-scroll-container {
flex: 1;
overflow-y: auto;
min-height: 0;
/* shawow around the messages */
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
}
.messages, .thinking {
padding: 20px;
display: flex;
flex-direction: column;
}
.message {
margin-bottom: 15px;
display: flex;
}
.message-content {
padding: 10px 15px;
border-radius: 18px;
max-width: 80%;
}
.user {
justify-content: flex-end;
}
.user .message-content {
background-color: #007bff;
color: white;
}
.assistant {
justify-content: flex-start;
}
.assistant .message-content {
background-color: #e9e9eb;
color: black;
}
.memory {
justify-content: flex-start;
}
.memory .message-content {
background-color: #ffb1ed;
color: black;
}
.input-container {
display: flex;
flex-direction: row;
}
#thinking {
margin-left: 20px;
}
.input-container input {
flex: 1;
padding: 10px;
font-size: 16px;
border: 1px solid #ccc;
border-radius: 4px;
margin-right: 10px;
margin-left: 10px;
}
button {
padding: 10px 20px;
font-size: 16px;
border: 1px solid #ccc;
border-radius: 4px;
cursor: pointer;
}
.vertically-disappearing {
max-height: 30px;
transition: max-height 0.3s ease-out, opacity 0.3s ease-out, margin-bottom 0.3s ease-out;
opacity: 1;
overflow: hidden;
}
.vertically-disappearing.hidden {
max-height: 0;
opacity: 0;
margin-bottom: 0;
}
.progress-container {
width: 100%;
background-color: #f0f0f0;
border-radius: 3px;
box-shadow: inset 0 1px 3px rgba(0, 0, 0, .2);
margin-bottom: 20px;
}
.progress-bar {
display: flex;
height: 20px;
background-color: #4CAF50;
border-radius: 2px;
transition: width 0.4s ease-in-out;
align-items: center;
justify-content: center;
color: white;
font-family: Arial, sans-serif;
font-size: 12px;
}
</style>
<title>GenAI</title>
<script type="module">
import * as text_tasks from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text/+esm'
import * as genai_tasks from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/+esm'
import { loadHnswlib } from 'https://cdn.jsdelivr.net/npm/hnswlib-wasm@0.8.2/+esm'
//import closevectorWeb from 'https://cdn.jsdelivr.net/npm/closevector-web@0.1.6/+esm'
// Download from https://www.kaggle.com/models/google/gemma/tfLite/gemma-2b-it-gpu-int4
const llmUrl = './gemma-2b-it-gpu-int4.bin';
// Download from https://storage.googleapis.com/mediapipe-models/text_embedder/universal_sentence_encoder/float32/1/universal_sentence_encoder.tflite
const embeddingModelUrl = './universal_sentence_encoder.tflite';
function getElement(id) {
const element = document.getElementById(id);
if (!element) {
throw new Error(`Element with id ${id} not found`);
}
return element;
}
function updateProgress(elementId, progress) {
const progressBar = getElement(elementId);
if (progressBar) {
progressBar.style.width = `${progress}%`;
progressBar.textContent = `${progress}%`;
if (progress === 100) {
progressBar.parentElement.classList.add('hidden');
}
}
}
async function precache(title, url, progressElementId, statsElementId) {
try {
getElement(statsElementId).textContent = `⏳ ${title} loading...`;
const response = await fetch(url);
const total = Number(response.headers.get('content-length'));
if (!total) {
getElement(statsElementId).textContent = '⚠️ Content-Length not available';
return;
}
const reader = response.body.getReader();
let bytesReceived = 0;
while (true) {
const { done, value } = await reader.read();
if (done) break;
bytesReceived += value.length;
const progress = Math.round((bytesReceived / total) * 100);
updateProgress(progressElementId, progress);
}
} catch (error) {
getElement(statsElementId).textContent = '⚠️ Error during fetch: ' + error;
throw error;
}
}
/*
{
modelUrl: llmUrl,
options: {
maxTokens: 1000,
topK: 40,
temperature: 0.8,
randomSeed: 101
}
}
*/
/*
Workers need to return responses for every message, in the type `{result:any} | {error: any}`
If createInitialMessage is set, it needs to be a function that returns a message that will be sent before
any task is sent to the worker.
*/
async function createWorkerDispatcher({url, options, createInitialMessage}, minWorkers=0, maxWorkers=4) {
const workers = [] // {worker, busy: boolean}
const workerResolvers = new Map(); // worker -> {resolve, reject}
const queue = [];
let initialPromise;
const addWorker = () => {
const worker = new Worker(url, options ?? { type: 'module' });
const workerIndex = workers.length;
worker.onmessage = e => {
const { result, error } = e.data;
const { resolve, reject } = workerResolvers.get(worker);
if (queue.length == 0) {
worker.busy = false;
workerResolvers.delete(worker);
} else {
const {message, resolve, reject} = queue.shift();
workerResolvers.set(worker, { resolve, reject });
worker.postMessage(message);
}
if (error == null) {
resolve(result);
} else {
reject(error);
}
};
worker.onerror = e => {
const { resolve, reject } = workerResolvers.get(worker);
workerResolvers.delete(worker);
worker.busy = false;
reject(e);
};
workers.push({ worker, busy: false });
return worker;
};
for (let i = 0; i < minWorkers; i++) {
addWorker();
}
return async message => {
if (initialPromise) {
await initialPromise;
initialPromise = undefined;
}
let worker = workers.find(w => !w.busy);
if (!worker && maxWorkers != null && workers.length >= maxWorkers) {
let resolve, reject;
const future = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
queue.push({message, resolve, reject});
return future;
} else {
worker ??= addWorker();
worker.busy = true;
return new Promise((resolve, reject) => {
workerResolvers.set(worker, { resolve, reject });
worker.postMessage(message);
});
}
};
}
const statsDisappearanceDelay = 3000;
async function main() {
const loadEmbeddingModelFuture = precache('Embedding model', embeddingModelUrl, 'embeddings-load', 'embeddings-stats');
const loadLLMFuture = precache('LLM', llmUrl, 'genai-load', 'genai-stats');
const hnswlibFuture = loadHnswlib();
const textEmbedderFuture = (async () => {
await loadEmbeddingModelFuture;
const text = await text_tasks.FilesetResolver.forTextTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text/wasm");
const textEmbedder = await text_tasks.TextEmbedder.createFromModelPath(text, embeddingModelUrl);
setFinalDisappearingStatus('embeddings-stats', '✅ Embeddings Model loaded')
return textEmbedder;
})();
const llmInferenceFuture = (async () => {
await loadLLMFuture;
const genai = await genai_tasks.FilesetResolver.forGenAiTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai@latest/wasm");
const llmInference = await genai_tasks.LlmInference.createFromOptions(genai, {
baseOptions: {
modelAssetPath: llmUrl
},
maxTokens: 4000,
topK: 10,
temperature: 0.1,
randomSeed: 101
/*
maxTokens: 1000,
topK: 40,
temperature: 0.2,
randomSeed: 101
*/
});
getElement('genai-stats').innerText = '✅ LLM loaded';
//setTimeout(() => getElement('genai-stats').classList.add('hidden'), statsDisappearanceDelay);
return llmInference;
})();
function formatChat(messages, addGenerationPrompt = true) {
return formatChatForGemma(messages);
}
function formatChatForGemma(messages, addGenerationPrompt = true) {
return '<bos>' +
messages.map(message =>
`<start_of_turn>${message.role === 'assistant' ? 'model' : message.role}\n` +
`${message.content.trim()}<end_of_turn>\n`
).join('') +
(addGenerationPrompt ? '<start_of_turn>model\n' : '');
'<start_of_turn>model\n';
}
const state = {
messages: [],
};
function saveState() {
window.location.hash = encodeURIComponent(JSON.stringify(state));
}
const formatMarkdown = (text, out) => {
let first = true;
for (let paragraph of text.split('\n\n')) {
paragraph = paragraph.trim();
if (paragraph === '') continue;
if (first) first = false;
else out.appendChild(document.createElement('p'))
const div = document.createElement('div');
div.innerText = paragraph;
out.appendChild(div);
}
};
const messagesScrollContainer = getElement('messages-scroll-container');
let isScrolledToBottom = true;
//const userMessageElisionRegex = /<memories>[\n\s\S]*?<\/memories>/mig;
const addMessage = ({role, content}) => {
let message = { role, content };
state.messages.push(message);
// Elide user message:
//content = content.replaceAll(userMessageElisionRegex, '')
//if (role === 'assistant' || role === 'user')
{
const messageDiv = Object.assign(document.createElement('div'), {
className: 'message ' + role,
})
const messageContentDiv = Object.assign(document.createElement('div'), {
className: 'message-content',
//textContent: content,
});
formatMarkdown(content, messageContentDiv);
messageDiv.appendChild(messageContentDiv);
getElement('messages').appendChild(messageDiv);
if (isScrolledToBottom) {
scrollToBottom();
}
}
saveState();
};
// Check if scrolled to bottom
messagesScrollContainer.addEventListener('scroll', () => {
isScrolledToBottom = messagesScrollContainer.scrollHeight - messagesScrollContainer.clientHeight <= messagesScrollContainer.scrollTop + 1;
});
function scrollToBottom() {
messagesScrollContainer.scrollTop = messagesScrollContainer.scrollHeight;
}
if (window.location.hash !== '') {
try {
const parsedState = JSON.parse(decodeURIComponent(window.location.hash.slice(1)));
parsedState.messages.forEach(({ role, content }) => addMessage({role, content}));
} catch (error) {
console.error('Error parsing messages from URL hash:', error);
}
}
if (state.messages.length === 0) {
addMessage({role: 'assistant', content: "Hi, I'm ochafik.com's assistant! How can I help you today?"});
// Clear that initial message from the history as it confuses Gemma.
state.messages.length = 0;
}
const indexDataFuture = (async () => {
const response = await fetch("ochafik.com-index.json");
return response.json();
})();
const asyncBreak = () => new Promise(resolve => setTimeout(resolve, 0));
const setFinalDisappearingStatus = (elementId, text) => {
console.log(text);
getElement(elementId).innerText = text;
setTimeout(() => getElement(elementId).classList.add('hidden'), statsDisappearanceDelay);
};
async function timed(title, asyncAction) {
const startTime = performance.now();
const result = await asyncAction();
const elapsedTime = performance.now() - startTime;
console.log(`[${title}]: executed in ${elapsedTime.toFixed(2)}ms`);
return {result, elapsedTime};
}
/** Cached in localStorage */
function embed(text, textEmbedder) {
const key = `embedding(${text}, ${embeddingModelUrl})`;
try {
const cached = localStorage.getItem(key);
if (cached != null) {
return JSON.parse(cached);
}
} catch (e) {
console.error(e);
}
const embeddings = textEmbedder.embed(text);
const floatEmbeddings = embeddings.embeddings.map(e => e.floatEmbedding)
if (floatEmbeddings.length !== 1) {
throw new Error('Expected 1 embedding, found ' + floatEmbeddings.length);
}
const result = floatEmbeddings[0];
localStorage.setItem(key, JSON.stringify(result));
return result;
}
async function indexQuestions(items, getTexts) {
const textEmbedder = await textEmbedderFuture;
const numDimensions = embed("test", textEmbedder).length;
const hnswlib = await hnswlibFuture;
//const startTime = performance.now();
// See https://github.com/ShravanSunder/hnswlib-wasm/blob/master/test/HierarchicalNSW.test.ts
const metric = 'cosine';
//const metric = 'l2';
const vectorIndex = new hnswlib.HierarchicalNSW(metric, numDimensions, "index.data");
//index.initIndex(itemCount);
vectorIndex.initIndex(items.length * 3 , 48, 128, 100);
//vectorIndex.setEfSearch(32);
const {result: embeddings, elapsedTime: embeddingTime} = await timed("embedding all", async () => {
const embeddings = []; // {embeddings: number[], index: number}[]
for (let i = 0; i < items.length; i++) {
const texts = getTexts(items[i]);
for (const text of texts) {
if (text != null) {
console.log(`Embedding text: ${text}`);
embeddings.push({text, embedding: embed(text, textEmbedder), index: i});
}
}
//embeddings.push(embed(getText(items[i]), textEmbedder));
updateProgress('compute-embeddings-load', Math.round((i+1) / items.length * 100));
await asyncBreak();
}
return embeddings;
});
setFinalDisappearingStatus('compute-embeddings-stats', `✅ Embeddings computed (${items.length} entries) in ${embeddingTime.toFixed(2)}ms`);
const {elapsedTime: indexCreationTime} = await timed("index creation", async () => {
for (let i = 0; i < embeddings.length; i++) {
const {text, embedding, index} = embeddings[i];
//console.log(`Text(index=${index}): ${text}\n${embedding}`)
vectorIndex.addPoint(embedding, index, false);
updateProgress('indexing-load', Math.round((i+1) / embeddings.length * 100));
await asyncBreak();
}
});
setFinalDisappearingStatus('indexing-stats', `✅ Index built (${embeddings.length} entries) in ${indexCreationTime.toFixed(2)}ms`);
return (text, numResults = 3) => {
const embedding = embed(text, textEmbedder);
const result = vectorIndex.searchKnn(embedding, numResults, undefined);
return result.neighbors.map((neighbourIndex, i) => ({
distance: result.distances[i],
item: items[neighbourIndex],
}));
}
}
const [
llmInference,
findClosestQuestions,
] = await Promise.all([
llmInferenceFuture,
(async () => indexQuestions((await indexDataFuture).questions_answers,
//qa => [qa.question, qa.conciseAnswer, qa.rationale]
qa => [qa.question]
))(),
]);
// TODO: move to Worker
async function execute() {
const userMessage = getElement('input').value;
//addMessage({role: 'user', content: userMessage});
getElement('input').value = '';
getElement('thinking').style.display = 'block';
addMessage({
role: 'user',
content: userMessage
});
await asyncBreak();
const matches = findClosestQuestions(userMessage);
//console.table(matches);
const distanceCutoff = 0.2;
console.log(`Found ${matches.length} memories relevant to "${userMessage}" (note: distanceCutoff = ${distanceCutoff}): ${JSON.stringify(matches, null, 2)}`);
const relevantMemories = matches.filter(m => m.distance < distanceCutoff);
const promptQAs = relevantMemories.map(({item: {question, conciseAnswer, rationale}}) => [
`Q: ${question}`,
`A: ${conciseAnswer}`,
//`Rationale: ${rationale}`,
].join('\n'));
// Mix RAG "memories" relevant to the last question with message history.
const augmentedMessages = [];
for (const {item: {question, conciseAnswer, rationale}} of relevantMemories) {
augmentedMessages.push({role: 'user', content: question});
augmentedMessages.push({role: 'assistant', content: conciseAnswer});
}
state.messages.forEach(m => augmentedMessages.push(m));
const prompt = formatChat(augmentedMessages);
console.log("state.messages", state.messages);
console.log("prompt", prompt);
const genaiStartTime = performance.now();
const response = await llmInference.generateResponse(prompt);
const genaiElapsedTime = performance.now() - genaiStartTime;
getElement('thinking').style.display = 'none';
addMessage({role: 'assistant', content: response});
//getElement('genai-results').textContent = response;
getElement('genai-stats').innerText = `💬 Generated response in ${genaiElapsedTime.toFixed(2)}ms`;
console.log(response);
getElement('input').focus();
}
getElement('input').addEventListener('keypress', async (e) => {
if (e.key === 'Enter') {
execute();
}
});
getElement('run').addEventListener('click', execute);
getElement('clear-chat').addEventListener('click', () => {
getElement('messages').innerHTML = '';
state.messages.length = 0;
saveState();
});
}
window.addEventListener('load', async () => {
//*
if ('serviceWorker' in navigator) {
try {
const registration = await navigator.serviceWorker.register('./sw.js');
console.log('ServiceWorker registration successful with scope: ', registration.scope);
registration.onupdatefound = () => {
const installingWorker = registration.installing;
installingWorker.onstatechange = () => {
if (installingWorker.state === 'installed' && navigator.serviceWorker.controller) {
// Reload to activate the service worker and apply caching
window.location.reload();
return;
}
};
};
} catch (err) {
console.log('ServiceWorker registration failed: ', err);
}
}
//*/
await main();
});
</script>
</head>
<body>
<div class="chatter">
<div id="messages-scroll-container" class="messages-scroll-container">
<div id="messages" class="messages"></div>
<div id="thinking" style="display: none">
<div class="message assistant">
<div class="message-content">💬</div>
</div>
</div>
</div>
<div class="chatter-footer">
<div class="input-container">
<button id="clear-chat">New</button>
<input id="input" type="text" placeholder="Type a sentence here then hit Enter or press Run" />
<button id="run">Run</button>
</div>
<hr>
<div class="info-container">
<div id="embeddings-stats" class="vertically-disappearing"> ⏳ Embeddings Model loading...</div>
<div class="progress-container vertically-disappearing">
<span id="embeddings-load" class="progress-bar" style="width: 0%;">0%</span>
</div>
<div id="compute-embeddings-stats" class="stats vertically-disappearing"> ⏳ Computing embeddings...</div>
<div class="progress-container vertically-disappearing">
<span id="compute-embeddings-load" class="progress-bar" style="width: 0%;">0%</span>
</div>
<div id="indexing-stats" class="vertically-disappearing"> ⏳ Building index...</div>
<div class="progress-container vertically-disappearing">
<span id="indexing-load" class="progress-bar" style="width: 0%;">0%</span>
</div>
<div id="genai-stats" class="vertically-disappearing"> ⏳ LLM loading...</div>
<div class="progress-container vertically-disappearing">
<span id="genai-load" class="progress-bar" style="width: 0%;">0%</span>
</div>
</div>
</div>
</div>
</body>
{
"questions_answers": [
{
"question": "What is the capital of France?",
"conciseAnswer": "Paris"
},
{
"question": "What is the capital of Germany?",
"conciseAnswer": "Berlin"
},
{
"question": "What is the capital of Italy?",
"conciseAnswer": "Rome"
},
{
"question": "Who is zOlive?",
"conciseAnswer": "zOlive is Olivier Chafik, a software engineer passionate about cool tech and ambitious UX projects.",
"rationale": "The page starts with 'Who is zOlive?' and immediately identifies Olivier Chafik as zOlive, providing information about his interests."
},
{
"question": "What's Olivier Chafik's educational background?",
"conciseAnswer": "He has a Diplôme d'Ingénieur from École Centrale Paris, equivalent to a Master of Science degree.",
"rationale": "The page mentions his educational background, specifying the degree and institution."
},
{
"question": "Where has Olivier Chafik worked?",
"conciseAnswer": "He's worked at Google, Deutsche Bank, a startup, Sophis (Misys), and Cimmetry Systems.",
"rationale": "The page lists his work experience chronologically, mentioning these companies."
},
{
"question": "How long has he been at Google?",
"conciseAnswer": "Since mid-2012.",
"rationale": "The page states 'Since mid-2012: Google (London)'."
},
{
"question": "What has he worked on at Google?",
"conciseAnswer": "YouTube for iOS, Swiffy, and AdSense.",
"rationale": "These projects are listed under his Google experience."
},
{
"question": "What programming languages has he used at Google?",
"conciseAnswer": "Objective-C, JavaScript / ActionScript, Java / GWT, and Dart.",
"rationale": "These languages are listed in parentheses under his Google experience."
},
{
"question": "What did he do at Deutsche Bank?",
"conciseAnswer": "He worked on a financial valuation service system and its computation grids for 6 months.",
"rationale": "This information is provided in his work experience section."
},
{
"question": "What technologies did he use at Deutsche Bank?",
"conciseAnswer": "Spring and Netty.",
"rationale": "These technologies are mentioned in parentheses under his Deutsche Bank experience."
},
{
"question": "Did he work on any startups?",
"conciseAnswer": "Yes, he co-created a startup for a year, incubated at Advancia & NeuillyLab in Paris.",
"rationale": "This information is provided in his work experience section."
},
{
"question": "What happened to the startup?",
"conciseAnswer": "The team ended up splitting, but he considers the project great.",
"rationale": "This detail is mentioned in the startup experience description."
},
{
"question": "What technologies did he use in the startup?",
"conciseAnswer": "Scala, Vaadin, and Logistics Simulations.",
"rationale": "These technologies are listed in parentheses under his startup experience."
},
{
"question": "What was his role at Sophis?",
"conciseAnswer": "He led a front office development team for 3 years.",
"rationale": "This information is provided in his work experience section."
},
{
"question": "What technologies did he use at Sophis?",
"conciseAnswer": "C++ / C#, Oracle, with a focus on Team Management and Usability.",
"rationale": "These details are mentioned in parentheses under his Sophis experience."
},
{
"question": "What was his first job?",
"conciseAnswer": "He worked at Cimmetry Systems in Montréal for 2 years, doing 3D graphics.",
"rationale": "This is the last job mentioned in his work experience, implying it was his first job."
},
{
"question": "What technologies did he use at Cimmetry Systems?",
"conciseAnswer": "C++, OpenGL, with a focus on Performance Optimization.",
"rationale": "These technologies are listed in parentheses under his Cimmetry Systems experience."
},
{
"question": "Is Olivier involved in open-source development?",
"conciseAnswer": "Yes, he's a prolific open-source developer with several projects that have attracted active contributors.",
"rationale": "The page mentions his open-source involvement and lists several projects."
},
{
"question": "What are some of Olivier's open-source projects?",
"conciseAnswer": "ScalaCL, Scalaxy, JavaCL, BridJ, and JNAerator.",
"rationale": "These projects are listed in the open-source section of the page."
},
{
"question": "What does ScalaCL do?",
"conciseAnswer": "ScalaCL runs Scala on graphic cards (GPU-backed collections + AST voodoo).",
"rationale": "This description is provided for ScalaCL in the open-source projects list."
},
{
"question": "What is Scalaxy?",
"conciseAnswer": "Scalaxy is a collection of macro-based experiments including Streams, Reified, and more.",
"rationale": "Scalaxy is described as various macro-based experiments with several sub-projects listed."
},
{
"question": "What does JavaCL do?",
"conciseAnswer": "JavaCL is the oldest OpenCL bindings for Java, popular within academia.",
"rationale": "This description is provided for JavaCL in the open-source projects list."
},
{
"question": "What is BridJ?",
"conciseAnswer": "BridJ is a modern C / C++ interoperability library for Java (dynamic bindings).",
"rationale": "This description is provided for BridJ in the open-source projects list."
},
{
"question": "What does JNAerator do?",
"conciseAnswer": "JNAerator is a native bindings generator that parses C / C++ / Objective-C headers to create JNA, Rococoa, BridJ or Node.js bindings.",
"rationale": "This detailed description is provided for JNAerator in the open-source projects list."
},
{
"question": "Has Olivier given any talks about his projects?",
"conciseAnswer": "Yes, he has given several talks at various conferences.",
"rationale": "The page lists several talks he has given about his projects."
},
{
"question": "What talks has Olivier given?",
"conciseAnswer": "He's given talks on macros & compiler plugins, Scalaxy/Streams, reified trees for GPGPU, JavaCL / ScalaCL, and ScalaCL at various conferences.",
"rationale": "The page lists these talks with their titles, years, and locations."
},
{
"question": "Has Olivier been involved in any book projects?",
"conciseAnswer": "Yes, he reviewed Matthew Scarpino's 'OpenCL in Action' book, which features a full chapter on JavaCL.",
"rationale": "This information is mentioned towards the end of the page."
},
{
"question": "How can I contact Olivier?",
"conciseAnswer": "You can find him on GitHub (ochafik@), Mastodon (@ochafik@fosstodon.org and @ochafik@mstdn.social), LinkedIn (ochafik), and Twitter (@ochafik).",
"rationale": "These contact methods are provided near the beginning of the page."
},
{
"question": "What's Olivier's email address?",
"conciseAnswer": "His email is olivier.chafik@gmail.com.",
"rationale": "This email address is listed at the bottom of the page."
},
{
"question": "Is Olivier open to new opportunities?",
"conciseAnswer": "Yes, he states he's always open to new challenges.",
"rationale": "The page ends with 'I'm always open to new challenges: come and challenge me!'"
},
{
"question": "What's the significance of 'Ché zOlive'?",
"conciseAnswer": "It appears to be a play on words or nickname for Olivier, possibly combining 'Ché' (like Che Guevara) with 'zOlive' (a stylized version of 'Olive' from Olivier).",
"rationale": "This phrase appears at the top and bottom of the page, seeming to be a personal branding element."
},
{
"question": "Does Olivier have any disclaimers about his online presence?",
"conciseAnswer": "Yes, he states 'Opinions expressed are my own.'",
"rationale": "This disclaimer is mentioned at the very end of the page."
}
]
}
importScripts('https://storage.googleapis.com/workbox-cdn/releases/6.4.1/workbox-sw.js');
const plugins = [
new workbox.cacheableResponse.CacheableResponsePlugin({statuses: [0, 200]}),
new workbox.expiration.ExpirationPlugin({maxEntries: 1000, purgeOnQuotaError: true}),
];
const isModelUrl = url => url.endsWith('.bin') || url.endsWith('.tflite');
workbox.routing.registerRoute(
({ request }) => isModelUrl(request.url),
new workbox.strategies.CacheFirst({
cacheName: 'bin-cache',
plugins,
})
);
workbox.routing.registerRoute(
({ request }) => !isModelUrl(request.url),
new workbox.strategies.StaleWhileRevalidate({
cacheName: 'other-cache',
plugins,
})
);
self.addEventListener('install', event => self.skipWaiting());
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment