Last active
June 26, 2024 23:56
-
-
Save ggorlen/8960728e37a29c9f00a64e642abccb08 to your computer and use it in GitHub Desktop.
OpenAI API request with fetch
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const apiKey = "YOUR_OPENAI_API_KEY"; | |
const endpoint = "https://api.openai.com/v1/chat/completions"; | |
const fetchCompletion = async (messages) => { | |
const response = await fetch(endpoint, { | |
method: "POST", | |
headers: { | |
"Content-Type": "application/json", | |
Authorization: `Bearer ${apiKey}`, | |
}, | |
body: JSON.stringify({ | |
messages, | |
model: "gpt-4o", | |
temperature: 0, | |
}), | |
}); | |
const data = await response.json(); | |
if (!response.ok) { | |
throw Error(data.error.message); | |
} | |
return data.choices[0].message.content; | |
}; | |
const fetchStreamingCompletion = async (messages, onContent) => { | |
const response = await fetch(endpoint, { | |
method: "POST", | |
headers: { | |
"Content-Type": "application/json", | |
Accept: "text/event-stream", | |
Authorization: `Bearer ${apiKey}`, | |
}, | |
body: JSON.stringify({ | |
messages, | |
model: "gpt-4o", | |
temperature: 0, | |
stream: true, | |
}), | |
}); | |
if (!response.ok) { | |
const { | |
error: { message }, | |
} = await response.json(); | |
throw Error(message); | |
} | |
let result = ""; | |
let lastSubChunk = ""; | |
for (const reader = response.body.getReader(); ; ) { | |
const { value, done } = await reader.read(); | |
if (done) { | |
break; | |
} | |
const chunk = new TextDecoder().decode(value); | |
const subChunks = chunk.split(/^data: (?=[{\[])/gm); | |
for (const subChunk of subChunks) { | |
if (!subChunk.trim()) { | |
continue; | |
} else if (subChunk.trim() === "[DONE]") { | |
break; | |
} | |
let data; | |
try { | |
data = JSON.parse(subChunk); | |
} catch (err) { | |
// Hack: sometimes the response splits a chunk into two pieces | |
try { | |
data = JSON.parse(lastSubChunk + subChunk); | |
} catch (err) { | |
console.warn(err); | |
lastSubChunk = subChunk; | |
continue; | |
} | |
} | |
const { content } = data.choices[0].delta; | |
if (content) { | |
result += content; | |
onContent(result); | |
} | |
lastSubChunk = subChunk; | |
} | |
} | |
return result; | |
}; | |
(async () => { | |
const messages = [{ role: "user", content: "ping" }]; | |
console.log(await fetchCompletion(messages)); | |
const result = await fetchStreamingCompletion(messages, (token) => | |
console.log(token), | |
); | |
console.log(result); | |
})(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment