Last active
March 23, 2024 11:52
-
-
Save bsenftner/1936e1ef8ae9b4f5d02a42d9a23d41a1 to your computer and use it in GitHub Desktop.
working example of streaming responses from OpenAI and simultaniously saving them to the DB as they stream in
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from openai import AsyncOpenAI | |
client = AsyncOpenAI( | |
api_key=get_settings().OPENAI_API_KEY, | |
) | |
# ---------------------------------------------------------------------------------------------- | |
async def get_response_openai(aichat: AiChatDB): | |
if aichat.model=="gpt-3.5-turbo" or aichat.model=="gpt-4": | |
try: | |
openai_stream = await client.chat.completions.create(model=aichat.model, | |
messages=[ {"role": "system", "content": aichat.prePrompt }, | |
{"role": "user", "content": aichat.prompt } ], | |
temperature=0, | |
max_tokens=500, | |
top_p=1, | |
frequency_penalty=0.0, | |
presence_penalty=0.0, | |
stream=True | |
) | |
except Exception as e: | |
msg = f"Error in communicating with OpenAI: {str(e)}" | |
log.info(msg) | |
raise HTTPException(status_code=503, detail=msg) | |
try: | |
async for event in openai_stream: | |
current_content = "" | |
if event.choices[0].delta.content is not None: | |
current_content = event.choices[0].delta.content | |
aichat.reply += current_content | |
await crud.put_aichat(aichat) | |
if event.choices[0].finish_reason is not None: | |
return | |
# log.info(f"Streamed from OpenAI: {current_content}, finish_reason: {finish_reason}") | |
yield current_content | |
except Exception as e: | |
msg = f"Streaming error with OpenAI: {str(e)}" | |
log.info(msg) | |
raise HTTPException(status_code=503, detail=msg) | |
msg = f"get_response_openai: unsupported OpenAI model" | |
log.info(msg) | |
raise HTTPException(status_code=503, detail=msg) | |
@router.get("/stream/{id}") | |
async def stream_aichatExchange(id: int = Path(..., gt=0), | |
current_user: UserInDB = Depends(get_current_active_user)) -> StreamingResponse: | |
aichat: AiChatDB = await validate_aichat(id, current_user.userid, | |
UserActionLevel.index('SITEBUG'), | |
UserAction.index('FAILED_GET_AICHAT'), | |
f"AIChat {id}, not found") | |
return StreamingResponse(get_response_openai(aichat), media_type='text/event-stream') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment