Last active
July 12, 2023 14:27
-
-
Save mrdrozdov/2b93e8fbb7ac0df604118bddf0074900 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import asyncio | |
import openai | |
async def dispatch_openai_requests(llm_kwargs, prompts): | |
"""Dispatches requests to OpenAI API asynchronously. | |
Args: | |
llm_kwargs: Dictionary with options such as max_tokens, temperature, etc. | |
prompts: List of messages to be sent to OpenAI Completion API. | |
Returns: | |
List of responses from OpenAI API. | |
""" | |
async_responses = [] | |
for x in prompts: | |
kwargs = llm_kwargs.copy() | |
kwargs.update(dict(prompt=x)) | |
async_responses.append(openai.Completion.acreate(**kwargs)) | |
return await asyncio.gather(*async_responses) | |
if __name__ == '__main__': | |
# First, setup openai. | |
# ... | |
# Setup your prompts. | |
prompts = ["translate from english to russian: ...", "where is the best coffee in nyc?"] | |
# Then send multiple requests. | |
llm_kwargs = ... | |
responses = asyncio.run(dispatch_openai_requests(llm_kwargs, prompts)) | |
# Finally extract the response data, such as the logprobs. | |
logprobs = [x.choices[0].logprobs.token_logprobs for x in responses] | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment