Skip to content

Instantly share code, notes, and snippets.

@vman
Created December 7, 2024 08:41
Show Gist options
  • Save vman/d93b9944af5a9f951b5875ca29ab2ed9 to your computer and use it in GitHub Desktop.
Save vman/d93b9944af5a9f951b5875ca29ab2ed9 to your computer and use it in GitHub Desktop.
static async Task Main(string[] args)
{
string endpoint = "<azure-openi-key>";
string key = "<azure-openi-endpoint>";
string deploymentName = "gpt-4o";
var azureOpenAIClient = new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(key));
//get userQuestion from the console
Console.WriteLine("What would you like to search?: ");
string userQuestion = Console.ReadLine();
//1. Call Open AI Chat API with the user's question.
var chatCompletionResponse = await CallOpenAIAPI(userQuestion, deploymentName, azureOpenAIClient);
//2. Check if the Chat API decided that for answering the question, a function call to the MS Graph needs to be made.
if (chatCompletionResponse.Value.FinishReason == ChatFinishReason.ToolCalls)
{
string functionName = chatCompletionResponse.Value.ToolCalls[0].FunctionName;
BinaryData functionArguments = chatCompletionResponse.Value.ToolCalls[0].FunctionArguments;
string toolCallId = chatCompletionResponse.Value.ToolCalls[0].Id;
Console.WriteLine($"Function Name: {functionName}, Params: {functionArguments}");
if (functionName == "search_microsoft365_documents")
{
//3. If the MS Graph function call needs to be made, the Chat API will also provide which parameters need to be passed to the function.
var searchParams = JsonSerializer.Deserialize<M365SearchQueryParams>(functionArguments);
//4. Call the MS Graph with the parameters provided by the Chat API
var functionResponse = await ExecuteMicrosoft365SearchWithGraph(searchParams.searchQuery);
Console.WriteLine($"Graph Response: {functionResponse}");
//5. Call the Chat API again with the function response.
var functionMessages = new List<OpenAI.Chat.ChatMessage>
{
new AssistantChatMessage(new List<ChatToolCall>() { ChatToolCall.CreateFunctionToolCall(toolCallId, functionName, functionArguments) }),
new ToolChatMessage(toolCallId, functionResponse)
};
chatCompletionResponse = await CallOpenAIAPI(userQuestion, deploymentName, azureOpenAIClient, functionMessages);
//6. Print the final response from the Chat API.
Console.WriteLine("------------------");
Console.WriteLine(chatCompletionResponse.Value.Content[0].Text);
}
}
else
{
//If the LLM decided that a function call is not needed, print the final response from the Chat API.
Console.WriteLine(chatCompletionResponse.Value.Content[0].Text);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment