Skip to content

Instantly share code, notes, and snippets.

@yorek
Created November 8, 2024 23:27
Show Gist options
  • Save yorek/d614497530078ccc4722d386070493c7 to your computer and use it in GitHub Desktop.
Save yorek/d614497530078ccc4722d386070493c7 to your computer and use it in GitHub Desktop.
Azure Logic App + Azure SQL RAG Template
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Azure_OpenAI_-_Get_an_embedding": {
"type": "ServiceProvider",
"inputs": {
"parameters": {
"deploymentId": "@parameters('OpenAI_TextEmbedding_Deployment_Identifier_ingesttosql')",
"input": "@triggerBody()?['question']"
},
"serviceProviderConfiguration": {
"connectionName": "openai",
"operationId": "getSingleEmbedding",
"serviceProviderId": "/serviceProviders/openai"
}
},
"runAfter": {}
},
"Response": {
"type": "Response",
"kind": "Http",
"inputs": {
"statusCode": 200,
"body": "@body('Azure_OpenAI_-_Get_chat_completions')?['content']"
},
"runAfter": {
"Azure_OpenAI_-_Get_chat_completions": [
"SUCCEEDED"
]
}
},
"SQL_Server_-_Vector_Search": {
"type": "ServiceProvider",
"description": "Use this query to retrieve vector embeddings from your SQL vector table. Update the query based on your SQL table and schema",
"inputs": {
"parameters": {
"query": "SELECT TOP(2) filename, chunkid, chunk,\n 1-vector_distance('cosine', CAST(@embedding AS VECTOR(1536)), embedding) AS similarity_score,\n vector_distance('cosine', CAST(@embedding AS VECTOR(1536)), embedding) AS distance_score\n FROM dbo.resumedocs\n ORDER BY distance_score ",
"queryParameters": {
"embedding ": "@{body('Azure_OpenAI_-_Get_an_embedding')['embedding']}"
}
},
"serviceProviderConfiguration": {
"connectionName": "sql",
"operationId": "executeQuery",
"serviceProviderId": "/serviceProviders/sql"
}
},
"runAfter": {
"Azure_OpenAI_-_Get_an_embedding": [
"SUCCEEDED"
]
}
},
"Azure_OpenAI_-_Get_chat_completions": {
"type": "ServiceProvider",
"description": "Update the system message for the LLM based on your requirements",
"inputs": {
"parameters": {
"deploymentId": "@parameters('OpenAIChatModel')",
"messages": [
{
"role": "System",
"content": "You are an intelligent & funny assistant who will exclusively answer based on the data provided in the search_results:\n- Use the information from search_results to generate your top 3 responses. If the data is not a perfect match for the user's query, use your best judgment to provide helpful suggestions and include the following format:\n File: {filename}\n Chunk ID: {chunkid}\n Similarity Score: {similarity_score}\n Add a small snippet from the Relevant Text: {chunktext}\n Do not use the entire chunk\n- Avoid any other external data sources.\n- Add a summary about why the candidate maybe a goodfit even if exact skills and the role being hired for are not matching , at the end of the recommendations. Ensure you call out which skills match the description and which ones are missing. If the candidate doesnt have prior experience for the hiring role which we may need to pay extra attention to during the interview process.\n- Add a Microsoft related interesting fact about the technology that was searched \n\nsearch_results = @{body('SQL_Server_-_Vector_Search')}"
},
{
"role": "User",
"content": "@triggerBody()?['question']",
"user": "Divya"
}
],
"temperature": 1
},
"serviceProviderConfiguration": {
"connectionName": "openai",
"operationId": "getChatCompletions",
"serviceProviderId": "/serviceProviders/openai"
}
},
"runAfter": {
"SQL_Server_-_Vector_Search": [
"SUCCEEDED"
]
}
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"triggers": {
"When_a_HTTP_request_is_received": {
"type": "Request",
"kind": "Http",
"inputs": {
"schema": {
"type": "object",
"properties": {
"question": {
"type": "string"
}
}
}
}
}
}
},
"kind": "Stateful"
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment