Created
April 22, 2023 12:23
-
-
Save laiso/15e3cc6b7214fc9783d2d85f40d89ade to your computer and use it in GitHub Desktop.
Semantic Kernel Python Sample
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import asyncio | |
import uuid | |
import semantic_kernel as sk | |
import semantic_kernel.ai.open_ai as sk_oai | |
KV = {} # DBのかわり | |
def init_kernel(): | |
k = sk.Kernel() | |
api_key, org_id = sk.openai_settings_from_dot_env() | |
k.config.add_chat_backend( | |
"chat-gpt", sk_oai.OpenAIChatCompletion("gpt-3.5-turbo", api_key, org_id) | |
) | |
k.config.add_embedding_backend( | |
"ada", sk_oai.OpenAITextEmbedding("text-embedding-ada-002", api_key, org_id) | |
) | |
k.register_memory_store(memory_store=sk.memory.VolatileMemoryStore()) | |
return k | |
def build_chat_function(k): | |
prompt_config = sk.PromptTemplateConfig.from_completion_parameters( | |
max_tokens=2000, temperature=0.7, top_p=0.8 | |
) | |
prompt_template = sk.ChatPromptTemplate("""Chat: | |
{{$chat_history}} | |
User: {{$user_input}} | |
ChatBot:""", k.prompt_template_engine, prompt_config) | |
prompt_template.add_system_message(""" | |
あなたはChatBotです。Userの質問にフレンドリーに答えてください。答えは10単語程度にしてください。 | |
""") | |
function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) | |
return k.register_semantic_function("ChatBot", "Chat", function_config) | |
async def main() -> None: | |
kernel = init_kernel() | |
chat_function = build_chat_function(kernel) | |
async def chat(user_id, message): | |
context_vars = KV.get(user_id, sk.ContextVariables(variables=dict( | |
user_input="", | |
chat_history="", | |
))) | |
context_vars["user_input"] = message | |
talk = f'{user_id}: ' + context_vars["user_input"] | |
print(talk) | |
answer = await kernel.run_async(chat_function, input_vars=context_vars) | |
print('Assistaunt: ' + answer.result) | |
await kernel.memory.save_information_async(user_id, talk, uuid.uuid4()) | |
context_vars["chat_history"] += f"\n{talk}\nAssistaunt:> {answer.result}\n" | |
KV[user_id] = context_vars | |
return "\n".join([talk, 'Assistaunt: ' + answer.result]) | |
await chat('user-1', '私は今日トマトのオムライスを食べました。') | |
await chat('user-2', '私は今日テニスをしました。') | |
await chat('user-1', '私は何を食べましたか?') | |
await chat('user-2', '私は何を食べましたか?') | |
memories = await kernel.memory.search_async('user-1', 'Userの今日の活動') | |
print("(´ー`)...: " + memories[0].text) | |
memories = await kernel.memory.search_async('user-2', 'Userの今日の活動') | |
print("(´ー`)...: " + memories[0].text) | |
if __name__ == "__main__": | |
asyncio.run(main()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment