-
-
Save spara/e3738e89d008e2ff95cd6dc6670215ed to your computer and use it in GitHub Desktop.
Demos a Llama 2 agent
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "code", | |
"execution_count": 1, | |
"id": "82e7374e-1c35-44ae-90b5-f20e21ab62b1", | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stderr", | |
"output_type": "stream", | |
"text": [ | |
"C:\\Users\\ablon\\AppData\\Local\\Temp\\ipykernel_35796\\3398436372.py:4: DeprecationWarning: Importing display from IPython.core.display is deprecated since IPython 7.14, please import from IPython display\n", | |
" from IPython.core.display import display, HTML\n" | |
] | |
} | |
], | |
"source": [ | |
"from llama_cpp import Llama\n", | |
"import re\n", | |
"import json\n", | |
"from IPython.core.display import display, HTML" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 2, | |
"id": "e3be0971-7abb-4c96-872c-e261ce6d5d5c", | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stderr", | |
"output_type": "stream", | |
"text": [ | |
"AVX = 1 | AVX2 = 1 | AVX512 = 0 | AVX512_VBMI = 0 | AVX512_VNNI = 0 | FMA = 1 | NEON = 0 | ARM_FMA = 0 | F16C = 1 | FP16_VA = 0 | WASM_SIMD = 0 | BLAS = 1 | SSE3 = 1 | VSX = 0 | \n" | |
] | |
} | |
], | |
"source": [ | |
"# Create the model using Llama 2 7b chat (runs on CPU)\n", | |
"# .bin downloaded from here : https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML\n", | |
"# but they can come from here too? : https://ai.meta.com/llama/\n", | |
"llm = Llama(model_path=\"./llama-2-7b-chat.ggmlv3.q8_0.bin\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 3, | |
"id": "9319d21e-351a-4118-970e-d4dee5d75a6e", | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"data": { | |
"text/plain": [ | |
"{'id': 'cmpl-776feafd-95fb-451a-9db9-5388e4f40024',\n", | |
" 'object': 'text_completion',\n", | |
" 'created': 1692903378,\n", | |
" 'model': './llama-2-7b-chat.ggmlv3.q8_0.bin',\n", | |
" 'choices': [{'text': '\\n\\nThe days of the week in order from Monday to Sunday are:\\n\\n1. Monday\\n2. Tuesday\\n3. Wednesday\\n4. Thursday\\n5. Friday\\n6. Saturday\\n7. Sunday',\n", | |
" 'index': 0,\n", | |
" 'logprobs': None,\n", | |
" 'finish_reason': 'stop'}],\n", | |
" 'usage': {'prompt_tokens': 11, 'completion_tokens': 50, 'total_tokens': 61}}" | |
] | |
}, | |
"execution_count": 3, | |
"metadata": {}, | |
"output_type": "execute_result" | |
} | |
], | |
"source": [ | |
"llm(\"What are the days of the week in order?\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 4, | |
"id": "f72c4723-cd1c-4c62-8c7e-49cafce07777", | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"# create an prompt template that uses an engineered system_prompt\n", | |
"\n", | |
"# Based on:\n", | |
"# https://www.pinecone.io/learn/llama-2/\n", | |
"# and\n", | |
"# https://docs.langchain.com/docs/components/agents\n", | |
"\n", | |
"prompt_template = '''<s>[INST] <<SYS>>\n", | |
"Assistant is a expert JSON builder designed to assist with a wide range of tasks.\n", | |
"\n", | |
"Assistant is able to trigger actions for User by responding with JSON strings that contain \"action\" and \"action_input\" parameters.\n", | |
"\n", | |
"Actions available to Assistant are:\n", | |
"\n", | |
"- \"set_room_color\": Useful for when Assistant is asked to set the color of User's lighting.\n", | |
" - To use the set_room_color tool, Assistant should respond like so:\n", | |
" {{\"action\": \"set_room_color\", \"action_input\": \"#FF0000\"}}\n", | |
"\n", | |
"Here are some previous conversations between the Assistant and User:\n", | |
"\n", | |
"User: Hey how are you today?\n", | |
"Assistant: I'm good thanks, how are you?\n", | |
"User: Can you set the color of my room to be red?\n", | |
"Assistant: {{\"action\": \"set_room_color\", \"action_input\": \"#FF0000\"}}\n", | |
"User: That looks great, but can you set room color to green instead?\n", | |
"Assistant: {{\"action\": \"set_room_color\", \"action_input\": \"#00FF00\"}}\n", | |
"User: Maybe my room would look better if the color was blue.\n", | |
"Assistant: {{\"action\": \"set_room_color\", \"action_input\": \"#0000FF\"}}\n", | |
"User: Please set my room lighting to the color of a tree.\n", | |
"Assistant: {{\"action\": \"set_room_color\", \"action_input\": \"#31D45B\"}}\n", | |
"User: Thanks, Bye!\n", | |
"Assistant: See you later.\n", | |
"<</SYS>>\n", | |
"\n", | |
"{0}[/INST]'''" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 5, | |
"id": "1279aff0-48d9-4587-8fdc-6c0800cb6133", | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"def set_room_color(hex):\n", | |
" # We would make a call to our smart-home API here, instead just drawing a colored square\n", | |
" display(HTML('<div style=\"width: 100px; height: 100px; background-color: ' + hex + '\"></div>'))" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 6, | |
"id": "7ec49b3a-74ca-4714-b591-43c76076add3", | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"def process_command(command):\n", | |
" # Put user command into prompt (in future projects we'll be re-injecting whole chat history here)\n", | |
" prompt = prompt_template.format(\"User: \" + command)\n", | |
" # Send command to the model\n", | |
" output = llm(prompt, stop=[\"User:\"])\n", | |
" response = output['choices'][0]['text']\n", | |
"\n", | |
" # try to find json in the response\n", | |
" try:\n", | |
" # Extract json from model response by finding first and last brackets {}\n", | |
" firstBracketIndex = response.index(\"{\")\n", | |
" lastBracketIndex = len(response) - response[::-1].index(\"}\")\n", | |
" jsonString = response[firstBracketIndex:lastBracketIndex]\n", | |
" responseJson = json.loads(jsonString)\n", | |
" if responseJson['action'] == 'set_room_color':\n", | |
" set_room_color(responseJson['action_input'])\n", | |
" return 'Room color set to ' + responseJson['action_input'] + '.' \n", | |
" except Exception as e:\n", | |
" print(e)\n", | |
" # No json match, just return response\n", | |
" return response" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 7, | |
"id": "c5d21d64-7000-4e22-801f-fce6c831ec00", | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stderr", | |
"output_type": "stream", | |
"text": [ | |
"Llama.generate: prefix-match hit\n" | |
] | |
}, | |
{ | |
"data": { | |
"text/html": [ | |
"<div style=\"width: 100px; height: 100px; background-color: #FFA07A\"></div>" | |
], | |
"text/plain": [ | |
"<IPython.core.display.HTML object>" | |
] | |
}, | |
"metadata": {}, | |
"output_type": "display_data" | |
}, | |
{ | |
"data": { | |
"text/plain": [ | |
"'Room color set to #FFA07A.'" | |
] | |
}, | |
"execution_count": 7, | |
"metadata": {}, | |
"output_type": "execute_result" | |
} | |
], | |
"source": [ | |
"process_command(\"Can you make my room lighting orange please?\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 8, | |
"id": "075d87cf-bb59-4461-93bd-68cea780c304", | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stderr", | |
"output_type": "stream", | |
"text": [ | |
"Llama.generate: prefix-match hit\n" | |
] | |
}, | |
{ | |
"name": "stdout", | |
"output_type": "stream", | |
"text": [ | |
"substring not found\n" | |
] | |
}, | |
{ | |
"data": { | |
"text/plain": [ | |
"' Hello! My name is Assistant. How can I assist you today? 😊'" | |
] | |
}, | |
"execution_count": 8, | |
"metadata": {}, | |
"output_type": "execute_result" | |
} | |
], | |
"source": [ | |
"process_command(\"What is your name?\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 9, | |
"id": "4a54b933-e408-4a6a-8115-f9bee860592d", | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"name": "stderr", | |
"output_type": "stream", | |
"text": [ | |
"Llama.generate: prefix-match hit\n" | |
] | |
}, | |
{ | |
"data": { | |
"text/html": [ | |
"<div style=\"width: 100px; height: 100px; background-color: #FFC900\"></div>" | |
], | |
"text/plain": [ | |
"<IPython.core.display.HTML object>" | |
] | |
}, | |
"metadata": {}, | |
"output_type": "display_data" | |
}, | |
{ | |
"data": { | |
"text/plain": [ | |
"'Room color set to #FFC900.'" | |
] | |
}, | |
"execution_count": 9, | |
"metadata": {}, | |
"output_type": "execute_result" | |
} | |
], | |
"source": [ | |
"process_command(\"Please set the lights to a happy color.\")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"id": "525e4520-1968-43c8-9eab-a65afbe04f28", | |
"metadata": {}, | |
"outputs": [], | |
"source": [] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3 (ipykernel)", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.11.4" | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 5 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment