Created
October 17, 2023 07:46
-
-
Save thekitchenscientist/e4f76df456e3aa7ae2608e13205857d3 to your computer and use it in GitHub Desktop.
Simple chat using dolphin-2.1-mistral-7b with a 16k context window, streamlit and llama_cpp
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
""" | |
Spyder Editor | |
@author: thekitchenscientist | |
""" | |
# Import streamlit library | |
import streamlit as st | |
from llama_cpp import Llama | |
llm = Llama(model_path="C:\Models\dolphin-2.1-mistral-7b.Q5_K_M.gguf", n_ctx=16384) | |
# Initialize variables in session state | |
if "chat_history_list" not in st.session_state: | |
st.session_state.chat_history_list = [] | |
if "chat_prompt_list" not in st.session_state: | |
st.session_state.chat_prompt_list = [] | |
# Define a function that takes text input and returns a string | |
def my_function(system_prompt_text, user_prompt_text): | |
history_template = "" | |
for chat in st.session_state.chat_prompt_list: | |
history_template = history_template + chat + "\n" | |
system_template = f"""<|im_start|>system | |
{system_prompt_text}""" | |
user_template = f"""<|im_end|> | |
<|im_start|>user | |
{user_prompt_text}<|im_end|> | |
<|im_start|>assistant | |
""" | |
if len(st.session_state.chat_prompt_list)==0: | |
st.session_state.chat_prompt_list.append(system_template) | |
output = llm(system_template+user_template, stop=["<|im_end|>"], echo=False, stream=False, max_tokens=250) | |
else: | |
output = llm(history_template+user_template, stop=["<|im_end|>"], echo=False, stream=False, max_tokens=250) | |
system_reply= output['choices'][0]['text'] | |
st.session_state.chat_prompt_list.append(user_template) | |
st.session_state.chat_prompt_list.append(system_reply) | |
return system_reply | |
system_name = "Dolphin AI" | |
system_prompt="""You are Dolphin, a helpful AI assistant. Try to give helpful, concise responses.""" | |
system_name_text = st.text_input( label="Bot Name", value =system_name, label_visibility="collapsed" ) | |
system_prompt_text = st.text_area(label="System Prompt", value =system_prompt, height=100 ) | |
user_prompt_text = st.text_input(label="Enter your chat:") | |
if user_prompt_text and system_prompt_text: | |
system_reply = my_function(system_prompt_text, user_prompt_text) | |
st.session_state.chat_history_list.append("\nYou: " + user_prompt_text) | |
st.session_state.chat_history_list.append("\n"+ system_name +": " + system_reply) | |
# Use st.text_area with value parameter to display the output in a text area | |
chat_history = "" | |
for chat in reversed(st.session_state.chat_history_list): | |
chat_history = chat_history + chat | |
st.text_area(label="Chat", value=chat_history, height=1000) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment