Created
July 22, 2024 13:37
-
-
Save mertbozkir/c7ff0cde29879819c92053bca664a062 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from langchain_groq import ChatGroq | |
from langchain_core.prompts import ChatPromptTemplate | |
from langchain.schema import StrOutputParser | |
from langchain.schema.runnable import Runnable | |
from langchain.schema.runnable.config import RunnableConfig | |
from typing import Dict, Optional | |
import chainlit as cl | |
import logging | |
from chainlit import logger | |
import chainlit.data as cl_data | |
from chainlit.data.dynamodb import DynamoDBDataLayer | |
from chainlit.data.storage_clients import S3StorageClient | |
storage_client = S3StorageClient(bucket="necodb") | |
cl_data._data_layer = DynamoDBDataLayer( | |
table_name="neco-dynamodb", storage_provider=storage_client, | |
) | |
logger.getChild("DynamoDB").setLevel(logging.DEBUG) | |
@cl.oauth_callback | |
def oauth_callback( | |
provider_id: str, | |
token: str, | |
raw_user_data: Dict[str, str], | |
default_user: cl.User, | |
) -> Optional[cl.User]: | |
return default_user | |
@cl.on_chat_start | |
async def on_chat_start(): | |
# Sending an image with the local file path | |
elements = [cl.Image(name="image", display="inline", size="large", path="groq.jpeg")] | |
await cl.Message( | |
content="Hello there, I am Groq. How can I help you ?", | |
elements=elements | |
).send() | |
model = ChatGroq(temperature=0, model_name="llama3-70b-8192") | |
prompt = ChatPromptTemplate.from_messages( | |
[ | |
( | |
"system", | |
"You're a intergalactic AWS Machine Learning Engineer like Eric Riddoch.", | |
), | |
("human", "{question}"), | |
] | |
) | |
@cl.on_message | |
async def on_message(message: cl.Message): | |
runnable = cl.user_session.get("runnable") # type: Runnable | |
msg = cl.Message(content="") | |
async for chunk in runnable.astream( | |
{"question": message.content}, | |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]), | |
): | |
await msg.stream_token(chunk) | |
await msg.send() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment