Skip to content

Instantly share code, notes, and snippets.

@waqasajaz
Created July 21, 2023 19:00
Show Gist options
  • Save waqasajaz/3a3cebb2b36717d2b9f4b22a31de28c8 to your computer and use it in GitHub Desktop.
Save waqasajaz/3a3cebb2b36717d2b9f4b22a31de28c8 to your computer and use it in GitHub Desktop.
import type { HttpContextContract } from "@ioc:Adonis/Core/HttpContext";
import { chatTypes } from "App/Enums/ChatTypeEunm";
import ChathistorySourceLog from "App/Models/ChathistorySourceLog";
import {
chatDetails,
createChat,
deleteChat,
deleteUserChats,
getChatHistory,
populateChatRequestId,
listChat,
updateChatDetails,
} from "App/Service/chatGptService";
import {
construct_prompt,
getVectors,
get_embedding,
pinecone_query,
} from "App/Service/EmbeddingService";
import { agentChat } from "App/Service/LangchainService";
import { failResponseHandler } from "App/Service/ResponseHandlerService";
import { getSettings } from "App/Service/SettingService";
import ChatValidator from "App/Validators/ChatValidator";
import UpdateChatDetailValidator from "App/Validators/UpdateChatDetailValidator";
import { defaultSettingsKeys, pineconeConfig } from "Config/services";
import { Configuration, OpenAIApi } from "openai"
import fs from "fs"
import Application from '@ioc:Adonis/Core/Application'
import { openAIConfig } from 'Config/services'
import { getCustomToolById } from "App/Service/CustomToolService";
import { generateMessagesHistoryArray } from "App/Service/ChatService";
export default class ChatGptsController {
public async chatListing({ response, request, params }: HttpContextContract) {
try {
const { page, page_size } = request.body();
let result = await listChat(params!.user!.id, page, page_size);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async chatDetails({ response, request, params }: HttpContextContract) {
try {
const { page, page_size } = request.body();
const result = await chatDetails(
params.chat_id,
params.user!.id,
page,
page_size
);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async create({ response, request, params }: HttpContextContract) {
try {
let res: any = {}
const data = await request.validate(ChatValidator);
let message = data.message
data.chat_type = data.chat_type ? data.chat_type : chatTypes.ACCENTSEARCH
let user = params.user!
message = message.replace(/[^\w\s]|_/, "").trim();
switch (data.chat_type) {
case chatTypes.WEBSEARCH:
res = await agentChat(data, message, user)
if (res.status) {
const db_response_object = res.data?.chat_history_data?.filter((chat_history) => chat_history.textType === 'response')[0]
res.data.id = db_response_object.chatId
res.data.text = db_response_object.text
res.data.chat_type = db_response_object.chatType
delete res.data?.chat_history_data
return response.json(res)
}
else {
return response.json(res)
}
case chatTypes.ACCENTSEARCH:
case chatTypes.CUSTOMTOOL:
case chatTypes.ACCENT16K:
case chatTypes.GPT4:
let prompt:any
let customTool:any
let related_vectors:any
let gpt_response_object
let model
if(data.chat_type === chatTypes.ACCENT16K){
model = openAIConfig.OPENAI_MODEL_16K
}
if(data.chat_type === chatTypes.GPT4){
model = openAIConfig.OPENAI_MODEL_GPT4
}
if(data.chat_type === chatTypes.CUSTOMTOOL){
if(data.tool_id){
let customToolData = await getCustomToolById(data.tool_id!)
if(customToolData.status){
customTool = customToolData.data!
}
}
}
if(customTool?.useKnowledgeBase || data.chat_type === chatTypes.ACCENTSEARCH || data.chat_type === chatTypes.ACCENT16K || data.chat_type === chatTypes.GPT4){
const vector = await get_embedding(message);
related_vectors = await pinecone_query(vector)
prompt = await construct_prompt(related_vectors , data.chat_type , customTool)
}else{
prompt = customTool.systemPrompt
}
let {tool_id , ...processedData} = data
const result = await createChat(
processedData,
message,
user,
prompt?.trim()!,
model
)
if (!result.status) {
return response.json({
status: false,
message: 'ChatGPT is facing some issues. Please Try Again later '
})
}
res = result
res.sources = []
let settings = (await getSettings()).data
if (related_vectors){
let source_data = related_vectors['matches']
gpt_response_object = res.data?.chat_history_data?.filter((chat_history) => chat_history.textType === 'response')[0]
delete res.data?.chat_history_data
let chatSourceLog: any = []
for (let index in source_data) {
let sourceLog: any = {}
let source: any = {}
source = source_data[index]['metadata']
source.similarity_score = source_data[index].score
if (!source_data[index]['metadata'].fileName) {
source.fileName = source.source
delete source.source
}
sourceLog.user_chat_histories_id = gpt_response_object.id!
sourceLog.similarity_score = source.similarity_score
sourceLog.file_name = source.fileName
sourceLog.text = source.text
let SIMILARITY_SCORE = (settings.find(element => element.key === defaultSettingsKeys.SIMILARITY_SCORE))
let threshold = SIMILARITY_SCORE ? Number(SIMILARITY_SCORE.value) : 0.77
if (source_data[index].score >= threshold) {
res.sources.push(
source
)
chatSourceLog.push(sourceLog)
}
}
await ChathistorySourceLog.createMany(chatSourceLog)
}
res.data.chat_type = data.chat_type
let messages = await generateMessagesHistoryArray(user!.id,data.chat_id ,data.chat_type ,gpt_response_object.id!)
res.data.user_prompt = data.message
if(data.chat_type === chatTypes.CUSTOMTOOL){
res.data.system_prompt = customTool.systemPrompt
}else{
let system_prompt = (settings.find(element => element.key === defaultSettingsKeys.PERSONALITY_PROMPT))
res.data.system_prompt = system_prompt ? system_prompt.value : prompt
}
res.data.messages = messages.status ? messages.data :[]
return response.json(res)
case chatTypes.CHATGPT:
res = await createChat(
data,
message,
user
)
if (!res.status) {
return response.json({
status: false,
message: 'ChatGPT is facing some issues. Please Try Again later '
})
}
delete res.data?.chat_history_data
res.data.chat_type = data.chat_type
return response.json(res)
case chatTypes.SEARCH:
let sources:any =[]
const vector = await get_embedding(message);
let pineconeVectors:any = await getVectors(vector , pineconeConfig.PINECONE_QUER_TOP_K_LIMIT_SEARCH_TOOL)
let source_data = pineconeVectors['matches']
for (let index in source_data) {
let source: any = {}
source = source_data[index]['metadata']
source.similarity_score = source_data[index].score
if (!source_data[index]['metadata'].fileName) {
source.fileName = source.source
delete source.source
}
sources.push(
source
)
}
res.status = true
res.data = {}
res.data.id = null
res.data.chat_type = data.chat_type
res.data.pinecone_data = sources
return response.json(res)
}
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async updateChatDetails({
response,
request,
params,
}: HttpContextContract) {
try {
const data = await request.validate(UpdateChatDetailValidator);
const result = await updateChatDetails(
params.chat_id,
data,
params.user?.id
);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async chatDelete({ response, params }: HttpContextContract) {
try {
const result = await deleteChat(params.chat_id, params.user!.id);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async deleteUserChats({ response, params }: HttpContextContract) {
try {
const result = await deleteUserChats(params.user!.id);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async chatDetailsAdmin({ response, params }: HttpContextContract) {
try {
const result = await listChat(params.user_id);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async chatHistoryAdmin({
request,
response,
params,
}: HttpContextContract) {
try {
const { page, page_size } = request.body();
const result = await getChatHistory(params.user_id, page, page_size);
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async populateChatRequestId({ response
}: HttpContextContract) {
try {
const result = await populateChatRequestId();
return response.json(result);
} catch (error) {
console.log(error);
return response.json(await failResponseHandler(error.message, error));
}
}
public async audioToText({ request, response }: HttpContextContract) {
let finalResponse = { status: false, message: "nothing run yet", data: "" }
try {
const file = request.file('file', {
size: '25mb',
extnames: ['mp3'],
})
if (!file) {
finalResponse.message = "Provide a Mp3 File"
return response.json(finalResponse)
}
if (!file.isValid) {
finalResponse.message = "Provide a Valid Mp3 File that should not be larger than 25mb"
return response.json(finalResponse)
}
await file.moveToDisk('./')
let fileName = file.fileName
let transcription
const configuration = new Configuration({
apiKey: openAIConfig.OPENAI_API_KEY
});
const openai = new OpenAIApi(configuration);
try {
transcription = await openai.createTranscription(
fs.createReadStream(Application.tmpPath(`./uploads/${fileName}`)),
"whisper-1",
undefined,
undefined,
undefined,
undefined,
{
maxBodyLength: Infinity,
maxContentLength: Infinity,
}
);
} catch (error) {
if (error.response.status === 429) {
finalResponse.message = "We're experiencing exceptionally high demand. Please hang tight as we work on scaling our systems. Please try again"
} else {
finalResponse.message = "Unable to transcribe Audio file Try again later"
}
finalResponse.status = false
console.log("error in catch of transcribing in ChatGptsController", error);
await fs.promises.unlink(Application.tmpPath(`./uploads/${fileName}`))
return response.json(finalResponse)
}
await fs.promises.unlink(Application.tmpPath(`./uploads/${fileName}`))
finalResponse.status = true
finalResponse.message = `Transcription Completed of ${file.clientName}`
finalResponse.data = transcription.data.text
return response.json(finalResponse)
} catch (error) {
finalResponse.status = false;
finalResponse.message = error.message;
console.log("======>error in catch block during audio_to_text in ChatGptsController", error)
return response.json(finalResponse);
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment