Created
May 16, 2025 15:30
-
-
Save devxpy/2ca466a9c6b87806e79a689f48227c48 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import json | |
from threading import Thread | |
from time import sleep | |
import gooey_gui as gui | |
import requests | |
from decouple import config | |
from fastapi import FastAPI | |
from starlette.requests import Request | |
app = FastAPI() | |
GOOEY_API_KEY = config("GOOEY_API_KEY") | |
@gui.route(app, "/pycon") | |
def pycon_page(request: Request): | |
gui.write("# Pycon 2025 Demo") | |
description = gui.text_area( | |
"#### Requirements\nWhat do you want your bot to do?" | |
) | |
result, set_result = gui.use_state({}) | |
status = result.get("status") | |
if status in ["standby", "starting", "running"]: | |
gui.write("Generating...") | |
else: | |
if gui.button("✨ Generate Bot"): | |
gui.session_state.pop("run_url", None) | |
run_bot_generator(description, set_result) | |
web_url = gui.session_state.get("web_url") | |
if web_url: | |
gui.caption(f"[View Generation URL]({web_url})") | |
if not result: | |
return | |
if status == "failed": | |
gui.error(result.get("detail")) | |
return | |
output_text = result.get("output") and result["output"]["output_text"] | |
if not output_text: | |
return | |
try: | |
config = json.loads(output_text[0]) | dict( | |
integration_id="Kbo", mode="inline", target="#gooey-embed" | |
) | |
except json.JSONDecodeError: | |
gui.error("Failed to parse JSON") | |
return | |
gui.html( | |
# language=html | |
""" | |
<div id="gooey-embed" class="border rounded mb-5" style="height: 80vh"></div> | |
<script id="gooey-embed-script" src="https://cdn.jsdelivr.net/gh/GooeyAI/gooey-web-widget@2/dist/lib.js"></script> | |
""" | |
) | |
gui.js( | |
# language=javascript | |
""" | |
async function loadGooeyEmbed() { | |
await window.waitUntilHydrated; | |
if (typeof GooeyEmbed === 'undefined') return; | |
GooeyEmbed.unmount(); | |
GooeyEmbed.mount(config); | |
} | |
const script = document.getElementById("gooey-embed-script"); | |
if (script) script.onload = loadGooeyEmbed; | |
loadGooeyEmbed(); | |
""", | |
config=config, | |
) | |
PROMPT_FORMAT = '''\ | |
Description: """ | |
%s | |
""" | |
Respond with the following JSON object: | |
{ | |
// branding for the web widget | |
branding: { | |
// (required) | |
name: string, | |
byLine: string, | |
description: string, | |
conversationStarters: string[], | |
// (optional) | |
fabLabel?: string, | |
photoUrl?: string, | |
websiteUrl?: string, | |
showPoweredByGooey?: boolean, | |
}, | |
// payload for the AI backend | |
payload: { | |
// (required) instructions for the bot | |
bot_script: string | |
// (optional) selected LLM model | |
selected_model?: string Enum - "gpt_4_1" "gpt_4_1_mini" "gpt_4_1_nano" "gpt_4_5" "o4_mini" "o3" "o3_mini" "o1" "o1_preview" "o1_mini" "gpt_4_o" "gpt_4_o_mini" "gpt_4_o_audio" "gpt_4_o_mini_audio" "chatgpt_4_o" "gpt_4_turbo_vision" "gpt_4_vision" "gpt_4_turbo" "gpt_4" "gpt_4_32k" "gpt_3_5_turbo" "gpt_3_5_turbo_16k" "gpt_3_5_turbo_instruct" "deepseek_r1" "llama4_maverick_17b_128e" "llama4_scout_17b_16e" "llama3_3_70b" "llama3_2_90b_vision" "llama3_2_11b_vision" "llama3_2_3b" "llama3_2_1b" "llama3_1_405b" "llama3_1_70b" "llama3_1_8b" "llama3_70b" "llama3_8b" "pixtral_large" "mistral_large" "mistral_small_24b_instruct" "mixtral_8x7b_instruct_0_1" "gemma_2_9b_it" "gemma_7b_it" "gemini_2_5_pro_preview" "gemini_2_5_flash_preview" "gemini_2_flash_lite" "gemini_2_flash" "gemini_1_5_flash" "gemini_1_5_pro" "gemini_1_pro_vision" "gemini_1_pro" "palm2_chat" "palm2_text" "claude_3_7_sonnet" "claude_3_5_sonnet" "claude_3_opus" "claude_3_sonnet" "claude_3_haiku" "afrollama_v1" "llama3_8b_cpt_sea_lion_v2_1_instruct" "sarvam_2b" "llama_3_groq_70b_tool_use" "llama_3_groq_8b_tool_use" "llama2_70b_chat" "sea_lion_7b_instruct" "llama3_8b_cpt_sea_lion_v2_instruct" "text_davinci_003" "text_davinci_002" "code_davinci_002" "text_curie_001" "text_babbage_001" "text_ada_001" | |
// (optional) speech recognition model | |
asr_model?: string Enum - "whisper_large_v2" "whisper_large_v3" "gpt_4_o_audio" "gpt_4_o_mini_audio" "gcp_v1" "usm" "deepgram" "azure" "seamless_m4t_v2" "mms_1b_all" "ghana_nlp_asr_v2" "lelapa" "seamless_m4t" "whisper_chichewa_large_v3" "nemo_english" "nemo_hindi" "whisper_hindi_large_v2" "whisper_telugu_large_v2" "vakyansh_bhojpuri" | |
// (optional) translation model | |
translation_model?: string Enum - "google" "ghana_nlp" "lelapa" "whisper_large_v2" "whisper_large_v3" "seamless_m4t_v2" | |
// (optional) user language code | |
user_language?: string | |
// (optional) text to speech provider | |
tts_provider?: string Enum - "GOOGLE_TTS" "ELEVEN_LABS" "UBERDUCK" "BARK" "AZURE_TTS" "OPEN_AI" "GHANA_NLP" | |
// (optional) url to a face image for lipsync | |
input_face?: string | |
// (optional) knowledge base document URLs | |
documents?: string[] | |
}, | |
}\ | |
''' | |
def run_bot_generator(description: str, set_result): | |
payload = dict( | |
input_prompt=PROMPT_FORMAT % (description,), | |
messages=[], | |
response_format_type="json_object", | |
) | |
# runs https://gooey.ai/copilot/bot-generator-gsvm4rw9tus0/ | |
response = requests.post( | |
"https://api.gooey.ai/v3/video-bots/async/?example_id=gsvm4rw9tus0", | |
headers={"Authorization": f"Bearer {GOOEY_API_KEY}"}, | |
json=payload, | |
) | |
assert response.ok, response.content | |
ret = response.json() | |
status_url = ret["status_url"] | |
gui.session_state["web_url"] = ret["web_url"] | |
Thread(target=poll_result, args=[status_url, set_result]).start() | |
def poll_result(status_url: str, set_result): | |
while True: | |
response = requests.get( | |
status_url, | |
headers={"Authorization": f"Bearer {GOOEY_API_KEY}"}, | |
) | |
assert response.ok, response.content | |
result = response.json() | |
set_result(result) | |
if result["status"] == "completed": | |
print(response.status_code, result) | |
break | |
elif result["status"] == "failed": | |
print(response.status_code, result) | |
break | |
else: | |
sleep(3) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment