mohsinmubaraksk's picture
Update app.py
12129f8 verified
import gradio as gr
from groq import Groq
import os
from dotenv import load_dotenv
load_dotenv()
# Initialize the Groq client
client = Groq(api_key=os.getenv("API_KEY"))
# Synchronous function to handle responses
def generate_response(prompt, history, temperature=0.5, max_tokens=1024, top_p=1.0):
"""
Generates a response synchronously using Groq client and returns the output.
"""
try:
# Format the conversation history
messages = [{"role": "system", "content": "You are a helpful assistant."}]
for user_input, bot_response in history:
messages.append({"role": "user", "content": user_input})
messages.append({"role": "assistant", "content": bot_response})
messages.append({"role": "user", "content": prompt})
# Call the Groq API for response
stream = client.chat.completions.create(
messages=messages,
model="llama3-8b-8192",
temperature=temperature,
max_tokens=max_tokens,
top_p=top_p,
stream=True,
)
# Collect the output from the stream
output = ""
for chunk in stream:
output += str(chunk.choices[0].delta.content)
return output
except Exception as e:
return f"Error: {str(e)}"
# Slider inputs for optional parameters
additional_inputs = [
gr.Slider(label="Temperature", value=0.5, minimum=0.0, maximum=1.0, step=0.05),
gr.Slider(label="Max Tokens", value=1024, minimum=1, maximum=2048, step=64),
gr.Slider(label="Top-p", value=1.0, minimum=0.0, maximum=1.0, step=0.05),
]
# Define the Gradio interface
interface = gr.ChatInterface(
fn=generate_response,
chatbot=gr.Chatbot(show_label=False),
additional_inputs=additional_inputs,
title="Sync APX Copilot Chatbot 🦜",
description="An AI chatbot powered by Groq for synchronous responses.",
)
# Launch the Gradio app
interface.launch(show_api=False, share=True)