File size: 1,977 Bytes
79a44da
d84fafa
887493d
d5fe603
 
 
 
d84fafa
887493d
90bdb1a
d84fafa
 
143b55f
d84fafa
143b55f
 
db7045a
 
 
 
 
 
143b55f
d84fafa
 
db7045a
 
 
 
 
 
143b55f
 
d84fafa
143b55f
d84fafa
e33768e
d84fafa
db7045a
143b55f
d84fafa
143b55f
db7045a
143b55f
db7045a
 
 
79a44da
 
db7045a
 
d84fafa
143b55f
79a44da
d84fafa
 
db7045a
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import gradio as gr
from groq import Groq
import os
from dotenv import load_env

load_env()

# Initialize the Groq client
client = Groq(api_key=os.getenv("API_KEY"))

# Synchronous function to handle responses
def generate_response(prompt, history, temperature=0.5, max_tokens=1024, top_p=1.0):
    """
    Generates a response synchronously using Groq client and returns the output.
    """
    try:
        # Format the conversation history
        messages = [{"role": "system", "content": "You are a helpful assistant."}]
        for user_input, bot_response in history:
            messages.append({"role": "user", "content": user_input})
            messages.append({"role": "assistant", "content": bot_response})
        messages.append({"role": "user", "content": prompt})

        # Call the Groq API for response
        stream = client.chat.completions.create(
            messages=messages,
            model="llama3-8b-8192",
            temperature=temperature,
            max_tokens=max_tokens,
            top_p=top_p,
            stream=True,
        )

        # Collect the output from the stream
        output = ""
        for chunk in stream:
            output += str(chunk.choices[0].delta.content)
        return output

    except Exception as e:
        return f"Error: {str(e)}"

# Slider inputs for optional parameters
additional_inputs = [
    gr.Slider(label="Temperature", value=0.5, minimum=0.0, maximum=1.0, step=0.05),
    gr.Slider(label="Max Tokens", value=1024, minimum=1, maximum=2048, step=64),
    gr.Slider(label="Top-p", value=1.0, minimum=0.0, maximum=1.0, step=0.05),
]

# Define the Gradio interface
interface = gr.ChatInterface(
    fn=generate_response,
    chatbot=gr.Chatbot(show_label=False),
    additional_inputs=additional_inputs,
    title="Sync APX Copilot Chatbot 🦜",
    description="An AI chatbot powered by Groq for synchronous responses.",
)

# Launch the Gradio app
interface.launch(show_api=False, share=True)