akhaliq HF staff commited on
Commit
a73fe7b
·
verified ·
1 Parent(s): ccda521

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +68 -57
app.py CHANGED
@@ -1,67 +1,78 @@
1
- import os
2
- import google.generativeai as genai
3
  import gradio as gr
 
 
4
 
5
- # Configure the API key
6
- genai.configure(api_key=os.environ["YOUR_API_KEY"])
7
-
8
- # Create the model
9
- generation_config = {
10
- "temperature": 1,
11
- "top_p": 0.95,
12
- "top_k": 64,
13
- "max_output_tokens": 8192,
14
- "response_mime_type": "text/plain",
15
- }
16
 
17
- model = genai.GenerativeModel(
18
- model_name="gemini-1.5-flash",
19
- generation_config=generation_config,
20
- # safety_settings can be adjusted
21
- # See https://ai.google.dev/gemini-api/docs/safety-settings
22
- )
23
 
24
- # Function to handle chat
25
- def chat_with_model(user_input, history):
26
- # Update the history with the user's input
27
- history.append({"role": "user", "content": user_input})
28
 
29
- # Convert history to expected format (list of protos.Content)
30
- formatted_history = []
31
- for entry in history:
32
- # Assuming the message content is a string
33
- content = genai.protos.Content(text=entry["content"], role=entry["role"])
34
- formatted_history.append(content)
35
 
36
- # Start or continue the chat session using the formatted history
37
- chat_session = model.start_chat(history=formatted_history)
38
- response = chat_session.send_message(user_input)
39
- # Add the assistant's response to the history
40
- history.append({"role": "assistant", "content": response.text})
41
- # Format the history for display in Gradio Chatbot
42
- messages = []
43
- for i in range(0, len(history), 2):
44
- user_msg = history[i]["content"]
45
- assistant_msg = history[i+1]["content"] if i+1 < len(history) else ""
46
- messages.append((user_msg, assistant_msg))
47
- return messages, history
 
 
 
 
 
 
 
 
 
 
 
48
 
49
- # Create Gradio app
50
- with gr.Blocks() as demo:
51
- gr.Markdown("# Chat with Gemini Model")
52
  chatbot = gr.Chatbot()
53
- state = gr.State([])
54
  with gr.Row():
55
- user_input = gr.Textbox(
56
- show_label=False,
57
- placeholder="Type your message and press Enter"
58
- )
59
- send_btn = gr.Button("Send")
60
- # Event handlers
61
- send_btn.click(chat_with_model, [user_input, state], [chatbot, state])
62
- user_input.submit(chat_with_model, [user_input, state], [chatbot, state])
63
- # Clear input after sending
64
- send_btn.click(lambda: "", None, user_input)
65
- user_input.submit(lambda: "", None, user_input)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
67
- demo.launch()
 
 
 
 
1
  import gradio as gr
2
+ import google.generativeai as genai
3
+ import os
4
 
5
+ # Configure the Gemini API with your API key
6
+ genai.configure(api_key=os.environ["API_KEY"])
 
 
 
 
 
 
 
 
 
7
 
8
+ # Initialize the Gemini Generative Model
9
+ model = genai.GenerativeModel("gemini-1.5-flash")
 
 
 
 
10
 
11
+ def chat_with_gemini(user_input, history):
12
+ """
13
+ Generates a response from the Gemini API based on user input and conversation history.
 
14
 
15
+ Args:
16
+ user_input (str): The latest message from the user.
17
+ history (list): The conversation history as a list of tuples.
 
 
 
18
 
19
+ Returns:
20
+ tuple: The chatbot's reply and the updated history.
21
+ """
22
+ try:
23
+ # Send the latest message to the Gemini API
24
+ response = model.generate_content(
25
+ user_input,
26
+ generation_config=genai.GenerationConfig(
27
+ max_output_tokens=150,
28
+ temperature=0.7
29
+ )
30
+ )
31
+
32
+ chatbot_reply = response.text.strip()
33
+
34
+ # Append the user input and chatbot reply to the history
35
+ history.append((user_input, chatbot_reply))
36
+
37
+ return history, history
38
+ except Exception as e:
39
+ error_message = f"An error occurred: {e}"
40
+ history.append((user_input, error_message))
41
+ return history, history
42
 
43
+ with gr.Blocks() as iface:
44
+ gr.Markdown("# 🗣️ Google Gemini Chatbot")
45
+
46
  chatbot = gr.Chatbot()
47
+
48
  with gr.Row():
49
+ with gr.Column(scale=0.85):
50
+ user_input = gr.Textbox(
51
+ placeholder="Type your message here...",
52
+ show_label=False
53
+ )
54
+ with gr.Column(scale=0.15):
55
+ send_button = gr.Button("Send")
56
+
57
+ # State to hold the conversation history
58
+ history = gr.State([])
59
+
60
+ def respond(message, history_state):
61
+ """
62
+ Handles the user message, generates a response, and updates the conversation history.
63
+
64
+ Args:
65
+ message (str): The user's message.
66
+ history_state (list): The current conversation history.
67
+
68
+ Returns:
69
+ tuple: Updated conversation history for display.
70
+ """
71
+ updated_history, new_history = chat_with_gemini(message, history_state)
72
+ return updated_history, new_history
73
+
74
+ send_button.click(respond, inputs=[user_input, history], outputs=[chatbot, history])
75
+ user_input.submit(respond, inputs=[user_input, history], outputs=[chatbot, history])
76
 
77
+ if __name__ == "__main__":
78
+ iface.launch()