rudra0410hf commited on
Commit
bb8b382
·
verified ·
1 Parent(s): 8425f69

Upload 4 files

Browse files
Files changed (4) hide show
  1. .env +1 -0
  2. .gitignore +15 -0
  3. main.py +53 -0
  4. requirements.txt +3 -0
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ GOOGLE_API_KEY=AIzaSyA3a4WthBzGofQ_zGto2vxTdYuHtPpOBCA
.gitignore ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python
2
+ *.pyc
3
+ *.pyo
4
+ __pycache__/
5
+
6
+ # Environment Variables
7
+ .env
8
+
9
+ # Streamlit
10
+ .streamlit/
11
+
12
+ # Virtual Environments
13
+ venv/
14
+ .env/
15
+ *.venv
main.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ import streamlit as st
4
+ from dotenv import load_dotenv
5
+ import google.generativeai as gen_ai
6
+
7
+ #Load environment variables
8
+ load_dotenv()
9
+
10
+ #Configure streamlit page settings
11
+ st.set_page_config(
12
+ page_title="Chat with Gemini-Pro!",
13
+ page_icon=":brain:", #Favicon emoji
14
+ layout="centered", #page layout option
15
+ )
16
+
17
+ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
18
+
19
+ #Set up Google Gemini-pro AI Model
20
+ gen_ai.configure(api_key=GOOGLE_API_KEY)
21
+ model = gen_ai.GenerativeModel('gemini-2.0-flash-exp')
22
+
23
+ #Function to translate roles between Gemini-pro and streamlit terminology
24
+ def translate_role_for_streamlit(user_role):
25
+ if user_role == "model":
26
+ return "assistant"
27
+ else:
28
+ return user_role
29
+
30
+ #Initialize chat session in streamlit if not already present
31
+ if "chat_session" not in st.session_state:
32
+ st.session_state.chat_session = model.start_chat(history=[])
33
+
34
+ #Display chatbot's title on the page
35
+ st.title("🤖 Gemini-Pro Boty😎")
36
+
37
+ #display chat history
38
+ for message in st.session_state.chat_session.history:
39
+ with st.chat_message(translate_role_for_streamlit(message.role)):
40
+ st.markdown(message.parts[0].text)
41
+
42
+ #input field for user
43
+ user_prompt = st.chat_input("Ask Gemini-pro.. ")
44
+ if user_prompt:
45
+ #Add user's message to chat and display it
46
+ st.chat_message("user").markdown(user_prompt)
47
+
48
+ #send user's message to the gemini-pro and get the response
49
+ gemini_response = st.session_state.chat_session.send_message(user_prompt)
50
+
51
+ #display gemini - pro response
52
+ with st.chat_message("assistant"):
53
+ st.markdown(gemini_response.text)
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ python-dotenv
2
+ google-generativeai
3
+ streamlit