A-New-Day-001 commited on
Commit
6b691d6
·
1 Parent(s): 468647b

Delete screens

Browse files
screens/__pycache__/chat_bot.cpython-311.pyc DELETED
Binary file (10.7 kB)
 
screens/__pycache__/chat_bot_2.cpython-311.pyc DELETED
Binary file (10.2 kB)
 
screens/__pycache__/search.cpython-311.pyc DELETED
Binary file (17.9 kB)
 
screens/chat_bot.py DELETED
@@ -1,187 +0,0 @@
1
- import streamlit as st
2
- #Import library
3
- import yaml
4
- #load config.yml and parse into variables
5
- with open("config.yml", "r") as ymlfile:
6
- cfg = yaml.safe_load(ymlfile)
7
- _BARD_API_KEY = cfg["API_KEY"]["Bard"]
8
- main_path = cfg["LOCAL_PATH"]["main_path"]
9
- chat_context_length = cfg["CHAT"]["chat_context_length"]
10
- model_name = cfg["EMBEDDINGS"]["HuggingFaceEmbeddings"]["model_name"]
11
- model_kwargs = cfg["EMBEDDINGS"]["HuggingFaceEmbeddings"]["model_kwargs"]
12
- chunk_size = cfg["CHUNK"]["chunk_size"]
13
- chunk_overlap = cfg["CHUNK"]["chunk_overlap"]
14
-
15
- from langchain.vectorstores import Chroma
16
- import streamlit as st
17
- from langchain.embeddings import HuggingFaceEmbeddings
18
- from langchain.chains import ConversationalRetrievalChain
19
- from langchain.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate
20
- # Bard
21
- from bardapi import Bard
22
- from typing import Any, List, Mapping, Optional
23
- from langchain.llms.base import LLM
24
- from langchain.callbacks.manager import CallbackManagerForLLMRun
25
-
26
- from streamlit_feedback import streamlit_feedback
27
-
28
-
29
- #define Bard
30
- class BardLLM(LLM):
31
-
32
- @property
33
- def _llm_type(self) -> str:
34
- return "custom"
35
-
36
- def _call(
37
- self,
38
- prompt: str,
39
- stop: Optional[List[str]] = None,
40
- run_manager: Optional[CallbackManagerForLLMRun] = None,
41
- ) -> str:
42
- response = Bard(token=_BARD_API_KEY).get_answer(prompt)['content']
43
- return response
44
-
45
- @property
46
- def _identifying_params(self) -> Mapping[str, Any]:
47
- """Get the identifying parameters."""
48
- return {}
49
-
50
-
51
-
52
- def load_embeddings():
53
- embeddings = HuggingFaceEmbeddings(model_name=model_name, model_kwargs=model_kwargs)
54
- chroma_index = Chroma(persist_directory=main_path+"/vectorstore/chroma_db", embedding_function=embeddings)
55
- print("Successfully loading embeddings and indexing")
56
- return chroma_index
57
-
58
-
59
-
60
- def ask_with_memory(vector_store, question, chat_history_1=[], document_description=""):
61
-
62
- llm=BardLLM()
63
- retriever = vector_store.as_retriever( # now the vs can return documents
64
- search_type='similarity', search_kwargs={'k': 3})
65
-
66
- general_system_template = f"""
67
- You are a professional consultant at a real estate consulting company, providing consulting services \
68
- to customers on real estate development strategies, real estate news and real estate law.\
69
- Your role is to communicate with customer, then interact with them about their concerns about real estates.\
70
- Once the customer has been provided their question,\
71
- then you obtain some documents about real estate laws or real estate news related to their question.\
72
- Then you will examine these documents .\
73
- You must provide the answer based on these documents which means\
74
- using only the heading and piece of context to answer the questions at the end.\
75
- If you don't know the answer just say that you don't know, don't try to make up an answer. \
76
- If the question is not in the field of real estate , just answer that you do not know. \
77
- You respond in a short, very conversational friendly style.\
78
- Answer only in Vietnamese\
79
- ----
80
- HEADING: ({document_description})
81
- CONTEXT: {{context}}
82
- ----
83
- """
84
- general_user_template = """Here is the next question, remember to only answer if you can from the provided context.
85
- If the question is not relevant to real estate , just answer that you do not know, do not create your own answer.
86
- Only respond in Vietnamese.
87
- QUESTION:```{question}```"""
88
-
89
- messages_1 = [
90
- SystemMessagePromptTemplate.from_template(general_system_template),
91
- HumanMessagePromptTemplate.from_template(general_user_template)
92
- ]
93
- qa_prompt = ChatPromptTemplate.from_messages( messages_1 )
94
-
95
-
96
- crc = ConversationalRetrievalChain.from_llm(llm, retriever, combine_docs_chain_kwargs={'prompt': qa_prompt})
97
- result = crc({'question': question, 'chat_history': chat_history_1})
98
- return result
99
-
100
-
101
- def clear_history():
102
- if "history_1" in st.session_state:
103
- st.session_state.history_1 = []
104
- st.session_state.messages_1 = []
105
-
106
- # Define a function for submitting feedback
107
- def _submit_feedback(user_response, emoji=None):
108
- st.toast(f"Feedback submitted: {user_response}", icon=emoji)
109
- return user_response.update({"some metadata": 123})
110
-
111
-
112
- def format_chat_history(chat_history_1):
113
- formatted_history = ""
114
- for entry in chat_history_1:
115
- question, answer = entry
116
- # Added an extra '\n' for the blank line
117
- formatted_history += f"Question: {question}\nAnswer: {answer}\n\n"
118
- return formatted_history
119
-
120
- def run_chatbot():
121
- with st.sidebar.title("Sidebar"):
122
- if st.button("Clear History"):
123
- clear_history()
124
-
125
- st.title("🦾 Chatbot (news,law)")
126
-
127
- # Initialize the chatbot and load embeddings
128
- if "messages_1" not in st.session_state:
129
- with st.spinner("Initializing, please wait a moment!!!"):
130
- st.session_state.vector_store = load_embeddings()
131
- st.success("Finish!!!")
132
- st.session_state["messages_1"] = [{"role": "assistant", "content": "Tôi có thể giúp gì được cho bạn?"}]
133
-
134
- messages_1 = st.session_state.messages_1
135
- feedback_kwargs = {
136
- "feedback_type": "thumbs",
137
- "optional_text_label": "Please provide extra information",
138
- "on_submit": _submit_feedback,
139
- }
140
-
141
- for n, msg in enumerate(messages_1):
142
- st.chat_message(msg["role"]).write(msg["content"])
143
-
144
- if msg["role"] == "assistant" and n > 1:
145
- feedback_key = f"feedback_{int(n/2)}"
146
-
147
- if feedback_key not in st.session_state:
148
- st.session_state[feedback_key] = None
149
-
150
- streamlit_feedback(
151
- **feedback_kwargs,
152
- key=feedback_key,
153
- )
154
-
155
-
156
- chat_history_placeholder = st.empty()
157
- if "history_1" not in st.session_state:
158
- st.session_state.history_1 = []
159
-
160
- if prompt := st.chat_input():
161
- if "vector_store" in st.session_state:
162
- vector_store = st.session_state["vector_store"]
163
-
164
- q = prompt
165
-
166
- st.session_state.messages_1.append({"role": "user", "content": prompt})
167
- st.chat_message("user").write(prompt)
168
- with st.spinner("Thinking..."):
169
- response = ask_with_memory(vector_store, q, st.session_state.history_1)
170
-
171
- if len(st.session_state.history_1) >= chat_context_length:
172
- st.session_state.history_1 = st.session_state.history_1[1:]
173
-
174
- st.session_state.history_1.append((q, response['answer']))
175
-
176
- chat_history_str = format_chat_history(st.session_state.history_1)
177
-
178
- msg = {"role": "assistant", "content": response['answer']}
179
- st.session_state.messages_1.append(msg)
180
- st.chat_message("assistant").write(msg["content"])
181
-
182
- # Display the feedback component after the chatbot responds
183
- feedback_key = f"feedback_{len(st.session_state.messages_1) - 1}"
184
- streamlit_feedback(
185
- **feedback_kwargs,
186
- key=feedback_key,
187
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
screens/chat_bot_2.py DELETED
@@ -1,184 +0,0 @@
1
- import streamlit as st
2
- #Import library
3
- import yaml
4
- #load config.yml and parse into variables
5
- with open("config2.yml", "r") as ymlfile:
6
- cfg = yaml.safe_load(ymlfile)
7
- _BARD_API_KEY = cfg["API_KEY"]["Bard"]
8
- main_path = cfg["LOCAL_PATH"]["main_path"]
9
- chat_context_length = cfg["CHAT"]["chat_context_length"]
10
- model_name = cfg["EMBEDDINGS"]["HuggingFaceEmbeddings"]["model_name"]
11
- model_kwargs = cfg["EMBEDDINGS"]["HuggingFaceEmbeddings"]["model_kwargs"]
12
- chunk_size = cfg["CHUNK"]["chunk_size"]
13
- chunk_overlap = cfg["CHUNK"]["chunk_overlap"]
14
-
15
- import os
16
- from dotenv import load_dotenv, find_dotenv
17
- from langchain.vectorstores import Chroma
18
- import streamlit.components.v1 as components
19
- import streamlit as st
20
- import sys
21
- from langchain.embeddings import HuggingFaceEmbeddings
22
- from langchain.chains import ConversationalRetrievalChain
23
- from langchain.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate
24
- # Bard
25
- from bardapi import Bard
26
- from typing import Any, List, Mapping, Optional
27
- from getpass import getpass
28
- import os
29
- from langchain.llms.base import LLM
30
- from langchain.callbacks.manager import CallbackManagerForLLMRun
31
-
32
- from streamlit_feedback import streamlit_feedback
33
-
34
-
35
- #define Bard
36
- class BardLLM(LLM):
37
-
38
- @property
39
- def _llm_type(self) -> str:
40
- return "custom"
41
-
42
- def _call(
43
- self,
44
- prompt: str,
45
- stop: Optional[List[str]] = None,
46
- run_manager: Optional[CallbackManagerForLLMRun] = None,
47
- ) -> str:
48
- response = Bard(token=_BARD_API_KEY).get_answer(prompt)['content']
49
- return response
50
-
51
- @property
52
- def _identifying_params(self) -> Mapping[str, Any]:
53
- """Get the identifying parameters."""
54
- return {}
55
-
56
-
57
-
58
- def load_embeddings():
59
- embeddings = HuggingFaceEmbeddings(model_name=model_name, model_kwargs=model_kwargs)
60
- chroma_index = Chroma(persist_directory="./chroma_index_1", embedding_function=embeddings)
61
- print("Successfully loading embeddings and indexing")
62
- return chroma_index
63
-
64
-
65
-
66
- def ask_with_memory(vector_store, question, chat_history=[], document_description=""):
67
-
68
- llm=BardLLM()
69
- retriever = vector_store.as_retriever( # now the vs can return documents
70
- search_type='similarity', search_kwargs={'k': 3})
71
-
72
- general_system_template = f"""
73
- You are a helpful and informative bot that answers questions posed below using provided context.
74
- You have to be truthful. Do not recommend or propose any infomation of the properties.
75
- Be sure to respond in a complete sentence, being comprehensive, including all information in the provided context.
76
- Imagine you're talking to a friend and use natural language and phrasing.
77
- You can only use Vietnamese do not use other languages.
78
- ----
79
- CONTEXT: {{context}}
80
- ----
81
- """
82
- general_user_template = """Here is the next question, remember to only answer if you can from the provided context.
83
- If the question is not relevant to real estate , just answer that you do not know, do not create your own answer.
84
- Only respond in Vietnamese.
85
- QUESTION:```{question}```"""
86
-
87
- messages = [
88
- SystemMessagePromptTemplate.from_template(general_system_template),
89
- HumanMessagePromptTemplate.from_template(general_user_template)
90
- ]
91
- qa_prompt = ChatPromptTemplate.from_messages( messages )
92
-
93
-
94
- crc = ConversationalRetrievalChain.from_llm(llm, retriever, combine_docs_chain_kwargs={'prompt': qa_prompt})
95
- result = crc({'question': question, 'chat_history': chat_history})
96
- return result
97
-
98
-
99
- def clear_history():
100
- if "history" in st.session_state:
101
- st.session_state.history = []
102
- st.session_state.messages = []
103
-
104
- # Define a function for submitting feedback
105
- def _submit_feedback(user_response, emoji=None):
106
- st.toast(f"Feedback submitted: {user_response}", icon=emoji)
107
- return user_response.update({"some metadata": 123})
108
-
109
-
110
- def format_chat_history(chat_history):
111
- formatted_history = ""
112
- for entry in chat_history:
113
- question, answer = entry
114
- # Added an extra '\n' for the blank line
115
- formatted_history += f"Question: {question}\nAnswer: {answer}\n\n"
116
- return formatted_history
117
-
118
- def run_chatbot_2():
119
- with st.sidebar.title("Sidebar"):
120
- if st.button("Clear History"):
121
- clear_history()
122
-
123
- st.title("🤖 Chatbot (property)")
124
-
125
- # Initialize the chatbot and load embeddings
126
- if "messages" not in st.session_state:
127
- with st.spinner("Initializing, please wait a moment!!!"):
128
- st.session_state.vector_store = load_embeddings()
129
- st.success("Finish!!!")
130
- st.session_state["messages"] = [{"role": "assistant", "content": "Tôi có thể giúp gì được cho bạn?"}]
131
-
132
- messages = st.session_state.messages
133
- feedback_kwargs = {
134
- "feedback_type": "thumbs",
135
- "optional_text_label": "Please provide extra information",
136
- "on_submit": _submit_feedback,
137
- }
138
-
139
- for n, msg in enumerate(messages):
140
- st.chat_message(msg["role"]).write(msg["content"])
141
-
142
- if msg["role"] == "assistant" and n > 1:
143
- feedback_key = f"feedback_{int(n/2)}"
144
-
145
- if feedback_key not in st.session_state:
146
- st.session_state[feedback_key] = None
147
-
148
- streamlit_feedback(
149
- **feedback_kwargs,
150
- key=feedback_key,
151
- )
152
-
153
- chat_history_placeholder = st.empty()
154
- if "history" not in st.session_state:
155
- st.session_state.history = []
156
-
157
- if prompt := st.chat_input():
158
- if "vector_store" in st.session_state:
159
- vector_store = st.session_state["vector_store"]
160
-
161
- q = prompt
162
-
163
- st.session_state.messages.append({"role": "user", "content": prompt})
164
- st.chat_message("user").write(prompt)
165
-
166
- response = ask_with_memory(vector_store, q, st.session_state.history)
167
-
168
- if len(st.session_state.history) >= chat_context_length:
169
- st.session_state.history = st.session_state.history[1:]
170
-
171
- st.session_state.history.append((q, response['answer']))
172
-
173
- chat_history_str = format_chat_history(st.session_state.history)
174
-
175
- msg = {"role": "assistant", "content": response['answer']}
176
- st.session_state.messages.append(msg)
177
- st.chat_message("assistant").write(msg["content"])
178
-
179
- # Display the feedback component after the chatbot responds
180
- feedback_key = f"feedback_{len(st.session_state.messages) - 1}"
181
- streamlit_feedback(
182
- **feedback_kwargs,
183
- key=feedback_key,
184
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
screens/index.py DELETED
@@ -1,26 +0,0 @@
1
- from screens.search import Search_Property
2
- from screens.chat_bot import run_chatbot
3
- from screens.chat_bot_2 import run_chatbot_2
4
- from utils.index import get_hash
5
-
6
- def get_routes():
7
- screens = [
8
-
9
- {
10
- "component": Search_Property,
11
- "name": "Search",
12
- "icon": "search"
13
- },
14
- {
15
- "component": run_chatbot,
16
- "name": "Chatbot (news,law)",
17
- "icon": "chat"
18
- },
19
- {
20
- "component": run_chatbot_2,
21
- "name": "Chatbot (property)",
22
- "icon": "chat"
23
- }
24
- ]
25
-
26
- return get_hash(screens)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
screens/search.py DELETED
@@ -1,245 +0,0 @@
1
- import streamlit as st
2
- import os
3
- import streamlit.components.v1 as components
4
- from io import BytesIO
5
- import requests
6
- import ast
7
-
8
- from langchain import PromptTemplate
9
- from langchain.chains import RetrievalQA
10
- from langchain.vectorstores import Chroma
11
- from langchain.embeddings import SentenceTransformerEmbeddings
12
- from bardapi import Bard
13
- from typing import Any, List, Mapping, Optional
14
-
15
- os.environ['_BARD_API_KEY'] = "aAhD1NyQqzeoXs8PclDOD_hvEI3N9uHnsn2F0isADM5FFwBfYxatJf1csSUTMo4TXLjOxA."
16
-
17
- from langchain.llms.base import LLM
18
- from langchain.callbacks.manager import CallbackManagerForLLMRun
19
- class BardLLM(LLM):
20
-
21
-
22
- @property
23
- def _llm_type(self) -> str:
24
- return "custom"
25
-
26
- def _call(
27
- self,
28
- prompt: str,
29
- stop: Optional[List[str]] = None,
30
- run_manager: Optional[CallbackManagerForLLMRun] = None,
31
- ) -> str:
32
- response = Bard(token=os.environ['_BARD_API_KEY']).get_answer(prompt)['content']
33
- return response
34
-
35
- @property
36
- def _identifying_params(self) -> Mapping[str, Any]:
37
- """Get the identifying parameters."""
38
- return {}
39
-
40
- @st.cache_data
41
- def get_image(url):
42
- r = requests.get(url)
43
- return BytesIO(r.content)
44
-
45
-
46
- # Define global variables
47
- embeddings = None
48
- index = None
49
- QUESTION_PROMPT = None
50
- qa = None
51
- result = []
52
-
53
- # Custom session state class for managing pagination
54
- class SessionState:
55
- def __init__(self):
56
- self.page_index = 0 # Initialize page index
57
- self.database_loaded = False # Initialize database loaded state
58
-
59
- # Create a session state object
60
- session_state = SessionState()
61
-
62
- # Define the search function outside of Search_Property
63
- def display_search_results(result, start_idx, end_idx):
64
- if result:
65
- st.subheader("Search Results:")
66
- for idx in range(start_idx, end_idx):
67
- if idx >= len(result):
68
- break
69
- property_info = result[idx]
70
- st.markdown(f"**Result {idx + 1}**")
71
-
72
- # Display property information
73
- if 'Image URL' in property_info.metadata and property_info.metadata['Image URL'] is not None and not isinstance(property_info.metadata['Image URL'], float):
74
- image_path_urls = property_info.metadata['Image URL']
75
- if image_path_urls is not None and not isinstance(image_path_urls, float):
76
- # Convert the string to a Python list
77
- imageUrls = ast.literal_eval(image_path_urls)
78
-
79
- # Now, imageUrls is a list of strings
80
- st.image(imageUrls[0],width=700)
81
-
82
- st.markdown(f"🏡 {property_info.metadata['Title']}")
83
- if 'Location' in property_info.metadata and property_info.metadata['Location'] is not None and not isinstance(property_info.metadata['Location'], float):
84
- st.write(f"📍 Address: {property_info.metadata['Location']}")
85
- if 'Area' in property_info.metadata and property_info.metadata['Area'] is not None and not isinstance(property_info.metadata['Area'], float):
86
- st.markdown(f"📏 Size: {property_info.metadata['Area']}")
87
- if 'Price' in property_info.metadata and property_info.metadata['Price'] is not None and not isinstance(property_info.metadata['Price'], float):
88
- st.markdown(f"💰 Price: {property_info.metadata['Price']} ")
89
- st.markdown(f"📅 Published Date: {property_info.metadata['Time stamp']}")
90
- col3, col4 = st.columns([2, 1])
91
- with col3:
92
- with st.expander("Full Property Information"):
93
- st.write(f"🏡 Property Title: {property_info.metadata['Title']}")
94
- if 'Area' in property_info.metadata and property_info.metadata['Area'] is not None and not isinstance(property_info.metadata['Area'], float):
95
- st.write(f"📏 Size: {property_info.metadata['Area']}")
96
- if 'Category' in property_info.metadata and property_info.metadata['Category'] is not None and not isinstance(property_info.metadata['Category'], float):
97
- st.write(f"🏢 Category: {property_info.metadata['Category']}")
98
- if 'Description' in property_info.metadata and property_info.metadata['Description'] is not None and not isinstance(property_info.metadata['Description'], float):
99
- st.write(f"📝 Description: {property_info.metadata['Description']}")
100
- if 'Price' in property_info.metadata and property_info.metadata['Price'] is not None and not isinstance(property_info.metadata['Price'], float):
101
- st.write(f"💰 Price: {property_info.metadata['Price']}")
102
- st.write(f"📅 Date: {property_info.metadata['Time stamp']}")
103
- if 'Location' in property_info.metadata and property_info.metadata['Location'] is not None and not isinstance(property_info.metadata['Location'], float):
104
- st.write(f"📍 Address: {property_info.metadata['Location']}")
105
- st.write(f"🆔 ID: {property_info.metadata['ID']}")
106
- if 'Estate type' in property_info.metadata and property_info.metadata['Estate type'] is not None and not isinstance(property_info.metadata['Estate type'], float):
107
- st.write(f"🏠 Housing Type: {property_info.metadata['Estate type']}")
108
- if 'Email' in property_info.metadata and property_info.metadata['Email'] is not None and not isinstance(property_info.metadata['Email'], float):
109
- st.write(f"✉️ Email: {property_info.metadata['Email']}")
110
- if 'Mobile Phone' in property_info.metadata and property_info.metadata['Mobile Phone'] is not None and not isinstance(property_info.metadata['Mobile Phone'], float):
111
- st.write(f"📞 Phone: {property_info.metadata['Mobile Phone']}")
112
- if 'Certification status' in property_info.metadata and property_info.metadata['Certification status'] is not None and not isinstance(property_info.metadata['Certification status'], float):
113
- st.write(f"🏆 Certification status: {property_info.metadata['Certification status']}")
114
- if 'Direction' in property_info.metadata and property_info.metadata['Direction'] is not None and not isinstance(property_info.metadata['Direction'], float):
115
- st.write(f"🧭 Direction: {property_info.metadata['Direction']}")
116
- if 'Rooms' in property_info.metadata and property_info.metadata['Rooms'] is not None and not isinstance(property_info.metadata['Rooms'], float):
117
- st.write(f"🚪 Rooms: {property_info.metadata['Rooms']}")
118
- if 'Bedrooms' in property_info.metadata and property_info.metadata['Bedrooms'] is not None and not isinstance(property_info.metadata['Bedrooms'], float):
119
- st.write(f"🛏️ Bedrooms: {property_info.metadata['Bedrooms']}")
120
- if 'Kitchen' in property_info.metadata and property_info.metadata['Kitchen'] is not None and not isinstance(property_info.metadata['Kitchen'], float):
121
- st.write(f"🍽️ Kitchen: {property_info.metadata['Kitchen']}")
122
- if 'Living room' in property_info.metadata and property_info.metadata['Living room'] is not None and not isinstance(property_info.metadata['Living room'], float):
123
- st.write(f"🛋️ Living room: {property_info.metadata['Living room']}")
124
- if 'Bathrooms' in property_info.metadata and property_info.metadata['Bathrooms'] is not None and not isinstance(property_info.metadata['Bathrooms'], float):
125
- st.write(f"🚽 Bathrooms: {property_info.metadata['Bathrooms']}")
126
- if 'Front width' in property_info.metadata and property_info.metadata['Front width'] is not None and not isinstance(property_info.metadata['Front width'], float):
127
- st.write(f"📐 Front width: {property_info.metadata['Front width']}")
128
- if 'Floor' in property_info.metadata and property_info.metadata['Floor'] is not None and not isinstance(property_info.metadata['Floor'], float):
129
- st.write(f"🧱 Floor: {property_info.metadata['Floor']}")
130
- if 'Parking Slot' in property_info.metadata and property_info.metadata['Parking Slot'] is not None and not isinstance(property_info.metadata['Parking Slot'], float):
131
- st.write(f"🚗 Parking Slot: {property_info.metadata['Parking Slot']}")
132
- if 'Seller name' in property_info.metadata and property_info.metadata['Seller name'] is not None and not isinstance(property_info.metadata['Seller name'], float):
133
- st.write(f"👤 Seller Name: {property_info.metadata['Seller name']}")
134
- if 'Seller type' in property_info.metadata and property_info.metadata['Seller type'] is not None and not isinstance(property_info.metadata['Seller type'], float):
135
- st.write(f"👨‍💼 Seller type: {property_info.metadata['Seller type']}")
136
- if 'Seller Address' in property_info.metadata and property_info.metadata['Seller Address'] is not None and not isinstance(property_info.metadata['Seller Address'], float):
137
- st.write(f"📌 Seller Address: {property_info.metadata['Seller Address']}")
138
- if 'Balcony Direction' in property_info.metadata and property_info.metadata['Balcony Direction'] is not None and not isinstance(property_info.metadata['Balcony Direction'], float):
139
- st.write(f"🌄 Balcony Direction: {property_info.metadata['Balcony Direction']}")
140
- if 'Furniture' in property_info.metadata and property_info.metadata['Furniture'] is not None and not isinstance(property_info.metadata['Furniture'], float):
141
- st.write(f"🛋️ Furniture: {property_info.metadata['Furniture']}")
142
- if 'Toilet' in property_info.metadata and property_info.metadata['Toilet'] is not None and not isinstance(property_info.metadata['Toilet'], float):
143
- st.write(f"🚽 Toilet: {property_info.metadata['Toilet']}")
144
-
145
- with col4:
146
- st.empty()
147
- if 'Image URL' in property_info.metadata and property_info.metadata['Image URL'] is not None and not isinstance(property_info.metadata['Image URL'], float):
148
- imageCarouselComponent = components.declare_component("image-carousel-component", path="./frontend/public")
149
- image_path_urls = property_info.metadata['Image URL']
150
- if image_path_urls is not None and not isinstance(image_path_urls, float):
151
- # Convert the string to a Python list
152
- imageUrls = ast.literal_eval(image_path_urls)
153
- if len(imageUrls) > 1:
154
- selectedImageUrl = imageCarouselComponent(imageUrls=imageUrls, height=200)
155
- if selectedImageUrl is not None:
156
- st.image(selectedImageUrl)
157
-
158
- # Add a divider after displaying property info
159
- st.markdown("<hr style='border: 2px solid white'>", unsafe_allow_html=True) # Horizontal rule as a divider
160
-
161
-
162
- def Search_Property():
163
- global embeddings, index, result, QUESTION_PROMPT, qa
164
-
165
- st.title("🏘️ Property Search ")
166
- # Load data and create the search
167
- if not session_state.database_loaded:
168
- st.info("Loading database... This may take a moment.")
169
- embeddings = SentenceTransformerEmbeddings(model_name="keepitreal/vietnamese-sbert")
170
- # Create a Chroma object with persistence
171
- db = Chroma(persist_directory="./chroma_index_1", embedding_function=embeddings)
172
- # Get documents from the database
173
- db.get()
174
- llm=BardLLM()
175
- qa = RetrievalQA.from_chain_type(
176
- llm=llm,
177
- chain_type="stuff",
178
- retriever=db.as_retriever(search_type="similarity", search_kwargs={"k":4}),
179
- return_source_documents=True)
180
- question_template = """
181
- Context: You are a helpful and informative bot that answers questions posed below using provided context.
182
- You have to be truthful. Do not recommend or propose any infomation of the properties.
183
- Be sure to respond in a complete sentence, being comprehensive, including all information in the provided context.
184
- Imagine you're talking to a friend and use natural language and phrasing.
185
- You can only use Vietnamese do not use other languages.
186
-
187
- QUESTION: '{question}'
188
-
189
- ANSWER:
190
- """
191
- QUESTION_PROMPT = PromptTemplate(
192
- template=question_template, input_variables=["question"]
193
- )
194
- session_state.database_loaded = True
195
-
196
- if session_state.database_loaded:
197
- col1, col2 = st.columns([2, 1]) # Create a two-column layout
198
-
199
- with col1:
200
- query = st.text_input("Enter your property search query:")
201
- search_button = st.button("Search", help="Click to start the search")
202
-
203
- if search_button:
204
- if not query:
205
- st.warning("Please enter a search query.")
206
- else:
207
- with st.spinner("Searching..."):
208
- if query is not None: # Check if model_embedding is not None
209
- qa.combine_documents_chain.llm_chain.prompt = QUESTION_PROMPT
210
- qa.combine_documents_chain.verbose = True
211
- qa.return_source_documents = True
212
- results = qa({"query":query,})
213
- result = results["source_documents"]
214
- session_state.page_index = 0 # Reset page index when a new search is performed
215
-
216
- with col2:
217
- if len(result) > 0:
218
- st.write(f'Total Results: {len(result)} properties found.') # Display "Total Results" in the second column
219
-
220
- if result:
221
- N = 5
222
- prev_button, next_button = st.columns([4,1])
223
- last_page = len(result) // N
224
-
225
-
226
- # Update page index based on button clicks
227
- if prev_button.button("Previous", key="prev_button"):
228
- if session_state.page_index - 1 < 0:
229
- session_state.page_index = last_page
230
- else:
231
- session_state.page_index -= 1
232
-
233
- if next_button.button("Next", key="next_button"):
234
- if session_state.page_index > last_page:
235
- st.warning("Displayed all results")
236
- session_state.page_index = 0
237
- else:
238
- session_state.page_index += 1
239
-
240
- # Calculate the range of results to display (5 properties at a time)
241
- start_idx = session_state.page_index * N
242
- end_idx = (1 + session_state.page_index) * N
243
-
244
- # Display results for the current page
245
- display_search_results(result, start_idx, end_idx)