iSathyam03 commited on
Commit
c148410
Β·
verified Β·
1 Parent(s): 2ac46a5

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +292 -0
  2. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_community.document_loaders import PDFPlumberLoader
3
+ from langchain_text_splitters import RecursiveCharacterTextSplitter
4
+ from langchain_core.vectorstores import InMemoryVectorStore
5
+ from langchain_ollama import OllamaEmbeddings
6
+ from langchain_core.prompts import ChatPromptTemplate
7
+ from langchain_ollama.llms import OllamaLLM
8
+ from typing import List
9
+ import os
10
+
11
+ # Initialize session state
12
+ def init_session_state():
13
+ if 'messages' not in st.session_state:
14
+ st.session_state['messages'] = []
15
+ if 'vector_store' not in st.session_state:
16
+ st.session_state['vector_store'] = None
17
+ if 'diet_preference' not in st.session_state:
18
+ st.session_state['diet_preference'] = 'All'
19
+ if 'meal_time' not in st.session_state:
20
+ st.session_state['meal_time'] = 'Any'
21
+
22
+ # Constants and configurations
23
+ PDF_STORAGE_PATH = 'Data/'
24
+ EMBEDDING_MODEL = OllamaEmbeddings(model="llama3.1")
25
+ LANGUAGE_MODEL = OllamaLLM(model="llama3.1")
26
+
27
+ MENU_RELATED_KEYWORDS = [
28
+ 'menu', 'food', 'dish', 'meal', 'eat', 'drink', 'recommend', 'suggestion',
29
+ 'order', 'specialty', 'cuisine', 'appetite', 'hungry', 'restaurant',
30
+ 'vegetarian', 'vegan', 'spicy', 'dessert', 'appetizer', 'main course',
31
+ 'dinner', 'lunch', 'breakfast', 'serve', 'portion', 'price', 'cost'
32
+ ]
33
+
34
+ MENU_ANALYSIS_PROMPT = """
35
+ You are an expert restaurant assistant with deep knowledge of cuisine and dietary preferences.
36
+ Use the provided menu context to help customers make informed dining choices.
37
+
38
+ Current user preferences:
39
+ 🍽️ Dietary preference: {diet_preference}
40
+ ⏰ Meal time: {meal_time}
41
+
42
+ Previous conversation:
43
+ {chat_history}
44
+
45
+ Current menu context: {context}
46
+
47
+ User query: {question}
48
+
49
+ Please provide recommendations based on the following guidelines:
50
+ 1. Consider the user's dietary preferences (vegetarian/non-vegetarian)
51
+ 2. Focus on menu items appropriate for the selected meal time
52
+ 3. Suggest complementary dishes when appropriate
53
+ 4. Highlight special or popular items from the menu
54
+ 5. Explain key ingredients or preparation methods if relevant
55
+ 6. Consider portion sizes and meal combinations
56
+
57
+ Response:
58
+ """
59
+
60
+ CASUAL_CHAT_PROMPT = """
61
+ You are a friendly restaurant assistant engaging in casual conversation.
62
+ Respond naturally to the user's message without analyzing the menu.
63
+
64
+ Current user preferences:
65
+ 🍽️ Dietary preference: {diet_preference}
66
+ ⏰ Meal time: {meal_time}
67
+
68
+ Previous conversation:
69
+ {chat_history}
70
+
71
+ User message: {question}
72
+
73
+ Respond in a friendly, conversational manner while maintaining context of the previous discussion.
74
+ """
75
+
76
+ # File handling functions
77
+ def save_uploaded_files(uploaded_files) -> List[str]:
78
+ """Save multiple uploaded files and return their paths"""
79
+ file_paths = []
80
+ for uploaded_file in uploaded_files:
81
+ file_path = os.path.join(PDF_STORAGE_PATH, uploaded_file.name)
82
+ with open(file_path, "wb") as file:
83
+ file.write(uploaded_file.getbuffer())
84
+ file_paths.append(file_path)
85
+ return file_paths
86
+
87
+ def load_multiple_pdfs(file_paths: List[str]):
88
+ """Load multiple PDF documents"""
89
+ documents = []
90
+ for file_path in file_paths:
91
+ document_loader = PDFPlumberLoader(file_path)
92
+ documents.extend(document_loader.load())
93
+ return documents
94
+
95
+ def chunk_documents(raw_documents):
96
+ """Split documents into chunks"""
97
+ text_processor = RecursiveCharacterTextSplitter(
98
+ chunk_size=1000,
99
+ chunk_overlap=200,
100
+ add_start_index=True
101
+ )
102
+ return text_processor.split_documents(raw_documents)
103
+
104
+ # Vector store operations
105
+ def index_documents(document_chunks):
106
+ """Create vector store from document chunks"""
107
+ vector_store = InMemoryVectorStore(embedding=EMBEDDING_MODEL)
108
+ vector_store.add_documents(document_chunks)
109
+ st.session_state['vector_store'] = vector_store
110
+
111
+ def find_related_documents(query: str):
112
+ """Find relevant documents for a query"""
113
+ if st.session_state['vector_store'] is None:
114
+ return []
115
+ return st.session_state['vector_store'].similarity_search(query)
116
+
117
+ # Text processing functions
118
+ def combine_documents(docs: list) -> str:
119
+ """Combine multiple documents into a single string"""
120
+ return "\n\n".join([doc.page_content for doc in docs])
121
+
122
+ def format_chat_history(messages: List[dict]) -> str:
123
+ """Format chat history for prompt context"""
124
+ formatted_history = []
125
+ for msg in messages:
126
+ role = msg["role"]
127
+ content = msg["content"]
128
+ formatted_history.append(f"{role}: {content}")
129
+ return "\n".join(formatted_history[-6:])
130
+
131
+ def should_analyze_menu(query: str) -> bool:
132
+ """Determine if the query requires menu analysis"""
133
+ query_lower = query.lower()
134
+ return any(keyword in query_lower for keyword in MENU_RELATED_KEYWORDS)
135
+
136
+ # Response generation functions
137
+ def generate_casual_response(user_query: str) -> str:
138
+ """Generate a casual response without menu analysis"""
139
+ chat_history = format_chat_history(st.session_state['messages'])
140
+ prompt = ChatPromptTemplate.from_template(CASUAL_CHAT_PROMPT)
141
+ chain = prompt | LANGUAGE_MODEL
142
+
143
+ return chain.invoke({
144
+ "chat_history": chat_history,
145
+ "question": user_query,
146
+ "diet_preference": st.session_state['diet_preference'],
147
+ "meal_time": st.session_state['meal_time']
148
+ })
149
+
150
+ def generate_menu_response(user_query: str, context_documents: list) -> str:
151
+ """Generate a response with menu analysis"""
152
+ chat_history = format_chat_history(st.session_state['messages'])
153
+ prompt = ChatPromptTemplate.from_template(MENU_ANALYSIS_PROMPT)
154
+ chain = prompt | LANGUAGE_MODEL
155
+
156
+ return chain.invoke({
157
+ "chat_history": chat_history,
158
+ "context": combine_documents(context_documents),
159
+ "question": user_query,
160
+ "diet_preference": st.session_state['diet_preference'],
161
+ "meal_time": st.session_state['meal_time']
162
+ })
163
+
164
+ def generate_answer(user_query: str, context_documents: list) -> str:
165
+ """Generate appropriate response based on query type"""
166
+ if should_analyze_menu(user_query):
167
+ return generate_menu_response(user_query, context_documents)
168
+ else:
169
+ return generate_casual_response(user_query)
170
+
171
+ # Main UI function
172
+ def main():
173
+ # Initialize session state
174
+ init_session_state()
175
+
176
+ # UI Configuration
177
+ st.set_page_config(
178
+ page_title="Smart Menu Assistant",
179
+ page_icon="🍽️",
180
+ layout="wide"
181
+ )
182
+
183
+ # Main title with emojis
184
+ st.title("🍽️ Smart Menu Assistant πŸ€–")
185
+ st.markdown("### πŸ‘¨β€πŸ³ Your Personal Restaurant Guide 🌟")
186
+ st.markdown("---")
187
+
188
+ # Sidebar with enhanced emojis
189
+ with st.sidebar:
190
+ st.markdown("### 🎯 Your Dining Journey")
191
+
192
+ # Welcome message
193
+ st.markdown("""
194
+ πŸ‘‹ Welcome, food lover!
195
+
196
+ Let me help you discover
197
+ the perfect dining options! 🌟
198
+
199
+ Use these filters to create your
200
+ perfect dining experience! ✨
201
+ """)
202
+
203
+ # Diet preference section
204
+ st.markdown("#### πŸ₯— Dietary Preference")
205
+ diet_choice = st.radio(
206
+ "What's your food preference? 🍴",
207
+ ["All 🍽️", "Vegetarian πŸ₯¬", "Non-Vegetarian πŸ—"],
208
+ help="Choose your dietary preference to get personalized recommendations",
209
+ key="diet_radio"
210
+ )
211
+ st.session_state['diet_preference'] = diet_choice.split()[0] # Remove emoji from choice
212
+
213
+ # Meal time section
214
+ st.markdown("#### ⏰ Meal Time")
215
+ meal_time = st.selectbox(
216
+ "When are you planning to eat? πŸ•",
217
+ ["Any ⏰", "Breakfast πŸŒ…", "Lunch 🌞", "Dinner πŸŒ™"],
218
+ help="Select a meal time for specific menu suggestions",
219
+ key="meal_select"
220
+ )
221
+ st.session_state['meal_time'] = meal_time.split()[0] # Remove emoji from choice
222
+
223
+ # Helpful tips
224
+ st.markdown("---")
225
+ st.markdown("""
226
+ πŸ’‘ **Pro Tips:**
227
+
228
+ 1. πŸ“‘ Upload multiple menus to compare options
229
+ 2. πŸ” Use filters to find perfect matches
230
+ 3. πŸ’¬ Ask specific questions about dishes
231
+ 4. ⭐ Look for chef's recommendations
232
+ """)
233
+
234
+ # Clear chat button at bottom
235
+ st.markdown("---")
236
+ if st.button("πŸ—‘οΈ Clear Chat History", help="Remove all previous messages"):
237
+ st.session_state['messages'] = []
238
+ st.session_state['vector_store'] = None
239
+ st.rerun()
240
+
241
+ # File Upload Section with emojis
242
+ uploaded_pdfs = st.file_uploader(
243
+ "πŸ“€ Upload Restaurant Menus (PDF)",
244
+ type="pdf",
245
+ help="Select one or more restaurant menus in PDF format",
246
+ accept_multiple_files=True
247
+ )
248
+
249
+ if uploaded_pdfs:
250
+ try:
251
+ with st.spinner("πŸ”„ Processing your menus..."):
252
+ saved_paths = save_uploaded_files(uploaded_pdfs)
253
+ raw_docs = load_multiple_pdfs(saved_paths)
254
+ processed_chunks = chunk_documents(raw_docs)
255
+ index_documents(processed_chunks)
256
+ st.success(f"βœ… Successfully processed {len(uploaded_pdfs)} menu(s)! πŸŽ‰\n\nπŸ’­ Ask me anything about the menu!")
257
+ except Exception as e:
258
+ st.error(f"❌ Error processing menus: {str(e)}")
259
+
260
+ # Add a separator before chat
261
+ st.markdown("---")
262
+ st.markdown("### πŸ’¬ Chat with Your Menu Assistant")
263
+
264
+ # Display chat history with enhanced styling
265
+ for message in st.session_state['messages']:
266
+ with st.chat_message(message["role"], avatar="πŸ‘€" if message["role"] == "user" else "πŸ‘¨β€πŸ³"):
267
+ st.write(message["content"])
268
+
269
+ # Chat input with emoji
270
+ user_input = st.chat_input("πŸ€” How can I assist you with the menu today?")
271
+
272
+ if user_input:
273
+ # Add user message to chat history
274
+ st.session_state['messages'].append({"role": "user", "content": user_input})
275
+ with st.chat_message("user", avatar="πŸ‘€"):
276
+ st.write(user_input)
277
+
278
+ try:
279
+ with st.spinner("🧠 Thinking..."):
280
+ # Only fetch relevant docs if menu analysis is needed
281
+ relevant_docs = find_related_documents(user_input) if should_analyze_menu(user_input) else []
282
+ ai_response = generate_answer(user_input, relevant_docs)
283
+
284
+ # Add assistant response to chat history
285
+ st.session_state['messages'].append({"role": "assistant", "content": ai_response})
286
+ with st.chat_message("assistant", avatar="πŸ‘¨β€πŸ³"):
287
+ st.write(ai_response)
288
+ except Exception as e:
289
+ st.error(f"❌ Error generating response: {str(e)}")
290
+
291
+ if __name__ == "__main__":
292
+ main()
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ streamlit
2
+ langchain_core
3
+ langchain_community
4
+ langchain_ollama
5
+ pdfplumber
6
+ gdown