Abid Ali Awan commited on
Commit
41a9d8b
ยท
1 Parent(s): 925aaa4

Refactor UIHandler to utilize UUID for unique user IDs per session, enhancing user identification during regulatory query processing. Update streaming_chatbot and delayed_clear methods to handle user_id_state, ensuring consistent user context throughout interactions.

Browse files
Files changed (1) hide show
  1. agents/ui_handler.py +38 -27
agents/ui_handler.py CHANGED
@@ -1,6 +1,6 @@
1
- import random
2
  import threading
3
  import time
 
4
 
5
  import gradio as gr
6
  from gradio import ChatMessage
@@ -13,14 +13,16 @@ from tools.llm import stream_llm
13
  class UIHandler:
14
  def __init__(self):
15
  self.agent = RegRadarAgent()
16
- self.user_id = (
17
- f"user-{random.randint(1000, 9999)}" # User ID per session, small number
18
- )
19
 
20
- def streaming_chatbot(self, message, history):
21
  """Process messages with tool visibility and lock input during response generation"""
 
 
 
 
 
22
  if not message.strip():
23
- return history, "", gr.update(interactive=True)
24
 
25
  # Add user message
26
  history.append(ChatMessage(role="user", content=message))
@@ -29,7 +31,7 @@ class UIHandler:
29
  start_time = time.time()
30
 
31
  # Disable input box at the start
32
- yield history, "", gr.update(interactive=False)
33
 
34
  # Detect if this is a regulatory query
35
  is_regulatory = self.agent.is_regulatory_query(message)
@@ -39,7 +41,7 @@ class UIHandler:
39
  history.append(
40
  ChatMessage(role="assistant", content="๐Ÿ’ฌ Processing general query...")
41
  )
42
- yield history, "", gr.update(interactive=False)
43
 
44
  # Clear processing message and stream response
45
  history.pop()
@@ -49,10 +51,10 @@ class UIHandler:
49
  for chunk in stream_llm(message):
50
  streaming_content += chunk
51
  history[-1] = ChatMessage(role="assistant", content=streaming_content)
52
- yield history, "", gr.update(interactive=False)
53
 
54
  # Re-enable input box at the end
55
- yield history, "", gr.update(interactive=True)
56
  return
57
 
58
  # Show tool detection
@@ -61,7 +63,7 @@ class UIHandler:
61
  # Initial processing message with tool info
62
  status_msg = f"๐Ÿ” Using **{tool_name}** to analyze your query (estimated 10-20 seconds)..."
63
  history.append(ChatMessage(role="assistant", content=status_msg))
64
- yield history, "", gr.update(interactive=False)
65
 
66
  # Extract parameters and process query
67
  params = self.agent.extract_parameters(message)
@@ -81,12 +83,10 @@ class UIHandler:
81
  ๐Ÿ”„ **Executing {tool_name}...**
82
  """
83
  history.append(ChatMessage(role="assistant", content=tool_status))
84
- yield history, "", gr.update(interactive=False)
85
 
86
  # Process the regulatory query
87
- results = self.agent.process_regulatory_query(
88
- message, params, user_id=self.user_id
89
- )
90
  crawl_results = results["crawl_results"]
91
  memory_results = results["memory_results"]
92
 
@@ -96,7 +96,7 @@ class UIHandler:
96
  content=tool_status
97
  + f"\n\nโœ… **Found {crawl_results['total_found']} regulatory updates**",
98
  )
99
- yield history, "", gr.update(interactive=False)
100
 
101
  # Show collapsible raw results
102
  if crawl_results["results"]:
@@ -130,7 +130,7 @@ class UIHandler:
130
  else:
131
  collapsible_results = "<details><summary><strong>๐Ÿ“‹ Raw Regulatory Data</strong> - Click to expand</summary>\nNo unique regulatory updates found.\n</details>"
132
  history.append(ChatMessage(role="assistant", content=collapsible_results))
133
- yield history, "", gr.update(interactive=False)
134
 
135
  # Display memory results if available
136
  if memory_results:
@@ -148,7 +148,7 @@ Found {len(memory_results)} similar past queries in memory. Top 3 shown below:
148
  </details>
149
  """
150
  history.append(ChatMessage(role="assistant", content=memory_msg))
151
- yield history, "", gr.update(interactive=False)
152
 
153
  # Generate final analysis
154
  history.append(
@@ -156,7 +156,7 @@ Found {len(memory_results)} similar past queries in memory. Top 3 shown below:
156
  role="assistant", content="๐Ÿ“ **Generating Compliance Report...**"
157
  )
158
  )
159
- yield history, "", gr.update(interactive=False)
160
 
161
  # Clear generating message and stream final report
162
  history.pop()
@@ -167,7 +167,7 @@ Found {len(memory_results)} similar past queries in memory. Top 3 shown below:
167
  for chunk in self.agent.generate_report(params, crawl_results, memory_results):
168
  streaming_content += chunk
169
  history[-1] = ChatMessage(role="assistant", content=streaming_content)
170
- yield history, "", gr.update(interactive=False)
171
 
172
  # Show completion time (before saving to memory)
173
  elapsed = time.time() - start_time
@@ -177,18 +177,18 @@ Found {len(memory_results)} similar past queries in memory. Top 3 shown below:
177
  )
178
  )
179
  # Re-enable input box at the end
180
- yield history, "", gr.update(interactive=True)
181
 
182
  # Save to memory in the background
183
  threading.Thread(
184
  target=self.agent.memory_tools.save_to_memory,
185
- args=(self.user_id, message, streaming_content),
186
  daemon=True,
187
  ).start()
188
 
189
- def delayed_clear(self):
190
  time.sleep(0.1) # 100ms delay to allow generator cancellation
191
- return [], "", gr.update(interactive=True)
192
 
193
  def create_ui(self):
194
  """Create Gradio interface"""
@@ -231,6 +231,9 @@ Found {len(memory_results)} similar past queries in memory. Top 3 shown below:
231
  stop = gr.Button("Stop", variant="stop", scale=1, min_width=60)
232
  clear = gr.Button("Clear", scale=1, min_width=60)
233
 
 
 
 
234
  # Example queries
235
  example_queries = [
236
  "Show me the latest SEC regulations for fintech",
@@ -279,13 +282,21 @@ Found {len(memory_results)} similar past queries in memory. Top 3 shown below:
279
 
280
  # Event handlers
281
  submit_event = msg.submit(
282
- self.streaming_chatbot, [msg, chatbot], [chatbot, msg, msg]
 
 
283
  )
284
  click_event = submit.click(
285
- self.streaming_chatbot, [msg, chatbot], [chatbot, msg, msg]
 
 
286
  )
287
  stop.click(None, cancels=[submit_event, click_event])
288
- clear.click(self.delayed_clear, outputs=[chatbot, msg, msg])
 
 
 
 
289
 
290
  # Footer
291
  gr.HTML("""
 
 
1
  import threading
2
  import time
3
+ import uuid
4
 
5
  import gradio as gr
6
  from gradio import ChatMessage
 
13
  class UIHandler:
14
  def __init__(self):
15
  self.agent = RegRadarAgent()
 
 
 
16
 
17
+ def streaming_chatbot(self, message, history, user_id_state):
18
  """Process messages with tool visibility and lock input during response generation"""
19
+ # Initialize user_id if not set
20
+ if not user_id_state:
21
+ user_id_state = f"user-{uuid.uuid4().hex[:4]}"
22
+ user_id = user_id_state
23
+
24
  if not message.strip():
25
+ return history, "", gr.update(interactive=True), user_id_state
26
 
27
  # Add user message
28
  history.append(ChatMessage(role="user", content=message))
 
31
  start_time = time.time()
32
 
33
  # Disable input box at the start
34
+ yield history, "", gr.update(interactive=False), user_id_state
35
 
36
  # Detect if this is a regulatory query
37
  is_regulatory = self.agent.is_regulatory_query(message)
 
41
  history.append(
42
  ChatMessage(role="assistant", content="๐Ÿ’ฌ Processing general query...")
43
  )
44
+ yield history, "", gr.update(interactive=False), user_id_state
45
 
46
  # Clear processing message and stream response
47
  history.pop()
 
51
  for chunk in stream_llm(message):
52
  streaming_content += chunk
53
  history[-1] = ChatMessage(role="assistant", content=streaming_content)
54
+ yield history, "", gr.update(interactive=False), user_id_state
55
 
56
  # Re-enable input box at the end
57
+ yield history, "", gr.update(interactive=True), user_id_state
58
  return
59
 
60
  # Show tool detection
 
63
  # Initial processing message with tool info
64
  status_msg = f"๐Ÿ” Using **{tool_name}** to analyze your query (estimated 10-20 seconds)..."
65
  history.append(ChatMessage(role="assistant", content=status_msg))
66
+ yield history, "", gr.update(interactive=False), user_id_state
67
 
68
  # Extract parameters and process query
69
  params = self.agent.extract_parameters(message)
 
83
  ๐Ÿ”„ **Executing {tool_name}...**
84
  """
85
  history.append(ChatMessage(role="assistant", content=tool_status))
86
+ yield history, "", gr.update(interactive=False), user_id_state
87
 
88
  # Process the regulatory query
89
+ results = self.agent.process_regulatory_query(message, params, user_id=user_id)
 
 
90
  crawl_results = results["crawl_results"]
91
  memory_results = results["memory_results"]
92
 
 
96
  content=tool_status
97
  + f"\n\nโœ… **Found {crawl_results['total_found']} regulatory updates**",
98
  )
99
+ yield history, "", gr.update(interactive=False), user_id_state
100
 
101
  # Show collapsible raw results
102
  if crawl_results["results"]:
 
130
  else:
131
  collapsible_results = "<details><summary><strong>๐Ÿ“‹ Raw Regulatory Data</strong> - Click to expand</summary>\nNo unique regulatory updates found.\n</details>"
132
  history.append(ChatMessage(role="assistant", content=collapsible_results))
133
+ yield history, "", gr.update(interactive=False), user_id_state
134
 
135
  # Display memory results if available
136
  if memory_results:
 
148
  </details>
149
  """
150
  history.append(ChatMessage(role="assistant", content=memory_msg))
151
+ yield history, "", gr.update(interactive=False), user_id_state
152
 
153
  # Generate final analysis
154
  history.append(
 
156
  role="assistant", content="๐Ÿ“ **Generating Compliance Report...**"
157
  )
158
  )
159
+ yield history, "", gr.update(interactive=False), user_id_state
160
 
161
  # Clear generating message and stream final report
162
  history.pop()
 
167
  for chunk in self.agent.generate_report(params, crawl_results, memory_results):
168
  streaming_content += chunk
169
  history[-1] = ChatMessage(role="assistant", content=streaming_content)
170
+ yield history, "", gr.update(interactive=False), user_id_state
171
 
172
  # Show completion time (before saving to memory)
173
  elapsed = time.time() - start_time
 
177
  )
178
  )
179
  # Re-enable input box at the end
180
+ yield history, "", gr.update(interactive=True), user_id_state
181
 
182
  # Save to memory in the background
183
  threading.Thread(
184
  target=self.agent.memory_tools.save_to_memory,
185
+ args=(user_id, message, streaming_content),
186
  daemon=True,
187
  ).start()
188
 
189
+ def delayed_clear(self, user_id_state):
190
  time.sleep(0.1) # 100ms delay to allow generator cancellation
191
+ return [], "", gr.update(interactive=True), user_id_state
192
 
193
  def create_ui(self):
194
  """Create Gradio interface"""
 
231
  stop = gr.Button("Stop", variant="stop", scale=1, min_width=60)
232
  clear = gr.Button("Clear", scale=1, min_width=60)
233
 
234
+ # Add user_id_state for session
235
+ user_id_state = gr.State()
236
+
237
  # Example queries
238
  example_queries = [
239
  "Show me the latest SEC regulations for fintech",
 
282
 
283
  # Event handlers
284
  submit_event = msg.submit(
285
+ self.streaming_chatbot,
286
+ [msg, chatbot, user_id_state],
287
+ [chatbot, msg, msg, user_id_state],
288
  )
289
  click_event = submit.click(
290
+ self.streaming_chatbot,
291
+ [msg, chatbot, user_id_state],
292
+ [chatbot, msg, msg, user_id_state],
293
  )
294
  stop.click(None, cancels=[submit_event, click_event])
295
+ clear.click(
296
+ self.delayed_clear,
297
+ inputs=[user_id_state],
298
+ outputs=[chatbot, msg, msg, user_id_state],
299
+ )
300
 
301
  # Footer
302
  gr.HTML("""