KarthikaRajagopal commited on
Commit
252c5c2
·
verified ·
1 Parent(s): 9c6910b

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile.txt +17 -0
  2. customer_support_agent.py +164 -0
  3. requirements.txt +3 -0
Dockerfile.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use the official Python image
2
+ FROM python:3.10
3
+
4
+ # Set the working directory
5
+ WORKDIR /app
6
+
7
+ # Copy all files to the container
8
+ COPY . .
9
+
10
+ # Install dependencies
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ # Expose the Streamlit port
14
+ EXPOSE 8501
15
+
16
+ # Run the Streamlit app
17
+ CMD ["streamlit", "run", "customer_support_agent.py", "--server.port=8501", "--server.address=0.0.0.0"]
customer_support_agent.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Optimized AI Customer Support Agent with Memory
3
+ ------------------------------------------------
4
+ This Streamlit application integrates an AI-powered customer support agent
5
+ that remembers past interactions using memory storage (Qdrant via Mem0).
6
+
7
+ Key Features:
8
+ - Uses OpenAI's GPT-4 for generating responses.
9
+ - Stores and retrieves relevant user interactions from memory.
10
+ - Generates synthetic customer data for testing.
11
+ - Allows users to view their stored memory and customer profile.
12
+
13
+ Enhancements in this optimized version:
14
+ - Improved readability and structure.
15
+ - Better error handling and logging.
16
+ - Removed redundant checks and streamlined memory retrieval.
17
+ - Clearer logic separation for querying, memory handling, and synthetic data generation.
18
+ """
19
+
20
+ import streamlit as st
21
+ from openai import OpenAI
22
+ from qdrant_client import QdrantClient
23
+ from mem0 import Memory
24
+ import os
25
+ import json
26
+ from datetime import datetime, timedelta
27
+
28
+ # Streamlit UI Setup
29
+ st.title("AI Customer Support Agent with Memory")
30
+ st.caption("Chat with a customer support assistant who recalls past interactions.")
31
+
32
+ # OpenAI API Key Input
33
+ openai_api_key = st.text_input("Enter OpenAI API Key", type="password")
34
+ if openai_api_key:
35
+ os.environ['OPENAI_API_KEY'] = openai_api_key
36
+
37
+ class CustomerSupportAIAgent:
38
+ def __init__(self):
39
+ self.app_id = "customer-support"
40
+
41
+ # Initialize Qdrant client separately
42
+ try:
43
+ self.qdrant_client = QdrantClient(host="localhost", port=6333)
44
+ except Exception as e:
45
+ st.error(f"Failed to connect to Qdrant: {e}")
46
+ st.stop()
47
+
48
+ # Pass the initialized Qdrant client to Memory
49
+ try:
50
+ self.memory = Memory(self.qdrant_client)
51
+ except Exception as e:
52
+ st.error(f"Failed to initialize memory: {e}")
53
+ st.stop()
54
+
55
+ # Initialize OpenAI client
56
+ self.client = OpenAI()
57
+
58
+
59
+ def handle_query(self, query, user_id):
60
+ """Processes user queries by searching memory and generating AI responses."""
61
+ try:
62
+ # Retrieve relevant past memories
63
+ relevant_memories = self.memory.search(query=query, user_id=user_id)
64
+ context = "\n".join(f"- {m['memory']}" for m in relevant_memories.get("results", []) if "memory" in m)
65
+ full_prompt = f"Relevant past information:\n{context}\nCustomer: {query}\nSupport Agent:"
66
+
67
+ # Generate AI response
68
+ response = self.client.chat.completions.create(
69
+ model="gpt-4",
70
+ messages=[
71
+ {"role": "system", "content": "You are a customer support AI for TechGadgets.com."},
72
+ {"role": "user", "content": full_prompt}
73
+ ]
74
+ )
75
+ answer = response.choices[0].message.content
76
+
77
+ # Store conversation in memory
78
+ for text, role in [(query, "user"), (answer, "assistant")]:
79
+ self.memory.add(text, user_id=user_id, metadata={"app_id": self.app_id, "role": role})
80
+
81
+ return answer
82
+ except Exception as e:
83
+ st.error(f"Error handling query: {e}")
84
+ return "Sorry, I encountered an issue. Please try again."
85
+
86
+ def generate_synthetic_data(self, user_id):
87
+ """Creates and stores synthetic customer data for testing purposes."""
88
+ try:
89
+ today = datetime.now()
90
+ order_date, expected_delivery = (today - timedelta(days=10)).strftime("%B %d, %Y"), (today + timedelta(days=2)).strftime("%B %d, %Y")
91
+ prompt = f"""
92
+ Generate a realistic customer profile for TechGadgets.com user {user_id} with:
93
+ - Basic details
94
+ - A recent order (placed on {order_date}, delivery by {expected_delivery})
95
+ - Order history, shipping address, and past customer service interactions
96
+ - Shopping preferences
97
+ Return JSON format.
98
+ """
99
+
100
+ response = self.client.chat.completions.create(
101
+ model="gpt-4",
102
+ messages=[
103
+ {"role": "system", "content": "Generate realistic customer profiles in JSON."},
104
+ {"role": "user", "content": prompt}
105
+ ]
106
+ )
107
+
108
+ customer_data = json.loads(response.choices[0].message.content)
109
+ for key, value in customer_data.items():
110
+ if isinstance(value, list):
111
+ for item in value:
112
+ self.memory.add(json.dumps(item), user_id=user_id, metadata={"app_id": self.app_id, "role": "system"})
113
+ else:
114
+ self.memory.add(f"{key}: {json.dumps(value)}", user_id=user_id, metadata={"app_id": self.app_id, "role": "system"})
115
+
116
+ return customer_data
117
+ except Exception as e:
118
+ st.error(f"Error generating synthetic data: {e}")
119
+ return None
120
+
121
+ # Initialize AI Agent
122
+ if openai_api_key:
123
+ support_agent = CustomerSupportAIAgent()
124
+
125
+ # Sidebar - Customer ID Input & Actions
126
+ st.sidebar.title("Customer ID")
127
+ customer_id = st.sidebar.text_input("Enter Customer ID")
128
+
129
+ if customer_id:
130
+ # Synthetic Data Generation
131
+ if st.sidebar.button("Generate Synthetic Data"):
132
+ with st.spinner("Generating data..."):
133
+ st.session_state.customer_data = support_agent.generate_synthetic_data(customer_id)
134
+ st.sidebar.success("Data Generated!") if st.session_state.customer_data else st.sidebar.error("Generation Failed.")
135
+
136
+ # View Stored Customer Data
137
+ if st.sidebar.button("View Profile"):
138
+ st.sidebar.json(st.session_state.get("customer_data", "No data available."))
139
+
140
+ # View Memory
141
+ if st.sidebar.button("View Memory"):
142
+ memories = support_agent.memory.get_all(user_id=customer_id)
143
+ st.sidebar.write("\n".join(f"- {m['memory']}" for m in memories.get("results", []) if "memory" in m))
144
+ else:
145
+ st.sidebar.error("Enter a Customer ID.")
146
+
147
+ # Chat Interface
148
+ if "messages" not in st.session_state:
149
+ st.session_state.messages = []
150
+
151
+ for msg in st.session_state.messages:
152
+ with st.chat_message(msg["role"]):
153
+ st.markdown(msg["content"])
154
+
155
+ query = st.chat_input("How can I assist you today?")
156
+ if query and customer_id:
157
+ st.session_state.messages.append({"role": "user", "content": query})
158
+ with st.chat_message("user"): st.markdown(query)
159
+ with st.spinner("Generating response..."):
160
+ answer = support_agent.handle_query(query, user_id=customer_id)
161
+ st.session_state.messages.append({"role": "assistant", "content": answer})
162
+ with st.chat_message("assistant"): st.markdown(answer)
163
+ else:
164
+ st.warning("Enter OpenAI API key to use the agent.")
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ streamlit
2
+ openai
3
+ mem0