Spaces:
Sleeping
Sleeping
Delete app.py
Browse files
app.py
DELETED
@@ -1,180 +0,0 @@
|
|
1 |
-
import openai
|
2 |
-
from pinecone import Pinecone, ServerlessSpec
|
3 |
-
import pandas as pd
|
4 |
-
import gradio as gr
|
5 |
-
from typing import List, Tuple
|
6 |
-
|
7 |
-
# Function to get embeddings from OpenAI's model
|
8 |
-
def get_embedding(text: str, openai_api_key: str, model: str = "text-embedding-ada-002") -> List[float]:
|
9 |
-
openai.api_key = openai_api_key
|
10 |
-
try:
|
11 |
-
response = openai.Embedding.create(
|
12 |
-
model=model,
|
13 |
-
input=text
|
14 |
-
)
|
15 |
-
return response['data'][0]['embedding']
|
16 |
-
except Exception as e:
|
17 |
-
print(f"Error getting embedding: {e}")
|
18 |
-
return []
|
19 |
-
|
20 |
-
# Function to process the uploaded CSV and store embeddings in Pinecone
|
21 |
-
def process_csv(file, openai_api_key: str, pinecone_api_key: str, pinecone_env: str) -> str:
|
22 |
-
try:
|
23 |
-
df = pd.read_csv(file.name)
|
24 |
-
|
25 |
-
# Initialize Pinecone
|
26 |
-
pc = Pinecone(api_key=pinecone_api_key)
|
27 |
-
index_name = "product-recommendations"
|
28 |
-
|
29 |
-
# Check if index exists
|
30 |
-
if index_name not in pc.list_indexes().names():
|
31 |
-
try:
|
32 |
-
pc.create_index(
|
33 |
-
name=index_name,
|
34 |
-
dimension=1536,
|
35 |
-
spec=ServerlessSpec(cloud="aws", region=pinecone_env)
|
36 |
-
)
|
37 |
-
except Exception as e:
|
38 |
-
print(f"Error creating Pinecone index: {e}")
|
39 |
-
return "Failed to create Pinecone index."
|
40 |
-
|
41 |
-
index = pc.Index(index_name)
|
42 |
-
|
43 |
-
embeddings = []
|
44 |
-
for i, row in df.iterrows():
|
45 |
-
embedding = get_embedding(row['description'], openai_api_key)
|
46 |
-
if embedding:
|
47 |
-
embeddings.append((str(row['product_id']), embedding, {'product_name': row['product_name'], 'image_url': row['image_url']}))
|
48 |
-
|
49 |
-
if embeddings:
|
50 |
-
try:
|
51 |
-
index.upsert(embeddings)
|
52 |
-
except Exception as e:
|
53 |
-
print(f"Error upserting embeddings to Pinecone: {e}")
|
54 |
-
return "Failed to upsert embeddings."
|
55 |
-
|
56 |
-
return "Product catalog processed and embeddings stored in Pinecone."
|
57 |
-
except Exception as e:
|
58 |
-
print(f"Error processing CSV file: {e}")
|
59 |
-
return "Failed to process CSV file."
|
60 |
-
|
61 |
-
# Recommendation logic
|
62 |
-
def recommend_products(query: str, openai_api_key: str, pinecone_api_key: str, pinecone_env: str, top_k: int = 10) -> List[Tuple[str, str]]:
|
63 |
-
query_embedding = get_embedding(query, openai_api_key)
|
64 |
-
|
65 |
-
if not query_embedding:
|
66 |
-
return []
|
67 |
-
|
68 |
-
try:
|
69 |
-
# Initialize Pinecone
|
70 |
-
pc = Pinecone(api_key=pinecone_api_key)
|
71 |
-
index = pc.Index("product-recommendations")
|
72 |
-
|
73 |
-
results = index.query(vector=query_embedding, top_k=top_k, include_metadata=True)
|
74 |
-
recommended_products = [(match['metadata']['image_url'], f"{match['metadata']['product_name']} (Score: {match['score']})") for match in results['matches']]
|
75 |
-
return recommended_products
|
76 |
-
except Exception as e:
|
77 |
-
print(f"Error querying Pinecone: {e}")
|
78 |
-
return []
|
79 |
-
|
80 |
-
# Function to generate contextual message
|
81 |
-
def generate_contextual_message(query: str, recommendations: List[Tuple[str, str]], openai_api_key: str, system_prompt: str) -> str:
|
82 |
-
openai.api_key = openai_api_key
|
83 |
-
product_names = [rec[1] for rec in recommendations]
|
84 |
-
prompt = f"User query: {query}\nRecommended products: {', '.join(product_names)}\n{system_prompt}"
|
85 |
-
|
86 |
-
try:
|
87 |
-
response = openai.ChatCompletion.create(
|
88 |
-
model="gpt-4", # or use "gpt-3.5-turbo" if preferred
|
89 |
-
messages=[{"role": "system", "content": "You are a helpful assistant."},
|
90 |
-
{"role": "user", "content": prompt}]
|
91 |
-
)
|
92 |
-
return response['choices'][0]['message']['content']
|
93 |
-
except Exception as e:
|
94 |
-
print(f"Error generating contextual message: {e}")
|
95 |
-
return "Failed to generate contextual message."
|
96 |
-
|
97 |
-
# Gradio interface
|
98 |
-
def handle_file_upload(file, openai_api_key, pinecone_api_key, pinecone_env):
|
99 |
-
return process_csv(file, openai_api_key, pinecone_api_key, pinecone_env)
|
100 |
-
|
101 |
-
def display_recommendations(user_input, openai_api_key, pinecone_api_key, pinecone_env, system_prompt):
|
102 |
-
recommendations = recommend_products(user_input, openai_api_key, pinecone_api_key, pinecone_env)
|
103 |
-
contextual_message = generate_contextual_message(user_input, recommendations, openai_api_key, system_prompt)
|
104 |
-
return recommendations, contextual_message
|
105 |
-
|
106 |
-
# Function to update outputs
|
107 |
-
def update_outputs(query_input, openai_api_key, pinecone_api_key, pinecone_env, chat_history, system_prompt):
|
108 |
-
recommendations, contextual_message = display_recommendations(query_input, openai_api_key, pinecone_api_key, pinecone_env, system_prompt)
|
109 |
-
|
110 |
-
# Update chat history
|
111 |
-
new_chat_history = chat_history + [[query_input, contextual_message]]
|
112 |
-
|
113 |
-
return recommendations, new_chat_history, gr.update(value="")
|
114 |
-
|
115 |
-
css = """
|
116 |
-
.lg.svelte-cmf5ev {background-color: #8A2BE2 !important;}
|
117 |
-
.user.svelte-1pjfiar.svelte-1pjfiar.svelte-1pjfiar {padding: 7px !important;border-radius: 10px 10px 0px 10px;width: fit-content;background-color: #E6E6FA !important ;border-color:#E6E6FA !important}
|
118 |
-
.bot.svelte-1pjfiar.svelte-1pjfiar.svelte-1pjfiar {padding: 7px !important;border-radius: 10px 10px 10px 0px;width : fit-content !important;border: 1.5px solid #9370DB !important;background: #FFFFFF 0% 0% no-repeat padding-box !important;box-shadow: 0px 3px 6px #0000001A !important;border: 2px solid #9370DB !important;}
|
119 |
-
.primary.svelte-cmf5ev {box-shadow: 0px 3px 6px #0000001A !important;border: 2px solid #9370DB !important;background: #8A2BE2 !important;width: fit-content;}
|
120 |
-
.primary.svelte-cmf5ev {color: white !important}
|
121 |
-
textarea.scroll-hide.svelte-1f354aw {font-family:'Roboto','Arial',sans-serif;font-size:14px}
|
122 |
-
label.svelte-1b6s6s { background: #9370DB 0% 0% no-repeat padding-box;color: white;width: 100%;}
|
123 |
-
label.svelte-1b6s6s {background: #9370DB 0% 0% no-repeat padding-box;color: white;width: 100%;font-size:20px;font-family:'Roboto','Arial',sans-serif; border-radius: 0px 0px 10px 10px;}
|
124 |
-
.wrapper.svelte-nab2ao{background-color : #F7F7F7 }
|
125 |
-
svg.iconify.iconify--carbon{width:15px; height:15px}
|
126 |
-
.thumbnail-item.svelte-fiatpe.svelte-fiatpe:hover {--ring-color: #9370DB !important;}
|
127 |
-
"""
|
128 |
-
|
129 |
-
default_chat = [["Welcome! I'm your AI-powered product recommendation bot. Ask me anything about finding the perfect product for you.", "I'm here to assist you with any product-related inquiries. Let's find what you need!"]]
|
130 |
-
|
131 |
-
# Create Gradio Interface
|
132 |
-
def build_interface():
|
133 |
-
with gr.Blocks(title="AI Smart Shopper", head="True", css=css) as interface:
|
134 |
-
gr.Markdown("""<div style="text-align: center; font-weight: bold;"> <h1>AI Smart Shopper</h1> </div>""")
|
135 |
-
|
136 |
-
with gr.Tab("API Keys"):
|
137 |
-
openai_api_key_input = gr.Textbox(label="OpenAI API Key", type="password")
|
138 |
-
pinecone_api_key_input = gr.Textbox(label="Pinecone API Key", type="password")
|
139 |
-
pinecone_env_input = gr.Textbox(label="Pinecone Environment", placeholder="e.g., us-east-1")
|
140 |
-
system_prompt_input = gr.Textbox(label="System Prompt", placeholder="Enter a system prompt for the assistant...")
|
141 |
-
|
142 |
-
with gr.Tab("Upload Catalog"):
|
143 |
-
upload_button = gr.File(label="Upload CSV", type="filepath")
|
144 |
-
output = gr.Textbox()
|
145 |
-
upload_button.upload(handle_file_upload, inputs=[upload_button, openai_api_key_input, pinecone_api_key_input, pinecone_env_input], outputs=output)
|
146 |
-
|
147 |
-
with gr.Tab("Get Recommendations"):
|
148 |
-
with gr.Row():
|
149 |
-
with gr.Column(scale=1):
|
150 |
-
chatbot = gr.Chatbot(value=default_chat, label="Recommender Chatbot", show_label=True)
|
151 |
-
query_input = gr.Textbox(label="Enter your product preference...", show_label=False, placeholder="Type your query here...")
|
152 |
-
with gr.Row():
|
153 |
-
with gr.Column(scale=1, min_width=150):
|
154 |
-
recommend_button = gr.Button("Get Recommendations")
|
155 |
-
with gr.Column(scale=1, min_width=150):
|
156 |
-
clear_button = gr.Button("Clear")
|
157 |
-
# Define state for chat history
|
158 |
-
chat_history = gr.State([])
|
159 |
-
|
160 |
-
# Define outputs
|
161 |
-
with gr.Column(scale=1):
|
162 |
-
recommendations_output = gr.Gallery(label="Recommendations For You", show_label=False, elem_id="gallery", columns=[3], rows=[1], object_fit="contain", height="auto", scale=5)
|
163 |
-
|
164 |
-
recommend_button.click(
|
165 |
-
update_outputs,
|
166 |
-
inputs=[query_input, openai_api_key_input, pinecone_api_key_input, pinecone_env_input, chat_history, system_prompt_input],
|
167 |
-
outputs=[recommendations_output, chatbot, query_input]
|
168 |
-
)
|
169 |
-
|
170 |
-
clear_button.click(
|
171 |
-
lambda: (gr.update(value=default_chat), gr.update(value=""), gr.update(value=[]), gr.update(value=[])),
|
172 |
-
outputs=[chatbot, query_input, chat_history, recommendations_output]
|
173 |
-
)
|
174 |
-
|
175 |
-
return interface
|
176 |
-
|
177 |
-
# Run the interface
|
178 |
-
if __name__ == "__main__":
|
179 |
-
interface = build_interface()
|
180 |
-
interface.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|