zayeem00 commited on
Commit
35845f5
·
verified ·
1 Parent(s): 5fb886e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +154 -0
app.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+ from pinecone import Pinecone, ServerlessSpec
3
+ import pandas as pd
4
+ import gradio as gr
5
+ from typing import List, Tuple
6
+
7
+ # Function to get embeddings from OpenAI's model
8
+ def get_embedding(text: str, openai_api_key: str, model: str = "text-embedding-ada-002") -> List[float]:
9
+ openai.api_key = openai_api_key
10
+ try:
11
+ response = openai.Embedding.create(
12
+ model=model,
13
+ input=text
14
+ )
15
+ return response['data'][0]['embedding']
16
+ except Exception as e:
17
+ print(f"Error getting embedding: {e}")
18
+ return []
19
+
20
+ # Function to process the uploaded CSV and store embeddings in Pinecone
21
+ def process_csv(file, openai_api_key: str, pinecone_api_key: str, pinecone_env: str) -> str:
22
+ try:
23
+ df = pd.read_csv(file.name)
24
+
25
+ # Initialize Pinecone
26
+ pc = Pinecone(api_key=pinecone_api_key)
27
+ index_name = "product-recommendations"
28
+
29
+ # Check if index exists
30
+ if index_name not in pc.list_indexes().names():
31
+ try:
32
+ pc.create_index(
33
+ name=index_name,
34
+ dimension=1536,
35
+ spec=ServerlessSpec(cloud="aws", region=pinecone_env)
36
+ )
37
+ except Exception as e:
38
+ print(f"Error creating Pinecone index: {e}")
39
+ return "Failed to create Pinecone index."
40
+
41
+ index = pc.Index(index_name)
42
+
43
+ embeddings = []
44
+ for i, row in df.iterrows():
45
+ embedding = get_embedding(row['description'], openai_api_key)
46
+ if embedding:
47
+ embeddings.append((str(row['product_id']), embedding, {'product_name': row['product_name'], 'image_url': row['image_url']}))
48
+
49
+ if embeddings:
50
+ try:
51
+ index.upsert(embeddings)
52
+ except Exception as e:
53
+ print(f"Error upserting embeddings to Pinecone: {e}")
54
+ return "Failed to upsert embeddings."
55
+
56
+ return "Product catalog processed and embeddings stored in Pinecone."
57
+ except Exception as e:
58
+ print(f"Error processing CSV file: {e}")
59
+ return "Failed to process CSV file."
60
+
61
+ # Recommendation logic
62
+ def recommend_products(query: str, openai_api_key: str, pinecone_api_key: str, pinecone_env: str, top_k: int = 5) -> List[Tuple[str, str]]:
63
+ query_embedding = get_embedding(query, openai_api_key)
64
+
65
+ if not query_embedding:
66
+ return []
67
+
68
+ try:
69
+ # Initialize Pinecone
70
+ pc = Pinecone(api_key=pinecone_api_key)
71
+ index = pc.Index("product-recommendations")
72
+
73
+ results = index.query(vector=query_embedding, top_k=top_k, include_metadata=True)
74
+ recommended_products = [(match['metadata']['image_url'], f"{match['metadata']['product_name']} (Score: {match['score']})") for match in results['matches']]
75
+ return recommended_products
76
+ except Exception as e:
77
+ print(f"Error querying Pinecone: {e}")
78
+ return []
79
+
80
+ # Function to generate contextual message
81
+ def generate_contextual_message(query: str, recommendations: List[Tuple[str, str]], openai_api_key: str) -> str:
82
+ openai.api_key = openai_api_key
83
+ product_names = [rec[1] for rec in recommendations]
84
+ prompt = f"User query: {query}\nRecommended products: {', '.join(product_names)}\nGenerate a personalized message for the user based on these recommendations."
85
+
86
+ try:
87
+ response = openai.ChatCompletion.create(
88
+ model="gpt-4", # or use "gpt-3.5-turbo" if preferred
89
+ messages=[{"role": "system", "content": "You are a helpful assistant."},
90
+ {"role": "user", "content": prompt}]
91
+ )
92
+ return response['choices'][0]['message']['content']
93
+ except Exception as e:
94
+ print(f"Error generating contextual message: {e}")
95
+ return "Failed to generate contextual message."
96
+
97
+ # Gradio interface
98
+ def handle_file_upload(file, openai_api_key, pinecone_api_key, pinecone_env):
99
+ return process_csv(file, openai_api_key, pinecone_api_key, pinecone_env)
100
+
101
+ def display_recommendations(user_input, openai_api_key, pinecone_api_key, pinecone_env):
102
+ recommendations = recommend_products(user_input, openai_api_key, pinecone_api_key, pinecone_env)
103
+ contextual_message = generate_contextual_message(user_input, recommendations, openai_api_key)
104
+ return recommendations, contextual_message
105
+
106
+ # Function to update outputs
107
+ def update_outputs(query_input, openai_api_key, pinecone_api_key, pinecone_env, chat_history):
108
+ recommendations, contextual_message = display_recommendations(query_input, openai_api_key, pinecone_api_key, pinecone_env)
109
+
110
+ # Update chat history
111
+ new_chat_history = chat_history + [("user", query_input),("assistant", contextual_message)]
112
+
113
+ return recommendations, new_chat_history
114
+
115
+ # Create Gradio Interface
116
+ def build_interface():
117
+ with gr.Blocks() as interface:
118
+ gr.Markdown("## Product Recommender System")
119
+
120
+ with gr.Tab("API Keys"):
121
+ openai_api_key_input = gr.Textbox(label="OpenAI API Key", type="password")
122
+ pinecone_api_key_input = gr.Textbox(label="Pinecone API Key", type="password")
123
+ pinecone_env_input = gr.Textbox(label="Pinecone Environment", placeholder="e.g., us-west1-gcp")
124
+
125
+ with gr.Tab("Upload Catalog"):
126
+ upload_button = gr.File(label="Upload CSV", type="filepath")
127
+ output = gr.Textbox()
128
+ upload_button.upload(handle_file_upload, inputs=[upload_button, openai_api_key_input, pinecone_api_key_input, pinecone_env_input], outputs=output)
129
+
130
+ with gr.Tab("Get Recommendations"):
131
+ with gr.Row():
132
+ with gr.Column(scale=1):
133
+ chatbot = gr.Chatbot(label="Chat")
134
+ query_input = gr.Textbox(label="Enter your product preference...", show_label=False, placeholder="Type your query here...")
135
+ recommend_button = gr.Button("Get Recommendations")
136
+ # Define state for chat history
137
+ chat_history = gr.State([])
138
+
139
+ # Define outputs first
140
+ with gr.Column(scale=1):
141
+ recommendations_output = gr.Gallery(label="Recommendations")
142
+
143
+ recommend_button.click(
144
+ update_outputs,
145
+ inputs=[query_input, openai_api_key_input, pinecone_api_key_input, pinecone_env_input, chat_history],
146
+ outputs=[recommendations_output, chatbot]
147
+ )
148
+
149
+ return interface
150
+
151
+ # Run the interface
152
+ if __name__ == "__main__":
153
+ interface = build_interface()
154
+ interface.launch()