Prathamesh1420 commited on
Commit
69b18c2
·
verified ·
1 Parent(s): 2122a17

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +100 -55
main.py CHANGED
@@ -1,5 +1,4 @@
1
  import streamlit as st
2
- import altair as alt
3
  import os
4
  from PIL import Image
5
  import numpy as np
@@ -7,74 +6,120 @@ import pickle
7
  import tensorflow
8
  from tensorflow.keras.preprocessing import image
9
  from tensorflow.keras.layers import GlobalMaxPooling2D
10
- from tensorflow.keras.applications.resnet50 import ResNet50,preprocess_input
11
  from sklearn.neighbors import NearestNeighbors
12
  from numpy.linalg import norm
 
13
 
14
- feature_list = np.array(pickle.load(open('embeddings.pkl','rb')))
15
- filenames = pickle.load(open('filenames.pkl','rb'))
16
-
17
- model = ResNet50(weights='imagenet',include_top=False,input_shape=(224,224,3))
18
- model.trainable = False
19
-
20
- model = tensorflow.keras.Sequential([
21
- model,
22
- GlobalMaxPooling2D()
23
- ])
24
-
25
- st.title('Fashion Recommender System')
26
-
27
- def save_uploaded_file(uploaded_file):
28
- try:
29
- with open(os.path.join('uploads',uploaded_file.name),'wb') as f:
30
- f.write(uploaded_file.getbuffer())
31
- return 1
32
- except:
33
- return 0
34
-
35
- def feature_extraction(img_path,model):
36
  img = image.load_img(img_path, target_size=(224, 224))
37
  img_array = image.img_to_array(img)
38
  expanded_img_array = np.expand_dims(img_array, axis=0)
39
  preprocessed_img = preprocess_input(expanded_img_array)
40
  result = model.predict(preprocessed_img).flatten()
41
  normalized_result = result / norm(result)
42
-
43
  return normalized_result
44
 
45
- def recommend(features,feature_list):
 
46
  neighbors = NearestNeighbors(n_neighbors=6, algorithm='brute', metric='euclidean')
47
  neighbors.fit(feature_list)
48
-
49
  distances, indices = neighbors.kneighbors([features])
50
-
51
  return indices
52
 
53
- # steps
54
- # file upload -> save
55
- uploaded_file = st.file_uploader("Choose an image")
56
- if uploaded_file is not None:
57
- if save_uploaded_file(uploaded_file):
58
- # display the file
59
- display_image = Image.open(uploaded_file)
60
- st.image(display_image)
61
- # feature extract
62
- features = feature_extraction(os.path.join("uploads",uploaded_file.name),model)
63
- #st.text(features)
64
- # recommendention
65
- indices = recommend(features,feature_list)
66
- # show
67
- col1,col2,col3,col4,col5 = st.beta_columns(5)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
- with col1:
70
- st.image(filenames[indices[0][0]])
71
- with col2:
72
- st.image(filenames[indices[0][1]])
73
- with col3:
74
- st.image(filenames[indices[0][2]])
75
- with col4:
76
- st.image(filenames[indices[0][3]])
77
- with col5:
78
- st.image(filenames[indices[0][4]])
79
- else:
80
- st.header("Some error occured in file upload")
 
1
  import streamlit as st
 
2
  import os
3
  from PIL import Image
4
  import numpy as np
 
6
  import tensorflow
7
  from tensorflow.keras.preprocessing import image
8
  from tensorflow.keras.layers import GlobalMaxPooling2D
9
+ from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input
10
  from sklearn.neighbors import NearestNeighbors
11
  from numpy.linalg import norm
12
+ from chatbot import Chatbot # Assuming you have a chatbot module
13
 
14
+ # Define function for feature extraction
15
+ def feature_extraction(img_path, model):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  img = image.load_img(img_path, target_size=(224, 224))
17
  img_array = image.img_to_array(img)
18
  expanded_img_array = np.expand_dims(img_array, axis=0)
19
  preprocessed_img = preprocess_input(expanded_img_array)
20
  result = model.predict(preprocessed_img).flatten()
21
  normalized_result = result / norm(result)
 
22
  return normalized_result
23
 
24
+ # Define function for recommendation
25
+ def recommend(features, feature_list):
26
  neighbors = NearestNeighbors(n_neighbors=6, algorithm='brute', metric='euclidean')
27
  neighbors.fit(feature_list)
 
28
  distances, indices = neighbors.kneighbors([features])
 
29
  return indices
30
 
31
+ # Function to save uploaded file
32
+ def save_uploaded_file(uploaded_file):
33
+ try:
34
+ # Ensure the uploads directory exists
35
+ if not os.path.exists('uploads'):
36
+ os.makedirs('uploads')
37
+
38
+ file_path = os.path.join('uploads', uploaded_file.name)
39
+ with open(file_path, 'wb') as f:
40
+ f.write(uploaded_file.getbuffer())
41
+ st.success(f"File saved to {file_path}")
42
+ return True
43
+ except Exception as e:
44
+ st.error(f"Error saving file: {e}")
45
+ return False
46
+
47
+ # Function to show dashboard content
48
+ def show_dashboard():
49
+ st.header("Fashion Recommender System")
50
+ chatbot = Chatbot()
51
+ # Load ResNet model for image feature extraction
52
+ model = ResNet50(weights='imagenet', include_top=False, input_shape=(224, 224, 3))
53
+ model.trainable = False
54
+ model = tensorflow.keras.Sequential([
55
+ model,
56
+ GlobalMaxPooling2D()
57
+ ])
58
+
59
+ try:
60
+ feature_list = np.array(pickle.load(open('embeddings.pkl', 'rb')))
61
+ filenames = pickle.load(open('filenames.pkl', 'rb'))
62
+ except Exception as e:
63
+ st.error(f"Error loading pickle files: {e}")
64
+ return
65
+
66
+ # File upload section
67
+ uploaded_file = st.file_uploader("Choose an image")
68
+ if uploaded_file is not None:
69
+ if save_uploaded_file(uploaded_file):
70
+ # Display the uploaded image
71
+ display_image = Image.open(uploaded_file)
72
+ st.image(display_image)
73
+
74
+ # Feature extraction
75
+ features = feature_extraction(os.path.join("uploads", uploaded_file.name), model)
76
+
77
+ # Recommendation
78
+ indices = recommend(features, feature_list)
79
+
80
+ # Display recommended products
81
+ col1, col2, col3, col4, col5 = st.columns(5)
82
+ columns = [col1, col2, col3, col4, col5]
83
+
84
+ for col, idx in zip(columns, indices[0]):
85
+ file_path = filenames[idx]
86
+ try:
87
+ if os.path.exists(file_path):
88
+ with col:
89
+ st.image(file_path)
90
+ else:
91
+ st.error(f"File does not exist: {file_path}")
92
+ except Exception as e:
93
+ st.error(f"Error opening file {file_path}: {e}")
94
+ else:
95
+ st.error("Some error occurred in file upload")
96
+
97
+ # Chatbot section
98
+ user_question = st.text_input("Ask a question:")
99
+ if user_question:
100
+ bot_response, recommended_products = chatbot.generate_response(user_question)
101
+ st.write("Chatbot:", bot_response)
102
+
103
+ # Display recommended products
104
+ for result in recommended_products:
105
+ pid = result['corpus_id']
106
+ product_info = chatbot.product_data[pid]
107
+ st.write("Product Name:", product_info['productDisplayName'])
108
+ st.write("Category:", product_info['masterCategory'])
109
+ st.write("Article Type:", product_info['articleType'])
110
+ st.write("Usage:", product_info['usage'])
111
+ st.write("Season:", product_info['season'])
112
+ st.write("Gender:", product_info['gender'])
113
+ st.image(chatbot.images[pid])
114
+
115
+ # Main Streamlit app
116
+ def main():
117
+ # Give title to the app
118
+ st.title("Fashion Recommender System")
119
+
120
+ # Show dashboard content directly
121
+ show_dashboard()
122
 
123
+ # Run the main app
124
+ if __name__ == "__main__":
125
+ main()