JeCabrera commited on
Commit
9f73263
verified
1 Parent(s): 3657c66

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -17
app.py CHANGED
@@ -148,7 +148,7 @@ def chat(message, history):
148
  except Exception as e:
149
  return f"Error: {e}"
150
 
151
- # Remove the first chat function as it's duplicated
152
  def process_file(file):
153
  file_path = file.name
154
  file_extension = os.path.splitext(file_path)[1].lower()
@@ -158,33 +158,44 @@ def process_file(file):
158
  with open(file_path, 'r', encoding='utf-8') as f:
159
  content = f.read()
160
  else:
161
- content = f"Archivo subido: {os.path.basename(file_path)} (Este tipo de archivo no puede ser procesado para mostrar su contenido)"
162
 
163
  return content
164
 
165
  def chat(message, history, file=None):
166
  try:
167
- file_content = ""
168
- if file is not None:
169
- file_content = f"\n\nArchivo subido: {os.path.basename(file.name)}\n"
170
- file_content += process_file(file)
171
-
172
- # Combinar el mensaje del usuario con el contenido del archivo
173
  combined_message = message
174
- if file_content:
175
- combined_message += f"\n\nContenido del archivo:\n{file_content}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
 
177
  messages = [
178
  {"role": "user", "parts": [system_prompt]},
179
- *[{"role": "user", "parts": [msg["content"]]} if isinstance(msg, dict) else {"role": "user", "parts": [msg[0]]} for msg in history],
180
  {"role": "user", "parts": [combined_message]}
181
  ]
 
182
  response = model.generate_content(messages)
183
 
184
- # Return in the format expected by gr.Chatbot with type="messages"
185
- return [{"role": "user", "content": combined_message}, {"role": "assistant", "content": response.text}]
186
  except Exception as e:
187
- return [{"role": "user", "content": combined_message}, {"role": "assistant", "content": f"Error: {e}"}]
188
 
189
  with gr.Blocks(title="馃Chucho Bot - CopyXpert Sales Assistant") as demo:
190
  gr.Markdown("# 馃Chucho Bot - CopyXpert Sales Assistant")
@@ -192,7 +203,7 @@ with gr.Blocks(title="馃Chucho Bot - CopyXpert Sales Assistant") as demo:
192
 
193
  with gr.Row():
194
  with gr.Column(scale=4):
195
- chatbot = gr.Chatbot(type="messages")
196
  msg = gr.Textbox(
197
  placeholder="Escribe tu pregunta aqu铆...",
198
  container=False,
@@ -207,7 +218,8 @@ with gr.Blocks(title="馃Chucho Bot - CopyXpert Sales Assistant") as demo:
207
  file_upload = gr.File(
208
  label="Sube un archivo (opcional)",
209
  file_types=["text", "pdf", "docx", "csv", "xlsx", "image"],
210
- type="filepath"
 
211
  )
212
 
213
  examples = gr.Examples(
@@ -234,7 +246,7 @@ with gr.Blocks(title="馃Chucho Bot - CopyXpert Sales Assistant") as demo:
234
  queue=False
235
  )
236
 
237
- clear.click(lambda: None, None, chatbot, queue=False)
238
  msg.submit(
239
  chat,
240
  inputs=[msg, chatbot, file_upload],
 
148
  except Exception as e:
149
  return f"Error: {e}"
150
 
151
+ # Eliminamos la primera funci贸n chat duplicada
152
  def process_file(file):
153
  file_path = file.name
154
  file_extension = os.path.splitext(file_path)[1].lower()
 
158
  with open(file_path, 'r', encoding='utf-8') as f:
159
  content = f.read()
160
  else:
161
+ content = f"Archivo subido: {os.path.basename(file_path)}"
162
 
163
  return content
164
 
165
  def chat(message, history, file=None):
166
  try:
167
+ # Preparamos el mensaje combinado solo si hay un archivo y el mensaje est谩 vac铆o
 
 
 
 
 
168
  combined_message = message
169
+ if file is not None and message.strip() == "":
170
+ combined_message = "Analiza este archivo"
171
+ elif file is not None:
172
+ # Solo a帽adimos una nota sobre el archivo, no su contenido completo
173
+ combined_message += f"\n\n(Se ha subido un archivo: {os.path.basename(file.name)})"
174
+
175
+ # Construimos el historial de mensajes para el modelo
176
+ formatted_history = []
177
+ for msg in history:
178
+ if isinstance(msg, dict):
179
+ if msg["role"] == "user":
180
+ formatted_history.append({"role": "user", "parts": [msg["content"]]})
181
+ else:
182
+ formatted_history.append({"role": "model", "parts": [msg["content"]]})
183
+ else:
184
+ formatted_history.append({"role": "user", "parts": [msg[0]]})
185
+ formatted_history.append({"role": "model", "parts": [msg[1]]})
186
 
187
  messages = [
188
  {"role": "user", "parts": [system_prompt]},
189
+ *formatted_history,
190
  {"role": "user", "parts": [combined_message]}
191
  ]
192
+
193
  response = model.generate_content(messages)
194
 
195
+ # Devolvemos en el formato esperado por gr.Chatbot con type="messages"
196
+ return history + [{"role": "user", "content": message}, {"role": "assistant", "content": response.text}]
197
  except Exception as e:
198
+ return history + [{"role": "user", "content": message}, {"role": "assistant", "content": f"Error: {e}"}]
199
 
200
  with gr.Blocks(title="馃Chucho Bot - CopyXpert Sales Assistant") as demo:
201
  gr.Markdown("# 馃Chucho Bot - CopyXpert Sales Assistant")
 
203
 
204
  with gr.Row():
205
  with gr.Column(scale=4):
206
+ chatbot = gr.Chatbot(type="messages", height=500)
207
  msg = gr.Textbox(
208
  placeholder="Escribe tu pregunta aqu铆...",
209
  container=False,
 
218
  file_upload = gr.File(
219
  label="Sube un archivo (opcional)",
220
  file_types=["text", "pdf", "docx", "csv", "xlsx", "image"],
221
+ type="filepath",
222
+ visible=True
223
  )
224
 
225
  examples = gr.Examples(
 
246
  queue=False
247
  )
248
 
249
+ clear.click(lambda: [], None, chatbot, queue=False)
250
  msg.submit(
251
  chat,
252
  inputs=[msg, chatbot, file_upload],