patrickbdevaney commited on
Commit
8db52a5
·
verified ·
1 Parent(s): c901ab2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -59
app.py CHANGED
@@ -3,13 +3,11 @@ import json
3
  import chromadb
4
  import numpy as np
5
  from dotenv import load_dotenv
6
- from fastapi import FastAPI, HTTPException
7
- from pydantic import BaseModel
8
  import torch
9
  from transformers import AutoTokenizer, AutoModel
10
- from groq import Groq
11
- import gradio as gr
12
- import httpx # Used to make async HTTP requests to FastAPI
13
 
14
  # Load environment variables
15
  load_dotenv()
@@ -29,9 +27,6 @@ if not any(api_keys):
29
  current_key_index = 0
30
  client = Groq(api_key=api_keys[current_key_index])
31
 
32
- # FastAPI app
33
- app = FastAPI()
34
-
35
  # Define Groq-based model with fallback
36
  class GroqChatbot:
37
  def __init__(self, api_keys):
@@ -177,56 +172,21 @@ embedding_store = LocalEmbeddingStore(storage_dir="./chromadb_storage")
177
  chatbot = GroqChatbot(api_keys=api_keys)
178
  rag_system = RAGSystem(groq_client=chatbot, embedding_store=embedding_store)
179
 
180
- # Pydantic models for API request and response
181
- class UserInput(BaseModel):
182
- input_text: str
183
-
184
- class ChatResponse(BaseModel):
185
- response: str
186
-
187
- @app.get("/")
188
- async def read_root():
189
- return {"message": "Welcome to the Groq and ChromaDB integration API!"}
190
-
191
- @app.post("/chat", response_model=ChatResponse)
192
- async def chat(user_input: UserInput):
193
- """Handle chat interactions with Groq and ChromaDB."""
194
- ai_response = rag_system.chat_with_rag(user_input.input_text)
195
- return ChatResponse(response=ai_response)
196
-
197
- @app.post("/embed", response_model=ChatResponse)
198
- async def embed_text(user_input: UserInput):
199
- """Handle text embedding."""
200
- embedding = chatbot.text_to_embedding(user_input.input_text)
201
- if embedding is not None:
202
- return ChatResponse(response="Text embedded successfully.")
203
- else:
204
- raise HTTPException(status_code=400, detail="Embedding generation failed.")
205
-
206
- @app.post("/add_document", response_model=ChatResponse)
207
- async def add_document(user_input: UserInput):
208
- """Add a document embedding to ChromaDB."""
209
- embedding = chatbot.text_to_embedding(user_input.input_text)
210
- if embedding is not None:
211
- doc_id = "sample_document" # You can generate or pass a doc ID
212
- embedding_store.add_embedding(doc_id, embedding, metadata={"source": "user_input"})
213
- return ChatResponse(response="Document added to the database.")
214
- else:
215
- raise HTTPException(status_code=400, detail="Embedding generation failed.")
216
-
217
- # Gradio Interface for querying the FastAPI /chat endpoint
218
- async def gradio_chatbot(input_text: str):
219
- async with httpx.AsyncClient() as client:
220
- response = await client.post(
221
- "http://127.0.0.1:7860/chat", # FastAPI endpoint
222
- json={"input_text": input_text}
223
- )
224
- response_data = response.json()
225
- return response_data["response"]
226
-
227
- # Gradio Interface
228
- iface = gr.Interface(fn=gradio_chatbot, inputs="text", outputs="text")
229
 
230
  if __name__ == "__main__":
231
- # Launch the Gradio interface
232
- iface.launch()
 
3
  import chromadb
4
  import numpy as np
5
  from dotenv import load_dotenv
6
+ import gradio as gr
7
+ from groq import Groq
8
  import torch
9
  from transformers import AutoTokenizer, AutoModel
10
+ import logging
 
 
11
 
12
  # Load environment variables
13
  load_dotenv()
 
27
  current_key_index = 0
28
  client = Groq(api_key=api_keys[current_key_index])
29
 
 
 
 
30
  # Define Groq-based model with fallback
31
  class GroqChatbot:
32
  def __init__(self, api_keys):
 
172
  chatbot = GroqChatbot(api_keys=api_keys)
173
  rag_system = RAGSystem(groq_client=chatbot, embedding_store=embedding_store)
174
 
175
+ # Gradio UI
176
+ def chat_ui(user_input, chat_history):
177
+ """Handle chat interactions and update history."""
178
+ if not user_input.strip():
179
+ return chat_history
180
+ ai_response = rag_system.chat_with_rag(user_input)
181
+ chat_history.append((user_input, ai_response))
182
+ return chat_history
183
+
184
+ # Gradio interface
185
+ with gr.Blocks() as demo:
186
+ chat_history = gr.Chatbot(label="Groq Chatbot with RAG", elem_id="chatbox")
187
+ user_input = gr.Textbox(placeholder="Enter your prompt here...")
188
+ submit_button = gr.Button("Submit")
189
+ submit_button.click(chat_ui, inputs=[user_input, chat_history], outputs=chat_history)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
190
 
191
  if __name__ == "__main__":
192
+ demo.launch()