rumaisa1054 commited on
Commit
f59186c
·
verified ·
1 Parent(s): c2adf1f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -0
app.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import faiss
3
+ import numpy as np
4
+ import streamlit as st
5
+ from groq import Groq
6
+ from sentence_transformers import SentenceTransformer
7
+
8
+ # Initialize FAISS and Model
9
+ VECTOR_DB_PATH = "vector_database.faiss"
10
+ EMBEDDING_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
11
+
12
+ # Initialize embedding model
13
+ embedding_model = SentenceTransformer(EMBEDDING_MODEL)
14
+
15
+ # Load FAISS Index
16
+ def load_faiss():
17
+ if not os.path.exists(VECTOR_DB_PATH):
18
+ st.error("Vector database not found! Ensure FAISS index is created.")
19
+ return None
20
+ index = faiss.read_index(VECTOR_DB_PATH)
21
+ return index
22
+
23
+ faiss_index = load_faiss()
24
+
25
+ # GROQ API setup
26
+ GROQ_API_KEY = "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" # Set in Hugging Face secrets
27
+ client = Groq(api_key=GROQ_API_KEY)
28
+
29
+ MODEL_ID = "deepseek-r1-distill-llama-70b"
30
+
31
+ # Function to get nearest neighbor from FAISS
32
+ def search_faiss(query, top_k=3):
33
+ query_embedding = embedding_model.encode(query, convert_to_numpy=True).reshape(1, -1)
34
+ distances, indices = faiss_index.search(query_embedding, top_k)
35
+ return indices
36
+
37
+ # Function to call DeepSeek model from GROQ
38
+ def generate_response(context, query):
39
+ prompt = f"Use the following retrieved context to answer the question:\n\nContext:\n{context}\n\nQuestion: {query}\nAnswer:"
40
+ response = client.chat.completions.create(
41
+ model=MODEL_ID,
42
+ messages=[{"role": "user", "content": prompt}],
43
+ )
44
+ return response.choices[0].message.content
45
+
46
+ # Streamlit UI
47
+ st.title("💡 AI Chat with FAISS & GROQ")
48
+ st.write("Ask a question and get responses based on stored knowledge!")
49
+
50
+ query = st.text_input("🔍 Enter your query:")
51
+ if query:
52
+ if faiss_index is None:
53
+ st.error("FAISS database not loaded. Please check deployment.")
54
+ else:
55
+ indices = search_faiss(query)
56
+ retrieved_context = "\n".join([f"Chunk {i}: Retrieved text" for i in indices[0]])
57
+ response = generate_response(retrieved_context, query)
58
+ st.write("### 🤖 AI Response:")
59
+ st.write(response)