Priyank1001 commited on
Commit
467ed40
·
verified ·
1 Parent(s): fbdf5d6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -41,17 +41,17 @@ You are an AI tutor helping students prepare for the GATE CSE exam. Use the prov
41
  @st.cache_resource(show_spinner=False)
42
  def get_retriever_and_llm():
43
  # embeddings = HuggingFaceEmbeddings(model_name=EMBEDDING_MODEL_NAME, model_kwargs={"device": "cpu"})
44
- if os.path.exists(EMBEDDING_MODEL_NAME):
45
- tokenizer = AutoTokenizer.from_pretrained(EMBEDDING_MODEL_NAME,trust_remote_code = True)
46
- else:
47
- tokenizer = AutoTokenizer.from_pretrained("intfloat/e5-small",trust_remote_code = True)
48
 
49
  embeddings = HuggingFaceEmbeddings(
50
  model_name = EMBEDDING_MODEL_NAME,
51
  model_kwargs ={"device":"cpu"},
52
  )
53
  vectorstore = FAISS.load_local(FAISS_DB_PATH, embeddings, allow_dangerous_deserialization=True)
54
- retriever = vectorstore.as_retriever(search_kwargs={"k": 3})
55
 
56
  llm = Llama(
57
  model_path=LLM_MODEL_PATH,
@@ -67,7 +67,7 @@ def generate_answer(question, retriever, llm):
67
  docs = retriever.invoke(question)
68
  context = "\n\n".join(doc.page_content for doc in docs)
69
  prompt = PROMPT_TEMPLATE.format(context=context, question=question)
70
- response = llm(prompt, max_tokens=1024, stop=["Question:", "Context:"])
71
  return response["choices"][0]["text"].strip()
72
 
73
  # --- Streamlit UI ---
 
41
  @st.cache_resource(show_spinner=False)
42
  def get_retriever_and_llm():
43
  # embeddings = HuggingFaceEmbeddings(model_name=EMBEDDING_MODEL_NAME, model_kwargs={"device": "cpu"})
44
+ # if os.path.exists(EMBEDDING_MODEL_NAME):
45
+ # tokenizer = AutoTokenizer.from_pretrained(EMBEDDING_MODEL_NAME,trust_remote_code = True)
46
+ # else:
47
+ # tokenizer = AutoTokenizer.from_pretrained("intfloat/e5-small",trust_remote_code = True)
48
 
49
  embeddings = HuggingFaceEmbeddings(
50
  model_name = EMBEDDING_MODEL_NAME,
51
  model_kwargs ={"device":"cpu"},
52
  )
53
  vectorstore = FAISS.load_local(FAISS_DB_PATH, embeddings, allow_dangerous_deserialization=True)
54
+ retriever = vectorstore.as_retriever(search_kwargs={"k": 8})
55
 
56
  llm = Llama(
57
  model_path=LLM_MODEL_PATH,
 
67
  docs = retriever.invoke(question)
68
  context = "\n\n".join(doc.page_content for doc in docs)
69
  prompt = PROMPT_TEMPLATE.format(context=context, question=question)
70
+ response = llm(prompt, max_tokens=8096, stop=["Question:", "Context:"])
71
  return response["choices"][0]["text"].strip()
72
 
73
  # --- Streamlit UI ---