Canstralian commited on
Commit
175787a
·
verified ·
1 Parent(s): 557cf37

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -16
app.py CHANGED
@@ -1,15 +1,19 @@
1
  import streamlit as st
2
  from transformers import pipeline
 
3
 
4
- # Title and Description
5
  st.title("WhiteRabbitNEO Q&A App")
6
- st.write("Ask any question, and WhiteRabbitNEO will provide an answer.")
7
 
8
- # Initialize the model pipeline
 
 
 
9
  @st.cache_resource
10
  def load_model():
11
  try:
12
- # Simple model pipeline initialization
 
13
  model = pipeline("question-answering", model="WhiteRabbitNEO")
14
  return model
15
  except Exception as e:
@@ -17,25 +21,21 @@ def load_model():
17
  return None
18
 
19
  # Load the model
20
- qa_pipeline = load_model()
21
 
22
- # Simple input section: Ask question and provide context
23
- if qa_pipeline:
24
- question = st.text_input("Your question:")
25
- context = st.text_area("Context (provide background info for the question):")
26
 
27
- # Button to trigger the model for prediction
28
  if st.button("Get Answer"):
29
  if question and context:
30
  try:
31
- # Get prediction from the model
32
- result = qa_pipeline(question=question, context=context)
33
- # Display the answer
34
- st.write(f"**Answer:** {result['answer']}")
35
- st.write(f"**Confidence Score:** {result['score']:.2f}")
36
  except Exception as e:
37
  st.error(f"Error generating answer: {e}")
38
  else:
39
- st.warning("Please fill both the question and context fields.")
40
  else:
41
  st.error("Model could not be loaded. Please check your configuration.")
 
 
1
  import streamlit as st
2
  from transformers import pipeline
3
+ from huggingface_hub import login
4
 
5
+ # Title of the Streamlit app
6
  st.title("WhiteRabbitNEO Q&A App")
 
7
 
8
+ # Hugging Face API token input (only required if the model is private)
9
+ token = st.text_input("Enter your Hugging Face token (if required):", type="password")
10
+
11
+ # Load the model
12
  @st.cache_resource
13
  def load_model():
14
  try:
15
+ if token:
16
+ login(token=token)
17
  model = pipeline("question-answering", model="WhiteRabbitNEO")
18
  return model
19
  except Exception as e:
 
21
  return None
22
 
23
  # Load the model
24
+ model = load_model()
25
 
26
+ if model:
27
+ question = st.text_input("Ask a question:")
28
+ context = st.text_area("Provide context for your question:")
 
29
 
 
30
  if st.button("Get Answer"):
31
  if question and context:
32
  try:
33
+ answer = model(question=question, context=context)
34
+ st.write("Answer:", answer['answer'])
 
 
 
35
  except Exception as e:
36
  st.error(f"Error generating answer: {e}")
37
  else:
38
+ st.warning("Please provide both a question and context.")
39
  else:
40
  st.error("Model could not be loaded. Please check your configuration.")
41
+