Canstralian commited on
Commit
557cf37
·
verified ·
1 Parent(s): dff4d8c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -15
app.py CHANGED
@@ -9,32 +9,33 @@ st.write("Ask any question, and WhiteRabbitNEO will provide an answer.")
9
  @st.cache_resource
10
  def load_model():
11
  try:
12
- # Replace with the correct model if needed (e.g., ensure WhiteRabbitNEO exists)
13
  model = pipeline("question-answering", model="WhiteRabbitNEO")
14
  return model
15
  except Exception as e:
16
  st.error(f"Failed to load model: {e}")
17
  return None
18
 
 
19
  qa_pipeline = load_model()
20
 
21
- # Input Section
22
  if qa_pipeline:
23
- question = st.text_input("Enter your question:")
24
- context = st.text_area("Provide some context for the question:")
25
 
26
- # Output Section
27
  if st.button("Get Answer"):
28
  if question and context:
29
- with st.spinner("Thinking..."):
30
- try:
31
- result = qa_pipeline(question=question, context=context)
32
- st.success("Answer:")
33
- st.write(result["answer"])
34
- st.write(f"Confidence: {result['score']:.2f}")
35
- except Exception as e:
36
- st.error(f"Error generating answer: {e}")
37
  else:
38
- st.warning("Please provide both a question and context.")
39
  else:
40
- st.error("Failed to load the question-answering model. Please try again later.")
 
9
  @st.cache_resource
10
  def load_model():
11
  try:
12
+ # Simple model pipeline initialization
13
  model = pipeline("question-answering", model="WhiteRabbitNEO")
14
  return model
15
  except Exception as e:
16
  st.error(f"Failed to load model: {e}")
17
  return None
18
 
19
+ # Load the model
20
  qa_pipeline = load_model()
21
 
22
+ # Simple input section: Ask question and provide context
23
  if qa_pipeline:
24
+ question = st.text_input("Your question:")
25
+ context = st.text_area("Context (provide background info for the question):")
26
 
27
+ # Button to trigger the model for prediction
28
  if st.button("Get Answer"):
29
  if question and context:
30
+ try:
31
+ # Get prediction from the model
32
+ result = qa_pipeline(question=question, context=context)
33
+ # Display the answer
34
+ st.write(f"**Answer:** {result['answer']}")
35
+ st.write(f"**Confidence Score:** {result['score']:.2f}")
36
+ except Exception as e:
37
+ st.error(f"Error generating answer: {e}")
38
  else:
39
+ st.warning("Please fill both the question and context fields.")
40
  else:
41
+ st.error("Model could not be loaded. Please check your configuration.")