File size: 1,277 Bytes
e72331f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a2c61c0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import streamlit as st
from transformers import pipeline

# Title and Description
st.title("WhiteRabbitNEO Q&A App")
st.write("Ask any question, and WhiteRabbitNEO will provide an answer.")

# Initialize the model pipeline
@st.cache_resource
def load_model():
    try:
        return pipeline("question-answering", model="WhiteRabbitNEO")
    except Exception as e:
        st.error(f"Failed to load model: {e}")
        return None

qa_pipeline = load_model()

# Input Section
if qa_pipeline:
    question = st.text_input("Enter your question:")
    context = st.text_area("Provide some context for the question:")

    # Output Section
    if st.button("Get Answer"):
        if question and context:
            with st.spinner("Thinking..."):
                try:
                    result = qa_pipeline(question=question, context=context)
                    st.success("Answer:")
                    st.write(result["answer"])
                    st.write(f"Confidence: {result['score']:.2f}")
                except Exception as e:
                    st.error(f"Error generating answer: {e}")
        else:
            st.warning("Please provide both a question and context.")
else:
    st.error("Failed to load the question-answering model. Please try again later.")