|
import streamlit as st |
|
from transformers import pipeline |
|
|
|
|
|
st.title("WhiteRabbitNEO Q&A App") |
|
st.write("Ask any question, and WhiteRabbitNEO will provide an answer.") |
|
|
|
|
|
@st.cache_resource |
|
def load_model(): |
|
try: |
|
|
|
model = pipeline("question-answering", model="WhiteRabbitNEO") |
|
return model |
|
except Exception as e: |
|
st.error(f"Failed to load model: {e}") |
|
return None |
|
|
|
qa_pipeline = load_model() |
|
|
|
|
|
if qa_pipeline: |
|
question = st.text_input("Enter your question:") |
|
context = st.text_area("Provide some context for the question:") |
|
|
|
|
|
if st.button("Get Answer"): |
|
if question and context: |
|
with st.spinner("Thinking..."): |
|
try: |
|
result = qa_pipeline(question=question, context=context) |
|
st.success("Answer:") |
|
st.write(result["answer"]) |
|
st.write(f"Confidence: {result['score']:.2f}") |
|
except Exception as e: |
|
st.error(f"Error generating answer: {e}") |
|
else: |
|
st.warning("Please provide both a question and context.") |
|
else: |
|
st.error("Failed to load the question-answering model. Please try again later.") |
|
|