Canstralian's picture
Update app.py
1db160d verified
raw
history blame
1.39 kB
import streamlit as st
from transformers import pipeline
# Title and Description
st.title("WhiteRabbitNEO Q&A App")
st.write("Ask any question, and WhiteRabbitNEO will provide an answer.")
# Initialize the model pipeline
@st.cache_resource
def load_model():
try:
# Replace with the correct model if needed (e.g., ensure WhiteRabbitNEO exists)
model = pipeline("question-answering", model="WhiteRabbitNEO")
return model
except Exception as e:
st.error(f"Failed to load model: {e}")
return None
qa_pipeline = load_model()
# Input Section
if qa_pipeline:
question = st.text_input("Enter your question:")
context = st.text_area("Provide some context for the question:")
# Output Section
if st.button("Get Answer"):
if question and context:
with st.spinner("Thinking..."):
try:
result = qa_pipeline(question=question, context=context)
st.success("Answer:")
st.write(result["answer"])
st.write(f"Confidence: {result['score']:.2f}")
except Exception as e:
st.error(f"Error generating answer: {e}")
else:
st.warning("Please provide both a question and context.")
else:
st.error("Failed to load the question-answering model. Please try again later.")