Canstralian's picture
Update app.py
557cf37 verified
raw
history blame
1.44 kB
import streamlit as st
from transformers import pipeline
# Title and Description
st.title("WhiteRabbitNEO Q&A App")
st.write("Ask any question, and WhiteRabbitNEO will provide an answer.")
# Initialize the model pipeline
@st.cache_resource
def load_model():
try:
# Simple model pipeline initialization
model = pipeline("question-answering", model="WhiteRabbitNEO")
return model
except Exception as e:
st.error(f"Failed to load model: {e}")
return None
# Load the model
qa_pipeline = load_model()
# Simple input section: Ask question and provide context
if qa_pipeline:
question = st.text_input("Your question:")
context = st.text_area("Context (provide background info for the question):")
# Button to trigger the model for prediction
if st.button("Get Answer"):
if question and context:
try:
# Get prediction from the model
result = qa_pipeline(question=question, context=context)
# Display the answer
st.write(f"**Answer:** {result['answer']}")
st.write(f"**Confidence Score:** {result['score']:.2f}")
except Exception as e:
st.error(f"Error generating answer: {e}")
else:
st.warning("Please fill both the question and context fields.")
else:
st.error("Model could not be loaded. Please check your configuration.")