Canstralian's picture
Update app.py
175787a verified
import streamlit as st
from transformers import pipeline
from huggingface_hub import login
# Title of the Streamlit app
st.title("WhiteRabbitNEO Q&A App")
# Hugging Face API token input (only required if the model is private)
token = st.text_input("Enter your Hugging Face token (if required):", type="password")
# Load the model
@st.cache_resource
def load_model():
try:
if token:
login(token=token)
model = pipeline("question-answering", model="WhiteRabbitNEO")
return model
except Exception as e:
st.error(f"Failed to load model: {e}")
return None
# Load the model
model = load_model()
if model:
question = st.text_input("Ask a question:")
context = st.text_area("Provide context for your question:")
if st.button("Get Answer"):
if question and context:
try:
answer = model(question=question, context=context)
st.write("Answer:", answer['answer'])
except Exception as e:
st.error(f"Error generating answer: {e}")
else:
st.warning("Please provide both a question and context.")
else:
st.error("Model could not be loaded. Please check your configuration.")