hgdgng's picture
Update app.py
cb3dd0a verified
import gradio as gr
from transformers import pipeline
# Try loading the model with a fallback for any loading errors
try:
print("Loading the model...")
qa_pipeline = pipeline("question-answering", model="distilbert-base-uncased-distilled-squad")
print("Model loaded successfully.")
except Exception as e:
# Print error message for debugging purposes
print(f"Error loading model: {e}")
qa_pipeline = None
# Define the function that takes inputs and returns the answer
def answer_question(context, question):
if qa_pipeline is None:
return "Error: Model not loaded."
result = qa_pipeline(question=question, context=context)
return result['answer']
# Create the Gradio interface
interface = gr.Interface(
fn=answer_question,
inputs=[gr.Textbox(lines=7, label="Context (Enter the passage)"), gr.Textbox(lines=2, label="Question")],
outputs="text",
title="Question Answering Model",
description="Ask a question based on the given context.",
)
# Print a message before launching the app to confirm it's starting
print("Launching the Gradio interface...")
# Launch the interface
interface.launch()