import gradio as gr from transformers import pipeline # Load the InLegalBERT model and tokenizer for Question Answering qa_pipeline = pipeline("question-answering", model="law-ai/InLegalBERT", tokenizer="law-ai/InLegalBERT") # Example context to help model reason (you can replace this with a full Bare Act or user-provided text) default_context = """ Section 420 of the Indian Penal Code deals with cheating and dishonestly inducing delivery of property. Whoever cheats and thereby dishonestly induces the person deceived to deliver any property to any person, or to make, alter or destroy the whole or any part of a valuable security, shall be punished with imprisonment of up to 7 years and shall also be liable to fine. """ # Function to generate legal answer def get_legal_answer(user_question, context_text): if not user_question.strip(): return "Please enter a valid legal question.", "" # Use default context if not provided context = context_text if context_text.strip() else default_context try: result = qa_pipeline(question=user_question, context=context) return result["answer"], context except Exception as e: return f"Error: {str(e)}", context # Gradio UI with gr.Blocks() as app: gr.Markdown("## 🧑‍⚖️ InLegalBERT Legal Engine - Ask Legal Questions") with gr.Row(): question_input = gr.Textbox(label="Enter Legal Question", placeholder="e.g., What is Section 420?") context_input = gr.Textbox(label="Context (Optional)", placeholder="Leave empty to use default legal context") output = gr.Textbox(label="Legal Output") submit_btn = gr.Button("Submit") submit_btn.click(fn=get_legal_answer, inputs=[question_input, context_input], outputs=output) app.launch()