MAsad789565's picture
Update app.py
b81b1c2 verified
import os
os.system("pip install transformers")
os.system("pip install torch")
os.system("pip install -U gradio")
import gradio as gr
from transformers import pipeline
# Use a pipeline as a high-level helper
pipe = pipeline("text-generation", model="MAsad789565/GPT2-medium-Finetuned")
def generate_text(prompt):
# Generate text using the pipeline
generated_text = pipe(prompt, max_length=100, temperature=1, top_k=20, top_p=0.9, do_sample=False)[0]['generated_text']
return generated_text
# Function to be called when the button is clicked
def send_to_model(textbox_value):
# Call the generate_text function with the provided input
generated_text = generate_text(textbox_value)
# Update the output Textbox with the generated text
iface.set_inputs({"text": textbox_value})
iface.set_outputs({"text": generated_text})
# Create a Gradio interface
iface = gr.Interface(
fn=generate_text,
inputs=gr.Textbox("text", placeholder="Enter your prompt here..."),
outputs=gr.Textbox("text", placeholder=" Generated text will appear here..."),
live=True,
title="GPT-2 Medium Fine-tuned Model",
description="Generate text using a fine-tuned GPT-2 medium model.",
examples=[
["User: What is SSL certificate?"],
["User: Explain the concept of artificial intelligence."],
["User: Write python code to find factorial of a number."],
],
theme="compact",
# Add a button to send text to the model
button="Generate",
button_click=send_to_model # Specify the function to be called when the button is clicked
)
# Launch the Gradio interface
iface.launch(share=True)