from transformers import pipeline | |
import torch | |
import gradio as gr | |
def predict(text): | |
# Process the text and file here. | |
# Return the prediction. | |
torch.manual_seed(0) | |
generator = pipeline('text-generation', model = 'openai-community/gpt2') | |
prompt = text | |
return generator(prompt, max_length = 30) | |
demo = gr.Interface( | |
fn=predict, | |
inputs=[ | |
gr.Textbox(placeholder="Enter text here..."), | |
], | |
outputs="text", | |
) | |
demo.launch() |