shenzye46's picture
Update app.py
5efc175 verified
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
# Load model and tokenizer
model_name = "shenzye46/SmolLM-135M-fine-tuned-recepie" # Replace with your model name
tokenizer_name = "HuggingFaceTB/SmolLM-135M"
def load_model_and_tokenizer():
tokenizer = AutoTokenizer.from_pretrained(tokenizer_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Ensure tokenizer.pad_token is set to tokenizer.eos_token
if tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.eos_token
tokenizer.pad_token_id = tokenizer.eos_token_id
return tokenizer, model
tokenizer, model = load_model_and_tokenizer()
def generate_recipe(recipe_name):
"""Generate cooking method given a recipe name."""
prompt = f"Recipe Name: {recipe_name}\nInstructions: "
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(inputs["input_ids"], max_length=512,temperature=0.7, # Sampling randomness
top_p=0.9, num_return_sequences=1, do_sample=True)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Return only the method part, splitting after 'Method:'
return generated_text.split("Recipe Name:")[-1].strip()
# Create Gradio interface
interface = gr.Interface(
fn=generate_recipe,
inputs=gr.Textbox(label="Recipe Name", placeholder="Enter the recipe name, e.g., Chocolate Cake"),
outputs=gr.Textbox(label="Cooking Method"),
title="Recipe Generator",
description="Enter the name of a recipe, and the model will generate the method to cook it!",
)
# Launch the app
interface.launch()