import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Load the model and tokenizer tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b") model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b") def generate(prompt): inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate(inputs['input_ids'], max_length=100000) response = tokenizer.decode(outputs[0], skip_special_tokens=True) return response # Create a Gradio Interface gr.Interface(fn=generate, inputs="text", outputs="text").launch()