Spaces:
Runtime error
Runtime error
# from transformers import AutoTokenizer, AutoModelForCausalLM | |
# def load_model(model_name="meta-llama/Llama-3.1-8B"): | |
# """ | |
# Load the Hugging Face Llama model and tokenizer. | |
# """ | |
# tokenizer = AutoTokenizer.from_pretrained(model_name) | |
# model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype="auto") | |
# return tokenizer, model | |
# # Initialize model and tokenizer | |
# tokenizer, model = load_model() | |
from transformers import pipeline | |
# Replace with your Hugging Face API token | |
api_token = "your_huggingface_api_token" | |
# Load the model using the API | |
generator = pipeline( | |
"text-generation", | |
model="meta-llama/Llama-3.1-8B", | |
use_auth_token=api_token | |
) | |