Generated with

from transformers import AutoModelForCausalLM
import torch 

repo_id = "gg-hf-gm/gemma-3n-E4B"
model = AutoModelForCausalLM.from_pretrained(repo_id, torch_dtype=torch.bfloat16, device_map="auto")

print(f"Total Parameters: {model.num_parameters():,}") # 7,849,435,920
osanseviero changed pull request status to closed

Sign up or log in to comment