File size: 208 Bytes
987a840 |
1 2 3 4 5 6 7 8 9 10 |
{
"architectures": [
"LlamaForCausalLM"
],
"model_type": "llama",
"torch_dtype": "float16",
"transformers_version": "4.36.0",
"use_cache": true,
"_name_or_path": "meta-llama/Llama-2-7b-hf"
} |