nm-autobot commited on
Commit
dd020ca
·
verified ·
1 Parent(s): 77174f2

Upload folder using huggingface_hub

Browse files
chat_template.jinja ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {% for message in messages %}
2
+ {% if message['role'] == 'user' %}
3
+ {{ '<|user|>
4
+ ' + message['content'] + eos_token }}
5
+ {% elif message['role'] == 'system' %}
6
+ {{ '<|system|>
7
+ ' + message['content'] + eos_token }}
8
+ {% elif message['role'] == 'assistant' %}
9
+ {{ '<|assistant|>
10
+ ' + message['content'] + eos_token }}
11
+ {% endif %}
12
+ {% if loop.last and add_generation_prompt %}
13
+ {{ '<|assistant|>' }}
14
+ {% endif %}
15
+ {% endfor %}
config.json CHANGED
@@ -65,7 +65,7 @@
65
  "rope_theta": 10000.0,
66
  "tie_word_embeddings": false,
67
  "torch_dtype": "bfloat16",
68
- "transformers_version": "4.51.3",
69
  "use_cache": true,
70
  "vocab_size": 32000
71
  }
 
65
  "rope_theta": 10000.0,
66
  "tie_word_embeddings": false,
67
  "torch_dtype": "bfloat16",
68
+ "transformers_version": "4.52.3",
69
  "use_cache": true,
70
  "vocab_size": 32000
71
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.51.3"
7
  }
 
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.52.3"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f64f967ac53b5ef7d6f72c297ca70d065b2c8c2b368be961770b4340a691308
3
  size 1232041608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30e0f0e98891d68a67aa7d4c8b9d9bb4ea4eee4353746e1d0404f4c9421640a0
3
  size 1232041608
tokenizer_config.json CHANGED
@@ -29,7 +29,6 @@
29
  }
30
  },
31
  "bos_token": "<s>",
32
- "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "extra_special_tokens": {},
 
29
  }
30
  },
31
  "bos_token": "<s>",
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "extra_special_tokens": {},