drjosh commited on
Commit
5b73337
·
verified ·
1 Parent(s): d5debf3

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

special_tokens_map.json CHANGED
@@ -21,11 +21,5 @@
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
- "pad_token": {
25
- "content": "<|PAD_TOKEN|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
  }
 
21
  "rstrip": false,
22
  "single_word": false
23
  },
24
+ "pad_token": "<|im_end|>"
 
 
 
 
 
 
25
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fab42efe8d17406525a9154b728cf9e957629a8ed7ce997770efdd71128c6a1a
3
- size 11422086
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6491529bdbb1841267c8124f124d453ac6da35baa42ab3df788b6ef9d1af6a96
3
+ size 11422185
tokenizer_config.json CHANGED
@@ -209,7 +209,7 @@
209
  "errors": "replace",
210
  "extra_special_tokens": {},
211
  "model_max_length": 32768,
212
- "pad_token": "<|PAD_TOKEN|>",
213
  "padding_side": "right",
214
  "split_special_tokens": false,
215
  "tokenizer_class": "Qwen2Tokenizer",
 
209
  "errors": "replace",
210
  "extra_special_tokens": {},
211
  "model_max_length": 32768,
212
+ "pad_token": "<|im_end|>",
213
  "padding_side": "right",
214
  "split_special_tokens": false,
215
  "tokenizer_class": "Qwen2Tokenizer",