quwsarohi commited on
Commit
824b7cf
·
verified ·
1 Parent(s): 206d602

websearch tool trained

Browse files
Files changed (4) hide show
  1. config.json +1 -1
  2. model.safetensors +1 -1
  3. tokenizer.json +3 -3
  4. tokenizer_config.json +7 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "HuggingFaceTB/SmolLM2-360M-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "quwsarohi/SmolThink",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb4f31ca7ffc3f7adf764fbaa72db6b6d2b0ca72498db76a544ed22a8d01d618
3
  size 723674912
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:823bfaad03aad8aa42842405944ab43846ba8b97734e8e28877124b6604d0c73
3
  size 723674912
tokenizer.json CHANGED
@@ -2,13 +2,13 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 832,
6
  "strategy": "LongestFirst",
7
- "stride": 104
8
  },
9
  "padding": {
10
  "strategy": {
11
- "Fixed": 832
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 3072,
6
  "strategy": "LongestFirst",
7
+ "stride": 768
8
  },
9
  "padding": {
10
  "strategy": {
11
+ "Fixed": 3072
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
tokenizer_config.json CHANGED
@@ -1,6 +1,4 @@
1
  {
2
- "add_bos_token": true,
3
- "add_eos_token": true,
4
  "add_prefix_space": false,
5
  "added_tokens_decoder": {
6
  "0": {
@@ -149,9 +147,16 @@
149
  "clean_up_tokenization_spaces": false,
150
  "eos_token": "<|im_end|>",
151
  "extra_special_tokens": {},
 
152
  "model_max_length": 8192,
 
153
  "pad_token": "<|endoftext|>",
 
 
 
154
  "tokenizer_class": "GPT2Tokenizer",
 
 
155
  "unk_token": "<|endoftext|>",
156
  "vocab_size": 49152
157
  }
 
1
  {
 
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
 
147
  "clean_up_tokenization_spaces": false,
148
  "eos_token": "<|im_end|>",
149
  "extra_special_tokens": {},
150
+ "max_length": 832,
151
  "model_max_length": 8192,
152
+ "pad_to_multiple_of": null,
153
  "pad_token": "<|endoftext|>",
154
+ "pad_token_type_id": 0,
155
+ "padding_side": "right",
156
+ "stride": 104,
157
  "tokenizer_class": "GPT2Tokenizer",
158
+ "truncation_side": "right",
159
+ "truncation_strategy": "longest_first",
160
  "unk_token": "<|endoftext|>",
161
  "vocab_size": 49152
162
  }