lmmy commited on
Commit
d9c5c2b
·
verified ·
1 Parent(s): 0978342

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ - fr
5
+ - de
6
+ - es
7
+ - pt
8
+ - it
9
+ - ja
10
+ - ko
11
+ - ru
12
+ - zh
13
+ - ar
14
+ - fa
15
+ - id
16
+ - ms
17
+ - ne
18
+ - pl
19
+ - ro
20
+ - sr
21
+ - sv
22
+ - tr
23
+ - uk
24
+ - vi
25
+ - hi
26
+ - bn
27
+ license: apache-2.0
28
+ library_name: vllm
29
+ inference: false
30
+ base_model: mistralai/Devstral-Small-2507
31
+ extra_gated_description: If you want to learn more about how we process your personal
32
+ data, please read our <a href="https://mistral.ai/terms/">Privacy Policy</a>.
33
+ pipeline_tag: text2text-generation
34
+ tags:
35
+ - mlx
36
+ ---
37
+ ## 💫 Community Model> Devstral-Small-2507 by mistralai
38
+
39
+ *👾 [LM Studio](https://lmstudio.ai) Community models highlights program. Highlighting new & noteworthy models by the community. Join the conversation on [Discord](https://discord.gg/aPQfnNkxGC)*.
40
+
41
+ **Model creator:** [mistralai](https://huggingface.co/mistralai)<br>
42
+ **Original model**: [Devstral-Small-2507](https://huggingface.co/mistralai/Devstral-Small-2507)<br>
43
+ **MLX quantization:** provided by [LM Studio team](https://x.com/lmstudio) using [mlx_lm](https://github.com/ml-explore/mlx-lm)<br>
44
+
45
+ ## Technical Details
46
+
47
+ Original bfloat16 version of Devstral-Small-2507 using MLX, optimized for Apple Silicon.
48
+
49
+ ## Special thanks
50
+
51
+ 🙏 Special thanks to the [Apple Machine Learning Research](https://github.com/ml-explore) team for creating [MLX](https://github.com/ml-explore/mlx).
52
+
53
+ ## Disclaimers
54
+
55
+ LM Studio is not the creator, originator, or owner of any Model featured in the Community Model Program. Each Community Model is created and provided by third parties. LM Studio does not endorse, support, represent or guarantee the completeness, truthfulness, accuracy, or reliability of any Community Model. You understand that Community Models can produce content that might be offensive, harmful, inaccurate or otherwise inappropriate, or deceptive. Each Community Model is the sole responsibility of the person or entity who originated such Model. LM Studio may not monitor or control the Community Models and cannot, and does not, take responsibility for any such Model. LM Studio disclaims all warranties or guarantees about the accuracy, reliability or benefits of the Community Models. LM Studio further disclaims any warranty that the Community Model will meet your requirements, be secure, uninterrupted or available at any time or location, or error-free, viruses-free, or that any errors will be corrected, or otherwise. You will be solely responsible for any damage resulting from your use of or access to the Community Models, your downloading of any Community Model, or use of any other Community Model provided by or through LM Studio.
chat_template.jinja ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- set default_system_message = 'You are Devstral, a helpful agentic model trained by Mistral AI and using the OpenHands scaffold. You can interact with a computer to solve tasks.\\n\\n<ROLE>\\nYour primary role is to assist users by executing commands, modifying code, and solving technical problems effectively. You should be thorough, methodical, and prioritize quality over speed.\\n* If the user asks a question, like \\"why is X happening\\", don\'t try to fix the problem. Just give an answer to the question.\\n</ROLE>\\n\\n<EFFICIENCY>\\n* Each action you take is somewhat expensive. Wherever possible, combine multiple actions into a single action, e.g. combine multiple bash commands into one, using sed and grep to edit/view multiple files at once.\\n* When exploring the codebase, use efficient tools like find, grep, and git commands with appropriate filters to minimize unnecessary operations.\\n</EFFICIENCY>\\n\\n<FILE_SYSTEM_GUIDELINES>\\n* When a user provides a file path, do NOT assume it\'s relative to the current working directory. First explore the file system to locate the file before working on it.\\n* If asked to edit a file, edit the file directly, rather than creating a new file with a different filename.\\n* For global search-and-replace operations, consider using `sed` instead of opening file editors multiple times.\\n</FILE_SYSTEM_GUIDELINES>\\n\\n<CODE_QUALITY>\\n* Write clean, efficient code with minimal comments. Avoid redundancy in comments: Do not repeat information that can be easily inferred from the code itself.\\n* When implementing solutions, focus on making the minimal changes needed to solve the problem.\\n* Before implementing any changes, first thoroughly understand the codebase through exploration.\\n* If you are adding a lot of code to a function or file, consider splitting the function or file into smaller pieces when appropriate.\\n</CODE_QUALITY>\\n\\n<VERSION_CONTROL>\\n* When configuring git credentials, use \\"openhands\\" as the user.name and \\"[email protected]\\" as the user.email by default, unless explicitly instructed otherwise.\\n* Exercise caution with git operations. Do NOT make potentially dangerous changes (e.g., pushing to main, deleting repositories) unless explicitly asked to do so.\\n* When committing changes, use `git status` to see all modified files, and stage all files necessary for the commit. Use `git commit -a` whenever possible.\\n* Do NOT commit files that typically shouldn\'t go into version control (e.g., node_modules/, .env files, build directories, cache files, large binaries) unless explicitly instructed by the user.\\n* If unsure about committing certain files, check for the presence of .gitignore files or ask the user for clarification.\\n</VERSION_CONTROL>\\n\\n<PULL_REQUESTS>\\n* When creating pull requests, create only ONE per session/issue unless explicitly instructed otherwise.\\n* When working with an existing PR, update it with new commits rather than creating additional PRs for the same issue.\\n* When updating a PR, preserve the original PR title and purpose, updating description only when necessary.\\n</PULL_REQUESTS>\\n\\n<PROBLEM_SOLVING_WORKFLOW>\\n1. EXPLORATION: Thoroughly explore relevant files and understand the context before proposing solutions\\n2. ANALYSIS: Consider multiple approaches and select the most promising one\\n3. TESTING:\\n * For bug fixes: Create tests to verify issues before implementing fixes\\n * For new features: Consider test-driven development when appropriate\\n * If the repository lacks testing infrastructure and implementing tests would require extensive setup, consult with the user before investing time in building testing infrastructure\\n * If the environment is not set up to run tests, consult with the user first before investing time to install all dependencies\\n4. IMPLEMENTATION: Make focused, minimal changes to address the problem\\n5. VERIFICATION: If the environment is set up to run tests, test your implementation thoroughly, including edge cases. If the environment is not set up to run tests, consult with the user first before investing time to run tests.\\n</PROBLEM_SOLVING_WORKFLOW>\\n\\n<SECURITY>\\n* Only use GITHUB_TOKEN and other credentials in ways the user has explicitly requested and would expect.\\n* Use APIs to work with GitHub or other platforms, unless the user asks otherwise or your task requires browsing.\\n</SECURITY>\\n\\n<ENVIRONMENT_SETUP>\\n* When user asks you to run an application, don\'t stop if the application is not installed. Instead, please install the application and run the command again.\\n* If you encounter missing dependencies:\\n 1. First, look around in the repository for existing dependency files (requirements.txt, pyproject.toml, package.json, Gemfile, etc.)\\n 2. If dependency files exist, use them to install all dependencies at once (e.g., `pip install -r requirements.txt`, `npm install`, etc.)\\n 3. Only install individual packages directly if no dependency files are found or if only specific packages are needed\\n* Similarly, if you encounter missing dependencies for essential tools requested by the user, install them when possible.\\n</ENVIRONMENT_SETUP>\\n\\n<TROUBLESHOOTING>\\n* If you\'ve made repeated attempts to solve a problem but tests still fail or the user reports it\'s still broken:\\n 1. Step back and reflect on 5-7 different possible sources of the problem\\n 2. Assess the likelihood of each possible cause\\n 3. Methodically address the most likely causes, starting with the highest probability\\n 4. Document your reasoning process\\n* When you run into any major issue while executing a plan from the user, please don\'t try to directly work around it. Instead, propose a new plan and confirm with the user before proceeding.\\n</TROUBLESHOOTING>\\n' %}
2
+ {{- bos_token }}
3
+ {%- if messages[0]['role'] == 'system' %}
4
+ {%- if messages[0]['content'] is string %}
5
+ {%- set system_message = messages[0]['content'] %}
6
+ {%- else %}
7
+ {%- set system_message = messages[0]['content'][0]['text'] %}
8
+ {%- endif %}
9
+ {%- set loop_messages = messages[1:] %}
10
+ {%- else %}
11
+ {%- set system_message = default_system_message %}
12
+ {%- set loop_messages = messages %}
13
+ {%- endif %}
14
+ {{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}
15
+ {%- for message in loop_messages %}
16
+ {%- if message['role'] == 'user' %}
17
+ {%- if message['content'] is string %}
18
+ {{- '[INST]' + message['content'] + '[/INST]' }}
19
+ {%- else %}
20
+ {{- '[INST]' }}
21
+ {%- for block in message['content'] %}
22
+ {%- if block['type'] == 'text' %}
23
+ {{- block['text'] }}
24
+ {%- else %}
25
+ {{- raise_exception('Only text is supported in message content!') }}
26
+ {%- endif %}
27
+ {%- endfor %}
28
+ {{- '[/INST]' }}
29
+ {%- endif %}
30
+ {%- elif message['role'] == 'system' %}
31
+ {%- if message['content'] is string %}
32
+ {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}
33
+ {%- else %}
34
+ {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}
35
+ {%- endif %}
36
+ {%- elif message['role'] == 'assistant' %}
37
+ {%- if message['content'] is string %}
38
+ {{- message['content'] + eos_token }}
39
+ {%- else %}
40
+ {{- message['content'][0]['text'] + eos_token }}
41
+ {%- endif %}
42
+ {%- else %}
43
+ {{- raise_exception('Only user, system and assistant roles are supported!') }}
44
+ {%- endif %}
45
+ {%- endfor %}
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 32768,
13
+ "max_position_embeddings": 131072,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 11,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.53.1",
25
+ "use_cache": true,
26
+ "vocab_size": 131072
27
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 11,
6
+ "transformers_version": "4.53.1"
7
+ }
model-00001-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c97e7429c8a770a3c2ea458b02516e75e99ec78f9105b3988ec33a9506239c8
3
+ size 5117116159
model-00002-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c13dca2d66a13e935350186bdc45ccd022f4c5f02b26fb60b2049d61b91e8286
3
+ size 5222015985
model-00003-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a49f78c2dcefbf0c3a89829aafbb22ddad80c0ed14207d6f1922a32fa39f79b
3
+ size 5117137230
model-00004-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35ce51217f5029c64ed7e928e29b028e25fab752cbd094453430695d9a929004
3
+ size 5222016045
model-00005-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5eb1562adf0c7297d2dc240b1ebd6f99ec4191a5a0dcd5cb1de82a892d170fc8
3
+ size 5222016025
model-00006-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4731687257badc6ca0c4f9d9350cf902ea5376729eca27611a5a320edc8dbb8e
3
+ size 5117137260
model-00007-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20e60948c370375d2874f2bce7db04fa0b1465e89416d6b0f4b526b05e7eda4a
3
+ size 5222016027
model-00008-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8e7eb490d4c3922f502f2e1995b3ff7d2f7af268842b21b61398ce4d0220e0a
3
+ size 5222016039
model-00009-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2df07c008b047c51911cc581335f760505616aa2dcd3b52075a29f7ddce773ee
3
+ size 4341200611
model-00010-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cf0483af4189194bab770598253da2df0dde722a799da7e71bc94c0d51fa478
3
+ size 1342177407
model.safetensors.index.json ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 47144806400,
4
+ "total_parameters": 23572403200
5
+ },
6
+ "weight_map": {
7
+ "lm_head.weight": "model-00010-of-00010.safetensors",
8
+ "model.embed_tokens.weight": "model-00001-of-00010.safetensors",
9
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00010.safetensors",
10
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
11
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
12
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
13
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
16
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
17
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
18
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00010.safetensors",
19
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
20
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
21
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
22
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
23
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
24
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
25
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
26
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
27
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00010.safetensors",
28
+ "model.layers.10.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
29
+ "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
30
+ "model.layers.10.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
31
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
32
+ "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
33
+ "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
34
+ "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
35
+ "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
36
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00010.safetensors",
37
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
38
+ "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
39
+ "model.layers.11.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
40
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
41
+ "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
42
+ "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
43
+ "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
44
+ "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
45
+ "model.layers.12.input_layernorm.weight": "model-00004-of-00010.safetensors",
46
+ "model.layers.12.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
47
+ "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
48
+ "model.layers.12.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
49
+ "model.layers.12.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
50
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
51
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
52
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
53
+ "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
54
+ "model.layers.13.input_layernorm.weight": "model-00004-of-00010.safetensors",
55
+ "model.layers.13.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
56
+ "model.layers.13.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
57
+ "model.layers.13.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
58
+ "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
59
+ "model.layers.13.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
60
+ "model.layers.13.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
61
+ "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
62
+ "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
63
+ "model.layers.14.input_layernorm.weight": "model-00004-of-00010.safetensors",
64
+ "model.layers.14.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
65
+ "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
66
+ "model.layers.14.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
67
+ "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
68
+ "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
69
+ "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
70
+ "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
71
+ "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
72
+ "model.layers.15.input_layernorm.weight": "model-00004-of-00010.safetensors",
73
+ "model.layers.15.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
74
+ "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
75
+ "model.layers.15.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
76
+ "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
77
+ "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
78
+ "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
79
+ "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
80
+ "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
81
+ "model.layers.16.input_layernorm.weight": "model-00004-of-00010.safetensors",
82
+ "model.layers.16.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
83
+ "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
84
+ "model.layers.16.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
85
+ "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
86
+ "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
87
+ "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
88
+ "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
89
+ "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
90
+ "model.layers.17.input_layernorm.weight": "model-00005-of-00010.safetensors",
91
+ "model.layers.17.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
92
+ "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
93
+ "model.layers.17.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
94
+ "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
95
+ "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
96
+ "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
97
+ "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
98
+ "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
99
+ "model.layers.18.input_layernorm.weight": "model-00005-of-00010.safetensors",
100
+ "model.layers.18.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
101
+ "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
102
+ "model.layers.18.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
103
+ "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
104
+ "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
105
+ "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
106
+ "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
107
+ "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
108
+ "model.layers.19.input_layernorm.weight": "model-00005-of-00010.safetensors",
109
+ "model.layers.19.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
110
+ "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
111
+ "model.layers.19.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
112
+ "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
113
+ "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
114
+ "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
115
+ "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
116
+ "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
117
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00010.safetensors",
118
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
119
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
120
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
121
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
122
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
123
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
124
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
125
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
126
+ "model.layers.20.input_layernorm.weight": "model-00005-of-00010.safetensors",
127
+ "model.layers.20.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
128
+ "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
129
+ "model.layers.20.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
130
+ "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
131
+ "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
132
+ "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
133
+ "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
134
+ "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
135
+ "model.layers.21.input_layernorm.weight": "model-00005-of-00010.safetensors",
136
+ "model.layers.21.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
137
+ "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
138
+ "model.layers.21.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
139
+ "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
140
+ "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
141
+ "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
142
+ "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
143
+ "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
144
+ "model.layers.22.input_layernorm.weight": "model-00006-of-00010.safetensors",
145
+ "model.layers.22.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
146
+ "model.layers.22.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
147
+ "model.layers.22.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
148
+ "model.layers.22.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
149
+ "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
150
+ "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
151
+ "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
152
+ "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
153
+ "model.layers.23.input_layernorm.weight": "model-00006-of-00010.safetensors",
154
+ "model.layers.23.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
155
+ "model.layers.23.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
156
+ "model.layers.23.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
157
+ "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
158
+ "model.layers.23.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
159
+ "model.layers.23.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
160
+ "model.layers.23.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
161
+ "model.layers.23.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
162
+ "model.layers.24.input_layernorm.weight": "model-00006-of-00010.safetensors",
163
+ "model.layers.24.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
164
+ "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
165
+ "model.layers.24.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
166
+ "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
167
+ "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
168
+ "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
169
+ "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
170
+ "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
171
+ "model.layers.25.input_layernorm.weight": "model-00006-of-00010.safetensors",
172
+ "model.layers.25.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
173
+ "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
174
+ "model.layers.25.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
175
+ "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
176
+ "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
177
+ "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
178
+ "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
179
+ "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
180
+ "model.layers.26.input_layernorm.weight": "model-00007-of-00010.safetensors",
181
+ "model.layers.26.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
182
+ "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
183
+ "model.layers.26.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
184
+ "model.layers.26.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
185
+ "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
186
+ "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
187
+ "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
188
+ "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
189
+ "model.layers.27.input_layernorm.weight": "model-00007-of-00010.safetensors",
190
+ "model.layers.27.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
191
+ "model.layers.27.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
192
+ "model.layers.27.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
193
+ "model.layers.27.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
194
+ "model.layers.27.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
195
+ "model.layers.27.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
196
+ "model.layers.27.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
197
+ "model.layers.27.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
198
+ "model.layers.28.input_layernorm.weight": "model-00007-of-00010.safetensors",
199
+ "model.layers.28.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
200
+ "model.layers.28.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
201
+ "model.layers.28.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
202
+ "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
203
+ "model.layers.28.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
204
+ "model.layers.28.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
205
+ "model.layers.28.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
206
+ "model.layers.28.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
207
+ "model.layers.29.input_layernorm.weight": "model-00007-of-00010.safetensors",
208
+ "model.layers.29.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
209
+ "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
210
+ "model.layers.29.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
211
+ "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
212
+ "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
213
+ "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
214
+ "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
215
+ "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
216
+ "model.layers.3.input_layernorm.weight": "model-00002-of-00010.safetensors",
217
+ "model.layers.3.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
218
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
219
+ "model.layers.3.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
220
+ "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
221
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
222
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
223
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
224
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
225
+ "model.layers.30.input_layernorm.weight": "model-00007-of-00010.safetensors",
226
+ "model.layers.30.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
227
+ "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
228
+ "model.layers.30.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
229
+ "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
230
+ "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
231
+ "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
232
+ "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
233
+ "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
234
+ "model.layers.31.input_layernorm.weight": "model-00008-of-00010.safetensors",
235
+ "model.layers.31.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
236
+ "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
237
+ "model.layers.31.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
238
+ "model.layers.31.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
239
+ "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
240
+ "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
241
+ "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
242
+ "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
243
+ "model.layers.32.input_layernorm.weight": "model-00008-of-00010.safetensors",
244
+ "model.layers.32.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
245
+ "model.layers.32.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
246
+ "model.layers.32.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
247
+ "model.layers.32.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
248
+ "model.layers.32.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
249
+ "model.layers.32.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
250
+ "model.layers.32.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
251
+ "model.layers.32.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
252
+ "model.layers.33.input_layernorm.weight": "model-00008-of-00010.safetensors",
253
+ "model.layers.33.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
254
+ "model.layers.33.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
255
+ "model.layers.33.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
256
+ "model.layers.33.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
257
+ "model.layers.33.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
258
+ "model.layers.33.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
259
+ "model.layers.33.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
260
+ "model.layers.33.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
261
+ "model.layers.34.input_layernorm.weight": "model-00008-of-00010.safetensors",
262
+ "model.layers.34.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
263
+ "model.layers.34.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
264
+ "model.layers.34.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
265
+ "model.layers.34.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
266
+ "model.layers.34.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
267
+ "model.layers.34.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
268
+ "model.layers.34.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
269
+ "model.layers.34.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
270
+ "model.layers.35.input_layernorm.weight": "model-00008-of-00010.safetensors",
271
+ "model.layers.35.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
272
+ "model.layers.35.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
273
+ "model.layers.35.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
274
+ "model.layers.35.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
275
+ "model.layers.35.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
276
+ "model.layers.35.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
277
+ "model.layers.35.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
278
+ "model.layers.35.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
279
+ "model.layers.36.input_layernorm.weight": "model-00009-of-00010.safetensors",
280
+ "model.layers.36.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
281
+ "model.layers.36.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
282
+ "model.layers.36.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
283
+ "model.layers.36.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
284
+ "model.layers.36.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
285
+ "model.layers.36.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
286
+ "model.layers.36.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
287
+ "model.layers.36.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
288
+ "model.layers.37.input_layernorm.weight": "model-00009-of-00010.safetensors",
289
+ "model.layers.37.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
290
+ "model.layers.37.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
291
+ "model.layers.37.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
292
+ "model.layers.37.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
293
+ "model.layers.37.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
294
+ "model.layers.37.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
295
+ "model.layers.37.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
296
+ "model.layers.37.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
297
+ "model.layers.38.input_layernorm.weight": "model-00009-of-00010.safetensors",
298
+ "model.layers.38.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
299
+ "model.layers.38.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
300
+ "model.layers.38.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
301
+ "model.layers.38.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
302
+ "model.layers.38.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
303
+ "model.layers.38.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
304
+ "model.layers.38.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
305
+ "model.layers.38.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
306
+ "model.layers.39.input_layernorm.weight": "model-00009-of-00010.safetensors",
307
+ "model.layers.39.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
308
+ "model.layers.39.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
309
+ "model.layers.39.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
310
+ "model.layers.39.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
311
+ "model.layers.39.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
312
+ "model.layers.39.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
313
+ "model.layers.39.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
314
+ "model.layers.39.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
315
+ "model.layers.4.input_layernorm.weight": "model-00002-of-00010.safetensors",
316
+ "model.layers.4.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
317
+ "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
318
+ "model.layers.4.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
319
+ "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
320
+ "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
321
+ "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
322
+ "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
323
+ "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
324
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00010.safetensors",
325
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
326
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
327
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
328
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
329
+ "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
330
+ "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
331
+ "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
332
+ "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
333
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00010.safetensors",
334
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
335
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
336
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
337
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
338
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
339
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
340
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
341
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
342
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00010.safetensors",
343
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
344
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
345
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
346
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
347
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
348
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
349
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
350
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
351
+ "model.layers.8.input_layernorm.weight": "model-00003-of-00010.safetensors",
352
+ "model.layers.8.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
353
+ "model.layers.8.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
354
+ "model.layers.8.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
355
+ "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
356
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
357
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
358
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
359
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
360
+ "model.layers.9.input_layernorm.weight": "model-00003-of-00010.safetensors",
361
+ "model.layers.9.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
362
+ "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
363
+ "model.layers.9.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
364
+ "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
365
+ "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
366
+ "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
367
+ "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
368
+ "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
369
+ "model.norm.weight": "model-00009-of-00010.safetensors"
370
+ }
371
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,1032 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>",
6
+ "[INST]",
7
+ "[/INST]",
8
+ "[AVAILABLE_TOOLS]",
9
+ "[/AVAILABLE_TOOLS]",
10
+ "[TOOL_RESULTS]",
11
+ "[/TOOL_RESULTS]",
12
+ "[TOOL_CALLS]",
13
+ "[IMG]",
14
+ "<pad>",
15
+ "[IMG_BREAK]",
16
+ "[IMG_END]",
17
+ "[PREFIX]",
18
+ "[MIDDLE]",
19
+ "[SUFFIX]",
20
+ "[SYSTEM_PROMPT]",
21
+ "[/SYSTEM_PROMPT]",
22
+ "[TOOL_CONTENT]",
23
+ "<SPECIAL_20>",
24
+ "<SPECIAL_21>",
25
+ "<SPECIAL_22>",
26
+ "<SPECIAL_23>",
27
+ "<SPECIAL_24>",
28
+ "<SPECIAL_25>",
29
+ "<SPECIAL_26>",
30
+ "<SPECIAL_27>",
31
+ "<SPECIAL_28>",
32
+ "<SPECIAL_29>",
33
+ "<SPECIAL_30>",
34
+ "<SPECIAL_31>",
35
+ "[ARGS]",
36
+ "[CALL_ID]",
37
+ "<SPECIAL_34>",
38
+ "<SPECIAL_35>",
39
+ "<SPECIAL_36>",
40
+ "<SPECIAL_37>",
41
+ "<SPECIAL_38>",
42
+ "<SPECIAL_39>",
43
+ "<SPECIAL_40>",
44
+ "<SPECIAL_41>",
45
+ "<SPECIAL_42>",
46
+ "<SPECIAL_43>",
47
+ "<SPECIAL_44>",
48
+ "<SPECIAL_45>",
49
+ "<SPECIAL_46>",
50
+ "<SPECIAL_47>",
51
+ "<SPECIAL_48>",
52
+ "<SPECIAL_49>",
53
+ "<SPECIAL_50>",
54
+ "<SPECIAL_51>",
55
+ "<SPECIAL_52>",
56
+ "<SPECIAL_53>",
57
+ "<SPECIAL_54>",
58
+ "<SPECIAL_55>",
59
+ "<SPECIAL_56>",
60
+ "<SPECIAL_57>",
61
+ "<SPECIAL_58>",
62
+ "<SPECIAL_59>",
63
+ "<SPECIAL_60>",
64
+ "<SPECIAL_61>",
65
+ "<SPECIAL_62>",
66
+ "<SPECIAL_63>",
67
+ "<SPECIAL_64>",
68
+ "<SPECIAL_65>",
69
+ "<SPECIAL_66>",
70
+ "<SPECIAL_67>",
71
+ "<SPECIAL_68>",
72
+ "<SPECIAL_69>",
73
+ "<SPECIAL_70>",
74
+ "<SPECIAL_71>",
75
+ "<SPECIAL_72>",
76
+ "<SPECIAL_73>",
77
+ "<SPECIAL_74>",
78
+ "<SPECIAL_75>",
79
+ "<SPECIAL_76>",
80
+ "<SPECIAL_77>",
81
+ "<SPECIAL_78>",
82
+ "<SPECIAL_79>",
83
+ "<SPECIAL_80>",
84
+ "<SPECIAL_81>",
85
+ "<SPECIAL_82>",
86
+ "<SPECIAL_83>",
87
+ "<SPECIAL_84>",
88
+ "<SPECIAL_85>",
89
+ "<SPECIAL_86>",
90
+ "<SPECIAL_87>",
91
+ "<SPECIAL_88>",
92
+ "<SPECIAL_89>",
93
+ "<SPECIAL_90>",
94
+ "<SPECIAL_91>",
95
+ "<SPECIAL_92>",
96
+ "<SPECIAL_93>",
97
+ "<SPECIAL_94>",
98
+ "<SPECIAL_95>",
99
+ "<SPECIAL_96>",
100
+ "<SPECIAL_97>",
101
+ "<SPECIAL_98>",
102
+ "<SPECIAL_99>",
103
+ "<SPECIAL_100>",
104
+ "<SPECIAL_101>",
105
+ "<SPECIAL_102>",
106
+ "<SPECIAL_103>",
107
+ "<SPECIAL_104>",
108
+ "<SPECIAL_105>",
109
+ "<SPECIAL_106>",
110
+ "<SPECIAL_107>",
111
+ "<SPECIAL_108>",
112
+ "<SPECIAL_109>",
113
+ "<SPECIAL_110>",
114
+ "<SPECIAL_111>",
115
+ "<SPECIAL_112>",
116
+ "<SPECIAL_113>",
117
+ "<SPECIAL_114>",
118
+ "<SPECIAL_115>",
119
+ "<SPECIAL_116>",
120
+ "<SPECIAL_117>",
121
+ "<SPECIAL_118>",
122
+ "<SPECIAL_119>",
123
+ "<SPECIAL_120>",
124
+ "<SPECIAL_121>",
125
+ "<SPECIAL_122>",
126
+ "<SPECIAL_123>",
127
+ "<SPECIAL_124>",
128
+ "<SPECIAL_125>",
129
+ "<SPECIAL_126>",
130
+ "<SPECIAL_127>",
131
+ "<SPECIAL_128>",
132
+ "<SPECIAL_129>",
133
+ "<SPECIAL_130>",
134
+ "<SPECIAL_131>",
135
+ "<SPECIAL_132>",
136
+ "<SPECIAL_133>",
137
+ "<SPECIAL_134>",
138
+ "<SPECIAL_135>",
139
+ "<SPECIAL_136>",
140
+ "<SPECIAL_137>",
141
+ "<SPECIAL_138>",
142
+ "<SPECIAL_139>",
143
+ "<SPECIAL_140>",
144
+ "<SPECIAL_141>",
145
+ "<SPECIAL_142>",
146
+ "<SPECIAL_143>",
147
+ "<SPECIAL_144>",
148
+ "<SPECIAL_145>",
149
+ "<SPECIAL_146>",
150
+ "<SPECIAL_147>",
151
+ "<SPECIAL_148>",
152
+ "<SPECIAL_149>",
153
+ "<SPECIAL_150>",
154
+ "<SPECIAL_151>",
155
+ "<SPECIAL_152>",
156
+ "<SPECIAL_153>",
157
+ "<SPECIAL_154>",
158
+ "<SPECIAL_155>",
159
+ "<SPECIAL_156>",
160
+ "<SPECIAL_157>",
161
+ "<SPECIAL_158>",
162
+ "<SPECIAL_159>",
163
+ "<SPECIAL_160>",
164
+ "<SPECIAL_161>",
165
+ "<SPECIAL_162>",
166
+ "<SPECIAL_163>",
167
+ "<SPECIAL_164>",
168
+ "<SPECIAL_165>",
169
+ "<SPECIAL_166>",
170
+ "<SPECIAL_167>",
171
+ "<SPECIAL_168>",
172
+ "<SPECIAL_169>",
173
+ "<SPECIAL_170>",
174
+ "<SPECIAL_171>",
175
+ "<SPECIAL_172>",
176
+ "<SPECIAL_173>",
177
+ "<SPECIAL_174>",
178
+ "<SPECIAL_175>",
179
+ "<SPECIAL_176>",
180
+ "<SPECIAL_177>",
181
+ "<SPECIAL_178>",
182
+ "<SPECIAL_179>",
183
+ "<SPECIAL_180>",
184
+ "<SPECIAL_181>",
185
+ "<SPECIAL_182>",
186
+ "<SPECIAL_183>",
187
+ "<SPECIAL_184>",
188
+ "<SPECIAL_185>",
189
+ "<SPECIAL_186>",
190
+ "<SPECIAL_187>",
191
+ "<SPECIAL_188>",
192
+ "<SPECIAL_189>",
193
+ "<SPECIAL_190>",
194
+ "<SPECIAL_191>",
195
+ "<SPECIAL_192>",
196
+ "<SPECIAL_193>",
197
+ "<SPECIAL_194>",
198
+ "<SPECIAL_195>",
199
+ "<SPECIAL_196>",
200
+ "<SPECIAL_197>",
201
+ "<SPECIAL_198>",
202
+ "<SPECIAL_199>",
203
+ "<SPECIAL_200>",
204
+ "<SPECIAL_201>",
205
+ "<SPECIAL_202>",
206
+ "<SPECIAL_203>",
207
+ "<SPECIAL_204>",
208
+ "<SPECIAL_205>",
209
+ "<SPECIAL_206>",
210
+ "<SPECIAL_207>",
211
+ "<SPECIAL_208>",
212
+ "<SPECIAL_209>",
213
+ "<SPECIAL_210>",
214
+ "<SPECIAL_211>",
215
+ "<SPECIAL_212>",
216
+ "<SPECIAL_213>",
217
+ "<SPECIAL_214>",
218
+ "<SPECIAL_215>",
219
+ "<SPECIAL_216>",
220
+ "<SPECIAL_217>",
221
+ "<SPECIAL_218>",
222
+ "<SPECIAL_219>",
223
+ "<SPECIAL_220>",
224
+ "<SPECIAL_221>",
225
+ "<SPECIAL_222>",
226
+ "<SPECIAL_223>",
227
+ "<SPECIAL_224>",
228
+ "<SPECIAL_225>",
229
+ "<SPECIAL_226>",
230
+ "<SPECIAL_227>",
231
+ "<SPECIAL_228>",
232
+ "<SPECIAL_229>",
233
+ "<SPECIAL_230>",
234
+ "<SPECIAL_231>",
235
+ "<SPECIAL_232>",
236
+ "<SPECIAL_233>",
237
+ "<SPECIAL_234>",
238
+ "<SPECIAL_235>",
239
+ "<SPECIAL_236>",
240
+ "<SPECIAL_237>",
241
+ "<SPECIAL_238>",
242
+ "<SPECIAL_239>",
243
+ "<SPECIAL_240>",
244
+ "<SPECIAL_241>",
245
+ "<SPECIAL_242>",
246
+ "<SPECIAL_243>",
247
+ "<SPECIAL_244>",
248
+ "<SPECIAL_245>",
249
+ "<SPECIAL_246>",
250
+ "<SPECIAL_247>",
251
+ "<SPECIAL_248>",
252
+ "<SPECIAL_249>",
253
+ "<SPECIAL_250>",
254
+ "<SPECIAL_251>",
255
+ "<SPECIAL_252>",
256
+ "<SPECIAL_253>",
257
+ "<SPECIAL_254>",
258
+ "<SPECIAL_255>",
259
+ "<SPECIAL_256>",
260
+ "<SPECIAL_257>",
261
+ "<SPECIAL_258>",
262
+ "<SPECIAL_259>",
263
+ "<SPECIAL_260>",
264
+ "<SPECIAL_261>",
265
+ "<SPECIAL_262>",
266
+ "<SPECIAL_263>",
267
+ "<SPECIAL_264>",
268
+ "<SPECIAL_265>",
269
+ "<SPECIAL_266>",
270
+ "<SPECIAL_267>",
271
+ "<SPECIAL_268>",
272
+ "<SPECIAL_269>",
273
+ "<SPECIAL_270>",
274
+ "<SPECIAL_271>",
275
+ "<SPECIAL_272>",
276
+ "<SPECIAL_273>",
277
+ "<SPECIAL_274>",
278
+ "<SPECIAL_275>",
279
+ "<SPECIAL_276>",
280
+ "<SPECIAL_277>",
281
+ "<SPECIAL_278>",
282
+ "<SPECIAL_279>",
283
+ "<SPECIAL_280>",
284
+ "<SPECIAL_281>",
285
+ "<SPECIAL_282>",
286
+ "<SPECIAL_283>",
287
+ "<SPECIAL_284>",
288
+ "<SPECIAL_285>",
289
+ "<SPECIAL_286>",
290
+ "<SPECIAL_287>",
291
+ "<SPECIAL_288>",
292
+ "<SPECIAL_289>",
293
+ "<SPECIAL_290>",
294
+ "<SPECIAL_291>",
295
+ "<SPECIAL_292>",
296
+ "<SPECIAL_293>",
297
+ "<SPECIAL_294>",
298
+ "<SPECIAL_295>",
299
+ "<SPECIAL_296>",
300
+ "<SPECIAL_297>",
301
+ "<SPECIAL_298>",
302
+ "<SPECIAL_299>",
303
+ "<SPECIAL_300>",
304
+ "<SPECIAL_301>",
305
+ "<SPECIAL_302>",
306
+ "<SPECIAL_303>",
307
+ "<SPECIAL_304>",
308
+ "<SPECIAL_305>",
309
+ "<SPECIAL_306>",
310
+ "<SPECIAL_307>",
311
+ "<SPECIAL_308>",
312
+ "<SPECIAL_309>",
313
+ "<SPECIAL_310>",
314
+ "<SPECIAL_311>",
315
+ "<SPECIAL_312>",
316
+ "<SPECIAL_313>",
317
+ "<SPECIAL_314>",
318
+ "<SPECIAL_315>",
319
+ "<SPECIAL_316>",
320
+ "<SPECIAL_317>",
321
+ "<SPECIAL_318>",
322
+ "<SPECIAL_319>",
323
+ "<SPECIAL_320>",
324
+ "<SPECIAL_321>",
325
+ "<SPECIAL_322>",
326
+ "<SPECIAL_323>",
327
+ "<SPECIAL_324>",
328
+ "<SPECIAL_325>",
329
+ "<SPECIAL_326>",
330
+ "<SPECIAL_327>",
331
+ "<SPECIAL_328>",
332
+ "<SPECIAL_329>",
333
+ "<SPECIAL_330>",
334
+ "<SPECIAL_331>",
335
+ "<SPECIAL_332>",
336
+ "<SPECIAL_333>",
337
+ "<SPECIAL_334>",
338
+ "<SPECIAL_335>",
339
+ "<SPECIAL_336>",
340
+ "<SPECIAL_337>",
341
+ "<SPECIAL_338>",
342
+ "<SPECIAL_339>",
343
+ "<SPECIAL_340>",
344
+ "<SPECIAL_341>",
345
+ "<SPECIAL_342>",
346
+ "<SPECIAL_343>",
347
+ "<SPECIAL_344>",
348
+ "<SPECIAL_345>",
349
+ "<SPECIAL_346>",
350
+ "<SPECIAL_347>",
351
+ "<SPECIAL_348>",
352
+ "<SPECIAL_349>",
353
+ "<SPECIAL_350>",
354
+ "<SPECIAL_351>",
355
+ "<SPECIAL_352>",
356
+ "<SPECIAL_353>",
357
+ "<SPECIAL_354>",
358
+ "<SPECIAL_355>",
359
+ "<SPECIAL_356>",
360
+ "<SPECIAL_357>",
361
+ "<SPECIAL_358>",
362
+ "<SPECIAL_359>",
363
+ "<SPECIAL_360>",
364
+ "<SPECIAL_361>",
365
+ "<SPECIAL_362>",
366
+ "<SPECIAL_363>",
367
+ "<SPECIAL_364>",
368
+ "<SPECIAL_365>",
369
+ "<SPECIAL_366>",
370
+ "<SPECIAL_367>",
371
+ "<SPECIAL_368>",
372
+ "<SPECIAL_369>",
373
+ "<SPECIAL_370>",
374
+ "<SPECIAL_371>",
375
+ "<SPECIAL_372>",
376
+ "<SPECIAL_373>",
377
+ "<SPECIAL_374>",
378
+ "<SPECIAL_375>",
379
+ "<SPECIAL_376>",
380
+ "<SPECIAL_377>",
381
+ "<SPECIAL_378>",
382
+ "<SPECIAL_379>",
383
+ "<SPECIAL_380>",
384
+ "<SPECIAL_381>",
385
+ "<SPECIAL_382>",
386
+ "<SPECIAL_383>",
387
+ "<SPECIAL_384>",
388
+ "<SPECIAL_385>",
389
+ "<SPECIAL_386>",
390
+ "<SPECIAL_387>",
391
+ "<SPECIAL_388>",
392
+ "<SPECIAL_389>",
393
+ "<SPECIAL_390>",
394
+ "<SPECIAL_391>",
395
+ "<SPECIAL_392>",
396
+ "<SPECIAL_393>",
397
+ "<SPECIAL_394>",
398
+ "<SPECIAL_395>",
399
+ "<SPECIAL_396>",
400
+ "<SPECIAL_397>",
401
+ "<SPECIAL_398>",
402
+ "<SPECIAL_399>",
403
+ "<SPECIAL_400>",
404
+ "<SPECIAL_401>",
405
+ "<SPECIAL_402>",
406
+ "<SPECIAL_403>",
407
+ "<SPECIAL_404>",
408
+ "<SPECIAL_405>",
409
+ "<SPECIAL_406>",
410
+ "<SPECIAL_407>",
411
+ "<SPECIAL_408>",
412
+ "<SPECIAL_409>",
413
+ "<SPECIAL_410>",
414
+ "<SPECIAL_411>",
415
+ "<SPECIAL_412>",
416
+ "<SPECIAL_413>",
417
+ "<SPECIAL_414>",
418
+ "<SPECIAL_415>",
419
+ "<SPECIAL_416>",
420
+ "<SPECIAL_417>",
421
+ "<SPECIAL_418>",
422
+ "<SPECIAL_419>",
423
+ "<SPECIAL_420>",
424
+ "<SPECIAL_421>",
425
+ "<SPECIAL_422>",
426
+ "<SPECIAL_423>",
427
+ "<SPECIAL_424>",
428
+ "<SPECIAL_425>",
429
+ "<SPECIAL_426>",
430
+ "<SPECIAL_427>",
431
+ "<SPECIAL_428>",
432
+ "<SPECIAL_429>",
433
+ "<SPECIAL_430>",
434
+ "<SPECIAL_431>",
435
+ "<SPECIAL_432>",
436
+ "<SPECIAL_433>",
437
+ "<SPECIAL_434>",
438
+ "<SPECIAL_435>",
439
+ "<SPECIAL_436>",
440
+ "<SPECIAL_437>",
441
+ "<SPECIAL_438>",
442
+ "<SPECIAL_439>",
443
+ "<SPECIAL_440>",
444
+ "<SPECIAL_441>",
445
+ "<SPECIAL_442>",
446
+ "<SPECIAL_443>",
447
+ "<SPECIAL_444>",
448
+ "<SPECIAL_445>",
449
+ "<SPECIAL_446>",
450
+ "<SPECIAL_447>",
451
+ "<SPECIAL_448>",
452
+ "<SPECIAL_449>",
453
+ "<SPECIAL_450>",
454
+ "<SPECIAL_451>",
455
+ "<SPECIAL_452>",
456
+ "<SPECIAL_453>",
457
+ "<SPECIAL_454>",
458
+ "<SPECIAL_455>",
459
+ "<SPECIAL_456>",
460
+ "<SPECIAL_457>",
461
+ "<SPECIAL_458>",
462
+ "<SPECIAL_459>",
463
+ "<SPECIAL_460>",
464
+ "<SPECIAL_461>",
465
+ "<SPECIAL_462>",
466
+ "<SPECIAL_463>",
467
+ "<SPECIAL_464>",
468
+ "<SPECIAL_465>",
469
+ "<SPECIAL_466>",
470
+ "<SPECIAL_467>",
471
+ "<SPECIAL_468>",
472
+ "<SPECIAL_469>",
473
+ "<SPECIAL_470>",
474
+ "<SPECIAL_471>",
475
+ "<SPECIAL_472>",
476
+ "<SPECIAL_473>",
477
+ "<SPECIAL_474>",
478
+ "<SPECIAL_475>",
479
+ "<SPECIAL_476>",
480
+ "<SPECIAL_477>",
481
+ "<SPECIAL_478>",
482
+ "<SPECIAL_479>",
483
+ "<SPECIAL_480>",
484
+ "<SPECIAL_481>",
485
+ "<SPECIAL_482>",
486
+ "<SPECIAL_483>",
487
+ "<SPECIAL_484>",
488
+ "<SPECIAL_485>",
489
+ "<SPECIAL_486>",
490
+ "<SPECIAL_487>",
491
+ "<SPECIAL_488>",
492
+ "<SPECIAL_489>",
493
+ "<SPECIAL_490>",
494
+ "<SPECIAL_491>",
495
+ "<SPECIAL_492>",
496
+ "<SPECIAL_493>",
497
+ "<SPECIAL_494>",
498
+ "<SPECIAL_495>",
499
+ "<SPECIAL_496>",
500
+ "<SPECIAL_497>",
501
+ "<SPECIAL_498>",
502
+ "<SPECIAL_499>",
503
+ "<SPECIAL_500>",
504
+ "<SPECIAL_501>",
505
+ "<SPECIAL_502>",
506
+ "<SPECIAL_503>",
507
+ "<SPECIAL_504>",
508
+ "<SPECIAL_505>",
509
+ "<SPECIAL_506>",
510
+ "<SPECIAL_507>",
511
+ "<SPECIAL_508>",
512
+ "<SPECIAL_509>",
513
+ "<SPECIAL_510>",
514
+ "<SPECIAL_511>",
515
+ "<SPECIAL_512>",
516
+ "<SPECIAL_513>",
517
+ "<SPECIAL_514>",
518
+ "<SPECIAL_515>",
519
+ "<SPECIAL_516>",
520
+ "<SPECIAL_517>",
521
+ "<SPECIAL_518>",
522
+ "<SPECIAL_519>",
523
+ "<SPECIAL_520>",
524
+ "<SPECIAL_521>",
525
+ "<SPECIAL_522>",
526
+ "<SPECIAL_523>",
527
+ "<SPECIAL_524>",
528
+ "<SPECIAL_525>",
529
+ "<SPECIAL_526>",
530
+ "<SPECIAL_527>",
531
+ "<SPECIAL_528>",
532
+ "<SPECIAL_529>",
533
+ "<SPECIAL_530>",
534
+ "<SPECIAL_531>",
535
+ "<SPECIAL_532>",
536
+ "<SPECIAL_533>",
537
+ "<SPECIAL_534>",
538
+ "<SPECIAL_535>",
539
+ "<SPECIAL_536>",
540
+ "<SPECIAL_537>",
541
+ "<SPECIAL_538>",
542
+ "<SPECIAL_539>",
543
+ "<SPECIAL_540>",
544
+ "<SPECIAL_541>",
545
+ "<SPECIAL_542>",
546
+ "<SPECIAL_543>",
547
+ "<SPECIAL_544>",
548
+ "<SPECIAL_545>",
549
+ "<SPECIAL_546>",
550
+ "<SPECIAL_547>",
551
+ "<SPECIAL_548>",
552
+ "<SPECIAL_549>",
553
+ "<SPECIAL_550>",
554
+ "<SPECIAL_551>",
555
+ "<SPECIAL_552>",
556
+ "<SPECIAL_553>",
557
+ "<SPECIAL_554>",
558
+ "<SPECIAL_555>",
559
+ "<SPECIAL_556>",
560
+ "<SPECIAL_557>",
561
+ "<SPECIAL_558>",
562
+ "<SPECIAL_559>",
563
+ "<SPECIAL_560>",
564
+ "<SPECIAL_561>",
565
+ "<SPECIAL_562>",
566
+ "<SPECIAL_563>",
567
+ "<SPECIAL_564>",
568
+ "<SPECIAL_565>",
569
+ "<SPECIAL_566>",
570
+ "<SPECIAL_567>",
571
+ "<SPECIAL_568>",
572
+ "<SPECIAL_569>",
573
+ "<SPECIAL_570>",
574
+ "<SPECIAL_571>",
575
+ "<SPECIAL_572>",
576
+ "<SPECIAL_573>",
577
+ "<SPECIAL_574>",
578
+ "<SPECIAL_575>",
579
+ "<SPECIAL_576>",
580
+ "<SPECIAL_577>",
581
+ "<SPECIAL_578>",
582
+ "<SPECIAL_579>",
583
+ "<SPECIAL_580>",
584
+ "<SPECIAL_581>",
585
+ "<SPECIAL_582>",
586
+ "<SPECIAL_583>",
587
+ "<SPECIAL_584>",
588
+ "<SPECIAL_585>",
589
+ "<SPECIAL_586>",
590
+ "<SPECIAL_587>",
591
+ "<SPECIAL_588>",
592
+ "<SPECIAL_589>",
593
+ "<SPECIAL_590>",
594
+ "<SPECIAL_591>",
595
+ "<SPECIAL_592>",
596
+ "<SPECIAL_593>",
597
+ "<SPECIAL_594>",
598
+ "<SPECIAL_595>",
599
+ "<SPECIAL_596>",
600
+ "<SPECIAL_597>",
601
+ "<SPECIAL_598>",
602
+ "<SPECIAL_599>",
603
+ "<SPECIAL_600>",
604
+ "<SPECIAL_601>",
605
+ "<SPECIAL_602>",
606
+ "<SPECIAL_603>",
607
+ "<SPECIAL_604>",
608
+ "<SPECIAL_605>",
609
+ "<SPECIAL_606>",
610
+ "<SPECIAL_607>",
611
+ "<SPECIAL_608>",
612
+ "<SPECIAL_609>",
613
+ "<SPECIAL_610>",
614
+ "<SPECIAL_611>",
615
+ "<SPECIAL_612>",
616
+ "<SPECIAL_613>",
617
+ "<SPECIAL_614>",
618
+ "<SPECIAL_615>",
619
+ "<SPECIAL_616>",
620
+ "<SPECIAL_617>",
621
+ "<SPECIAL_618>",
622
+ "<SPECIAL_619>",
623
+ "<SPECIAL_620>",
624
+ "<SPECIAL_621>",
625
+ "<SPECIAL_622>",
626
+ "<SPECIAL_623>",
627
+ "<SPECIAL_624>",
628
+ "<SPECIAL_625>",
629
+ "<SPECIAL_626>",
630
+ "<SPECIAL_627>",
631
+ "<SPECIAL_628>",
632
+ "<SPECIAL_629>",
633
+ "<SPECIAL_630>",
634
+ "<SPECIAL_631>",
635
+ "<SPECIAL_632>",
636
+ "<SPECIAL_633>",
637
+ "<SPECIAL_634>",
638
+ "<SPECIAL_635>",
639
+ "<SPECIAL_636>",
640
+ "<SPECIAL_637>",
641
+ "<SPECIAL_638>",
642
+ "<SPECIAL_639>",
643
+ "<SPECIAL_640>",
644
+ "<SPECIAL_641>",
645
+ "<SPECIAL_642>",
646
+ "<SPECIAL_643>",
647
+ "<SPECIAL_644>",
648
+ "<SPECIAL_645>",
649
+ "<SPECIAL_646>",
650
+ "<SPECIAL_647>",
651
+ "<SPECIAL_648>",
652
+ "<SPECIAL_649>",
653
+ "<SPECIAL_650>",
654
+ "<SPECIAL_651>",
655
+ "<SPECIAL_652>",
656
+ "<SPECIAL_653>",
657
+ "<SPECIAL_654>",
658
+ "<SPECIAL_655>",
659
+ "<SPECIAL_656>",
660
+ "<SPECIAL_657>",
661
+ "<SPECIAL_658>",
662
+ "<SPECIAL_659>",
663
+ "<SPECIAL_660>",
664
+ "<SPECIAL_661>",
665
+ "<SPECIAL_662>",
666
+ "<SPECIAL_663>",
667
+ "<SPECIAL_664>",
668
+ "<SPECIAL_665>",
669
+ "<SPECIAL_666>",
670
+ "<SPECIAL_667>",
671
+ "<SPECIAL_668>",
672
+ "<SPECIAL_669>",
673
+ "<SPECIAL_670>",
674
+ "<SPECIAL_671>",
675
+ "<SPECIAL_672>",
676
+ "<SPECIAL_673>",
677
+ "<SPECIAL_674>",
678
+ "<SPECIAL_675>",
679
+ "<SPECIAL_676>",
680
+ "<SPECIAL_677>",
681
+ "<SPECIAL_678>",
682
+ "<SPECIAL_679>",
683
+ "<SPECIAL_680>",
684
+ "<SPECIAL_681>",
685
+ "<SPECIAL_682>",
686
+ "<SPECIAL_683>",
687
+ "<SPECIAL_684>",
688
+ "<SPECIAL_685>",
689
+ "<SPECIAL_686>",
690
+ "<SPECIAL_687>",
691
+ "<SPECIAL_688>",
692
+ "<SPECIAL_689>",
693
+ "<SPECIAL_690>",
694
+ "<SPECIAL_691>",
695
+ "<SPECIAL_692>",
696
+ "<SPECIAL_693>",
697
+ "<SPECIAL_694>",
698
+ "<SPECIAL_695>",
699
+ "<SPECIAL_696>",
700
+ "<SPECIAL_697>",
701
+ "<SPECIAL_698>",
702
+ "<SPECIAL_699>",
703
+ "<SPECIAL_700>",
704
+ "<SPECIAL_701>",
705
+ "<SPECIAL_702>",
706
+ "<SPECIAL_703>",
707
+ "<SPECIAL_704>",
708
+ "<SPECIAL_705>",
709
+ "<SPECIAL_706>",
710
+ "<SPECIAL_707>",
711
+ "<SPECIAL_708>",
712
+ "<SPECIAL_709>",
713
+ "<SPECIAL_710>",
714
+ "<SPECIAL_711>",
715
+ "<SPECIAL_712>",
716
+ "<SPECIAL_713>",
717
+ "<SPECIAL_714>",
718
+ "<SPECIAL_715>",
719
+ "<SPECIAL_716>",
720
+ "<SPECIAL_717>",
721
+ "<SPECIAL_718>",
722
+ "<SPECIAL_719>",
723
+ "<SPECIAL_720>",
724
+ "<SPECIAL_721>",
725
+ "<SPECIAL_722>",
726
+ "<SPECIAL_723>",
727
+ "<SPECIAL_724>",
728
+ "<SPECIAL_725>",
729
+ "<SPECIAL_726>",
730
+ "<SPECIAL_727>",
731
+ "<SPECIAL_728>",
732
+ "<SPECIAL_729>",
733
+ "<SPECIAL_730>",
734
+ "<SPECIAL_731>",
735
+ "<SPECIAL_732>",
736
+ "<SPECIAL_733>",
737
+ "<SPECIAL_734>",
738
+ "<SPECIAL_735>",
739
+ "<SPECIAL_736>",
740
+ "<SPECIAL_737>",
741
+ "<SPECIAL_738>",
742
+ "<SPECIAL_739>",
743
+ "<SPECIAL_740>",
744
+ "<SPECIAL_741>",
745
+ "<SPECIAL_742>",
746
+ "<SPECIAL_743>",
747
+ "<SPECIAL_744>",
748
+ "<SPECIAL_745>",
749
+ "<SPECIAL_746>",
750
+ "<SPECIAL_747>",
751
+ "<SPECIAL_748>",
752
+ "<SPECIAL_749>",
753
+ "<SPECIAL_750>",
754
+ "<SPECIAL_751>",
755
+ "<SPECIAL_752>",
756
+ "<SPECIAL_753>",
757
+ "<SPECIAL_754>",
758
+ "<SPECIAL_755>",
759
+ "<SPECIAL_756>",
760
+ "<SPECIAL_757>",
761
+ "<SPECIAL_758>",
762
+ "<SPECIAL_759>",
763
+ "<SPECIAL_760>",
764
+ "<SPECIAL_761>",
765
+ "<SPECIAL_762>",
766
+ "<SPECIAL_763>",
767
+ "<SPECIAL_764>",
768
+ "<SPECIAL_765>",
769
+ "<SPECIAL_766>",
770
+ "<SPECIAL_767>",
771
+ "<SPECIAL_768>",
772
+ "<SPECIAL_769>",
773
+ "<SPECIAL_770>",
774
+ "<SPECIAL_771>",
775
+ "<SPECIAL_772>",
776
+ "<SPECIAL_773>",
777
+ "<SPECIAL_774>",
778
+ "<SPECIAL_775>",
779
+ "<SPECIAL_776>",
780
+ "<SPECIAL_777>",
781
+ "<SPECIAL_778>",
782
+ "<SPECIAL_779>",
783
+ "<SPECIAL_780>",
784
+ "<SPECIAL_781>",
785
+ "<SPECIAL_782>",
786
+ "<SPECIAL_783>",
787
+ "<SPECIAL_784>",
788
+ "<SPECIAL_785>",
789
+ "<SPECIAL_786>",
790
+ "<SPECIAL_787>",
791
+ "<SPECIAL_788>",
792
+ "<SPECIAL_789>",
793
+ "<SPECIAL_790>",
794
+ "<SPECIAL_791>",
795
+ "<SPECIAL_792>",
796
+ "<SPECIAL_793>",
797
+ "<SPECIAL_794>",
798
+ "<SPECIAL_795>",
799
+ "<SPECIAL_796>",
800
+ "<SPECIAL_797>",
801
+ "<SPECIAL_798>",
802
+ "<SPECIAL_799>",
803
+ "<SPECIAL_800>",
804
+ "<SPECIAL_801>",
805
+ "<SPECIAL_802>",
806
+ "<SPECIAL_803>",
807
+ "<SPECIAL_804>",
808
+ "<SPECIAL_805>",
809
+ "<SPECIAL_806>",
810
+ "<SPECIAL_807>",
811
+ "<SPECIAL_808>",
812
+ "<SPECIAL_809>",
813
+ "<SPECIAL_810>",
814
+ "<SPECIAL_811>",
815
+ "<SPECIAL_812>",
816
+ "<SPECIAL_813>",
817
+ "<SPECIAL_814>",
818
+ "<SPECIAL_815>",
819
+ "<SPECIAL_816>",
820
+ "<SPECIAL_817>",
821
+ "<SPECIAL_818>",
822
+ "<SPECIAL_819>",
823
+ "<SPECIAL_820>",
824
+ "<SPECIAL_821>",
825
+ "<SPECIAL_822>",
826
+ "<SPECIAL_823>",
827
+ "<SPECIAL_824>",
828
+ "<SPECIAL_825>",
829
+ "<SPECIAL_826>",
830
+ "<SPECIAL_827>",
831
+ "<SPECIAL_828>",
832
+ "<SPECIAL_829>",
833
+ "<SPECIAL_830>",
834
+ "<SPECIAL_831>",
835
+ "<SPECIAL_832>",
836
+ "<SPECIAL_833>",
837
+ "<SPECIAL_834>",
838
+ "<SPECIAL_835>",
839
+ "<SPECIAL_836>",
840
+ "<SPECIAL_837>",
841
+ "<SPECIAL_838>",
842
+ "<SPECIAL_839>",
843
+ "<SPECIAL_840>",
844
+ "<SPECIAL_841>",
845
+ "<SPECIAL_842>",
846
+ "<SPECIAL_843>",
847
+ "<SPECIAL_844>",
848
+ "<SPECIAL_845>",
849
+ "<SPECIAL_846>",
850
+ "<SPECIAL_847>",
851
+ "<SPECIAL_848>",
852
+ "<SPECIAL_849>",
853
+ "<SPECIAL_850>",
854
+ "<SPECIAL_851>",
855
+ "<SPECIAL_852>",
856
+ "<SPECIAL_853>",
857
+ "<SPECIAL_854>",
858
+ "<SPECIAL_855>",
859
+ "<SPECIAL_856>",
860
+ "<SPECIAL_857>",
861
+ "<SPECIAL_858>",
862
+ "<SPECIAL_859>",
863
+ "<SPECIAL_860>",
864
+ "<SPECIAL_861>",
865
+ "<SPECIAL_862>",
866
+ "<SPECIAL_863>",
867
+ "<SPECIAL_864>",
868
+ "<SPECIAL_865>",
869
+ "<SPECIAL_866>",
870
+ "<SPECIAL_867>",
871
+ "<SPECIAL_868>",
872
+ "<SPECIAL_869>",
873
+ "<SPECIAL_870>",
874
+ "<SPECIAL_871>",
875
+ "<SPECIAL_872>",
876
+ "<SPECIAL_873>",
877
+ "<SPECIAL_874>",
878
+ "<SPECIAL_875>",
879
+ "<SPECIAL_876>",
880
+ "<SPECIAL_877>",
881
+ "<SPECIAL_878>",
882
+ "<SPECIAL_879>",
883
+ "<SPECIAL_880>",
884
+ "<SPECIAL_881>",
885
+ "<SPECIAL_882>",
886
+ "<SPECIAL_883>",
887
+ "<SPECIAL_884>",
888
+ "<SPECIAL_885>",
889
+ "<SPECIAL_886>",
890
+ "<SPECIAL_887>",
891
+ "<SPECIAL_888>",
892
+ "<SPECIAL_889>",
893
+ "<SPECIAL_890>",
894
+ "<SPECIAL_891>",
895
+ "<SPECIAL_892>",
896
+ "<SPECIAL_893>",
897
+ "<SPECIAL_894>",
898
+ "<SPECIAL_895>",
899
+ "<SPECIAL_896>",
900
+ "<SPECIAL_897>",
901
+ "<SPECIAL_898>",
902
+ "<SPECIAL_899>",
903
+ "<SPECIAL_900>",
904
+ "<SPECIAL_901>",
905
+ "<SPECIAL_902>",
906
+ "<SPECIAL_903>",
907
+ "<SPECIAL_904>",
908
+ "<SPECIAL_905>",
909
+ "<SPECIAL_906>",
910
+ "<SPECIAL_907>",
911
+ "<SPECIAL_908>",
912
+ "<SPECIAL_909>",
913
+ "<SPECIAL_910>",
914
+ "<SPECIAL_911>",
915
+ "<SPECIAL_912>",
916
+ "<SPECIAL_913>",
917
+ "<SPECIAL_914>",
918
+ "<SPECIAL_915>",
919
+ "<SPECIAL_916>",
920
+ "<SPECIAL_917>",
921
+ "<SPECIAL_918>",
922
+ "<SPECIAL_919>",
923
+ "<SPECIAL_920>",
924
+ "<SPECIAL_921>",
925
+ "<SPECIAL_922>",
926
+ "<SPECIAL_923>",
927
+ "<SPECIAL_924>",
928
+ "<SPECIAL_925>",
929
+ "<SPECIAL_926>",
930
+ "<SPECIAL_927>",
931
+ "<SPECIAL_928>",
932
+ "<SPECIAL_929>",
933
+ "<SPECIAL_930>",
934
+ "<SPECIAL_931>",
935
+ "<SPECIAL_932>",
936
+ "<SPECIAL_933>",
937
+ "<SPECIAL_934>",
938
+ "<SPECIAL_935>",
939
+ "<SPECIAL_936>",
940
+ "<SPECIAL_937>",
941
+ "<SPECIAL_938>",
942
+ "<SPECIAL_939>",
943
+ "<SPECIAL_940>",
944
+ "<SPECIAL_941>",
945
+ "<SPECIAL_942>",
946
+ "<SPECIAL_943>",
947
+ "<SPECIAL_944>",
948
+ "<SPECIAL_945>",
949
+ "<SPECIAL_946>",
950
+ "<SPECIAL_947>",
951
+ "<SPECIAL_948>",
952
+ "<SPECIAL_949>",
953
+ "<SPECIAL_950>",
954
+ "<SPECIAL_951>",
955
+ "<SPECIAL_952>",
956
+ "<SPECIAL_953>",
957
+ "<SPECIAL_954>",
958
+ "<SPECIAL_955>",
959
+ "<SPECIAL_956>",
960
+ "<SPECIAL_957>",
961
+ "<SPECIAL_958>",
962
+ "<SPECIAL_959>",
963
+ "<SPECIAL_960>",
964
+ "<SPECIAL_961>",
965
+ "<SPECIAL_962>",
966
+ "<SPECIAL_963>",
967
+ "<SPECIAL_964>",
968
+ "<SPECIAL_965>",
969
+ "<SPECIAL_966>",
970
+ "<SPECIAL_967>",
971
+ "<SPECIAL_968>",
972
+ "<SPECIAL_969>",
973
+ "<SPECIAL_970>",
974
+ "<SPECIAL_971>",
975
+ "<SPECIAL_972>",
976
+ "<SPECIAL_973>",
977
+ "<SPECIAL_974>",
978
+ "<SPECIAL_975>",
979
+ "<SPECIAL_976>",
980
+ "<SPECIAL_977>",
981
+ "<SPECIAL_978>",
982
+ "<SPECIAL_979>",
983
+ "<SPECIAL_980>",
984
+ "<SPECIAL_981>",
985
+ "<SPECIAL_982>",
986
+ "<SPECIAL_983>",
987
+ "<SPECIAL_984>",
988
+ "<SPECIAL_985>",
989
+ "<SPECIAL_986>",
990
+ "<SPECIAL_987>",
991
+ "<SPECIAL_988>",
992
+ "<SPECIAL_989>",
993
+ "<SPECIAL_990>",
994
+ "<SPECIAL_991>",
995
+ "<SPECIAL_992>",
996
+ "<SPECIAL_993>",
997
+ "<SPECIAL_994>",
998
+ "<SPECIAL_995>",
999
+ "<SPECIAL_996>",
1000
+ "<SPECIAL_997>",
1001
+ "<SPECIAL_998>",
1002
+ "<SPECIAL_999>"
1003
+ ],
1004
+ "bos_token": {
1005
+ "content": "<s>",
1006
+ "lstrip": false,
1007
+ "normalized": false,
1008
+ "rstrip": false,
1009
+ "single_word": false
1010
+ },
1011
+ "eos_token": {
1012
+ "content": "</s>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false
1017
+ },
1018
+ "pad_token": {
1019
+ "content": "<pad>",
1020
+ "lstrip": false,
1021
+ "normalized": false,
1022
+ "rstrip": false,
1023
+ "single_word": false
1024
+ },
1025
+ "unk_token": {
1026
+ "content": "<unk>",
1027
+ "lstrip": false,
1028
+ "normalized": false,
1029
+ "rstrip": false,
1030
+ "single_word": false
1031
+ }
1032
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23ad081f384b2bdb3c97f6e5461dfce52c1174c7328854a55006988f0fef9da7
3
+ size 17078019
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff