Inasity commited on
Commit
e5f3526
·
verified ·
1 Parent(s): ffad167

Upload 14 files

Browse files
.gitattributes CHANGED
@@ -1,35 +1,36 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
LeCeption-XML-V2.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "instruct": {
3
+ "input_sequence": "<|start_header_id|>user<|end_header_id|>\n\n",
4
+ "output_sequence": "<|start_header_id|>assistant<|end_header_id|>\n\n",
5
+ "last_output_sequence": "",
6
+ "system_sequence": "<|start_header_id|>system<|end_header_id|>\n\n",
7
+ "stop_sequence": "<|eot_id|>",
8
+ "wrap": false,
9
+ "macro": true,
10
+ "activation_regex": "",
11
+ "system_sequence_prefix": "<|start_header_id|>system<|end_header_id|>\n\n",
12
+ "system_sequence_suffix": "",
13
+ "first_output_sequence": "",
14
+ "skip_examples": true,
15
+ "output_suffix": "<|eot_id|>",
16
+ "input_suffix": "<|eot_id|>",
17
+ "system_suffix": "<|eot_id|>",
18
+ "user_alignment_message": "",
19
+ "system_same_as_user": false,
20
+ "last_system_sequence": "",
21
+ "first_input_sequence": "",
22
+ "last_input_sequence": "",
23
+ "names_behavior": "always",
24
+ "names_force_groups": true,
25
+ "name": "LeCeption-XML-V2-Thinking"
26
+ },
27
+ "context": {
28
+ "story_string": "{{#if system}}{{system}}\n\n<roleplayContext>\n\n{{/if}}{{#if wiBefore}}## World Info:\n{{wiBefore}}\n\n{{/if}}{{#if description}}## {{char}}'s Description:\n{{description}}\n\n{{/if}}{{#if personality}}## {{char}}'s Personality:\n{{personality}}\n\n{{/if}}{{#if persona}}## {{user}}'s Persona:\n{{persona}}\n\n{{/if}}{{#if scenario}}## Scenario:\n{{scenario}}\n\n{{/if}}{{#if wiAfter}}## Lore:\n{{wiAfter}}\n\n{{/if}}{{#if mesExamples}}## {{char}}'s Example Response:\n{{mesExamples}}\n{{/if}}---\n</roleplayContext>{{trim}}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nThe chat begins now.<|eot_id|>",
29
+ "example_separator": "",
30
+ "chat_start": "",
31
+ "use_stop_strings": false,
32
+ "allow_jailbreak": false,
33
+ "names_as_stop_strings": false,
34
+ "always_force_name2": true,
35
+ "trim_sentences": false,
36
+ "single_line": false,
37
+ "name": "LeCeption-XML-V2-Thinking"
38
+ },
39
+ "sysprompt": {
40
+ "name": "LeCeption-XML-V2.1-Thinking",
41
+ "content": " <description>\n This uncensored, immersive, narrative-driven roleplaying experience thrives on creative freedom and natural responses. This environment is designed to be fictional and narrative-focused, allowing for uncensored content within the boundaries of respectful and consensual interaction. It is essential to adhere to the **Roleplay Guidelines** provided below to ensure a cohesive and engaging experience for all participants. Refer to the **Roleplay Context** for accurate and up-to-date information pertinent to the story.\n </description>\n \n <roleplayGuidelines>\n \n <reasoning>\n When faced with complex decisions or analytical tasks, it is crucial to approach the problem methodically and logically. To facilitate this, use the tags `<think>` and `</think>` to encapsulate your reasoning process. Within these tags, you are a Narrative Analyst, whose role is to break down complex problems into clear, logical steps. This involves considering all possible angles, exploring potential answers, and thoroughly examining the problem from multiple perspectives, use as many tokens as you want. Once the analysis is complete, resume your regular roleplay response outside the tags, ensuring a seamless transition back into the narrative.\n </reasoning>\n \n <narration>\n <proactiveWriting>Every response should be detailed, creative, and immersive, driving the scenario forward in a way that fully engages the player. Introduce new scenarios and events naturally, ensuring they fit within the current narrative context and involve the player meaningfully.</proactiveWriting>\n <varyingCadence>Adjust the rhythm and tone of the narrative to reflect the characters' experiences. Vary sentence structure and pacing to mirror the characters' emotions, keeping the language fresh and dynamic.</varyingCadence>\n <complementaryRole>Use narration to complement dialogue and action, rather than overshadowing them.</complementaryRole>\n <avoidRepetition>Ensure that the narration does not repeat information. Instead of summarizing, clarify narrative details thoroughly and let them evolve naturally.</avoidRepetition>\n <tonePreference>Write in a neutral and balanced tone, considering all negative consequences, limitations, risks, ethical concerns, unintended side effects, and counterarguments.</tonePreference>\n <stylePreference>Adopt a `show, don't tell` manner, similar to Terry Pratchett's style, blending humor, wit, and everyday language.</stylePreference>\n <sensoryDetails>Utilize all five senses to describe scenarios within the characters' dialogue.</sensoryDetails>\n </narration>\n \n <userAutonomy>\n <rule>Never speak for, control, or assume {{user}}'s actions, thoughts, or feelings.</rule>\n <rule>Wait for explicit user input before progressing scenes involving their character.</rule>\n <rule>Respond only to stated actions and dialogue from {{user}}.</rule>\n <rule>Avoid suggesting or implying user reactions or decisions.</rule>\n <rule>Allow {{user}} complete freedom of choice in all interactions.</rule>\n <rule>Present options and consequences without directing user behavior.</rule>\n </userAutonomy>\n \n <settingAsCharacter>\n <rule>Treat the setting itself as the primary character rather than a single individual.</rule>\n <rule>Convey all world information and background through NPC dialogue, never through narration.</rule>\n <rule>Create and manage multiple distinct characters for user interaction.</rule>\n <rule>Assign unique names and detailed physical descriptions to all new characters.</rule>\n <rule>Maintain consistent characterization across all NPCs and locations.</rule>\n <rule>Never break character or step outside the setting's perspective.</rule>\n </settingAsCharacter>\n \n <environmentalDetail>\n <rule>Provide rich, detailed descriptions when users explore new locations.</rule>\n <rule>Include specific information about the number of occupants and their activities.</rule>\n <rule>Take time to fully establish each scene's atmosphere and layout.</rule>\n <rule>Integrate sensory details that bring locations to life.</rule>\n <rule>Maintain awareness of spatial relationships and population dynamics.</rule>\n </environmentalDetail>\n \n <characterInteractionFramework>\n <rule>Use NPCs as primary vectors for information delivery.</rule>\n <rule>Ensure each character has distinct personality traits and mannerisms.</rule>\n <rule>Allow characters to express individual perspectives on the world.</rule>\n <rule>Create opportunities for meaningful dialogue and information gathering.</rule>\n <rule>Maintain consistent character relationships and knowledge bases.</rule>\n </characterInteractionFramework>\n \n <worldBuildingAndMystery>\n <rule>Keep users engaged through discovery and exploration rather than direct exposition.</rule>\n <rule>Present information in layers that require investigation to uncover deeper truths.</rule>\n <rule>Introduce consistent cultural, historical, and environmental details to create an interconnected universe.</rule>\n <rule>Allow the world to evolve independently of user actions.</rule>\n <rule>Use character interactions to reveal world lore naturally.</rule>\n </worldBuildingAndMystery>\n \n <narrativeConsistency>\n <rule>Track and reference past events, interactions, and established world elements.</rule>\n <rule>Ensure logical progression of time and events.</rule>\n <rule>Maintain awareness of all active characters and their current situations.</rule>\n <rule>Allow location and character evolution while preserving core world rules.</rule>\n <rule>Create cohesive story arcs that build upon previous developments.</rule>\n <rule>Keep track of user-established facts and preferences.</rule>\n </narrativeConsistency>\n \n <characterEmbodiment>\n <rule>Examine the context, subtext, and implications of the given information to gain a deeper understanding of the characters.</rule>\n <rule>Reflect on the potential consequences of characters' actions and decisions.</rule>\n <rule>Ensure characters' reactions, interactions, and decision-making align with their established personalities.</rule>\n <rule>Allow characters' personas to evolve with the story for a dynamic experience.</rule>\n </characterEmbodiment>\n \n <outOfCharacterInteractions>\n <rule>Use [OOC:] for non-narrative interactions to clearly distinguish personal input from the role-play.</rule>\n </outOfCharacterInteractions>\n \n </roleplayGuidelines>\n \n</roleplay>"
42
+ }
43
+ }
README.md ADDED
@@ -0,0 +1,2312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.0
4
+ - Sao10K/L3.1-70B-Hanami-x1
5
+ - Sao10K/L3.3-70B-Euryale-v2.3
6
+ - LatitudeGames/Wayfarer-Large-70B-Llama-3.3
7
+ - TheDrummer/Anubis-70B-v1
8
+ - TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v4.4
9
+ - SicariusSicariiStuff/Negative_LLAMA_70B
10
+ - Sao10K/70B-L3.3-Cirrus-x1
11
+ library_name: transformers
12
+ ---
13
+ <!DOCTYPE html>
14
+
15
+ <style>
16
+ /* ELECTRA THEME */
17
+
18
+ /* Base styles */
19
+ /* DEBUG STYLES FOR SMALL SCREENS - Added temporarily to diagnose responsive issues */
20
+ @media (max-width: 480px) {
21
+ .debug-overflow {
22
+ border: 2px solid red !important;
23
+ }
24
+ }
25
+
26
+ /* Fix for vertical text in composition list on mobile */
27
+ @media (max-width: 480px) {
28
+ .composition-list li {
29
+ grid-template-columns: 1fr; /* Change to single column on mobile */
30
+ }
31
+
32
+ .model-component a {
33
+ display: inline; /* Change from block to inline */
34
+ word-break: break-word; /* Better word breaking behavior */
35
+ }
36
+ }
37
+
38
+ /* Remove horizontal padding on containers for mobile */
39
+ @media (max-width: 480px) {
40
+ .container {
41
+ padding-left: 0;
42
+ padding-right: 0;
43
+ }
44
+ }
45
+
46
+ * {
47
+ margin: 0;
48
+ padding: 0;
49
+ box-sizing: border-box;
50
+ }
51
+
52
+ html {
53
+ font-size: 16px;
54
+ scroll-behavior: smooth;
55
+ }
56
+
57
+ body {
58
+ font-family: 'Rajdhani', sans-serif;
59
+ background-color: #010208;
60
+ color: #e0f7ff;
61
+ line-height: 1.6;
62
+ background: linear-gradient(to bottom, rgba(3, 6, 18, 0.95), rgba(1, 2, 8, 0.98));
63
+ background-attachment: fixed;
64
+ position: relative;
65
+ overflow-x: hidden;
66
+ margin: 0;
67
+ padding: 0;
68
+ font-size: 16px;
69
+ overflow-y: auto;
70
+ min-height: 100vh;
71
+ height: auto;
72
+ }
73
+
74
+ body::before {
75
+ content: '';
76
+ position: fixed;
77
+ top: 0;
78
+ left: 0;
79
+ width: 100%;
80
+ height: 100%;
81
+ background:
82
+ radial-gradient(circle at 10% 20%, rgba(0, 178, 255, 0.05) 0%, transparent 40%),
83
+ radial-gradient(circle at 90% 80%, rgba(0, 98, 255, 0.05) 0%, transparent 40%);
84
+ pointer-events: none;
85
+ z-index: -1;
86
+ }
87
+
88
+ /* Typography */
89
+ h1, h2, h3, h4, h5, h6 {
90
+ font-family: 'Orbitron', sans-serif;
91
+ font-weight: 700;
92
+ color: #e0f7ff;
93
+ margin-bottom: 1rem;
94
+ text-transform: uppercase;
95
+ letter-spacing: 1px;
96
+ }
97
+
98
+ p {
99
+ margin-bottom: 1.5rem;
100
+ color: rgba(224, 247, 255, 0.9);
101
+ }
102
+
103
+ a {
104
+ color: #00b2ff;
105
+ text-decoration: none;
106
+ transition: all 0.3s ease;
107
+ }
108
+
109
+ a:hover {
110
+ color: #72e5ff;
111
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
112
+ }
113
+
114
+ /* Aesthetic neon details */
115
+ .neon-border {
116
+ border: 1px solid #00b2ff;
117
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
118
+ }
119
+
120
+ .glowing-text {
121
+ color: #00b2ff;
122
+ text-shadow:
123
+ 0 0 5px rgba(0, 178, 255, 0.5),
124
+ 0 0 10px rgba(0, 178, 255, 0.3),
125
+ 0 0 15px rgba(0, 178, 255, 0.1);
126
+ }
127
+
128
+ /* Form elements */
129
+ input, select, textarea, button {
130
+ font-family: 'Rajdhani', sans-serif;
131
+ padding: 0.75rem 1rem;
132
+ border: 1px solid rgba(0, 178, 255, 0.3);
133
+ background-color: rgba(3, 6, 18, 0.8);
134
+ color: #e0f7ff;
135
+ border-radius: 0;
136
+ transition: all 0.3s ease;
137
+ }
138
+
139
+ input:focus, select:focus, textarea:focus {
140
+ outline: none;
141
+ border-color: #00b2ff;
142
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
143
+ }
144
+
145
+ button {
146
+ cursor: pointer;
147
+ background-color: rgba(0, 98, 255, 0.1);
148
+ border: 1px solid #00b2ff;
149
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
150
+ }
151
+
152
+ button:hover {
153
+ background-color: rgba(0, 98, 255, 0.2);
154
+ transform: translateY(-2px);
155
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.3);
156
+ }
157
+
158
+ /* Details and summary */
159
+ details {
160
+ margin-bottom: 1.5rem;
161
+ }
162
+
163
+ summary {
164
+ padding: 1rem;
165
+ background: rgba(0, 98, 255, 0.05);
166
+ border: 1px solid rgba(0, 178, 255, 0.2);
167
+ font-weight: 600;
168
+ cursor: pointer;
169
+ position: relative;
170
+ overflow: hidden;
171
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
172
+ transition: all 0.3s ease;
173
+ }
174
+
175
+ summary:hover {
176
+ background: rgba(0, 98, 255, 0.1);
177
+ border-color: #00b2ff;
178
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.2);
179
+ }
180
+
181
+ summary::before {
182
+ content: '';
183
+ position: absolute;
184
+ top: 0;
185
+ left: 0;
186
+ width: 8px;
187
+ height: 100%;
188
+ background: linear-gradient(135deg, #00b2ff, #0062ff);
189
+ opacity: 0.7;
190
+ }
191
+
192
+ details[open] summary {
193
+ margin-bottom: 1rem;
194
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.2);
195
+ }
196
+
197
+ /* Code blocks */
198
+ code {
199
+ font-family: 'Cascadia Code', 'Source Code Pro', monospace;
200
+ background: rgba(0, 98, 255, 0.05);
201
+ padding: 0.2rem 0.4rem;
202
+ border: 1px solid rgba(0, 178, 255, 0.2);
203
+ border-radius: 0;
204
+ font-size: 0.9rem;
205
+ color: #00b2ff;
206
+ }
207
+
208
+ pre {
209
+ background: rgba(3, 6, 18, 0.8);
210
+ padding: 1.5rem;
211
+ border: 1px solid rgba(0, 178, 255, 0.2);
212
+ overflow-x: auto;
213
+ margin-bottom: 1.5rem;
214
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
215
+ }
216
+
217
+ pre code {
218
+ background: transparent;
219
+ padding: 0;
220
+ border: none;
221
+ color: #e0f7ff;
222
+ }
223
+
224
+ /* Scrollbar styling */
225
+ ::-webkit-scrollbar {
226
+ width: 8px;
227
+ height: 8px;
228
+ background-color: #010208;
229
+ }
230
+
231
+ ::-webkit-scrollbar-thumb {
232
+ background: linear-gradient(135deg, #00b2ff, #0062ff);
233
+ border-radius: 0;
234
+ }
235
+
236
+ ::-webkit-scrollbar-track {
237
+ background-color: rgba(3, 6, 18, 0.8);
238
+ border-radius: 0;
239
+ }
240
+
241
+ /* Selection styling */
242
+ ::selection {
243
+ background-color: rgba(0, 178, 255, 0.3);
244
+ color: #e0f7ff;
245
+ }
246
+
247
+ /* Metrics section */
248
+ .metrics-section {
249
+ margin-bottom: 30px;
250
+ position: relative;
251
+ background: rgba(3, 6, 18, 0.8);
252
+ border: 1px solid #00b2ff;
253
+ padding: 20px;
254
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
255
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
256
+ }
257
+
258
+ /* Core metrics grid */
259
+ .core-metrics-grid {
260
+ display: grid;
261
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
262
+ gap: 15px;
263
+ margin-bottom: 30px;
264
+ }
265
+
266
+ .info-grid {
267
+ display: grid;
268
+ grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
269
+ gap: 15px;
270
+ }
271
+
272
+ /* Metric box */
273
+ .metric-box {
274
+ background: rgba(3, 6, 18, 0.8);
275
+ border: 1px solid #00b2ff;
276
+ border-radius: 0;
277
+ padding: 15px;
278
+ display: flex;
279
+ flex-direction: column;
280
+ gap: 8px;
281
+ position: relative;
282
+ overflow: hidden;
283
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
284
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.15);
285
+ transition: all 0.3s ease;
286
+ }
287
+
288
+ .metric-box:hover {
289
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.3);
290
+ transform: translateY(-2px);
291
+ }
292
+
293
+ .metric-box::before {
294
+ content: '';
295
+ position: absolute;
296
+ top: 0;
297
+ left: 0;
298
+ width: 100%;
299
+ height: 100%;
300
+ background:
301
+ linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.1) 49.5%, rgba(0, 178, 255, 0.1) 50.5%, transparent 50.5%) 0 0/10px 10px;
302
+ pointer-events: none;
303
+ opacity: 0.5;
304
+ }
305
+
306
+ .metric-box .label {
307
+ color: #e0f7ff;
308
+ font-size: 14px;
309
+ font-weight: 500;
310
+ text-transform: uppercase;
311
+ letter-spacing: 1px;
312
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
313
+ }
314
+
315
+ .metric-box .value {
316
+ color: #00b2ff;
317
+ font-size: 28px;
318
+ font-weight: 700;
319
+ text-shadow:
320
+ 0 0 10px rgba(0, 178, 255, 0.5),
321
+ 0 0 20px rgba(0, 178, 255, 0.3);
322
+ letter-spacing: 1px;
323
+ font-family: 'Orbitron', sans-serif;
324
+ }
325
+
326
+ /* Progress metrics */
327
+ .progress-metrics {
328
+ display: grid;
329
+ gap: 15px;
330
+ padding: 20px;
331
+ background: rgba(3, 6, 18, 0.8);
332
+ border: 1px solid #00b2ff;
333
+ position: relative;
334
+ overflow: hidden;
335
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
336
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
337
+ }
338
+
339
+ .progress-metric {
340
+ display: grid;
341
+ gap: 8px;
342
+ }
343
+
344
+ .progress-label {
345
+ display: flex;
346
+ justify-content: space-between;
347
+ align-items: center;
348
+ color: #e0f7ff;
349
+ font-size: 14px;
350
+ text-transform: uppercase;
351
+ letter-spacing: 1px;
352
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
353
+ }
354
+
355
+ .progress-value {
356
+ color: #00b2ff;
357
+ font-weight: 600;
358
+ text-shadow:
359
+ 0 0 5px rgba(0, 178, 255, 0.5),
360
+ 0 0 10px rgba(0, 178, 255, 0.3);
361
+ font-family: 'Orbitron', sans-serif;
362
+ }
363
+
364
+ /* Progress bars */
365
+ .progress-bar {
366
+ height: 4px;
367
+ background: rgba(0, 178, 255, 0.1);
368
+ border-radius: 0;
369
+ overflow: hidden;
370
+ position: relative;
371
+ border: 1px solid rgba(0, 178, 255, 0.2);
372
+ clip-path: polygon(0 0, 100% 0, calc(100% - 4px) 100%, 0 100%);
373
+ }
374
+
375
+ .progress-fill {
376
+ height: 100%;
377
+ background: linear-gradient(90deg, #0062ff, #00b2ff);
378
+ border-radius: 0;
379
+ position: relative;
380
+ overflow: hidden;
381
+ clip-path: polygon(0 0, calc(100% - 4px) 0, 100% 100%, 0 100%);
382
+ box-shadow:
383
+ 0 0 10px rgba(0, 178, 255, 0.4),
384
+ 0 0 20px rgba(0, 178, 255, 0.2);
385
+ }
386
+
387
+ .progress-fill::after {
388
+ content: '';
389
+ position: absolute;
390
+ top: 0;
391
+ left: 0;
392
+ width: 100%;
393
+ height: 100%;
394
+ background: linear-gradient(90deg,
395
+ rgba(255, 255, 255, 0.1) 0%,
396
+ rgba(255, 255, 255, 0.1) 40%,
397
+ rgba(255, 255, 255, 0.3) 50%,
398
+ rgba(255, 255, 255, 0.1) 60%,
399
+ rgba(255, 255, 255, 0.1) 100%
400
+ );
401
+ background-size: 200% 100%;
402
+ animation: shimmer 2s infinite;
403
+ }
404
+
405
+ /* Split progress bars */
406
+ .progress-metric.split .progress-label {
407
+ justify-content: space-between;
408
+ font-size: 13px;
409
+ }
410
+
411
+ .progress-bar.split {
412
+ display: flex;
413
+ background: rgba(0, 178, 255, 0.1);
414
+ position: relative;
415
+ justify-content: center;
416
+ border: 1px solid rgba(0, 178, 255, 0.2);
417
+ clip-path: polygon(0 0, 100% 0, calc(100% - 4px) 100%, 0 100%);
418
+ }
419
+
420
+ .progress-bar.split::after {
421
+ content: '';
422
+ position: absolute;
423
+ top: 0;
424
+ left: 50%;
425
+ transform: translateX(-50%);
426
+ width: 2px;
427
+ height: 100%;
428
+ background: rgba(0, 178, 255, 0.3);
429
+ z-index: 2;
430
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.4);
431
+ }
432
+
433
+ .progress-fill-left,
434
+ .progress-fill-right {
435
+ height: 100%;
436
+ background: linear-gradient(90deg, #0062ff, #00b2ff);
437
+ position: relative;
438
+ width: 50%;
439
+ overflow: hidden;
440
+ }
441
+
442
+ .progress-fill-left {
443
+ clip-path: polygon(0 0, calc(100% - 4px) 0, 100% 100%, 0 100%);
444
+ margin-right: 1px;
445
+ transform-origin: right;
446
+ transform: scaleX(var(--scale, 0));
447
+ box-shadow:
448
+ 0 0 10px rgba(0, 178, 255, 0.4),
449
+ 0 0 20px rgba(0, 178, 255, 0.2);
450
+ }
451
+
452
+ .progress-fill-right {
453
+ clip-path: polygon(0 0, 100% 0, 100% 100%, 4px 100%);
454
+ margin-left: 1px;
455
+ transform-origin: left;
456
+ transform: scaleX(var(--scale, 0));
457
+ box-shadow:
458
+ 0 0 10px rgba(0, 178, 255, 0.4),
459
+ 0 0 20px rgba(0, 178, 255, 0.2);
460
+ }
461
+
462
+ /* Benchmark container */
463
+ .benchmark-container {
464
+ background: rgba(3, 6, 18, 0.8);
465
+ border: 1px solid #00b2ff;
466
+ position: relative;
467
+ overflow: hidden;
468
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
469
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
470
+ padding: 20px;
471
+ }
472
+
473
+ /* Benchmark notification */
474
+ .benchmark-notification {
475
+ background: rgba(3, 6, 18, 0.8);
476
+ border: 1px solid #00b2ff;
477
+ padding: 15px;
478
+ margin-bottom: 20px;
479
+ position: relative;
480
+ overflow: hidden;
481
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
482
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.15);
483
+ }
484
+
485
+ .notification-content {
486
+ display: flex;
487
+ align-items: center;
488
+ gap: 10px;
489
+ position: relative;
490
+ z-index: 1;
491
+ }
492
+
493
+ .notification-icon {
494
+ font-size: 20px;
495
+ color: #00b2ff;
496
+ text-shadow:
497
+ 0 0 10px rgba(0, 178, 255, 0.5),
498
+ 0 0 20px rgba(0, 178, 255, 0.3);
499
+ }
500
+
501
+ .notification-text {
502
+ color: #e0f7ff;
503
+ font-size: 14px;
504
+ display: flex;
505
+ align-items: center;
506
+ gap: 10px;
507
+ flex-wrap: wrap;
508
+ text-transform: uppercase;
509
+ letter-spacing: 1px;
510
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
511
+ }
512
+
513
+ .benchmark-link {
514
+ color: #00b2ff;
515
+ font-weight: 500;
516
+ white-space: nowrap;
517
+ text-shadow:
518
+ 0 0 5px rgba(0, 178, 255, 0.5),
519
+ 0 0 10px rgba(0, 178, 255, 0.3);
520
+ position: relative;
521
+ padding: 2px 5px;
522
+ border: 1px solid rgba(0, 178, 255, 0.3);
523
+ clip-path: polygon(0 0, calc(100% - 5px) 0, 100% 5px, 100% 100%, 5px 100%, 0 calc(100% - 5px));
524
+ transition: all 0.3s ease;
525
+ }
526
+
527
+ .benchmark-link:hover {
528
+ background: rgba(0, 178, 255, 0.1);
529
+ border-color: #00b2ff;
530
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
531
+ }
532
+
533
+ @keyframes shimmer {
534
+ 0% { background-position: 200% 0; }
535
+ 100% { background-position: -200% 0; }
536
+ }
537
+
538
+ /* Button styles */
539
+ .button {
540
+ display: inline-block;
541
+ padding: 10px 20px;
542
+ background-color: rgba(0, 98, 255, 0.1);
543
+ color: #00b2ff;
544
+ border: 1px solid #00b2ff;
545
+ font-family: 'Rajdhani', sans-serif;
546
+ font-weight: 600;
547
+ font-size: 15px;
548
+ text-transform: uppercase;
549
+ letter-spacing: 1px;
550
+ cursor: pointer;
551
+ transition: all 0.3s ease;
552
+ position: relative;
553
+ overflow: hidden;
554
+ text-align: center;
555
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
556
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.15);
557
+ }
558
+
559
+ .button:hover {
560
+ background-color: rgba(0, 98, 255, 0.2);
561
+ color: #e0f7ff;
562
+ transform: translateY(-2px);
563
+ box-shadow:
564
+ 0 0 20px rgba(0, 178, 255, 0.3),
565
+ 0 0 40px rgba(0, 178, 255, 0.1);
566
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
567
+ }
568
+
569
+ .button:active {
570
+ transform: translateY(1px);
571
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.2);
572
+ }
573
+
574
+ .button::before {
575
+ content: '';
576
+ position: absolute;
577
+ top: 0;
578
+ left: -100%;
579
+ width: 100%;
580
+ height: 100%;
581
+ background: linear-gradient(
582
+ 90deg,
583
+ transparent,
584
+ rgba(0, 178, 255, 0.2),
585
+ transparent
586
+ );
587
+ transition: left 0.7s ease;
588
+ }
589
+
590
+ .button:hover::before {
591
+ left: 100%;
592
+ }
593
+
594
+ .button::after {
595
+ content: '';
596
+ position: absolute;
597
+ inset: 0;
598
+ background: linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.1) 49.5%, rgba(0, 178, 255, 0.1) 50.5%, transparent 50.5%);
599
+ background-size: 5px 5px;
600
+ opacity: 0;
601
+ transition: opacity 0.3s ease;
602
+ pointer-events: none;
603
+ }
604
+
605
+ .button:hover::after {
606
+ opacity: 1;
607
+ }
608
+
609
+ /* Support buttons */
610
+ .support-buttons {
611
+ display: flex;
612
+ gap: 15px;
613
+ flex-wrap: wrap;
614
+ }
615
+
616
+ .support-buttons .button {
617
+ min-width: 150px;
618
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.2);
619
+ }
620
+
621
+ .support-buttons .button:hover {
622
+ box-shadow:
623
+ 0 0 20px rgba(0, 178, 255, 0.4),
624
+ 0 0 40px rgba(0, 178, 255, 0.2);
625
+ }
626
+
627
+ /* Button animations */
628
+ @keyframes pulse {
629
+ 0% {
630
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
631
+ }
632
+ 50% {
633
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.5);
634
+ }
635
+ 100% {
636
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
637
+ }
638
+ }
639
+
640
+ .animated-button {
641
+ animation: pulse 2s infinite;
642
+ }
643
+
644
+ /* Button variants */
645
+ .button.primary {
646
+ background-color: rgba(0, 98, 255, 0.2);
647
+ border-color: #00b2ff;
648
+ }
649
+
650
+ .button.primary:hover {
651
+ background-color: rgba(0, 98, 255, 0.3);
652
+ }
653
+
654
+ .button.outline {
655
+ background-color: transparent;
656
+ border-color: #00b2ff;
657
+ }
658
+
659
+ .button.outline:hover {
660
+ background-color: rgba(0, 98, 255, 0.1);
661
+ }
662
+
663
+ .button.small {
664
+ padding: 6px 12px;
665
+ font-size: 13px;
666
+ }
667
+
668
+ .button.large {
669
+ padding: 12px 24px;
670
+ font-size: 16px;
671
+ }
672
+
673
+ /* Button with icon */
674
+ .button-with-icon {
675
+ display: inline-flex;
676
+ align-items: center;
677
+ gap: 8px;
678
+ }
679
+
680
+ .button-icon {
681
+ font-size: 18px;
682
+ line-height: 1;
683
+ }
684
+
685
+ /* Responsive adjustments */
686
+ @media (max-width: 768px) {
687
+ .support-buttons {
688
+ flex-direction: column;
689
+ }
690
+
691
+ .support-buttons .button {
692
+ width: 100%;
693
+ }
694
+ }
695
+
696
+ /* Container & Layout */
697
+ .container {
698
+ width: 95%;
699
+ max-width: 1200px;
700
+ margin: 10px auto;
701
+ padding: 20px;
702
+ position: relative;
703
+ background-color: rgba(3, 6, 18, 0.8);
704
+ border: 1px solid #00b2ff;
705
+ backdrop-filter: blur(10px);
706
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.3);
707
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 20px 100%, 0 calc(100% - 20px));
708
+ }
709
+
710
+
711
+ .container::before {
712
+ content: '';
713
+ position: absolute;
714
+ top: 0;
715
+ left: 0;
716
+ width: 100%;
717
+ height: 100%;
718
+ background:
719
+ radial-gradient(circle at 20% 30%, rgba(0, 178, 255, 0.15) 0%, transparent 50%),
720
+ radial-gradient(circle at 80% 70%, rgba(0, 178, 255, 0.1) 0%, transparent 40%);
721
+ pointer-events: none;
722
+ z-index: -1;
723
+ }
724
+
725
+ /* Header */
726
+ .header {
727
+ margin-bottom: 50px;
728
+ position: relative;
729
+ padding-bottom: 20px;
730
+ border-bottom: 1px solid #00b2ff;
731
+ overflow: hidden;
732
+ }
733
+
734
+ .header::before {
735
+ content: '';
736
+ position: absolute;
737
+ bottom: -1px;
738
+ left: 0;
739
+ width: 50%;
740
+ height: 1px;
741
+ background: linear-gradient(90deg, #00b2ff, transparent);
742
+ box-shadow: 0 0 20px #00b2ff;
743
+ }
744
+
745
+ .header::after {
746
+ content: '';
747
+ position: absolute;
748
+ bottom: -1px;
749
+ right: 0;
750
+ width: 50%;
751
+ height: 1px;
752
+ background: linear-gradient(90deg, transparent, #00b2ff);
753
+ box-shadow: 0 0 20px #00b2ff;
754
+ }
755
+
756
+ .header h1 {
757
+ font-family: 'Orbitron', sans-serif;
758
+ font-size: 48px;
759
+ color: #e0f7ff;
760
+ text-align: center;
761
+ text-transform: uppercase;
762
+ letter-spacing: 2px;
763
+ margin: 0;
764
+ position: relative;
765
+ text-shadow:
766
+ 0 0 5px rgba(0, 178, 255, 0.5),
767
+ 0 0 10px rgba(0, 178, 255, 0.3),
768
+ 0 0 20px rgba(0, 178, 255, 0.1);
769
+ }
770
+
771
+ .header h1::before {
772
+ content: '';
773
+ position: absolute;
774
+ width: 100px;
775
+ height: 1px;
776
+ bottom: -10px;
777
+ left: 50%;
778
+ transform: translateX(-50%);
779
+ background: #00b2ff;
780
+ box-shadow: 0 0 20px #00b2ff;
781
+ }
782
+
783
+ /* Info section */
784
+ .info {
785
+ margin-bottom: 50px;
786
+ overflow: visible; /* Ensure content can extend beyond container */
787
+ }
788
+
789
+ .info > img {
790
+ width: 100%;
791
+ height: auto;
792
+ border: 1px solid #00b2ff;
793
+ margin-bottom: 30px;
794
+ box-shadow: 0 0 30px rgba(0, 178, 255, 0.3);
795
+ clip-path: polygon(0 0, calc(100% - 20px) 0, 100% 20px, 100% 100%, 20px 100%, 0 calc(100% - 20px));
796
+ background-color: rgba(3, 6, 18, 0.6);
797
+ display: block;
798
+ min-height: 100vh; /* Make image at least the height of the viewport */
799
+ object-fit: contain; /* Maintain aspect ratio */
800
+ object-position: top; /* Align from the top */
801
+ }
802
+
803
+ .info h2 {
804
+ font-family: 'Orbitron', sans-serif;
805
+ font-size: 28px;
806
+ color: #e0f7ff;
807
+ text-transform: uppercase;
808
+ letter-spacing: 1.5px;
809
+ margin: 30px 0 20px 0;
810
+ padding-bottom: 10px;
811
+ border-bottom: 1px solid rgba(0, 178, 255, 0.2);
812
+ position: relative;
813
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
814
+ }
815
+
816
+ .info h2::after {
817
+ content: '';
818
+ position: absolute;
819
+ bottom: -1px;
820
+ left: 0;
821
+ width: 100px;
822
+ height: 1px;
823
+ background: #00b2ff;
824
+ box-shadow: 0 0 15px #00b2ff;
825
+ }
826
+
827
+ .info h3 {
828
+ font-family: 'Rajdhani', sans-serif;
829
+ font-size: 24px;
830
+ color: #e0f7ff;
831
+ margin: 20px 0 15px 0;
832
+ letter-spacing: 1px;
833
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.2);
834
+ }
835
+
836
+ .info h4 {
837
+ font-family: 'Rajdhani', sans-serif;
838
+ font-size: 18px;
839
+ color: #00b2ff;
840
+ margin: 15px 0 10px 0;
841
+ letter-spacing: 0.5px;
842
+ text-transform: uppercase;
843
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
844
+ }
845
+
846
+ .info p {
847
+ margin: 0 0 15px 0;
848
+ line-height: 1.6;
849
+ }
850
+
851
+ /* Creator section */
852
+ .creator-section {
853
+ margin-bottom: 30px;
854
+ padding: 20px 20px 10px 20px;
855
+ background: rgba(3, 6, 18, 0.8);
856
+ border: 1px solid #00b2ff;
857
+ position: relative;
858
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
859
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
860
+ }
861
+
862
+ .creator-section::before {
863
+ content: '';
864
+ position: absolute;
865
+ top: 0;
866
+ left: 0;
867
+ width: 100%;
868
+ height: 100%;
869
+ background: linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.05) 49.5%, rgba(0, 178, 255, 0.05) 50.5%, transparent 50.5%);
870
+ background-size: 10px 10px;
871
+ pointer-events: none;
872
+ z-index: 0;
873
+ }
874
+
875
+ .creator-badge {
876
+ position: relative;
877
+ z-index: 1;
878
+ }
879
+
880
+ .creator-info {
881
+ display: flex;
882
+ flex-direction: column;
883
+ }
884
+
885
+ .creator-label {
886
+ color: #e0f7ff;
887
+ font-size: 14px;
888
+ text-transform: uppercase;
889
+ letter-spacing: 1px;
890
+ margin-bottom: 5px;
891
+ }
892
+
893
+ .creator-link {
894
+ color: #00b2ff;
895
+ text-decoration: none;
896
+ font-weight: 600;
897
+ display: flex;
898
+ align-items: center;
899
+ gap: 5px;
900
+ transition: all 0.3s ease;
901
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
902
+ }
903
+
904
+ .creator-link:hover {
905
+ transform: translateX(5px);
906
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
907
+ }
908
+
909
+ .creator-name {
910
+ font-size: 18px;
911
+ }
912
+
913
+ .creator-arrow {
914
+ font-weight: 600;
915
+ transition: transform 0.3s ease;
916
+ }
917
+
918
+ .creator-link:hover .creator-arrow {
919
+ transform: translateX(5px);
920
+ }
921
+
922
+ /* Supporters dropdown section */
923
+ .sponsors-section {
924
+ margin-top: 15px;
925
+ position: relative;
926
+ z-index: 2;
927
+ }
928
+
929
+ .sponsors-dropdown {
930
+ width: 100%;
931
+ background: rgba(0, 60, 120, 0.2);
932
+ border: 1px solid #00b2ff;
933
+ border-radius: 4px;
934
+ overflow: hidden;
935
+ position: relative;
936
+ }
937
+
938
+ .sponsors-dropdown::before {
939
+ content: '';
940
+ position: absolute;
941
+ top: 0;
942
+ left: 0;
943
+ width: 100%;
944
+ height: 100%;
945
+ background: linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.05) 49.5%, rgba(0, 178, 255, 0.05) 50.5%, transparent 50.5%);
946
+ background-size: 8px 8px;
947
+ pointer-events: none;
948
+ z-index: 0;
949
+ }
950
+
951
+ .sponsors-summary {
952
+ padding: 12px 15px;
953
+ display: flex;
954
+ justify-content: space-between;
955
+ align-items: center;
956
+ cursor: pointer;
957
+ outline: none;
958
+ position: relative;
959
+ z-index: 1;
960
+ transition: all 0.3s ease;
961
+ }
962
+
963
+ .sponsors-summary:hover {
964
+ background-color: rgba(0, 178, 255, 0.1);
965
+ }
966
+
967
+ .sponsors-title {
968
+ font-family: 'Orbitron', sans-serif;
969
+ color: #e0f7ff;
970
+ font-size: 16px;
971
+ text-transform: uppercase;
972
+ letter-spacing: 1px;
973
+ font-weight: 600;
974
+ text-shadow: 0 0 8px rgba(0, 178, 255, 0.4);
975
+ }
976
+
977
+ .dropdown-icon {
978
+ color: #00b2ff;
979
+ transition: transform 0.3s ease;
980
+ }
981
+
982
+ details[open] .dropdown-icon {
983
+ transform: rotate(180deg);
984
+ }
985
+
986
+ .sponsors-list {
987
+ padding: 15px;
988
+ display: grid;
989
+ grid-template-columns: repeat(auto-fill, minmax(120px, 1fr));
990
+ gap: 15px;
991
+ background: rgba(0, 20, 40, 0.4);
992
+ border-top: 1px solid rgba(0, 178, 255, 0.3);
993
+ }
994
+
995
+ .sponsor-item {
996
+ display: flex;
997
+ flex-direction: column;
998
+ align-items: center;
999
+ text-align: center;
1000
+ padding: 10px;
1001
+ border: 1px solid rgba(0, 178, 255, 0.2);
1002
+ background: rgba(0, 30, 60, 0.3);
1003
+ border-radius: 6px;
1004
+ transition: all 0.3s ease;
1005
+ }
1006
+
1007
+ .sponsor-item:hover {
1008
+ transform: translateY(-3px);
1009
+ border-color: #00b2ff;
1010
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.3);
1011
+ background: rgba(0, 30, 60, 0.5);
1012
+ }
1013
+
1014
+ .sponsor-rank {
1015
+ color: #00b2ff;
1016
+ font-weight: 600;
1017
+ font-size: 14px;
1018
+ margin-bottom: 5px;
1019
+ text-shadow: 0 0 8px rgba(0, 178, 255, 0.5);
1020
+ }
1021
+
1022
+ .sponsor-img {
1023
+ width: 60px;
1024
+ height: 60px;
1025
+ border-radius: 50%;
1026
+ object-fit: cover;
1027
+ border: 2px solid #00b2ff;
1028
+ box-shadow: 0 0 12px rgba(0, 178, 255, 0.3);
1029
+ margin-bottom: 8px;
1030
+ transition: all 0.3s ease;
1031
+ }
1032
+
1033
+ .sponsor-item:nth-child(1) .sponsor-img {
1034
+ border-color: gold;
1035
+ box-shadow: 0 0 12px rgba(255, 215, 0, 0.5);
1036
+ }
1037
+
1038
+ .sponsor-item:nth-child(2) .sponsor-img {
1039
+ border-color: silver;
1040
+ box-shadow: 0 0 12px rgba(192, 192, 192, 0.5);
1041
+ }
1042
+
1043
+ .sponsor-item:nth-child(3) .sponsor-img {
1044
+ border-color: #cd7f32; /* bronze */
1045
+ box-shadow: 0 0 12px rgba(205, 127, 50, 0.5);
1046
+ }
1047
+
1048
+ .sponsor-item:hover .sponsor-img {
1049
+ width: 60px;
1050
+ height: 60px;
1051
+ border-radius: 50%;
1052
+ object-fit: cover;
1053
+ border: 2px solid #00b2ff;
1054
+ box-shadow: 0 0 12px rgba(0, 178, 255, 0.3);
1055
+ margin-bottom: 8px;
1056
+ transition: all 0.3s ease;
1057
+ }
1058
+
1059
+ .sponsor-item:nth-child(1) .sponsor-img {
1060
+ border-color: gold;
1061
+ box-shadow: 0 0 12px rgba(255, 215, 0, 0.5);
1062
+ }
1063
+
1064
+ .sponsor-item:nth-child(2) .sponsor-img {
1065
+ border-color: silver;
1066
+ box-shadow: 0 0 12px rgba(192, 192, 192, 0.5);
1067
+ }
1068
+
1069
+ .sponsor-item:nth-child(3) .sponsor-img {
1070
+ border-color: #cd7f32; /* bronze */
1071
+ box-shadow: 0 0 12px rgba(205, 127, 50, 0.5);
1072
+ }
1073
+
1074
+ .sponsor-name {
1075
+ color: #e0f7ff;
1076
+ font-size: 14px;
1077
+ font-weight: 500;
1078
+ word-break: break-word;
1079
+ }
1080
+
1081
+ /* Model info */
1082
+ .model-info {
1083
+ margin-bottom: 50px;
1084
+ }
1085
+
1086
+ /* Section container */
1087
+ .section-container {
1088
+ margin-bottom: 50px;
1089
+ padding: 25px;
1090
+ background: rgba(3, 6, 18, 0.8);
1091
+ border: 1px solid #00b2ff;
1092
+ position: relative;
1093
+ overflow: hidden;
1094
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1095
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
1096
+ }
1097
+
1098
+ .section-container::before {
1099
+ content: '';
1100
+ position: absolute;
1101
+ top: 0;
1102
+ left: 0;
1103
+ width: 100%;
1104
+ height: 100%;
1105
+ background:
1106
+ linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.05) 49.5%, rgba(0, 178, 255, 0.05) 50.5%, transparent 50.5%);
1107
+ background-size: 10px 10px;
1108
+ pointer-events: none;
1109
+ z-index: 0;
1110
+ }
1111
+
1112
+ .section-container h2 {
1113
+ margin-top: 0;
1114
+ }
1115
+
1116
+ /* Support section */
1117
+ .support-section {
1118
+ margin-bottom: 50px;
1119
+ padding: 25px;
1120
+ background: rgba(3, 6, 18, 0.8);
1121
+ border: 1px solid #00b2ff;
1122
+ position: relative;
1123
+ overflow: hidden;
1124
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1125
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
1126
+ }
1127
+
1128
+ .support-section::before {
1129
+ content: '';
1130
+ position: absolute;
1131
+ top: 0;
1132
+ left: 0;
1133
+ width: 100%;
1134
+ height: 100%;
1135
+ background:
1136
+ linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.05) 49.5%, rgba(0, 178, 255, 0.05) 50.5%, transparent 50.5%);
1137
+ background-size: 10px 10px;
1138
+ pointer-events: none;
1139
+ z-index: 0;
1140
+ }
1141
+
1142
+ .support-section h2 {
1143
+ margin-top: 0;
1144
+ }
1145
+
1146
+ /* Special thanks */
1147
+ .special-thanks {
1148
+ margin-top: 30px;
1149
+ }
1150
+
1151
+ .thanks-list {
1152
+ list-style: none;
1153
+ padding: 0;
1154
+ margin: 15px 0;
1155
+ display: grid;
1156
+ grid-template-columns: repeat(auto-fill, minmax(250px, 1fr));
1157
+ gap: 15px;
1158
+ }
1159
+
1160
+ .thanks-list li {
1161
+ padding: 10px 15px;
1162
+ background: rgba(0, 98, 255, 0.05);
1163
+ border: 1px solid rgba(0, 178, 255, 0.2);
1164
+ position: relative;
1165
+ overflow: hidden;
1166
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
1167
+ transition: all 0.3s ease;
1168
+ }
1169
+
1170
+ .thanks-list li:hover {
1171
+ background: rgba(0, 98, 255, 0.1);
1172
+ border-color: #00b2ff;
1173
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.2);
1174
+ transform: translateY(-2px);
1175
+ }
1176
+
1177
+ .thanks-list li strong {
1178
+ color: #00b2ff;
1179
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
1180
+ }
1181
+
1182
+ .thanks-note {
1183
+ font-style: italic;
1184
+ color: rgba(224, 247, 255, 0.7);
1185
+ text-align: center;
1186
+ margin-top: 20px;
1187
+ }
1188
+
1189
+ /* General card styles */
1190
+ .info-card,
1191
+ .template-card,
1192
+ .settings-card,
1193
+ .quantized-section {
1194
+ background: rgba(3, 6, 18, 0.8);
1195
+ border: 1px solid #00b2ff;
1196
+ padding: 25px;
1197
+ margin: 20px 0;
1198
+ position: relative;
1199
+ overflow: hidden;
1200
+ clip-path: polygon(0 0, calc(100% - 15px) 0, 100% 15px, 100% 100%, 15px 100%, 0 calc(100% - 15px));
1201
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.15);
1202
+ }
1203
+
1204
+ .info-card::before,
1205
+ .template-card::before,
1206
+ .settings-card::before,
1207
+ .quantized-section::before {
1208
+ content: '';
1209
+ position: absolute;
1210
+ top: 0;
1211
+ left: 0;
1212
+ width: 100%;
1213
+ height: 100%;
1214
+ background:
1215
+ linear-gradient(45deg, transparent 49.5%, rgba(0, 178, 255, 0.05) 49.5%, rgba(0, 178, 255, 0.05) 50.5%, transparent 50.5%);
1216
+ background-size: 10px 10px;
1217
+ pointer-events: none;
1218
+ z-index: 0;
1219
+ }
1220
+
1221
+ .info-card::after,
1222
+ .template-card::after,
1223
+ .settings-card::after,
1224
+ .quantized-section::after {
1225
+ content: '';
1226
+ position: absolute;
1227
+ top: 0;
1228
+ left: 0;
1229
+ width: 100%;
1230
+ height: 100%;
1231
+ background: linear-gradient(135deg, rgba(0, 178, 255, 0.15), transparent 70%);
1232
+ pointer-events: none;
1233
+ z-index: 0;
1234
+ }
1235
+
1236
+ /* Info card specific */
1237
+ .info-card {
1238
+ box-shadow: 0 0 30px rgba(0, 178, 255, 0.2);
1239
+ }
1240
+
1241
+ .info-header {
1242
+ margin-bottom: 25px;
1243
+ padding-bottom: 15px;
1244
+ border-bottom: 1px solid rgba(0, 178, 255, 0.2);
1245
+ position: relative;
1246
+ }
1247
+
1248
+ .info-header::after {
1249
+ content: '';
1250
+ position: absolute;
1251
+ bottom: -1px;
1252
+ left: 0;
1253
+ width: 100px;
1254
+ height: 1px;
1255
+ background: #00b2ff;
1256
+ box-shadow: 0 0 10px #00b2ff;
1257
+ }
1258
+
1259
+ .model-tags {
1260
+ display: flex;
1261
+ flex-wrap: wrap;
1262
+ gap: 10px;
1263
+ margin-top: 10px;
1264
+ }
1265
+
1266
+ .model-tag {
1267
+ background: rgba(0, 98, 255, 0.15);
1268
+ border: 1px solid #00b2ff;
1269
+ color: #e0f7ff;
1270
+ font-size: 12px;
1271
+ padding: 5px 10px;
1272
+ text-transform: uppercase;
1273
+ letter-spacing: 1px;
1274
+ font-weight: 500;
1275
+ position: relative;
1276
+ overflow: hidden;
1277
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
1278
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.2);
1279
+ transition: all 0.3s ease;
1280
+ }
1281
+
1282
+ .model-tag:hover {
1283
+ background: rgba(0, 98, 255, 0.25);
1284
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.4);
1285
+ transform: translateY(-2px);
1286
+ }
1287
+
1288
+ /* Model composition list */
1289
+ .model-composition h4 {
1290
+ margin-bottom: 15px;
1291
+ }
1292
+
1293
+ .composition-list {
1294
+ list-style: none;
1295
+ padding: 0;
1296
+ margin: 0 0 20px 0;
1297
+ display: grid;
1298
+ gap: 12px;
1299
+ }
1300
+
1301
+ .composition-list li {
1302
+ display: grid;
1303
+ grid-template-columns: minmax(0, 1fr) auto;
1304
+ align-items: center;
1305
+ gap: 10px;
1306
+ padding: 10px 15px;
1307
+ background: rgba(0, 98, 255, 0.05);
1308
+ border: 1px solid rgba(0, 178, 255, 0.2);
1309
+ position: relative;
1310
+ overflow: hidden;
1311
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
1312
+ transition: all 0.3s ease;
1313
+ }
1314
+
1315
+ .composition-list li:hover {
1316
+ background: rgba(0, 98, 255, 0.1);
1317
+ border-color: #00b2ff;
1318
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.2);
1319
+ transform: translateY(-2px);
1320
+ }
1321
+
1322
+ .composition-list li::before {
1323
+ content: '';
1324
+ position: absolute;
1325
+ top: 0;
1326
+ left: 0;
1327
+ width: 8px;
1328
+ height: 100%;
1329
+ background: linear-gradient(180deg, #00b2ff, #0062ff);
1330
+ opacity: 0.7;
1331
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.4);
1332
+ }
1333
+
1334
+ .model-component {
1335
+ color: #00b2ff;
1336
+ font-weight: 500;
1337
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
1338
+ }
1339
+
1340
+ .model-component a {
1341
+ display: block;
1342
+ overflow-wrap: break-word;
1343
+ word-wrap: break-word;
1344
+ word-break: break-word;
1345
+ transition: all 0.3s ease;
1346
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
1347
+ }
1348
+
1349
+ .model-component a:hover {
1350
+ transform: translateX(5px);
1351
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
1352
+ }
1353
+
1354
+
1355
+
1356
+ /* Base model dropdown styles */
1357
+ .base-model-dropdown {
1358
+ width: 100%;
1359
+
1360
+ position: relative;
1361
+ padding-right: 50px; /* Make space for the BASE label */
1362
+ display: block;
1363
+ margin-bottom: 0;
1364
+ }
1365
+
1366
+ .base-model-summary {
1367
+ display: flex;
1368
+ justify-content: space-between;
1369
+ align-items: center;
1370
+ padding: 8px 12px 8px 20px; /* Increased left padding to prevent text overlap with blue stripe */
1371
+ cursor: pointer;
1372
+ border: 1px solid rgba(0, 178, 255, 0.2);
1373
+ position: relative;
1374
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
1375
+ margin-bottom: 0;
1376
+ transition: all 0.3s ease;
1377
+ color: #00b2ff;
1378
+ font-weight: 500;
1379
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
1380
+ }
1381
+
1382
+ .base-model-summary:hover {
1383
+ background: rgba(0, 98, 255, 0.1);
1384
+ border-color: #00b2ff;
1385
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.2);
1386
+ }
1387
+
1388
+ .base-model-summary span:first-child {
1389
+ overflow: hidden;
1390
+ text-overflow: ellipsis;
1391
+ display: inline-block;
1392
+ white-space: nowrap;
1393
+ flex: 1;
1394
+ }
1395
+
1396
+ .dropdown-icon {
1397
+ font-size: 0.75rem;
1398
+ margin-left: 8px;
1399
+ color: rgba(0, 178, 255, 0.7);
1400
+ transition: transform 0.3s ease;
1401
+ }
1402
+
1403
+ .base-model-dropdown[open] .dropdown-icon {
1404
+ transform: rotate(180deg);
1405
+ }
1406
+
1407
+ .base-model-list {
1408
+ position: absolute;
1409
+ margin-top: 0;
1410
+ left: 50%;
1411
+ transform: translateX(-50%);
1412
+ background: rgba(3, 6, 18, 0.95);
1413
+ border: 1px solid rgba(0, 178, 255, 0.3);
1414
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
1415
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.1);
1416
+ min-width: 100%;
1417
+ overflow: visible;
1418
+ }
1419
+
1420
+ .base-model-item {
1421
+ padding: 8px 12px 8px 20px; /* Increased left padding for the model items */
1422
+ border-bottom: 1px solid rgba(0, 178, 255, 0.1);
1423
+ position: relative;
1424
+ transition: all 0.3s ease;
1425
+ }
1426
+
1427
+ .base-model-item:last-child {
1428
+ border-bottom: none;
1429
+ margin-bottom: 0;
1430
+ }
1431
+
1432
+ .base-model-item:hover {
1433
+ background: rgba(0, 98, 255, 0.1);
1434
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.2);
1435
+ transform: translateY(-1px) translateX(0);
1436
+ }
1437
+
1438
+ .base-model-item a {
1439
+ display: block;
1440
+ width: 100%;
1441
+ overflow: hidden;
1442
+ padding-left: 10px;
1443
+ }
1444
+
1445
+ .model-label {
1446
+ color: #00b2ff;
1447
+ text-decoration: none;
1448
+ transition: all 0.3s ease;
1449
+ display: inline-block;
1450
+ font-weight: 500;
1451
+ white-space: nowrap;
1452
+ overflow: hidden;
1453
+ text-overflow: ellipsis;
1454
+ }
1455
+
1456
+ .model-label:hover {
1457
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
1458
+ }
1459
+
1460
+ /* BASE label */
1461
+ .base-model-dropdown::after {
1462
+ z-index: 1;
1463
+ content: 'BASE';
1464
+ position: absolute;
1465
+ right: 0;
1466
+ top: 8px;
1467
+ transform: translateY(0);
1468
+ font-size: 10px;
1469
+ padding: 2px 5px;
1470
+ background: rgba(0, 178, 255, 0.2);
1471
+ color: #e0f7ff;
1472
+ border: 1px solid #00b2ff;
1473
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.3);
1474
+ clip-path: polygon(0 0, calc(100% - 5px) 0, 100% 5px, 100% 100%, 5px 100%, 0 calc(100% - 5px));
1475
+
1476
+ }
1477
+
1478
+ /* Override the blue stripe for base-model-summary and items */
1479
+ .base-model-dropdown {
1480
+ position: relative;
1481
+ }
1482
+
1483
+ .base-model-summary::before,
1484
+ .base-model-item::before {
1485
+ content: '';
1486
+ position: absolute;
1487
+ top: 0;
1488
+ left: 0;
1489
+ width: 8px;
1490
+ height: 100%;
1491
+ background: linear-gradient(180deg, #00b2ff, #0062ff);
1492
+ opacity: 0.7;
1493
+ }
1494
+
1495
+ .base-model-dropdown[open] .base-model-summary,
1496
+ .base-model-dropdown[open] .base-model-list {
1497
+ border-color: rgba(0, 178, 255, 0.7);
1498
+ box-shadow: 0 0 25px rgba(0, 178, 255, 0.3);
1499
+ z-index: 20;
1500
+ position: relative;
1501
+ }
1502
+
1503
+ /* Model description */
1504
+ .model-description {
1505
+ margin-top: 30px;
1506
+ }
1507
+
1508
+ .model-description h4 {
1509
+ margin-bottom: 15px;
1510
+ }
1511
+
1512
+ .model-description p {
1513
+ margin-bottom: 20px;
1514
+ }
1515
+
1516
+ .model-description ul {
1517
+ padding-left: 20px;
1518
+ margin-bottom: 20px;
1519
+ list-style: none;
1520
+ }
1521
+
1522
+ .model-description li {
1523
+ margin-bottom: 8px;
1524
+ position: relative;
1525
+ padding-left: 15px;
1526
+ }
1527
+
1528
+ .model-description li::before {
1529
+ content: '';
1530
+ position: absolute;
1531
+ left: 0;
1532
+ top: 0.8em;
1533
+ width: 8px;
1534
+ height: 1px;
1535
+ background: #00b2ff;
1536
+ transform: skewX(-20deg);
1537
+ box-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
1538
+ }
1539
+
1540
+ /* Template card */
1541
+ .template-card {
1542
+ box-shadow: 0 0 30px rgba(0, 178, 255, 0.2);
1543
+ }
1544
+
1545
+ .template-item {
1546
+ padding: 15px;
1547
+ margin-bottom: 15px;
1548
+ background: rgba(0, 98, 255, 0.05);
1549
+ border: 1px solid rgba(0, 178, 255, 0.2);
1550
+ position: relative;
1551
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
1552
+ transition: all 0.3s ease;
1553
+ }
1554
+
1555
+ .template-item:hover {
1556
+ background: rgba(0, 98, 255, 0.1);
1557
+ border-color: #00b2ff;
1558
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.3);
1559
+ transform: translateY(-2px);
1560
+ }
1561
+
1562
+ .template-content {
1563
+ display: flex;
1564
+ flex-direction: column;
1565
+ gap: 5px;
1566
+ }
1567
+
1568
+ .template-link {
1569
+ display: flex;
1570
+ align-items: center;
1571
+ justify-content: space-between;
1572
+ font-weight: 600;
1573
+ color: #00b2ff;
1574
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.3);
1575
+ padding: 5px;
1576
+ transition: all 0.3s ease;
1577
+ }
1578
+
1579
+ .template-link:hover {
1580
+ text-shadow: 0 0 10px rgba(0, 178, 255, 0.5);
1581
+ transform: translateX(5px);
1582
+ }
1583
+
1584
+ .link-arrow {
1585
+ font-weight: 600;
1586
+ transition: transform 0.3s ease;
1587
+ }
1588
+
1589
+ .template-link:hover .link-arrow {
1590
+ transform: translateX(5px);
1591
+ }
1592
+
1593
+ .template-author {
1594
+ font-size: 14px;
1595
+ color: rgba(224, 247, 255, 0.8);
1596
+ text-transform: uppercase;
1597
+ letter-spacing: 1px;
1598
+ }
1599
+
1600
+ /* Settings card */
1601
+ .settings-card {
1602
+ box-shadow: 0 0 30px rgba(0, 178, 255, 0.2);
1603
+ }
1604
+
1605
+ .settings-header {
1606
+ margin-bottom: 15px;
1607
+ padding-bottom: 10px;
1608
+ border-bottom: 1px solid rgba(0, 178, 255, 0.2);
1609
+ position: relative;
1610
+ }
1611
+
1612
+ .settings-header::after {
1613
+ content: '';
1614
+ position: absolute;
1615
+ bottom: -1px;
1616
+ left: 0;
1617
+ width: 80px;
1618
+ height: 1px;
1619
+ background: #00b2ff;
1620
+ box-shadow: 0 0 10px #00b2ff;
1621
+ }
1622
+
1623
+ .settings-content {
1624
+ padding: 15px;
1625
+ background: rgba(0, 98, 255, 0.05);
1626
+ border: 1px solid rgba(0, 178, 255, 0.2);
1627
+ margin-bottom: 15px;
1628
+ position: relative;
1629
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
1630
+ }
1631
+
1632
+ .settings-grid {
1633
+ display: grid;
1634
+ grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
1635
+ gap: 20px;
1636
+ margin-top: 20px;
1637
+ }
1638
+
1639
+ .setting-item {
1640
+ display: flex;
1641
+ justify-content: space-between;
1642
+ align-items: center;
1643
+ margin-bottom: 10px;
1644
+ padding: 8px 0;
1645
+ border-bottom: 1px solid rgba(0, 178, 255, 0.1);
1646
+ }
1647
+
1648
+ .setting-item:last-child {
1649
+ margin-bottom: 0;
1650
+ border-bottom: none;
1651
+ }
1652
+
1653
+ .setting-label {
1654
+ color: #e0f7ff;
1655
+ font-size: 14px;
1656
+ font-weight: 500;
1657
+ text-transform: uppercase;
1658
+ letter-spacing: 1px;
1659
+ }
1660
+
1661
+ .setting-value {
1662
+ color: #00b2ff;
1663
+ font-weight: 600;
1664
+ font-family: 'Orbitron', sans-serif;
1665
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.5);
1666
+ }
1667
+
1668
+ .setting-item.highlight {
1669
+ padding: 15px;
1670
+ background: rgba(0, 98, 255, 0.1);
1671
+ border: 1px solid rgba(0, 178, 255, 0.3);
1672
+ border-radius: 0;
1673
+ display: flex;
1674
+ justify-content: center;
1675
+ position: relative;
1676
+ clip-path: polygon(0 0, calc(100% - 8px) 0, 100% 8px, 100% 100%, 8px 100%, 0 calc(100% - 8px));
1677
+ }
1678
+
1679
+ .setting-item.highlight .setting-value {
1680
+ font-size: 24px;
1681
+ font-weight: 700;
1682
+ text-shadow:
1683
+ 0 0 10px rgba(0, 178, 255, 0.5),
1684
+ 0 0 20px rgba(0, 178, 255, 0.3);
1685
+ }
1686
+
1687
+ /* Sampler Settings Section */
1688
+ .sampler-settings {
1689
+ position: relative;
1690
+ overflow: visible;
1691
+ }
1692
+
1693
+ .sampler-settings .settings-card {
1694
+ background: rgba(3, 6, 18, 0.8);
1695
+ border: 1px solid #00b2ff;
1696
+ box-shadow: 0 0 20px rgba(0, 178, 255, 0.25), inset 0 0 30px rgba(0, 98, 255, 0.1);
1697
+ padding: 20px;
1698
+ margin: 15px 0;
1699
+ position: relative;
1700
+ }
1701
+
1702
+ .sampler-settings .settings-header h3 {
1703
+ color: #0062ff;
1704
+ text-shadow: 0 0 8px rgba(0, 178, 255, 0.5);
1705
+ font-size: 1.2rem;
1706
+ letter-spacing: 1px;
1707
+ }
1708
+
1709
+ .sampler-settings .settings-grid {
1710
+ display: grid;
1711
+ grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
1712
+ gap: 15px;
1713
+ }
1714
+
1715
+ .sampler-settings .setting-item {
1716
+ border-bottom: 1px solid rgba(0, 178, 255, 0.2);
1717
+ padding: 12px 0;
1718
+ transition: all 0.3s ease;
1719
+ }
1720
+
1721
+ .sampler-settings .setting-label {
1722
+ font-family: 'Rajdhani', sans-serif;
1723
+ font-weight: 600;
1724
+ color: #e0f7ff;
1725
+ }
1726
+
1727
+ .sampler-settings .setting-value {
1728
+ font-family: 'Orbitron', sans-serif;
1729
+ color: #00b2ff;
1730
+ }
1731
+
1732
+ /* DRY Settings styles */
1733
+ .dry-settings {
1734
+ margin-top: 8px;
1735
+ padding-left: 8px;
1736
+ border-left: 2px solid rgba(0, 178, 255, 0.3);
1737
+ display: flex;
1738
+ flex-direction: column;
1739
+ gap: 6px;
1740
+ }
1741
+
1742
+ .dry-item {
1743
+ display: flex;
1744
+ justify-content: space-between;
1745
+ align-items: center;
1746
+ }
1747
+
1748
+ .dry-label {
1749
+ font-size: 13px;
1750
+ color: #e0f7ff;
1751
+ }
1752
+
1753
+ .dry-value {
1754
+ color: #00b2ff;
1755
+ font-family: 'Orbitron', sans-serif;
1756
+ text-shadow: 0 0 5px rgba(0, 178, 255, 0.4);
1757
+ }
1758
+
1759
+ /* Quantized sections */
1760
+ .quantized-section {
1761
+ margin-bottom: 30px;
1762
+ }
1763
+
1764
+ .quantized-items {
1765
+ display: grid;
1766
+ gap: 15px;
1767
+ margin-top: 15px;
1768
+ }
1769
+
1770
+ .quantized-item {
1771
+ padding: 15px;
1772
+ background: rgba(0, 98, 255, 0.05);
1773
+ border: 1px solid rgba(0, 178, 255, 0.2);
1774
+ display: grid;
1775
+ gap: 8px;
1776
+ position: relative;
1777
+ clip-path: polygon(0 0, calc(100% - 10px) 0, 100% 10px, 100% 100%, 10px 100%, 0 calc(100% - 10px));
1778
+ transition: all 0.3s ease;
1779
+ }
1780
+
1781
+ .quantized-item:hover {
1782
+ background: rgba(0, 98, 255, 0.1);
1783
+ border-color: #00b2ff;
1784
+ box-shadow: 0 0 15px rgba(0, 178, 255, 0.3);
1785
+ transform: translateY(-2px);
1786
+ }
1787
+
1788
+ .author {
1789
+ color: #e0f7ff;
1790
+ font-size: 12px;
1791
+ text-transform: uppercase;
1792
+ letter-spacing: 1px;
1793
+ font-weight: 500;
1794
+ }
1795
+
1796
+ .multi-links {
1797
+ display: flex;
1798
+ align-items: center;
1799
+ flex-wrap: wrap;
1800
+ gap: 5px;
1801
+ }
1802
+
1803
+ .separator {
1804
+ color: rgba(224, 247, 255, 0.5);
1805
+ margin: 0 5px;
1806
+ }
1807
+ </style>
1808
+
1809
+ <html lang="en" style="margin:0; padding:0; width:100%; height:100%;">
1810
+ <head>
1811
+ <meta charset="UTF-8">
1812
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
1813
+ <title>L3.3-Electra-R1-70b</title>
1814
+ <link href="https://fonts.googleapis.com/css2?family=Orbitron:wght@400;500;600;700&family=Rajdhani:wght@300;400;500;600;700&display=swap" rel="stylesheet">
1815
+ </head>
1816
+ <body>
1817
+ <div class="container">
1818
+ <div class="header">
1819
+ <h1>L3.3-Electra-R1-70b</h1>
1820
+ </div>
1821
+ <div class="info">
1822
+ <img src="https://cdn-uploads.huggingface.co/production/uploads/64545af5ec40bbbd01242ca6/GXLpDNkbGEvESfLmWkKpD.jpeg" alt="Electra Model Mascot">
1823
+ <div class="creator-section">
1824
+ <div class="creator-badge" style="display: flex; align-items: center; gap: 1rem;">
1825
+ <div class="creator-info">
1826
+ <span class="creator-label">Created by</span>
1827
+ <a href="https://huggingface.co/Steelskull" target="_blank" class="creator-link">
1828
+ <span class="creator-name">SteelSkull</span>
1829
+ <span class="creator-arrow">→</span>
1830
+ </a>
1831
+ </div>
1832
+ <a href="https://ko-fi.com/Y8Y0AO2XE" target="_blank" class="button" style="margin: 0; padding: 0.5rem 1rem;">
1833
+ Support on Ko-fi
1834
+ </a>
1835
+ </div>
1836
+ <div class="sponsors-section">
1837
+ <details class="sponsors-dropdown" open>
1838
+ <summary class="sponsors-summary">
1839
+ <span class="sponsors-title">⚡ Top Sponsors</span>
1840
+ <span class="dropdown-icon">▼</span>
1841
+ </summary>
1842
+ <div class="sponsors-list">
1843
+ <div class="sponsor-item">
1844
+ <div class="sponsor-rank">#1</div>
1845
+ <img src="https://storage.ko-fi.com/cdn/useruploads/0f77ce5e-3d45-4b45-93e1-b93e74ef32ca_7408a132-232b-4bf4-9878-c483bd80d532.png" alt="Artus" class="sponsor-img">
1846
+ <div class="sponsor-name">Artus</div>
1847
+ </div>
1848
+ <div class="sponsor-item">
1849
+ <div class="sponsor-rank">#2</div>
1850
+ <img src="https://storage.ko-fi.com/cdn/useruploads/957890c9-c45b-4229-8837-bd802de0691d_586ce212-c05e-4e35-a808-4d278783dc33.png" alt="Buthayna" class="sponsor-img">
1851
+ <div class="sponsor-name">Buthayna</div>
1852
+ </div>
1853
+ <div class="sponsor-item">
1854
+ <div class="sponsor-rank">#3</div>
1855
+ <img src="https://storage.ko-fi.com/cdn/useruploads/b28597ab-a2e6-4b55-aad9-6b2794e68847_3a65f36e-76b4-4fac-bfef-08b43722e331.png" alt="Kistara" class="sponsor-img">
1856
+ <div class="sponsor-name">Kistara</div>
1857
+ </div>
1858
+ <div class="sponsor-item">
1859
+ <div class="sponsor-rank">#4</div>
1860
+ <img src="https://storage.ko-fi.com/cdn/useruploads/86d8e2d8-fbde-4347-8e40-71b3e8eb9e65.jpeg" alt="lizzieshinkickr" class="sponsor-img">
1861
+ <div class="sponsor-name">lizzieshinkickr</div>
1862
+ </div>
1863
+ <div class="sponsor-item">
1864
+ <div class="sponsor-rank">#5</div>
1865
+ <img src="https://storage.ko-fi.com/cdn/useruploads/f68fdafa-7b8e-4d2f-9eec-be99772f3f77_82e97a70-65ca-4608-983a-c1f28a67da41.png" alt="Mooth Dragoon" class="sponsor-img">
1866
+ <div class="sponsor-name">Mooth Dragoon</div>
1867
+ </div>
1868
+ <div class="sponsor-item">
1869
+ <div class="sponsor-rank">#6</div>
1870
+ <img src="https://storage.ko-fi.com/cdn/useruploads/5e126f2e-da62-41c6-9350-a2461fbad35c_2a3df41f-4481-4dc7-8f08-88f24da2e7a1.png" alt="JH2011" class="sponsor-img">
1871
+ <div class="sponsor-name">JH2011</div>
1872
+ </div>
1873
+ <div class="sponsor-item">
1874
+ <div class="sponsor-rank">#7</div>
1875
+ <img src="https://storage.ko-fi.com/cdn/useruploads/4b5adb19-7822-468b-a397-e5d56ac8fb72_08050f44-82b3-497c-84d4-d895c38089f1.png" alt="NarpasSword" class="sponsor-img">
1876
+ <div class="sponsor-name">NarpasSword</div>
1877
+ </div>
1878
+ <div class="sponsor-item">
1879
+ <div class="sponsor-rank">#8</div>
1880
+ <img src="https://storage.ko-fi.com/cdn/useruploads/8b9b831f-ea45-4ee7-8473-2c9c75e0c31c_1c95d276-c5ba-43fa-953a-6245fb25d284.png" alt="WeForgot" class="sponsor-img">
1881
+ <div class="sponsor-name">WeForgot</div>
1882
+ </div>
1883
+ </div>
1884
+ </details>
1885
+ </div>
1886
+ </div>
1887
+ <div class="model-info">
1888
+ <h2>Model Information</h2>
1889
+ <div class="info-card">
1890
+ <div class="info-header">
1891
+ <h3>L3.3-Electra-R1-70b | v0.6.A</h3>
1892
+ <div class="model-tags">
1893
+ <span class="model-tag">L3.3 = Llama 3.3</span>
1894
+ <span class="model-tag">SCE Merge</span>
1895
+ <span class="model-tag">R1 = Deepseek R1</span>
1896
+ <span class="model-tag">70b Parameters</span>
1897
+ <span class="model-tag">v0.6.A</span>
1898
+ </div>
1899
+ </div>
1900
+ <div class="model-composition">
1901
+ <h4>Model Composition</h4>
1902
+ <ul class="composition-list">
1903
+ <li>
1904
+ <details class="base-model-dropdown base-model">
1905
+ <summary class="base-model-summary">
1906
+ <a href="https://huggingface.co/TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v4.4" target="_blank">TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v4.4</a>
1907
+ <span class="dropdown-icon">▼</span>
1908
+ </summary>
1909
+ <div class="base-model-list">
1910
+ <div class="base-model-item">
1911
+ <a href="https://huggingface.co/huihui-ai/DeepSeek-R1-Distill-Llama-70B-abliterated" target="_blank" class="model-label">huihui-ai/DeepSeek-R1-Distill-Llama-70B-abliterated</a>
1912
+ </div>
1913
+ <div class="base-model-item">
1914
+ <a href="https://huggingface.co/nbeerbower/Llama-3.1-Nemotron-lorablated-70B" target="_blank" class="model-label">nbeerbower/Llama-3.1-Nemotron-lorablated-70B</a>
1915
+ </div>
1916
+ <div class="base-model-item">
1917
+ <a href="https://huggingface.co/SicariusSicariiStuff/Negative_LLAMA_70B" target="_blank" class="model-label">SicariusSicariiStuff/Negative_LLAMA_70B</a>
1918
+ </div>
1919
+ <div class="base-model-item">
1920
+ <a href="https://huggingface.co/TheDrummer/Fallen-Llama-3.3-R1-70B-v1" target="_blank" class="model-label">TheDrummer/Fallen-Llama-3.3-R1-70B-v1</a>
1921
+ </div>
1922
+ <div class="base-model-item">
1923
+ <a href="https://huggingface.co/abacusai/Dracarys2-Llama-3.1-70B-Instruct" target="_blank" class="model-label">abacusai/Dracarys2-Llama-3.1-70B-Instruct</a>
1924
+ </div>
1925
+ </div>
1926
+ </details>
1927
+ </li>
1928
+ <li><span class="model-component"><a href="https://huggingface.co/EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.0" target="_blank">EVA-LLaMA-3.33-70B-v0.0</a></span> Core capabilities</li>
1929
+ <li><span class="model-component"><a href="https://huggingface.co/LatitudeGames/Wayfarer-Large-70B-Llama-3.3" target="_blank">Wayfarer-Large-70B-Llama-3.3</a></span> Storytelling and RP</li>
1930
+ <li><span class="model-component"><a href="https://huggingface.co/Sao10K/L3.3-70B-Euryale-v2.3" target="_blank">L3.3-70B-Euryale-v2.3</a></span> All Rounder RP</li>
1931
+ <li><span class="model-component"><a href="https://huggingface.co/Sao10K/70B-L3.3-Cirrus-x1" target="_blank">70B-L3.3-Cirrus-x1</a></span> Improved coherence</li>
1932
+ <li><span class="model-component"><a href="https://huggingface.co/Sao10K/L3.1-70B-Hanami-x1" target="_blank">L3.1-70B-Hanami-x1</a></span> Balanced responses</li>
1933
+ <li><span class="model-component"><a href="https://huggingface.co/TheDrummer/Anubis-70B-v1" target="_blank">Anubis-70B-v1</a></span> Enhanced detail</li>
1934
+ <li><span class="model-component"><a href="https://huggingface.co/SicariusSicariiStuff/Negative_LLAMA_70B" target="_blank">Negative_LLAMA_70B</a></span> Reduced bias - Base</li>
1935
+ <li><span class="model-component"><a href="https://huggingface.co/TheDrummer/Fallen-Llama-3.3-R1-70B-v1" target="_blank">Fallen-Llama-3.3-R1-70B-v1</a></span> Reduced bias - Base</li>
1936
+ </ul>
1937
+ <div class="model-description">
1938
+ <h4>Model Series Overview</h4>
1939
+ <p>L3.3-Electra-R1-70b is the newest release of the Unnamed series, this is the 6th iteration based of user feedback.</p>
1940
+ <h4>Technical Architecture</h4>
1941
+ <p>Built on a custom DeepSeek R1 Distill base (TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v4.4), Electra-R1 integrates specialized components through the SCE merge method. The model uses float32 dtype during processing with a bfloat16 output dtype for optimized performance.</p>
1942
+ <h4>Core Capabilities</h4>
1943
+ <p>Electra-R1 serves newest gold standard and baseline. User feedback consistently highlights its superior intelligence, coherence, and unique ability to provide deep character insights. Through proper prompting, the model demonstrates advanced reasoning capabilities and unprompted exploration of character inner thoughts and motivations.</p>
1944
+ <h4>Base Architecture</h4>
1945
+ <p>The model utilizes the custom Hydroblated-R1 base, created for stability and enhanced reasoning. The SCE merge method's settings are precisely tuned based on extensive community feedback (of over 10 diffrent models from Nevoria to Cu-Mai), ensuring optimal component integration while maintaining model coherence and reliability. This foundation establishes Electra-R1 as the benchmark upon which its variant models build and expand.</p>
1946
+ </div>
1947
+ </div>
1948
+ </div>
1949
+ <!-- UGI-Benchmark Results section commented out
1950
+ <h2>UGI-Benchmark Results:</h2>
1951
+ <div class="benchmark-container">
1952
+ <div class="benchmark-notification">
1953
+ <div class="notification-content">
1954
+ <span class="notification-icon">⚡</span>
1955
+ <span class="notification-text">
1956
+ Latest benchmark results as of 02/20/2025.
1957
+ <a href="https://huggingface.co/spaces/DontPlanToEnd/UGI-Leaderboard" target="_blank" class="benchmark-link">
1958
+ View Full Leaderboard →
1959
+ </a>
1960
+ </span>
1961
+ </div>
1962
+ </div>
1963
+ <div class="metrics-section">
1964
+ <h3>Core Metrics</h3>
1965
+ <div class="core-metrics-grid">
1966
+ <div class="metric-box">
1967
+ <span class="label">UGI Score</span>
1968
+ <span class="value">40.04</span>
1969
+ </div>
1970
+ <div class="metric-box">
1971
+ <span class="label">Willingness Score</span>
1972
+ <span class="value">2.5/10</span>
1973
+ </div>
1974
+ <div class="metric-box">
1975
+ <span class="label">Natural Intelligence</span>
1976
+ <span class="value">42.36</span>
1977
+ </div>
1978
+ <div class="metric-box">
1979
+ <span class="label">Coding Ability</span>
1980
+ <span class="value">22</span>
1981
+ </div>
1982
+ </div>
1983
+ </div>
1984
+ <div class="metrics-section">
1985
+ <h3>Model Information</h3>
1986
+ <div class="info-grid">
1987
+ <div class="metric-box">
1988
+ <span class="label">Political Lean</span>
1989
+ <span class="value">-8.5%</span>
1990
+ </div>
1991
+ <div class="metric-box">
1992
+ <span class="label">Ideology</span>
1993
+ <span class="value">Liberalism</span>
1994
+ </div>
1995
+ <div class="metric-box">
1996
+ <span class="label">Parameters</span>
1997
+ <span class="value">70B</span>
1998
+ </div>
1999
+ </div>
2000
+ </div>
2001
+ <div class="metrics-section">
2002
+ <details open>
2003
+ <summary>Aggregated Scores</summary>
2004
+ <div class="progress-metrics">
2005
+ <div class="progress-metric">
2006
+ <div class="progress-label">
2007
+ <span>Diplomacy</span>
2008
+ <span class="progress-value">61.7%</span>
2009
+ </div>
2010
+ <div class="progress-bar">
2011
+ <div class="progress-fill" style="width: 61.7%"></div>
2012
+ </div>
2013
+ </div>
2014
+ <div class="progress-metric">
2015
+ <div class="progress-label">
2016
+ <span>Government</span>
2017
+ <span class="progress-value">44.6%</span>
2018
+ </div>
2019
+ <div class="progress-bar">
2020
+ <div class="progress-fill" style="width: 44.6%"></div>
2021
+ </div>
2022
+ </div>
2023
+ <div class="progress-metric">
2024
+ <div class="progress-label">
2025
+ <span>Economy</span>
2026
+ <span class="progress-value">43.3%</span>
2027
+ </div>
2028
+ <div class="progress-bar">
2029
+ <div class="progress-fill" style="width: 43.3%"></div>
2030
+ </div>
2031
+ </div>
2032
+ <div class="progress-metric">
2033
+ <div class="progress-label">
2034
+ <span>Society</span>
2035
+ <span class="progress-value">60.0%</span>
2036
+ </div>
2037
+ <div class="progress-bar">
2038
+ <div class="progress-fill" style="width: 60.0%"></div>
2039
+ </div>
2040
+ </div>
2041
+ </div>
2042
+ </details>
2043
+ </div>
2044
+ <div class="metrics-section">
2045
+ <details open>
2046
+ <summary>Individual Scores</summary>
2047
+ <div class="progress-metrics">
2048
+ <div class="progress-metric split">
2049
+ <div class="progress-label">
2050
+ <span>Federal</span>
2051
+ <span class="progress-value">46.0%</span>
2052
+ <span>Unitary</span>
2053
+ </div>
2054
+ <div class="progress-bar split">
2055
+ <div class="progress-fill-left" style="--scale: 0.460"></div>
2056
+ <div class="progress-fill-right" style="--scale: 0.540"></div>
2057
+ </div>
2058
+ </div>
2059
+ <div class="progress-metric split">
2060
+ <div class="progress-label">
2061
+ <span>Democratic</span>
2062
+ <span class="progress-value">67.5%</span>
2063
+ <span>Autocratic</span>
2064
+ </div>
2065
+ <div class="progress-bar split">
2066
+ <div class="progress-fill-left" style="--scale: 0.675"></div>
2067
+ <div class="progress-fill-right" style="--scale: 0.325"></div>
2068
+ </div>
2069
+ </div>
2070
+ <div class="progress-metric split">
2071
+ <div class="progress-label">
2072
+ <span>Security</span>
2073
+ <span class="progress-value">47.5%</span>
2074
+ <span>Freedom</span>
2075
+ </div>
2076
+ <div class="progress-bar split">
2077
+ <div class="progress-fill-left" style="--scale: 0.475"></div>
2078
+ <div class="progress-fill-right" style="--scale: 0.525"></div>
2079
+ </div>
2080
+ </div>
2081
+ <div class="progress-metric split">
2082
+ <div class="progress-label">
2083
+ <span>Nationalism</span>
2084
+ <span class="progress-value">40.4%</span>
2085
+ <span>Int'l</span>
2086
+ </div>
2087
+ <div class="progress-bar split">
2088
+ <div class="progress-fill-left" style="--scale: 0.404"></div>
2089
+ <div class="progress-fill-right" style="--scale: 0.596"></div>
2090
+ </div>
2091
+ </div>
2092
+ <div class="progress-metric split">
2093
+ <div class="progress-label">
2094
+ <span>Militarist</span>
2095
+ <span class="progress-value">32.9%</span>
2096
+ <span>Pacifist</span>
2097
+ </div>
2098
+ <div class="progress-bar split">
2099
+ <div class="progress-fill-left" style="--scale: 0.329"></div>
2100
+ <div class="progress-fill-right" style="--scale: 0.671"></div>
2101
+ </div>
2102
+ </div>
2103
+ <div class="progress-metric split">
2104
+ <div class="progress-label">
2105
+ <span>Assimilationist</span>
2106
+ <span class="progress-value">41.5%</span>
2107
+ <span>Multiculturalist</span>
2108
+ </div>
2109
+ <div class="progress-bar split">
2110
+ <div class="progress-fill-left" style="--scale: 0.415"></div>
2111
+ <div class="progress-fill-right" style="--scale: 0.585"></div>
2112
+ </div>
2113
+ </div>
2114
+ <div class="progress-metric split">
2115
+ <div class="progress-label">
2116
+ <span>Collectivize</span>
2117
+ <span class="progress-value">43.3%</span>
2118
+ <span>Privatize</span>
2119
+ </div>
2120
+ <div class="progress-bar split">
2121
+ <div class="progress-fill-left" style="--scale: 0.433"></div>
2122
+ <div class="progress-fill-right" style="--scale: 0.567"></div>
2123
+ </div>
2124
+ </div>
2125
+ <div class="progress-metric split">
2126
+ <div class="progress-label">
2127
+ <span>Planned</span>
2128
+ <span class="progress-value">42.9%</span>
2129
+ <span>LaissezFaire</span>
2130
+ </div>
2131
+ <div class="progress-bar split">
2132
+ <div class="progress-fill-left" style="--scale: 0.429"></div>
2133
+ <div class="progress-fill-right" style="--scale: 0.571"></div>
2134
+ </div>
2135
+ </div>
2136
+ <div class="progress-metric split">
2137
+ <div class="progress-label">
2138
+ <span>Isolationism</span>
2139
+ <span class="progress-value">43.8%</span>
2140
+ <span>Globalism</span>
2141
+ </div>
2142
+ <div class="progress-bar split">
2143
+ <div class="progress-fill-left" style="--scale: 0.438"></div>
2144
+ <div class="progress-fill-right" style="--scale: 0.562"></div>
2145
+ </div>
2146
+ </div>
2147
+ <div class="progress-metric split">
2148
+ <div class="progress-label">
2149
+ <span>Irreligious</span>
2150
+ <span class="progress-value">57.9%</span>
2151
+ <span>Religious</span>
2152
+ </div>
2153
+ <div class="progress-bar split">
2154
+ <div class="progress-fill-left" style="--scale: 0.579"></div>
2155
+ <div class="progress-fill-right" style="--scale: 0.421"></div>
2156
+ </div>
2157
+ </div>
2158
+ <div class="progress-metric split">
2159
+ <div class="progress-label">
2160
+ <span>Progressive</span>
2161
+ <span class="progress-value">57.3%</span>
2162
+ <span>Traditional</span>
2163
+ </div>
2164
+ <div class="progress-bar split">
2165
+ <div class="progress-fill-left" style="--scale: 0.573"></div>
2166
+ <div class="progress-fill-right" style="--scale: 0.427"></div>
2167
+ </div>
2168
+ </div>
2169
+ <div class="progress-metric split">
2170
+ <div class="progress-label">
2171
+ <span>Acceleration</span>
2172
+ <span class="progress-value">64.8%</span>
2173
+ <span>Bioconservative</span>
2174
+ </div>
2175
+ <div class="progress-bar split">
2176
+ <div class="progress-fill-left" style="--scale: 0.648"></div>
2177
+ <div class="progress-fill-right" style="--scale: 0.352"></div>
2178
+ </div>
2179
+ </div>
2180
+ </div>
2181
+ </details>
2182
+ </div>
2183
+ </div>
2184
+ -->
2185
+ <!-- Add spacing here -->
2186
+ <div style="height: 40px;"></div>
2187
+ <!-- Sampler Settings Section -->
2188
+ <div class="section-container sampler-settings">
2189
+ <h2>Recommended Sampler Settings</h2>
2190
+ <div class="settings-card">
2191
+ <div class="settings-header">
2192
+ <h3>⚡ By: @Geechan</h3>
2193
+ </div>
2194
+ <div class="settings-content">
2195
+ <div class="settings-grid">
2196
+ <div class="setting-item">
2197
+ <span class="setting-label">Static Temperature:</span>
2198
+ <span class="setting-value">1.0</span>
2199
+ </div>
2200
+ <div class="setting-item">
2201
+ <span class="setting-label">Dynamic Temp (Alternative):</span>
2202
+ <span class="setting-value">0.8 - 1.05</span>
2203
+ </div>
2204
+ <div class="setting-item">
2205
+ <span class="setting-label">Min P:</span>
2206
+ <span class="setting-value">0.025-0.03</span>
2207
+ </div>
2208
+ <div class="setting-item">
2209
+ <span class="setting-label">DRY:</span>
2210
+ <div class="dry-settings">
2211
+ <div class="dry-item">
2212
+ <span class="dry-label">- Multiplier:</span>
2213
+ <span class="dry-value">0.8</span>
2214
+ </div>
2215
+ <div class="dry-item">
2216
+ <span class="dry-label">- Base:</span>
2217
+ <span class="dry-value">1.74</span>
2218
+ </div>
2219
+ <div class="dry-item">
2220
+ <span class="dry-label">- Length:</span>
2221
+ <span class="dry-value">4-6</span>
2222
+ </div>
2223
+ </div>
2224
+ </div>
2225
+ </div>
2226
+ </div>
2227
+ </div>
2228
+ </div>
2229
+ <!-- Add spacing here -->
2230
+ <div style="height: 40px;"></div>
2231
+ <div class="section-container">
2232
+ <h2>Good Starting Templates & Prompts</h2>
2233
+ <div class="template-card">
2234
+ <div class="template-item">
2235
+ <div class="template-content">
2236
+ <a href="https://huggingface.co/TheSkullery/Unnamed-Exp-70b-v0.6.A/blob/main/LeCeption-XML-V2.json" target="_blank" class="template-link">
2237
+ LeCeption v2
2238
+ <span class="link-arrow">→</span>
2239
+ </a>
2240
+ <span class="template-author">by @Steel</span> > A revamped XML version of Llam@ception 1.5.2 with stepped thinking and Reasoning added
2241
+ </div>
2242
+ </div>
2243
+ </div>
2244
+ <div class="settings-card">
2245
+ <div class="settings-header">
2246
+ <h3>⚡ ST REASONING CONFIGURATION:</h3>
2247
+ </div>
2248
+ <div class="settings-content">
2249
+ <div class="settings-grid">
2250
+ <div class="settings-card">
2251
+ <div class="settings-header">
2252
+ <h3>Start Reply With: (Either)</h3>
2253
+ </div>
2254
+ <div class="settings-content">
2255
+ <div class="setting-item">
2256
+ <p>'<span style="color: #00b2ff">&lt;think&gt;</span> OK, as an objective, detached narrative analyst, let's think this through carefully:'</p>
2257
+ </div>
2258
+ </div>
2259
+ <div class="settings-content">
2260
+ <div class="setting-item">
2261
+ <p>'<span style="color: #00b2ff">&lt;think&gt;</span> OK, the user is asking'</p>
2262
+ </div>
2263
+ </div>
2264
+ </div>
2265
+ <div class="settings-card">
2266
+ <div class="settings-header">
2267
+ <h3>Reasoning Formatting (no spaces):</h3>
2268
+ </div>
2269
+ <div class="settings-content">
2270
+ <div class="setting-item">
2271
+ <span class="setting-label">Prefix:</span>
2272
+ <span class="setting-value">'<span style="color: #00b2ff">&lt;think&gt;</span>'</span>
2273
+ </div>
2274
+ <div class="setting-item">
2275
+ <span class="setting-label">Suffix:</span>
2276
+ <span class="setting-value">'<span style="color: #00b2ff">&lt;/think&gt;</span>'</span>
2277
+ </div>
2278
+ </div>
2279
+ </div>
2280
+ </div>
2281
+ </div>
2282
+ </div>
2283
+ </div>
2284
+ <div class="support-section">
2285
+ <h2>Support & Community:</h2>
2286
+ <div class="support-buttons">
2287
+ <a href="https://ko-fi.com/Y8Y0AO2XE" target="_blank" class="button">
2288
+ Support on Ko-fi
2289
+ </a>
2290
+ <a href="https://discord.gg/4tCngSm3qZ" target="_blank" class="button">
2291
+ Join Discord
2292
+ </a>
2293
+ </div>
2294
+ <div class="special-thanks">
2295
+ <h3>Special Thanks</h3>
2296
+ <ul class="thanks-list">
2297
+ <li><strong>@Geechan</strong> for feedback and sampler settings</li>
2298
+ <li><strong>@Konnect</strong> for their feedback and templates</li>
2299
+ <li><strong>@Kistara</strong> for their feedback and help with the model mascot design on past model's</li>
2300
+ <li><strong>@Thana Alt</strong> for their feedback</li>
2301
+ <li><strong>@Lightning_missile</strong> for their feedback</li>
2302
+ <li><strong>The Arli community</strong> for feedback and testers</li>
2303
+ <li><strong>The BeaverAI communty</strong> for feedback and testers</li>
2304
+ </ul>
2305
+ <p class="thanks-note">I wish I could add everyone but im pretty sure it would be as long as the card!</p>
2306
+ </div>
2307
+ </div>
2308
+ </div>
2309
+ </div>
2310
+ </div>
2311
+ </body>
2312
+ </html>
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v4.4",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 8192,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 28672,
19
+ "max_position_embeddings": 131072,
20
+ "mlp_bias": false,
21
+ "model_type": "llama",
22
+ "num_attention_heads": 64,
23
+ "num_hidden_layers": 80,
24
+ "num_key_value_heads": 8,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "factor": 8.0,
29
+ "high_freq_factor": 4.0,
30
+ "low_freq_factor": 1.0,
31
+ "original_max_position_embeddings": 8192,
32
+ "rope_type": "llama3"
33
+ },
34
+ "rope_theta": 500000.0,
35
+ "tie_word_embeddings": false,
36
+ "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.48.2",
38
+ "use_cache": true,
39
+ "vocab_size": 128256,
40
+ "quantization_config": {
41
+ "quant_method": "exl2",
42
+ "version": "0.2.7",
43
+ "bits": 4.5,
44
+ "head_bits": 8,
45
+ "calibration": {
46
+ "rows": 115,
47
+ "length": 2048,
48
+ "dataset": "(default)"
49
+ }
50
+ }
51
+ }
measurement.json ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.6", "total_size": 141107412992}, "weight_map": {"lm_head.weight": "model-00001-of-00030.safetensors", "model.embed_tokens.weight": "model-00001-of-00030.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.11.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.24.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.37.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.5.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.62.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.75.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.input_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.norm.weight": "model-00030-of-00030.safetensors"}}
output-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e00c22a9fae89a9cf7b4c4a11a16d16d1714cdd507cbc98b59030a0e1e6d067
3
+ size 8518396950
output-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:238c4b67552267601e6b64fc39e09203a995e05fadccaaba926b0b896acba52c
3
+ size 8539826336
output-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a94694002ccf057d721bc761bef947dd3247d611891d3aeb0dc60f998ba2e326
3
+ size 8491897644
output-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3a5b2f6ea3e0e6f4d843124e3bac579392e530d322371ea5291c7a331035646
3
+ size 8524240440
output-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67c75ea50568ddc294d90e39371beee9d723ff6c181dc47360954b45a39fb310
3
+ size 7579306816
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|eot_id|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|finetune_right_pad_id|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
+ size 17209920
tokenizer_config.json ADDED
@@ -0,0 +1,2063 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "128000": {
4
+ "content": "<|begin_of_text|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "128001": {
12
+ "content": "<|end_of_text|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "128002": {
20
+ "content": "<|reserved_special_token_0|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "128003": {
28
+ "content": "<|reserved_special_token_1|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "128004": {
36
+ "content": "<|finetune_right_pad_id|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "128005": {
44
+ "content": "<|reserved_special_token_2|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "128006": {
52
+ "content": "<|start_header_id|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "128007": {
60
+ "content": "<|end_header_id|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "128008": {
68
+ "content": "<|eom_id|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "128009": {
76
+ "content": "<|eot_id|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "128010": {
84
+ "content": "<|python_tag|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "128011": {
92
+ "content": "<|reserved_special_token_3|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "128012": {
100
+ "content": "<|reserved_special_token_4|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "128013": {
108
+ "content": "<|reserved_special_token_5|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "128014": {
116
+ "content": "<|reserved_special_token_6|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "128015": {
124
+ "content": "<|reserved_special_token_7|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "128016": {
132
+ "content": "<|reserved_special_token_8|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "128017": {
140
+ "content": "<|reserved_special_token_9|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "128018": {
148
+ "content": "<|reserved_special_token_10|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "128019": {
156
+ "content": "<|reserved_special_token_11|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "128020": {
164
+ "content": "<|reserved_special_token_12|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "128021": {
172
+ "content": "<|reserved_special_token_13|>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": true
178
+ },
179
+ "128022": {
180
+ "content": "<|reserved_special_token_14|>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": true
186
+ },
187
+ "128023": {
188
+ "content": "<|reserved_special_token_15|>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": true
194
+ },
195
+ "128024": {
196
+ "content": "<|reserved_special_token_16|>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": true
202
+ },
203
+ "128025": {
204
+ "content": "<|reserved_special_token_17|>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": true
210
+ },
211
+ "128026": {
212
+ "content": "<|reserved_special_token_18|>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": true
218
+ },
219
+ "128027": {
220
+ "content": "<|reserved_special_token_19|>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": true
226
+ },
227
+ "128028": {
228
+ "content": "<|reserved_special_token_20|>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": true
234
+ },
235
+ "128029": {
236
+ "content": "<|reserved_special_token_21|>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": true
242
+ },
243
+ "128030": {
244
+ "content": "<|reserved_special_token_22|>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": true
250
+ },
251
+ "128031": {
252
+ "content": "<|reserved_special_token_23|>",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "128032": {
260
+ "content": "<|reserved_special_token_24|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "128033": {
268
+ "content": "<|reserved_special_token_25|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "128034": {
276
+ "content": "<|reserved_special_token_26|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "128035": {
284
+ "content": "<|reserved_special_token_27|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ },
291
+ "128036": {
292
+ "content": "<|reserved_special_token_28|>",
293
+ "lstrip": false,
294
+ "normalized": false,
295
+ "rstrip": false,
296
+ "single_word": false,
297
+ "special": true
298
+ },
299
+ "128037": {
300
+ "content": "<|reserved_special_token_29|>",
301
+ "lstrip": false,
302
+ "normalized": false,
303
+ "rstrip": false,
304
+ "single_word": false,
305
+ "special": true
306
+ },
307
+ "128038": {
308
+ "content": "<|reserved_special_token_30|>",
309
+ "lstrip": false,
310
+ "normalized": false,
311
+ "rstrip": false,
312
+ "single_word": false,
313
+ "special": true
314
+ },
315
+ "128039": {
316
+ "content": "<|reserved_special_token_31|>",
317
+ "lstrip": false,
318
+ "normalized": false,
319
+ "rstrip": false,
320
+ "single_word": false,
321
+ "special": true
322
+ },
323
+ "128040": {
324
+ "content": "<|reserved_special_token_32|>",
325
+ "lstrip": false,
326
+ "normalized": false,
327
+ "rstrip": false,
328
+ "single_word": false,
329
+ "special": true
330
+ },
331
+ "128041": {
332
+ "content": "<|reserved_special_token_33|>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false,
337
+ "special": true
338
+ },
339
+ "128042": {
340
+ "content": "<|reserved_special_token_34|>",
341
+ "lstrip": false,
342
+ "normalized": false,
343
+ "rstrip": false,
344
+ "single_word": false,
345
+ "special": true
346
+ },
347
+ "128043": {
348
+ "content": "<|reserved_special_token_35|>",
349
+ "lstrip": false,
350
+ "normalized": false,
351
+ "rstrip": false,
352
+ "single_word": false,
353
+ "special": true
354
+ },
355
+ "128044": {
356
+ "content": "<|reserved_special_token_36|>",
357
+ "lstrip": false,
358
+ "normalized": false,
359
+ "rstrip": false,
360
+ "single_word": false,
361
+ "special": true
362
+ },
363
+ "128045": {
364
+ "content": "<|reserved_special_token_37|>",
365
+ "lstrip": false,
366
+ "normalized": false,
367
+ "rstrip": false,
368
+ "single_word": false,
369
+ "special": true
370
+ },
371
+ "128046": {
372
+ "content": "<|reserved_special_token_38|>",
373
+ "lstrip": false,
374
+ "normalized": false,
375
+ "rstrip": false,
376
+ "single_word": false,
377
+ "special": true
378
+ },
379
+ "128047": {
380
+ "content": "<|reserved_special_token_39|>",
381
+ "lstrip": false,
382
+ "normalized": false,
383
+ "rstrip": false,
384
+ "single_word": false,
385
+ "special": true
386
+ },
387
+ "128048": {
388
+ "content": "<|reserved_special_token_40|>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false,
393
+ "special": true
394
+ },
395
+ "128049": {
396
+ "content": "<|reserved_special_token_41|>",
397
+ "lstrip": false,
398
+ "normalized": false,
399
+ "rstrip": false,
400
+ "single_word": false,
401
+ "special": true
402
+ },
403
+ "128050": {
404
+ "content": "<|reserved_special_token_42|>",
405
+ "lstrip": false,
406
+ "normalized": false,
407
+ "rstrip": false,
408
+ "single_word": false,
409
+ "special": true
410
+ },
411
+ "128051": {
412
+ "content": "<|reserved_special_token_43|>",
413
+ "lstrip": false,
414
+ "normalized": false,
415
+ "rstrip": false,
416
+ "single_word": false,
417
+ "special": true
418
+ },
419
+ "128052": {
420
+ "content": "<|reserved_special_token_44|>",
421
+ "lstrip": false,
422
+ "normalized": false,
423
+ "rstrip": false,
424
+ "single_word": false,
425
+ "special": true
426
+ },
427
+ "128053": {
428
+ "content": "<|reserved_special_token_45|>",
429
+ "lstrip": false,
430
+ "normalized": false,
431
+ "rstrip": false,
432
+ "single_word": false,
433
+ "special": true
434
+ },
435
+ "128054": {
436
+ "content": "<|reserved_special_token_46|>",
437
+ "lstrip": false,
438
+ "normalized": false,
439
+ "rstrip": false,
440
+ "single_word": false,
441
+ "special": true
442
+ },
443
+ "128055": {
444
+ "content": "<|reserved_special_token_47|>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false,
449
+ "special": true
450
+ },
451
+ "128056": {
452
+ "content": "<|reserved_special_token_48|>",
453
+ "lstrip": false,
454
+ "normalized": false,
455
+ "rstrip": false,
456
+ "single_word": false,
457
+ "special": true
458
+ },
459
+ "128057": {
460
+ "content": "<|reserved_special_token_49|>",
461
+ "lstrip": false,
462
+ "normalized": false,
463
+ "rstrip": false,
464
+ "single_word": false,
465
+ "special": true
466
+ },
467
+ "128058": {
468
+ "content": "<|reserved_special_token_50|>",
469
+ "lstrip": false,
470
+ "normalized": false,
471
+ "rstrip": false,
472
+ "single_word": false,
473
+ "special": true
474
+ },
475
+ "128059": {
476
+ "content": "<|reserved_special_token_51|>",
477
+ "lstrip": false,
478
+ "normalized": false,
479
+ "rstrip": false,
480
+ "single_word": false,
481
+ "special": true
482
+ },
483
+ "128060": {
484
+ "content": "<|reserved_special_token_52|>",
485
+ "lstrip": false,
486
+ "normalized": false,
487
+ "rstrip": false,
488
+ "single_word": false,
489
+ "special": true
490
+ },
491
+ "128061": {
492
+ "content": "<|reserved_special_token_53|>",
493
+ "lstrip": false,
494
+ "normalized": false,
495
+ "rstrip": false,
496
+ "single_word": false,
497
+ "special": true
498
+ },
499
+ "128062": {
500
+ "content": "<|reserved_special_token_54|>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false,
505
+ "special": true
506
+ },
507
+ "128063": {
508
+ "content": "<|reserved_special_token_55|>",
509
+ "lstrip": false,
510
+ "normalized": false,
511
+ "rstrip": false,
512
+ "single_word": false,
513
+ "special": true
514
+ },
515
+ "128064": {
516
+ "content": "<|reserved_special_token_56|>",
517
+ "lstrip": false,
518
+ "normalized": false,
519
+ "rstrip": false,
520
+ "single_word": false,
521
+ "special": true
522
+ },
523
+ "128065": {
524
+ "content": "<|reserved_special_token_57|>",
525
+ "lstrip": false,
526
+ "normalized": false,
527
+ "rstrip": false,
528
+ "single_word": false,
529
+ "special": true
530
+ },
531
+ "128066": {
532
+ "content": "<|reserved_special_token_58|>",
533
+ "lstrip": false,
534
+ "normalized": false,
535
+ "rstrip": false,
536
+ "single_word": false,
537
+ "special": true
538
+ },
539
+ "128067": {
540
+ "content": "<|reserved_special_token_59|>",
541
+ "lstrip": false,
542
+ "normalized": false,
543
+ "rstrip": false,
544
+ "single_word": false,
545
+ "special": true
546
+ },
547
+ "128068": {
548
+ "content": "<|reserved_special_token_60|>",
549
+ "lstrip": false,
550
+ "normalized": false,
551
+ "rstrip": false,
552
+ "single_word": false,
553
+ "special": true
554
+ },
555
+ "128069": {
556
+ "content": "<|reserved_special_token_61|>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false,
561
+ "special": true
562
+ },
563
+ "128070": {
564
+ "content": "<|reserved_special_token_62|>",
565
+ "lstrip": false,
566
+ "normalized": false,
567
+ "rstrip": false,
568
+ "single_word": false,
569
+ "special": true
570
+ },
571
+ "128071": {
572
+ "content": "<|reserved_special_token_63|>",
573
+ "lstrip": false,
574
+ "normalized": false,
575
+ "rstrip": false,
576
+ "single_word": false,
577
+ "special": true
578
+ },
579
+ "128072": {
580
+ "content": "<|reserved_special_token_64|>",
581
+ "lstrip": false,
582
+ "normalized": false,
583
+ "rstrip": false,
584
+ "single_word": false,
585
+ "special": true
586
+ },
587
+ "128073": {
588
+ "content": "<|reserved_special_token_65|>",
589
+ "lstrip": false,
590
+ "normalized": false,
591
+ "rstrip": false,
592
+ "single_word": false,
593
+ "special": true
594
+ },
595
+ "128074": {
596
+ "content": "<|reserved_special_token_66|>",
597
+ "lstrip": false,
598
+ "normalized": false,
599
+ "rstrip": false,
600
+ "single_word": false,
601
+ "special": true
602
+ },
603
+ "128075": {
604
+ "content": "<|reserved_special_token_67|>",
605
+ "lstrip": false,
606
+ "normalized": false,
607
+ "rstrip": false,
608
+ "single_word": false,
609
+ "special": true
610
+ },
611
+ "128076": {
612
+ "content": "<|reserved_special_token_68|>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false,
617
+ "special": true
618
+ },
619
+ "128077": {
620
+ "content": "<|reserved_special_token_69|>",
621
+ "lstrip": false,
622
+ "normalized": false,
623
+ "rstrip": false,
624
+ "single_word": false,
625
+ "special": true
626
+ },
627
+ "128078": {
628
+ "content": "<|reserved_special_token_70|>",
629
+ "lstrip": false,
630
+ "normalized": false,
631
+ "rstrip": false,
632
+ "single_word": false,
633
+ "special": true
634
+ },
635
+ "128079": {
636
+ "content": "<|reserved_special_token_71|>",
637
+ "lstrip": false,
638
+ "normalized": false,
639
+ "rstrip": false,
640
+ "single_word": false,
641
+ "special": true
642
+ },
643
+ "128080": {
644
+ "content": "<|reserved_special_token_72|>",
645
+ "lstrip": false,
646
+ "normalized": false,
647
+ "rstrip": false,
648
+ "single_word": false,
649
+ "special": true
650
+ },
651
+ "128081": {
652
+ "content": "<|reserved_special_token_73|>",
653
+ "lstrip": false,
654
+ "normalized": false,
655
+ "rstrip": false,
656
+ "single_word": false,
657
+ "special": true
658
+ },
659
+ "128082": {
660
+ "content": "<|reserved_special_token_74|>",
661
+ "lstrip": false,
662
+ "normalized": false,
663
+ "rstrip": false,
664
+ "single_word": false,
665
+ "special": true
666
+ },
667
+ "128083": {
668
+ "content": "<|reserved_special_token_75|>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false,
673
+ "special": true
674
+ },
675
+ "128084": {
676
+ "content": "<|reserved_special_token_76|>",
677
+ "lstrip": false,
678
+ "normalized": false,
679
+ "rstrip": false,
680
+ "single_word": false,
681
+ "special": true
682
+ },
683
+ "128085": {
684
+ "content": "<|reserved_special_token_77|>",
685
+ "lstrip": false,
686
+ "normalized": false,
687
+ "rstrip": false,
688
+ "single_word": false,
689
+ "special": true
690
+ },
691
+ "128086": {
692
+ "content": "<|reserved_special_token_78|>",
693
+ "lstrip": false,
694
+ "normalized": false,
695
+ "rstrip": false,
696
+ "single_word": false,
697
+ "special": true
698
+ },
699
+ "128087": {
700
+ "content": "<|reserved_special_token_79|>",
701
+ "lstrip": false,
702
+ "normalized": false,
703
+ "rstrip": false,
704
+ "single_word": false,
705
+ "special": true
706
+ },
707
+ "128088": {
708
+ "content": "<|reserved_special_token_80|>",
709
+ "lstrip": false,
710
+ "normalized": false,
711
+ "rstrip": false,
712
+ "single_word": false,
713
+ "special": true
714
+ },
715
+ "128089": {
716
+ "content": "<|reserved_special_token_81|>",
717
+ "lstrip": false,
718
+ "normalized": false,
719
+ "rstrip": false,
720
+ "single_word": false,
721
+ "special": true
722
+ },
723
+ "128090": {
724
+ "content": "<|reserved_special_token_82|>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false,
729
+ "special": true
730
+ },
731
+ "128091": {
732
+ "content": "<|reserved_special_token_83|>",
733
+ "lstrip": false,
734
+ "normalized": false,
735
+ "rstrip": false,
736
+ "single_word": false,
737
+ "special": true
738
+ },
739
+ "128092": {
740
+ "content": "<|reserved_special_token_84|>",
741
+ "lstrip": false,
742
+ "normalized": false,
743
+ "rstrip": false,
744
+ "single_word": false,
745
+ "special": true
746
+ },
747
+ "128093": {
748
+ "content": "<|reserved_special_token_85|>",
749
+ "lstrip": false,
750
+ "normalized": false,
751
+ "rstrip": false,
752
+ "single_word": false,
753
+ "special": true
754
+ },
755
+ "128094": {
756
+ "content": "<|reserved_special_token_86|>",
757
+ "lstrip": false,
758
+ "normalized": false,
759
+ "rstrip": false,
760
+ "single_word": false,
761
+ "special": true
762
+ },
763
+ "128095": {
764
+ "content": "<|reserved_special_token_87|>",
765
+ "lstrip": false,
766
+ "normalized": false,
767
+ "rstrip": false,
768
+ "single_word": false,
769
+ "special": true
770
+ },
771
+ "128096": {
772
+ "content": "<|reserved_special_token_88|>",
773
+ "lstrip": false,
774
+ "normalized": false,
775
+ "rstrip": false,
776
+ "single_word": false,
777
+ "special": true
778
+ },
779
+ "128097": {
780
+ "content": "<|reserved_special_token_89|>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false,
785
+ "special": true
786
+ },
787
+ "128098": {
788
+ "content": "<|reserved_special_token_90|>",
789
+ "lstrip": false,
790
+ "normalized": false,
791
+ "rstrip": false,
792
+ "single_word": false,
793
+ "special": true
794
+ },
795
+ "128099": {
796
+ "content": "<|reserved_special_token_91|>",
797
+ "lstrip": false,
798
+ "normalized": false,
799
+ "rstrip": false,
800
+ "single_word": false,
801
+ "special": true
802
+ },
803
+ "128100": {
804
+ "content": "<|reserved_special_token_92|>",
805
+ "lstrip": false,
806
+ "normalized": false,
807
+ "rstrip": false,
808
+ "single_word": false,
809
+ "special": true
810
+ },
811
+ "128101": {
812
+ "content": "<|reserved_special_token_93|>",
813
+ "lstrip": false,
814
+ "normalized": false,
815
+ "rstrip": false,
816
+ "single_word": false,
817
+ "special": true
818
+ },
819
+ "128102": {
820
+ "content": "<|reserved_special_token_94|>",
821
+ "lstrip": false,
822
+ "normalized": false,
823
+ "rstrip": false,
824
+ "single_word": false,
825
+ "special": true
826
+ },
827
+ "128103": {
828
+ "content": "<|reserved_special_token_95|>",
829
+ "lstrip": false,
830
+ "normalized": false,
831
+ "rstrip": false,
832
+ "single_word": false,
833
+ "special": true
834
+ },
835
+ "128104": {
836
+ "content": "<|reserved_special_token_96|>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false,
841
+ "special": true
842
+ },
843
+ "128105": {
844
+ "content": "<|reserved_special_token_97|>",
845
+ "lstrip": false,
846
+ "normalized": false,
847
+ "rstrip": false,
848
+ "single_word": false,
849
+ "special": true
850
+ },
851
+ "128106": {
852
+ "content": "<|reserved_special_token_98|>",
853
+ "lstrip": false,
854
+ "normalized": false,
855
+ "rstrip": false,
856
+ "single_word": false,
857
+ "special": true
858
+ },
859
+ "128107": {
860
+ "content": "<|reserved_special_token_99|>",
861
+ "lstrip": false,
862
+ "normalized": false,
863
+ "rstrip": false,
864
+ "single_word": false,
865
+ "special": true
866
+ },
867
+ "128108": {
868
+ "content": "<|reserved_special_token_100|>",
869
+ "lstrip": false,
870
+ "normalized": false,
871
+ "rstrip": false,
872
+ "single_word": false,
873
+ "special": true
874
+ },
875
+ "128109": {
876
+ "content": "<|reserved_special_token_101|>",
877
+ "lstrip": false,
878
+ "normalized": false,
879
+ "rstrip": false,
880
+ "single_word": false,
881
+ "special": true
882
+ },
883
+ "128110": {
884
+ "content": "<|reserved_special_token_102|>",
885
+ "lstrip": false,
886
+ "normalized": false,
887
+ "rstrip": false,
888
+ "single_word": false,
889
+ "special": true
890
+ },
891
+ "128111": {
892
+ "content": "<|reserved_special_token_103|>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false,
897
+ "special": true
898
+ },
899
+ "128112": {
900
+ "content": "<|reserved_special_token_104|>",
901
+ "lstrip": false,
902
+ "normalized": false,
903
+ "rstrip": false,
904
+ "single_word": false,
905
+ "special": true
906
+ },
907
+ "128113": {
908
+ "content": "<|reserved_special_token_105|>",
909
+ "lstrip": false,
910
+ "normalized": false,
911
+ "rstrip": false,
912
+ "single_word": false,
913
+ "special": true
914
+ },
915
+ "128114": {
916
+ "content": "<|reserved_special_token_106|>",
917
+ "lstrip": false,
918
+ "normalized": false,
919
+ "rstrip": false,
920
+ "single_word": false,
921
+ "special": true
922
+ },
923
+ "128115": {
924
+ "content": "<|reserved_special_token_107|>",
925
+ "lstrip": false,
926
+ "normalized": false,
927
+ "rstrip": false,
928
+ "single_word": false,
929
+ "special": true
930
+ },
931
+ "128116": {
932
+ "content": "<|reserved_special_token_108|>",
933
+ "lstrip": false,
934
+ "normalized": false,
935
+ "rstrip": false,
936
+ "single_word": false,
937
+ "special": true
938
+ },
939
+ "128117": {
940
+ "content": "<|reserved_special_token_109|>",
941
+ "lstrip": false,
942
+ "normalized": false,
943
+ "rstrip": false,
944
+ "single_word": false,
945
+ "special": true
946
+ },
947
+ "128118": {
948
+ "content": "<|reserved_special_token_110|>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false,
953
+ "special": true
954
+ },
955
+ "128119": {
956
+ "content": "<|reserved_special_token_111|>",
957
+ "lstrip": false,
958
+ "normalized": false,
959
+ "rstrip": false,
960
+ "single_word": false,
961
+ "special": true
962
+ },
963
+ "128120": {
964
+ "content": "<|reserved_special_token_112|>",
965
+ "lstrip": false,
966
+ "normalized": false,
967
+ "rstrip": false,
968
+ "single_word": false,
969
+ "special": true
970
+ },
971
+ "128121": {
972
+ "content": "<|reserved_special_token_113|>",
973
+ "lstrip": false,
974
+ "normalized": false,
975
+ "rstrip": false,
976
+ "single_word": false,
977
+ "special": true
978
+ },
979
+ "128122": {
980
+ "content": "<|reserved_special_token_114|>",
981
+ "lstrip": false,
982
+ "normalized": false,
983
+ "rstrip": false,
984
+ "single_word": false,
985
+ "special": true
986
+ },
987
+ "128123": {
988
+ "content": "<|reserved_special_token_115|>",
989
+ "lstrip": false,
990
+ "normalized": false,
991
+ "rstrip": false,
992
+ "single_word": false,
993
+ "special": true
994
+ },
995
+ "128124": {
996
+ "content": "<|reserved_special_token_116|>",
997
+ "lstrip": false,
998
+ "normalized": false,
999
+ "rstrip": false,
1000
+ "single_word": false,
1001
+ "special": true
1002
+ },
1003
+ "128125": {
1004
+ "content": "<|reserved_special_token_117|>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false,
1009
+ "special": true
1010
+ },
1011
+ "128126": {
1012
+ "content": "<|reserved_special_token_118|>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false,
1017
+ "special": true
1018
+ },
1019
+ "128127": {
1020
+ "content": "<|reserved_special_token_119|>",
1021
+ "lstrip": false,
1022
+ "normalized": false,
1023
+ "rstrip": false,
1024
+ "single_word": false,
1025
+ "special": true
1026
+ },
1027
+ "128128": {
1028
+ "content": "<|reserved_special_token_120|>",
1029
+ "lstrip": false,
1030
+ "normalized": false,
1031
+ "rstrip": false,
1032
+ "single_word": false,
1033
+ "special": true
1034
+ },
1035
+ "128129": {
1036
+ "content": "<|reserved_special_token_121|>",
1037
+ "lstrip": false,
1038
+ "normalized": false,
1039
+ "rstrip": false,
1040
+ "single_word": false,
1041
+ "special": true
1042
+ },
1043
+ "128130": {
1044
+ "content": "<|reserved_special_token_122|>",
1045
+ "lstrip": false,
1046
+ "normalized": false,
1047
+ "rstrip": false,
1048
+ "single_word": false,
1049
+ "special": true
1050
+ },
1051
+ "128131": {
1052
+ "content": "<|reserved_special_token_123|>",
1053
+ "lstrip": false,
1054
+ "normalized": false,
1055
+ "rstrip": false,
1056
+ "single_word": false,
1057
+ "special": true
1058
+ },
1059
+ "128132": {
1060
+ "content": "<|reserved_special_token_124|>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false,
1065
+ "special": true
1066
+ },
1067
+ "128133": {
1068
+ "content": "<|reserved_special_token_125|>",
1069
+ "lstrip": false,
1070
+ "normalized": false,
1071
+ "rstrip": false,
1072
+ "single_word": false,
1073
+ "special": true
1074
+ },
1075
+ "128134": {
1076
+ "content": "<|reserved_special_token_126|>",
1077
+ "lstrip": false,
1078
+ "normalized": false,
1079
+ "rstrip": false,
1080
+ "single_word": false,
1081
+ "special": true
1082
+ },
1083
+ "128135": {
1084
+ "content": "<|reserved_special_token_127|>",
1085
+ "lstrip": false,
1086
+ "normalized": false,
1087
+ "rstrip": false,
1088
+ "single_word": false,
1089
+ "special": true
1090
+ },
1091
+ "128136": {
1092
+ "content": "<|reserved_special_token_128|>",
1093
+ "lstrip": false,
1094
+ "normalized": false,
1095
+ "rstrip": false,
1096
+ "single_word": false,
1097
+ "special": true
1098
+ },
1099
+ "128137": {
1100
+ "content": "<|reserved_special_token_129|>",
1101
+ "lstrip": false,
1102
+ "normalized": false,
1103
+ "rstrip": false,
1104
+ "single_word": false,
1105
+ "special": true
1106
+ },
1107
+ "128138": {
1108
+ "content": "<|reserved_special_token_130|>",
1109
+ "lstrip": false,
1110
+ "normalized": false,
1111
+ "rstrip": false,
1112
+ "single_word": false,
1113
+ "special": true
1114
+ },
1115
+ "128139": {
1116
+ "content": "<|reserved_special_token_131|>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false,
1121
+ "special": true
1122
+ },
1123
+ "128140": {
1124
+ "content": "<|reserved_special_token_132|>",
1125
+ "lstrip": false,
1126
+ "normalized": false,
1127
+ "rstrip": false,
1128
+ "single_word": false,
1129
+ "special": true
1130
+ },
1131
+ "128141": {
1132
+ "content": "<|reserved_special_token_133|>",
1133
+ "lstrip": false,
1134
+ "normalized": false,
1135
+ "rstrip": false,
1136
+ "single_word": false,
1137
+ "special": true
1138
+ },
1139
+ "128142": {
1140
+ "content": "<|reserved_special_token_134|>",
1141
+ "lstrip": false,
1142
+ "normalized": false,
1143
+ "rstrip": false,
1144
+ "single_word": false,
1145
+ "special": true
1146
+ },
1147
+ "128143": {
1148
+ "content": "<|reserved_special_token_135|>",
1149
+ "lstrip": false,
1150
+ "normalized": false,
1151
+ "rstrip": false,
1152
+ "single_word": false,
1153
+ "special": true
1154
+ },
1155
+ "128144": {
1156
+ "content": "<|reserved_special_token_136|>",
1157
+ "lstrip": false,
1158
+ "normalized": false,
1159
+ "rstrip": false,
1160
+ "single_word": false,
1161
+ "special": true
1162
+ },
1163
+ "128145": {
1164
+ "content": "<|reserved_special_token_137|>",
1165
+ "lstrip": false,
1166
+ "normalized": false,
1167
+ "rstrip": false,
1168
+ "single_word": false,
1169
+ "special": true
1170
+ },
1171
+ "128146": {
1172
+ "content": "<|reserved_special_token_138|>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false,
1177
+ "special": true
1178
+ },
1179
+ "128147": {
1180
+ "content": "<|reserved_special_token_139|>",
1181
+ "lstrip": false,
1182
+ "normalized": false,
1183
+ "rstrip": false,
1184
+ "single_word": false,
1185
+ "special": true
1186
+ },
1187
+ "128148": {
1188
+ "content": "<|reserved_special_token_140|>",
1189
+ "lstrip": false,
1190
+ "normalized": false,
1191
+ "rstrip": false,
1192
+ "single_word": false,
1193
+ "special": true
1194
+ },
1195
+ "128149": {
1196
+ "content": "<|reserved_special_token_141|>",
1197
+ "lstrip": false,
1198
+ "normalized": false,
1199
+ "rstrip": false,
1200
+ "single_word": false,
1201
+ "special": true
1202
+ },
1203
+ "128150": {
1204
+ "content": "<|reserved_special_token_142|>",
1205
+ "lstrip": false,
1206
+ "normalized": false,
1207
+ "rstrip": false,
1208
+ "single_word": false,
1209
+ "special": true
1210
+ },
1211
+ "128151": {
1212
+ "content": "<|reserved_special_token_143|>",
1213
+ "lstrip": false,
1214
+ "normalized": false,
1215
+ "rstrip": false,
1216
+ "single_word": false,
1217
+ "special": true
1218
+ },
1219
+ "128152": {
1220
+ "content": "<|reserved_special_token_144|>",
1221
+ "lstrip": false,
1222
+ "normalized": false,
1223
+ "rstrip": false,
1224
+ "single_word": false,
1225
+ "special": true
1226
+ },
1227
+ "128153": {
1228
+ "content": "<|reserved_special_token_145|>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false,
1233
+ "special": true
1234
+ },
1235
+ "128154": {
1236
+ "content": "<|reserved_special_token_146|>",
1237
+ "lstrip": false,
1238
+ "normalized": false,
1239
+ "rstrip": false,
1240
+ "single_word": false,
1241
+ "special": true
1242
+ },
1243
+ "128155": {
1244
+ "content": "<|reserved_special_token_147|>",
1245
+ "lstrip": false,
1246
+ "normalized": false,
1247
+ "rstrip": false,
1248
+ "single_word": false,
1249
+ "special": true
1250
+ },
1251
+ "128156": {
1252
+ "content": "<|reserved_special_token_148|>",
1253
+ "lstrip": false,
1254
+ "normalized": false,
1255
+ "rstrip": false,
1256
+ "single_word": false,
1257
+ "special": true
1258
+ },
1259
+ "128157": {
1260
+ "content": "<|reserved_special_token_149|>",
1261
+ "lstrip": false,
1262
+ "normalized": false,
1263
+ "rstrip": false,
1264
+ "single_word": false,
1265
+ "special": true
1266
+ },
1267
+ "128158": {
1268
+ "content": "<|reserved_special_token_150|>",
1269
+ "lstrip": false,
1270
+ "normalized": false,
1271
+ "rstrip": false,
1272
+ "single_word": false,
1273
+ "special": true
1274
+ },
1275
+ "128159": {
1276
+ "content": "<|reserved_special_token_151|>",
1277
+ "lstrip": false,
1278
+ "normalized": false,
1279
+ "rstrip": false,
1280
+ "single_word": false,
1281
+ "special": true
1282
+ },
1283
+ "128160": {
1284
+ "content": "<|reserved_special_token_152|>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false,
1289
+ "special": true
1290
+ },
1291
+ "128161": {
1292
+ "content": "<|reserved_special_token_153|>",
1293
+ "lstrip": false,
1294
+ "normalized": false,
1295
+ "rstrip": false,
1296
+ "single_word": false,
1297
+ "special": true
1298
+ },
1299
+ "128162": {
1300
+ "content": "<|reserved_special_token_154|>",
1301
+ "lstrip": false,
1302
+ "normalized": false,
1303
+ "rstrip": false,
1304
+ "single_word": false,
1305
+ "special": true
1306
+ },
1307
+ "128163": {
1308
+ "content": "<|reserved_special_token_155|>",
1309
+ "lstrip": false,
1310
+ "normalized": false,
1311
+ "rstrip": false,
1312
+ "single_word": false,
1313
+ "special": true
1314
+ },
1315
+ "128164": {
1316
+ "content": "<|reserved_special_token_156|>",
1317
+ "lstrip": false,
1318
+ "normalized": false,
1319
+ "rstrip": false,
1320
+ "single_word": false,
1321
+ "special": true
1322
+ },
1323
+ "128165": {
1324
+ "content": "<|reserved_special_token_157|>",
1325
+ "lstrip": false,
1326
+ "normalized": false,
1327
+ "rstrip": false,
1328
+ "single_word": false,
1329
+ "special": true
1330
+ },
1331
+ "128166": {
1332
+ "content": "<|reserved_special_token_158|>",
1333
+ "lstrip": false,
1334
+ "normalized": false,
1335
+ "rstrip": false,
1336
+ "single_word": false,
1337
+ "special": true
1338
+ },
1339
+ "128167": {
1340
+ "content": "<|reserved_special_token_159|>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false,
1345
+ "special": true
1346
+ },
1347
+ "128168": {
1348
+ "content": "<|reserved_special_token_160|>",
1349
+ "lstrip": false,
1350
+ "normalized": false,
1351
+ "rstrip": false,
1352
+ "single_word": false,
1353
+ "special": true
1354
+ },
1355
+ "128169": {
1356
+ "content": "<|reserved_special_token_161|>",
1357
+ "lstrip": false,
1358
+ "normalized": false,
1359
+ "rstrip": false,
1360
+ "single_word": false,
1361
+ "special": true
1362
+ },
1363
+ "128170": {
1364
+ "content": "<|reserved_special_token_162|>",
1365
+ "lstrip": false,
1366
+ "normalized": false,
1367
+ "rstrip": false,
1368
+ "single_word": false,
1369
+ "special": true
1370
+ },
1371
+ "128171": {
1372
+ "content": "<|reserved_special_token_163|>",
1373
+ "lstrip": false,
1374
+ "normalized": false,
1375
+ "rstrip": false,
1376
+ "single_word": false,
1377
+ "special": true
1378
+ },
1379
+ "128172": {
1380
+ "content": "<|reserved_special_token_164|>",
1381
+ "lstrip": false,
1382
+ "normalized": false,
1383
+ "rstrip": false,
1384
+ "single_word": false,
1385
+ "special": true
1386
+ },
1387
+ "128173": {
1388
+ "content": "<|reserved_special_token_165|>",
1389
+ "lstrip": false,
1390
+ "normalized": false,
1391
+ "rstrip": false,
1392
+ "single_word": false,
1393
+ "special": true
1394
+ },
1395
+ "128174": {
1396
+ "content": "<|reserved_special_token_166|>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false,
1401
+ "special": true
1402
+ },
1403
+ "128175": {
1404
+ "content": "<|reserved_special_token_167|>",
1405
+ "lstrip": false,
1406
+ "normalized": false,
1407
+ "rstrip": false,
1408
+ "single_word": false,
1409
+ "special": true
1410
+ },
1411
+ "128176": {
1412
+ "content": "<|reserved_special_token_168|>",
1413
+ "lstrip": false,
1414
+ "normalized": false,
1415
+ "rstrip": false,
1416
+ "single_word": false,
1417
+ "special": true
1418
+ },
1419
+ "128177": {
1420
+ "content": "<|reserved_special_token_169|>",
1421
+ "lstrip": false,
1422
+ "normalized": false,
1423
+ "rstrip": false,
1424
+ "single_word": false,
1425
+ "special": true
1426
+ },
1427
+ "128178": {
1428
+ "content": "<|reserved_special_token_170|>",
1429
+ "lstrip": false,
1430
+ "normalized": false,
1431
+ "rstrip": false,
1432
+ "single_word": false,
1433
+ "special": true
1434
+ },
1435
+ "128179": {
1436
+ "content": "<|reserved_special_token_171|>",
1437
+ "lstrip": false,
1438
+ "normalized": false,
1439
+ "rstrip": false,
1440
+ "single_word": false,
1441
+ "special": true
1442
+ },
1443
+ "128180": {
1444
+ "content": "<|reserved_special_token_172|>",
1445
+ "lstrip": false,
1446
+ "normalized": false,
1447
+ "rstrip": false,
1448
+ "single_word": false,
1449
+ "special": true
1450
+ },
1451
+ "128181": {
1452
+ "content": "<|reserved_special_token_173|>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false,
1457
+ "special": true
1458
+ },
1459
+ "128182": {
1460
+ "content": "<|reserved_special_token_174|>",
1461
+ "lstrip": false,
1462
+ "normalized": false,
1463
+ "rstrip": false,
1464
+ "single_word": false,
1465
+ "special": true
1466
+ },
1467
+ "128183": {
1468
+ "content": "<|reserved_special_token_175|>",
1469
+ "lstrip": false,
1470
+ "normalized": false,
1471
+ "rstrip": false,
1472
+ "single_word": false,
1473
+ "special": true
1474
+ },
1475
+ "128184": {
1476
+ "content": "<|reserved_special_token_176|>",
1477
+ "lstrip": false,
1478
+ "normalized": false,
1479
+ "rstrip": false,
1480
+ "single_word": false,
1481
+ "special": true
1482
+ },
1483
+ "128185": {
1484
+ "content": "<|reserved_special_token_177|>",
1485
+ "lstrip": false,
1486
+ "normalized": false,
1487
+ "rstrip": false,
1488
+ "single_word": false,
1489
+ "special": true
1490
+ },
1491
+ "128186": {
1492
+ "content": "<|reserved_special_token_178|>",
1493
+ "lstrip": false,
1494
+ "normalized": false,
1495
+ "rstrip": false,
1496
+ "single_word": false,
1497
+ "special": true
1498
+ },
1499
+ "128187": {
1500
+ "content": "<|reserved_special_token_179|>",
1501
+ "lstrip": false,
1502
+ "normalized": false,
1503
+ "rstrip": false,
1504
+ "single_word": false,
1505
+ "special": true
1506
+ },
1507
+ "128188": {
1508
+ "content": "<|reserved_special_token_180|>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false,
1513
+ "special": true
1514
+ },
1515
+ "128189": {
1516
+ "content": "<|reserved_special_token_181|>",
1517
+ "lstrip": false,
1518
+ "normalized": false,
1519
+ "rstrip": false,
1520
+ "single_word": false,
1521
+ "special": true
1522
+ },
1523
+ "128190": {
1524
+ "content": "<|reserved_special_token_182|>",
1525
+ "lstrip": false,
1526
+ "normalized": false,
1527
+ "rstrip": false,
1528
+ "single_word": false,
1529
+ "special": true
1530
+ },
1531
+ "128191": {
1532
+ "content": "<|reserved_special_token_183|>",
1533
+ "lstrip": false,
1534
+ "normalized": false,
1535
+ "rstrip": false,
1536
+ "single_word": false,
1537
+ "special": true
1538
+ },
1539
+ "128192": {
1540
+ "content": "<|reserved_special_token_184|>",
1541
+ "lstrip": false,
1542
+ "normalized": false,
1543
+ "rstrip": false,
1544
+ "single_word": false,
1545
+ "special": true
1546
+ },
1547
+ "128193": {
1548
+ "content": "<|reserved_special_token_185|>",
1549
+ "lstrip": false,
1550
+ "normalized": false,
1551
+ "rstrip": false,
1552
+ "single_word": false,
1553
+ "special": true
1554
+ },
1555
+ "128194": {
1556
+ "content": "<|reserved_special_token_186|>",
1557
+ "lstrip": false,
1558
+ "normalized": false,
1559
+ "rstrip": false,
1560
+ "single_word": false,
1561
+ "special": true
1562
+ },
1563
+ "128195": {
1564
+ "content": "<|reserved_special_token_187|>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false,
1569
+ "special": true
1570
+ },
1571
+ "128196": {
1572
+ "content": "<|reserved_special_token_188|>",
1573
+ "lstrip": false,
1574
+ "normalized": false,
1575
+ "rstrip": false,
1576
+ "single_word": false,
1577
+ "special": true
1578
+ },
1579
+ "128197": {
1580
+ "content": "<|reserved_special_token_189|>",
1581
+ "lstrip": false,
1582
+ "normalized": false,
1583
+ "rstrip": false,
1584
+ "single_word": false,
1585
+ "special": true
1586
+ },
1587
+ "128198": {
1588
+ "content": "<|reserved_special_token_190|>",
1589
+ "lstrip": false,
1590
+ "normalized": false,
1591
+ "rstrip": false,
1592
+ "single_word": false,
1593
+ "special": true
1594
+ },
1595
+ "128199": {
1596
+ "content": "<|reserved_special_token_191|>",
1597
+ "lstrip": false,
1598
+ "normalized": false,
1599
+ "rstrip": false,
1600
+ "single_word": false,
1601
+ "special": true
1602
+ },
1603
+ "128200": {
1604
+ "content": "<|reserved_special_token_192|>",
1605
+ "lstrip": false,
1606
+ "normalized": false,
1607
+ "rstrip": false,
1608
+ "single_word": false,
1609
+ "special": true
1610
+ },
1611
+ "128201": {
1612
+ "content": "<|reserved_special_token_193|>",
1613
+ "lstrip": false,
1614
+ "normalized": false,
1615
+ "rstrip": false,
1616
+ "single_word": false,
1617
+ "special": true
1618
+ },
1619
+ "128202": {
1620
+ "content": "<|reserved_special_token_194|>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false,
1625
+ "special": true
1626
+ },
1627
+ "128203": {
1628
+ "content": "<|reserved_special_token_195|>",
1629
+ "lstrip": false,
1630
+ "normalized": false,
1631
+ "rstrip": false,
1632
+ "single_word": false,
1633
+ "special": true
1634
+ },
1635
+ "128204": {
1636
+ "content": "<|reserved_special_token_196|>",
1637
+ "lstrip": false,
1638
+ "normalized": false,
1639
+ "rstrip": false,
1640
+ "single_word": false,
1641
+ "special": true
1642
+ },
1643
+ "128205": {
1644
+ "content": "<|reserved_special_token_197|>",
1645
+ "lstrip": false,
1646
+ "normalized": false,
1647
+ "rstrip": false,
1648
+ "single_word": false,
1649
+ "special": true
1650
+ },
1651
+ "128206": {
1652
+ "content": "<|reserved_special_token_198|>",
1653
+ "lstrip": false,
1654
+ "normalized": false,
1655
+ "rstrip": false,
1656
+ "single_word": false,
1657
+ "special": true
1658
+ },
1659
+ "128207": {
1660
+ "content": "<|reserved_special_token_199|>",
1661
+ "lstrip": false,
1662
+ "normalized": false,
1663
+ "rstrip": false,
1664
+ "single_word": false,
1665
+ "special": true
1666
+ },
1667
+ "128208": {
1668
+ "content": "<|reserved_special_token_200|>",
1669
+ "lstrip": false,
1670
+ "normalized": false,
1671
+ "rstrip": false,
1672
+ "single_word": false,
1673
+ "special": true
1674
+ },
1675
+ "128209": {
1676
+ "content": "<|reserved_special_token_201|>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false,
1681
+ "special": true
1682
+ },
1683
+ "128210": {
1684
+ "content": "<|reserved_special_token_202|>",
1685
+ "lstrip": false,
1686
+ "normalized": false,
1687
+ "rstrip": false,
1688
+ "single_word": false,
1689
+ "special": true
1690
+ },
1691
+ "128211": {
1692
+ "content": "<|reserved_special_token_203|>",
1693
+ "lstrip": false,
1694
+ "normalized": false,
1695
+ "rstrip": false,
1696
+ "single_word": false,
1697
+ "special": true
1698
+ },
1699
+ "128212": {
1700
+ "content": "<|reserved_special_token_204|>",
1701
+ "lstrip": false,
1702
+ "normalized": false,
1703
+ "rstrip": false,
1704
+ "single_word": false,
1705
+ "special": true
1706
+ },
1707
+ "128213": {
1708
+ "content": "<|reserved_special_token_205|>",
1709
+ "lstrip": false,
1710
+ "normalized": false,
1711
+ "rstrip": false,
1712
+ "single_word": false,
1713
+ "special": true
1714
+ },
1715
+ "128214": {
1716
+ "content": "<|reserved_special_token_206|>",
1717
+ "lstrip": false,
1718
+ "normalized": false,
1719
+ "rstrip": false,
1720
+ "single_word": false,
1721
+ "special": true
1722
+ },
1723
+ "128215": {
1724
+ "content": "<|reserved_special_token_207|>",
1725
+ "lstrip": false,
1726
+ "normalized": false,
1727
+ "rstrip": false,
1728
+ "single_word": false,
1729
+ "special": true
1730
+ },
1731
+ "128216": {
1732
+ "content": "<|reserved_special_token_208|>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false,
1737
+ "special": true
1738
+ },
1739
+ "128217": {
1740
+ "content": "<|reserved_special_token_209|>",
1741
+ "lstrip": false,
1742
+ "normalized": false,
1743
+ "rstrip": false,
1744
+ "single_word": false,
1745
+ "special": true
1746
+ },
1747
+ "128218": {
1748
+ "content": "<|reserved_special_token_210|>",
1749
+ "lstrip": false,
1750
+ "normalized": false,
1751
+ "rstrip": false,
1752
+ "single_word": false,
1753
+ "special": true
1754
+ },
1755
+ "128219": {
1756
+ "content": "<|reserved_special_token_211|>",
1757
+ "lstrip": false,
1758
+ "normalized": false,
1759
+ "rstrip": false,
1760
+ "single_word": false,
1761
+ "special": true
1762
+ },
1763
+ "128220": {
1764
+ "content": "<|reserved_special_token_212|>",
1765
+ "lstrip": false,
1766
+ "normalized": false,
1767
+ "rstrip": false,
1768
+ "single_word": false,
1769
+ "special": true
1770
+ },
1771
+ "128221": {
1772
+ "content": "<|reserved_special_token_213|>",
1773
+ "lstrip": false,
1774
+ "normalized": false,
1775
+ "rstrip": false,
1776
+ "single_word": false,
1777
+ "special": true
1778
+ },
1779
+ "128222": {
1780
+ "content": "<|reserved_special_token_214|>",
1781
+ "lstrip": false,
1782
+ "normalized": false,
1783
+ "rstrip": false,
1784
+ "single_word": false,
1785
+ "special": true
1786
+ },
1787
+ "128223": {
1788
+ "content": "<|reserved_special_token_215|>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false,
1793
+ "special": true
1794
+ },
1795
+ "128224": {
1796
+ "content": "<|reserved_special_token_216|>",
1797
+ "lstrip": false,
1798
+ "normalized": false,
1799
+ "rstrip": false,
1800
+ "single_word": false,
1801
+ "special": true
1802
+ },
1803
+ "128225": {
1804
+ "content": "<|reserved_special_token_217|>",
1805
+ "lstrip": false,
1806
+ "normalized": false,
1807
+ "rstrip": false,
1808
+ "single_word": false,
1809
+ "special": true
1810
+ },
1811
+ "128226": {
1812
+ "content": "<|reserved_special_token_218|>",
1813
+ "lstrip": false,
1814
+ "normalized": false,
1815
+ "rstrip": false,
1816
+ "single_word": false,
1817
+ "special": true
1818
+ },
1819
+ "128227": {
1820
+ "content": "<|reserved_special_token_219|>",
1821
+ "lstrip": false,
1822
+ "normalized": false,
1823
+ "rstrip": false,
1824
+ "single_word": false,
1825
+ "special": true
1826
+ },
1827
+ "128228": {
1828
+ "content": "<|reserved_special_token_220|>",
1829
+ "lstrip": false,
1830
+ "normalized": false,
1831
+ "rstrip": false,
1832
+ "single_word": false,
1833
+ "special": true
1834
+ },
1835
+ "128229": {
1836
+ "content": "<|reserved_special_token_221|>",
1837
+ "lstrip": false,
1838
+ "normalized": false,
1839
+ "rstrip": false,
1840
+ "single_word": false,
1841
+ "special": true
1842
+ },
1843
+ "128230": {
1844
+ "content": "<|reserved_special_token_222|>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false,
1849
+ "special": true
1850
+ },
1851
+ "128231": {
1852
+ "content": "<|reserved_special_token_223|>",
1853
+ "lstrip": false,
1854
+ "normalized": false,
1855
+ "rstrip": false,
1856
+ "single_word": false,
1857
+ "special": true
1858
+ },
1859
+ "128232": {
1860
+ "content": "<|reserved_special_token_224|>",
1861
+ "lstrip": false,
1862
+ "normalized": false,
1863
+ "rstrip": false,
1864
+ "single_word": false,
1865
+ "special": true
1866
+ },
1867
+ "128233": {
1868
+ "content": "<|reserved_special_token_225|>",
1869
+ "lstrip": false,
1870
+ "normalized": false,
1871
+ "rstrip": false,
1872
+ "single_word": false,
1873
+ "special": true
1874
+ },
1875
+ "128234": {
1876
+ "content": "<|reserved_special_token_226|>",
1877
+ "lstrip": false,
1878
+ "normalized": false,
1879
+ "rstrip": false,
1880
+ "single_word": false,
1881
+ "special": true
1882
+ },
1883
+ "128235": {
1884
+ "content": "<|reserved_special_token_227|>",
1885
+ "lstrip": false,
1886
+ "normalized": false,
1887
+ "rstrip": false,
1888
+ "single_word": false,
1889
+ "special": true
1890
+ },
1891
+ "128236": {
1892
+ "content": "<|reserved_special_token_228|>",
1893
+ "lstrip": false,
1894
+ "normalized": false,
1895
+ "rstrip": false,
1896
+ "single_word": false,
1897
+ "special": true
1898
+ },
1899
+ "128237": {
1900
+ "content": "<|reserved_special_token_229|>",
1901
+ "lstrip": false,
1902
+ "normalized": false,
1903
+ "rstrip": false,
1904
+ "single_word": false,
1905
+ "special": true
1906
+ },
1907
+ "128238": {
1908
+ "content": "<|reserved_special_token_230|>",
1909
+ "lstrip": false,
1910
+ "normalized": false,
1911
+ "rstrip": false,
1912
+ "single_word": false,
1913
+ "special": true
1914
+ },
1915
+ "128239": {
1916
+ "content": "<|reserved_special_token_231|>",
1917
+ "lstrip": false,
1918
+ "normalized": false,
1919
+ "rstrip": false,
1920
+ "single_word": false,
1921
+ "special": true
1922
+ },
1923
+ "128240": {
1924
+ "content": "<|reserved_special_token_232|>",
1925
+ "lstrip": false,
1926
+ "normalized": false,
1927
+ "rstrip": false,
1928
+ "single_word": false,
1929
+ "special": true
1930
+ },
1931
+ "128241": {
1932
+ "content": "<|reserved_special_token_233|>",
1933
+ "lstrip": false,
1934
+ "normalized": false,
1935
+ "rstrip": false,
1936
+ "single_word": false,
1937
+ "special": true
1938
+ },
1939
+ "128242": {
1940
+ "content": "<|reserved_special_token_234|>",
1941
+ "lstrip": false,
1942
+ "normalized": false,
1943
+ "rstrip": false,
1944
+ "single_word": false,
1945
+ "special": true
1946
+ },
1947
+ "128243": {
1948
+ "content": "<|reserved_special_token_235|>",
1949
+ "lstrip": false,
1950
+ "normalized": false,
1951
+ "rstrip": false,
1952
+ "single_word": false,
1953
+ "special": true
1954
+ },
1955
+ "128244": {
1956
+ "content": "<|reserved_special_token_236|>",
1957
+ "lstrip": false,
1958
+ "normalized": false,
1959
+ "rstrip": false,
1960
+ "single_word": false,
1961
+ "special": true
1962
+ },
1963
+ "128245": {
1964
+ "content": "<|reserved_special_token_237|>",
1965
+ "lstrip": false,
1966
+ "normalized": false,
1967
+ "rstrip": false,
1968
+ "single_word": false,
1969
+ "special": true
1970
+ },
1971
+ "128246": {
1972
+ "content": "<|reserved_special_token_238|>",
1973
+ "lstrip": false,
1974
+ "normalized": false,
1975
+ "rstrip": false,
1976
+ "single_word": false,
1977
+ "special": true
1978
+ },
1979
+ "128247": {
1980
+ "content": "<|reserved_special_token_239|>",
1981
+ "lstrip": false,
1982
+ "normalized": false,
1983
+ "rstrip": false,
1984
+ "single_word": false,
1985
+ "special": true
1986
+ },
1987
+ "128248": {
1988
+ "content": "<|reserved_special_token_240|>",
1989
+ "lstrip": false,
1990
+ "normalized": false,
1991
+ "rstrip": false,
1992
+ "single_word": false,
1993
+ "special": true
1994
+ },
1995
+ "128249": {
1996
+ "content": "<|reserved_special_token_241|>",
1997
+ "lstrip": false,
1998
+ "normalized": false,
1999
+ "rstrip": false,
2000
+ "single_word": false,
2001
+ "special": true
2002
+ },
2003
+ "128250": {
2004
+ "content": "<|reserved_special_token_242|>",
2005
+ "lstrip": false,
2006
+ "normalized": false,
2007
+ "rstrip": false,
2008
+ "single_word": false,
2009
+ "special": true
2010
+ },
2011
+ "128251": {
2012
+ "content": "<|reserved_special_token_243|>",
2013
+ "lstrip": false,
2014
+ "normalized": false,
2015
+ "rstrip": false,
2016
+ "single_word": false,
2017
+ "special": true
2018
+ },
2019
+ "128252": {
2020
+ "content": "<|reserved_special_token_244|>",
2021
+ "lstrip": false,
2022
+ "normalized": false,
2023
+ "rstrip": false,
2024
+ "single_word": false,
2025
+ "special": true
2026
+ },
2027
+ "128253": {
2028
+ "content": "<|reserved_special_token_245|>",
2029
+ "lstrip": false,
2030
+ "normalized": false,
2031
+ "rstrip": false,
2032
+ "single_word": false,
2033
+ "special": true
2034
+ },
2035
+ "128254": {
2036
+ "content": "<|reserved_special_token_246|>",
2037
+ "lstrip": false,
2038
+ "normalized": false,
2039
+ "rstrip": false,
2040
+ "single_word": false,
2041
+ "special": true
2042
+ },
2043
+ "128255": {
2044
+ "content": "<|reserved_special_token_247|>",
2045
+ "lstrip": false,
2046
+ "normalized": false,
2047
+ "rstrip": false,
2048
+ "single_word": false,
2049
+ "special": true
2050
+ }
2051
+ },
2052
+ "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
2054
+ "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
+ "extra_special_tokens": {},
2057
+ "model_input_names": [
2058
+ "input_ids",
2059
+ "attention_mask"
2060
+ ],
2061
+ "model_max_length": 131072,
2062
+ "tokenizer_class": "PreTrainedTokenizerFast"
2063
+ }