mrdayl commited on
Commit
a7c81f6
·
verified ·
1 Parent(s): 829b5c4

(Trained with Unsloth)

Browse files
Files changed (2) hide show
  1. config.json +15 -7
  2. tokenizer_config.json +1 -0
config.json CHANGED
@@ -7,13 +7,14 @@
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
 
10
  "eos_token_id": 106,
11
  "final_logit_softcapping": null,
12
  "head_dim": 256,
13
  "hidden_activation": "gelu_pytorch_tanh",
14
- "hidden_size": 640,
15
  "initializer_range": 0.02,
16
- "intermediate_size": 2048,
17
  "layer_types": [
18
  "sliding_attention",
19
  "sliding_attention",
@@ -32,25 +33,32 @@
32
  "sliding_attention",
33
  "sliding_attention",
34
  "sliding_attention",
35
- "full_attention"
 
 
 
 
 
 
 
 
36
  ],
37
  "max_position_embeddings": 32768,
38
  "model_type": "gemma3_text",
39
  "num_attention_heads": 4,
40
- "num_hidden_layers": 18,
41
  "num_key_value_heads": 1,
42
  "pad_token_id": 0,
43
  "query_pre_attn_scalar": 256,
44
  "rms_norm_eps": 1e-06,
45
- "rope_local_base_freq": 10000.0,
46
  "rope_scaling": null,
47
- "rope_theta": 1000000.0,
48
  "sliding_window": 512,
49
  "torch_dtype": "bfloat16",
50
  "transformers_version": "4.55.4",
51
  "unsloth_fixed": true,
52
  "unsloth_version": "2025.8.10",
53
- "use_bidirectional_attention": false,
54
  "use_cache": true,
55
  "vocab_size": 262144
56
  }
 
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
+ "cache_implementation": "hybrid",
11
  "eos_token_id": 106,
12
  "final_logit_softcapping": null,
13
  "head_dim": 256,
14
  "hidden_activation": "gelu_pytorch_tanh",
15
+ "hidden_size": 1152,
16
  "initializer_range": 0.02,
17
+ "intermediate_size": 6912,
18
  "layer_types": [
19
  "sliding_attention",
20
  "sliding_attention",
 
33
  "sliding_attention",
34
  "sliding_attention",
35
  "sliding_attention",
36
+ "full_attention",
37
+ "sliding_attention",
38
+ "sliding_attention",
39
+ "sliding_attention",
40
+ "sliding_attention",
41
+ "sliding_attention",
42
+ "full_attention",
43
+ "sliding_attention",
44
+ "sliding_attention"
45
  ],
46
  "max_position_embeddings": 32768,
47
  "model_type": "gemma3_text",
48
  "num_attention_heads": 4,
49
+ "num_hidden_layers": 26,
50
  "num_key_value_heads": 1,
51
  "pad_token_id": 0,
52
  "query_pre_attn_scalar": 256,
53
  "rms_norm_eps": 1e-06,
54
+ "rope_local_base_freq": 10000,
55
  "rope_scaling": null,
56
+ "rope_theta": 1000000,
57
  "sliding_window": 512,
58
  "torch_dtype": "bfloat16",
59
  "transformers_version": "4.55.4",
60
  "unsloth_fixed": true,
61
  "unsloth_version": "2025.8.10",
 
62
  "use_cache": true,
63
  "vocab_size": 262144
64
  }
tokenizer_config.json CHANGED
@@ -51337,6 +51337,7 @@
51337
  "model_max_length": 32768,
51338
  "pad_token": "<pad>",
51339
  "padding_side": "right",
 
51340
  "sp_model_kwargs": null,
51341
  "spaces_between_special_tokens": false,
51342
  "tokenizer_class": "GemmaTokenizer",
 
51337
  "model_max_length": 32768,
51338
  "pad_token": "<pad>",
51339
  "padding_side": "right",
51340
+ "processor_class": "Gemma3Processor",
51341
  "sp_model_kwargs": null,
51342
  "spaces_between_special_tokens": false,
51343
  "tokenizer_class": "GemmaTokenizer",