Update config.json
Browse files- config.json +4 -4
config.json
CHANGED
@@ -4,9 +4,9 @@
|
|
4 |
],
|
5 |
"attention_dropout": 0.0,
|
6 |
"auto_map": {
|
7 |
-
"AutoConfig": "
|
8 |
-
"AutoModel": "
|
9 |
-
"AutoModelForCausalLM": "
|
10 |
},
|
11 |
"embedding_dropout": 0.0,
|
12 |
"eos_token_id": 126081,
|
@@ -18,7 +18,7 @@
|
|
18 |
"intermediate_size": 1408,
|
19 |
"max_position_embeddings": 32768,
|
20 |
"max_window_layers": 28,
|
21 |
-
"model_type": "
|
22 |
"moe_intermediate_size": 1408,
|
23 |
"norm_head": false,
|
24 |
"norm_softmax": false,
|
|
|
4 |
],
|
5 |
"attention_dropout": 0.0,
|
6 |
"auto_map": {
|
7 |
+
"AutoConfig": "configuration_aquif_moe.AquifMoeConfig",
|
8 |
+
"AutoModel": "modeling_aquif_moe.AquifMoeModel",
|
9 |
+
"AutoModelForCausalLM": "modeling_aquif_moe.AquifMoeForCausalLM"
|
10 |
},
|
11 |
"embedding_dropout": 0.0,
|
12 |
"eos_token_id": 126081,
|
|
|
18 |
"intermediate_size": 1408,
|
19 |
"max_position_embeddings": 32768,
|
20 |
"max_window_layers": 28,
|
21 |
+
"model_type": "aquif_moe",
|
22 |
"moe_intermediate_size": 1408,
|
23 |
"norm_head": false,
|
24 |
"norm_softmax": false,
|