Upload folder using huggingface_hub
Browse files- config.json +1 -1
- generation_config.json +2 -1
config.json
CHANGED
@@ -83,7 +83,7 @@
|
|
83 |
"router_aux_loss_coef": 0.0,
|
84 |
"shared_intermediate_size": 1536,
|
85 |
"tie_word_embeddings": true,
|
86 |
-
"transformers_version": "4.
|
87 |
"unsloth_fixed": true,
|
88 |
"use_cache": true,
|
89 |
"vocab_size": 100352
|
|
|
83 |
"router_aux_loss_coef": 0.0,
|
84 |
"shared_intermediate_size": 1536,
|
85 |
"tie_word_embeddings": true,
|
86 |
+
"transformers_version": "4.57.0",
|
87 |
"unsloth_fixed": true,
|
88 |
"use_cache": true,
|
89 |
"vocab_size": 100352
|
generation_config.json
CHANGED
@@ -2,6 +2,7 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 100257,
|
4 |
"eos_token_id": 100257,
|
|
|
5 |
"pad_token_id": 100256,
|
6 |
-
"transformers_version": "4.
|
7 |
}
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 100257,
|
4 |
"eos_token_id": 100257,
|
5 |
+
"max_length": 131072,
|
6 |
"pad_token_id": 100256,
|
7 |
+
"transformers_version": "4.57.0"
|
8 |
}
|