Fixed the bug with flash attention
Browse files- config.json +2 -1
config.json
CHANGED
@@ -30,5 +30,6 @@
|
|
30 |
"torch_dtype": "bfloat16",
|
31 |
"transformers_version": "4.46.0.dev0",
|
32 |
"use_cache": true,
|
33 |
-
"vocab_size": 256000
|
|
|
34 |
}
|
|
|
30 |
"torch_dtype": "bfloat16",
|
31 |
"transformers_version": "4.46.0.dev0",
|
32 |
"use_cache": true,
|
33 |
+
"vocab_size": 256000,
|
34 |
+
"_attn_implementation": "eager"
|
35 |
}
|