Update 1_Pooling/config.json
Browse filesContext: Embedding Models with causal mask only have the full attention context on the last token. Other pooling methods do not make sense!
- 1_Pooling/config.json +2 -2
1_Pooling/config.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
| 1 |
{
|
| 2 |
"word_embedding_dimension": 896,
|
| 3 |
"pooling_mode_cls_token": false,
|
| 4 |
-
"pooling_mode_mean_tokens":
|
| 5 |
"pooling_mode_max_tokens": false,
|
| 6 |
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
"pooling_mode_weightedmean_tokens": false,
|
| 8 |
-
"pooling_mode_lasttoken":
|
| 9 |
"include_prompt": true
|
| 10 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"word_embedding_dimension": 896,
|
| 3 |
"pooling_mode_cls_token": false,
|
| 4 |
+
"pooling_mode_mean_tokens": false,
|
| 5 |
"pooling_mode_max_tokens": false,
|
| 6 |
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
"pooling_mode_weightedmean_tokens": false,
|
| 8 |
+
"pooling_mode_lasttoken": true,
|
| 9 |
"include_prompt": true
|
| 10 |
}
|