Datasets:
mteb
/

Samoed commited on
Commit
d4cd8f3
·
unverified ·
1 Parent(s): 0af4146

Added encodechka results (#182)

Browse files
Files changed (24) hide show
  1. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/InappropriatenessClassificationv2.json +95 -0
  2. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RUParaPhraserSTS.json +13 -19
  3. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RuNLUIntentClassification.json +136 -0
  4. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RuSTSBenchmarkSTS.json +13 -19
  5. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RuToxicOKMLCUPClassification.json +95 -0
  6. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/SentiRuEval2016.json +73 -0
  7. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/XNLI.json +46 -93
  8. results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/model_meta.json +1 -1
  9. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/InappropriatenessClassificationv2.json +95 -0
  10. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RUParaPhraserSTS.json +13 -19
  11. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RuNLUIntentClassification.json +136 -0
  12. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RuSTSBenchmarkSTS.json +13 -19
  13. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RuToxicOKMLCUPClassification.json +95 -0
  14. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/SentiRuEval2016.json +73 -0
  15. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/XNLI.json +46 -93
  16. results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/model_meta.json +1 -1
  17. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/InappropriatenessClassificationv2.json +95 -0
  18. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RUParaPhraserSTS.json +13 -19
  19. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RuNLUIntentClassification.json +136 -0
  20. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RuSTSBenchmarkSTS.json +13 -19
  21. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RuToxicOKMLCUPClassification.json +95 -0
  22. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/SentiRuEval2016.json +73 -0
  23. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/XNLI.json +46 -93
  24. results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/model_meta.json +1 -1
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/InappropriatenessClassificationv2.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "698cb161a90150ec46618f714cdd8606cf21a9eb",
3
+ "task_name": "InappropriatenessClassificationv2",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.572433,
9
+ "f1": 0.537324,
10
+ "f1_weighted": 0.586527,
11
+ "ap": 0.325883,
12
+ "ap_weighted": 0.325883,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.525667,
16
+ "f1": 0.519159,
17
+ "f1_weighted": 0.541907,
18
+ "ap": 0.33254,
19
+ "ap_weighted": 0.33254
20
+ },
21
+ {
22
+ "accuracy": 0.583667,
23
+ "f1": 0.531975,
24
+ "f1_weighted": 0.595228,
25
+ "ap": 0.314482,
26
+ "ap_weighted": 0.314482
27
+ },
28
+ {
29
+ "accuracy": 0.545333,
30
+ "f1": 0.497453,
31
+ "f1_weighted": 0.560535,
32
+ "ap": 0.297929,
33
+ "ap_weighted": 0.297929
34
+ },
35
+ {
36
+ "accuracy": 0.615333,
37
+ "f1": 0.550413,
38
+ "f1_weighted": 0.619889,
39
+ "ap": 0.32334,
40
+ "ap_weighted": 0.32334
41
+ },
42
+ {
43
+ "accuracy": 0.609667,
44
+ "f1": 0.549164,
45
+ "f1_weighted": 0.616328,
46
+ "ap": 0.323062,
47
+ "ap_weighted": 0.323062
48
+ },
49
+ {
50
+ "accuracy": 0.604,
51
+ "f1": 0.571687,
52
+ "f1_weighted": 0.619529,
53
+ "ap": 0.345218,
54
+ "ap_weighted": 0.345218
55
+ },
56
+ {
57
+ "accuracy": 0.609,
58
+ "f1": 0.5708,
59
+ "f1_weighted": 0.622872,
60
+ "ap": 0.342153,
61
+ "ap_weighted": 0.342153
62
+ },
63
+ {
64
+ "accuracy": 0.525333,
65
+ "f1": 0.51339,
66
+ "f1_weighted": 0.544392,
67
+ "ap": 0.321849,
68
+ "ap_weighted": 0.321849
69
+ },
70
+ {
71
+ "accuracy": 0.556333,
72
+ "f1": 0.53497,
73
+ "f1_weighted": 0.575503,
74
+ "ap": 0.327231,
75
+ "ap_weighted": 0.327231
76
+ },
77
+ {
78
+ "accuracy": 0.55,
79
+ "f1": 0.534231,
80
+ "f1_weighted": 0.569083,
81
+ "ap": 0.33103,
82
+ "ap_weighted": 0.33103
83
+ }
84
+ ],
85
+ "main_score": 0.572433,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "rus-Cyrl"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 205.99100065231323,
94
+ "kg_co2_emissions": null
95
+ }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RUParaPhraserSTS.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
  "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
- "evaluation_time": 2.0018677711486816,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.49",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_pearson": 0.6027608066628394,
10
- "cosine_spearman": 0.6587159398945475,
11
- "euclidean_pearson": 0.6404063252847751,
12
- "euclidean_spearman": 0.6587159393396224,
 
 
 
 
 
13
  "hf_subset": "default",
14
  "languages": [
15
  "rus-Cyrl"
16
- ],
17
- "main_score": 0.6587159398945475,
18
- "manhattan_pearson": 0.6367522798557337,
19
- "manhattan_spearman": 0.6557562563964406,
20
- "pearson": [
21
- 0.6027608213252988,
22
- 1.1151912002652535e-190
23
- ],
24
- "spearman": [
25
- 0.658715939617085,
26
- 9.255608761140866e-240
27
  ]
28
  }
29
  ]
30
  },
31
- "task_name": "RUParaPhraserSTS"
 
32
  }
 
1
  {
2
  "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
+ "task_name": "RUParaPhraserSTS",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "pearson": 0.602761,
9
+ "spearman": 0.658715,
10
+ "cosine_pearson": 0.602761,
11
+ "cosine_spearman": 0.658715,
12
+ "manhattan_pearson": 0.636752,
13
+ "manhattan_spearman": 0.655757,
14
+ "euclidean_pearson": 0.640406,
15
+ "euclidean_spearman": 0.658715,
16
+ "main_score": 0.658715,
17
  "hf_subset": "default",
18
  "languages": [
19
  "rus-Cyrl"
 
 
 
 
 
 
 
 
 
 
 
20
  ]
21
  }
22
  ]
23
  },
24
+ "evaluation_time": 120.49252843856812,
25
+ "kg_co2_emissions": null
26
  }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RuNLUIntentClassification.json ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "424d0f767aaa5c411e3a529eec04658e5726a39e",
3
+ "task_name": "RuNLUIntentClassification",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.61164,
9
+ "f1": 0.549913,
10
+ "f1_weighted": 0.598506,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.6076,
14
+ "f1": 0.54767,
15
+ "f1_weighted": 0.597187
16
+ },
17
+ {
18
+ "accuracy": 0.5968,
19
+ "f1": 0.541504,
20
+ "f1_weighted": 0.581928
21
+ },
22
+ {
23
+ "accuracy": 0.618,
24
+ "f1": 0.553091,
25
+ "f1_weighted": 0.606849
26
+ },
27
+ {
28
+ "accuracy": 0.5926,
29
+ "f1": 0.528608,
30
+ "f1_weighted": 0.57762
31
+ },
32
+ {
33
+ "accuracy": 0.6118,
34
+ "f1": 0.553993,
35
+ "f1_weighted": 0.597585
36
+ },
37
+ {
38
+ "accuracy": 0.6184,
39
+ "f1": 0.546021,
40
+ "f1_weighted": 0.606671
41
+ },
42
+ {
43
+ "accuracy": 0.6448,
44
+ "f1": 0.572056,
45
+ "f1_weighted": 0.632213
46
+ },
47
+ {
48
+ "accuracy": 0.619,
49
+ "f1": 0.554663,
50
+ "f1_weighted": 0.606681
51
+ },
52
+ {
53
+ "accuracy": 0.6048,
54
+ "f1": 0.553439,
55
+ "f1_weighted": 0.592555
56
+ },
57
+ {
58
+ "accuracy": 0.6026,
59
+ "f1": 0.548083,
60
+ "f1_weighted": 0.585772
61
+ }
62
+ ],
63
+ "main_score": 0.61164,
64
+ "hf_subset": "rus-eng",
65
+ "languages": [
66
+ "rus-Cyrl",
67
+ "rus-Latn"
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.6027,
72
+ "f1": 0.536807,
73
+ "f1_weighted": 0.587707,
74
+ "scores_per_experiment": [
75
+ {
76
+ "accuracy": 0.5848,
77
+ "f1": 0.526639,
78
+ "f1_weighted": 0.571317
79
+ },
80
+ {
81
+ "accuracy": 0.593,
82
+ "f1": 0.525897,
83
+ "f1_weighted": 0.576289
84
+ },
85
+ {
86
+ "accuracy": 0.5912,
87
+ "f1": 0.523767,
88
+ "f1_weighted": 0.575685
89
+ },
90
+ {
91
+ "accuracy": 0.5926,
92
+ "f1": 0.523214,
93
+ "f1_weighted": 0.581286
94
+ },
95
+ {
96
+ "accuracy": 0.5944,
97
+ "f1": 0.533915,
98
+ "f1_weighted": 0.577058
99
+ },
100
+ {
101
+ "accuracy": 0.6124,
102
+ "f1": 0.538406,
103
+ "f1_weighted": 0.59521
104
+ },
105
+ {
106
+ "accuracy": 0.6438,
107
+ "f1": 0.565737,
108
+ "f1_weighted": 0.632562
109
+ },
110
+ {
111
+ "accuracy": 0.623,
112
+ "f1": 0.558596,
113
+ "f1_weighted": 0.60873
114
+ },
115
+ {
116
+ "accuracy": 0.5888,
117
+ "f1": 0.535232,
118
+ "f1_weighted": 0.574616
119
+ },
120
+ {
121
+ "accuracy": 0.603,
122
+ "f1": 0.536667,
123
+ "f1_weighted": 0.584312
124
+ }
125
+ ],
126
+ "main_score": 0.6027,
127
+ "hf_subset": "rus",
128
+ "languages": [
129
+ "rus-Cyrl"
130
+ ]
131
+ }
132
+ ]
133
+ },
134
+ "evaluation_time": 476.48728609085083,
135
+ "kg_co2_emissions": null
136
+ }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RuSTSBenchmarkSTS.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
  "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
- "evaluation_time": 1.2986884117126465,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.49",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_pearson": 0.7353138950187652,
10
- "cosine_spearman": 0.7331781990120141,
11
- "euclidean_pearson": 0.727284512674197,
12
- "euclidean_spearman": 0.7331940284383294,
 
 
 
 
 
13
  "hf_subset": "default",
14
  "languages": [
15
  "rus-Cyrl"
16
- ],
17
- "main_score": 0.7331781990120141,
18
- "manhattan_pearson": 0.7245956300093939,
19
- "manhattan_spearman": 0.7269895261332752,
20
- "pearson": [
21
- 0.7353138944245623,
22
- 1.8856363326711897e-215
23
- ],
24
- "spearman": [
25
- 0.7331700299020929,
26
- 1.4081228352843363e-213
27
  ]
28
  }
29
  ]
30
  },
31
- "task_name": "RuSTSBenchmarkSTS"
 
32
  }
 
1
  {
2
  "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
+ "task_name": "RuSTSBenchmarkSTS",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "pearson": 0.735314,
9
+ "spearman": 0.733167,
10
+ "cosine_pearson": 0.735314,
11
+ "cosine_spearman": 0.733178,
12
+ "manhattan_pearson": 0.724596,
13
+ "manhattan_spearman": 0.72697,
14
+ "euclidean_pearson": 0.727285,
15
+ "euclidean_spearman": 0.733175,
16
+ "main_score": 0.733178,
17
  "hf_subset": "default",
18
  "languages": [
19
  "rus-Cyrl"
 
 
 
 
 
 
 
 
 
 
 
20
  ]
21
  }
22
  ]
23
  },
24
+ "evaluation_time": 79.25192046165466,
25
+ "kg_co2_emissions": null
26
  }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/RuToxicOKMLCUPClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "13722b7320ef4b6a471f9e8b379f3f49167d0517",
3
+ "task_name": "RuToxicOKMLCUPClassification",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.74645,
9
+ "f1": 0.744859,
10
+ "f1_weighted": 0.744859,
11
+ "ap": 0.684413,
12
+ "ap_weighted": 0.684413,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.771,
16
+ "f1": 0.770948,
17
+ "f1_weighted": 0.770948,
18
+ "ap": 0.706802,
19
+ "ap_weighted": 0.706802
20
+ },
21
+ {
22
+ "accuracy": 0.755,
23
+ "f1": 0.753217,
24
+ "f1_weighted": 0.753217,
25
+ "ap": 0.705843,
26
+ "ap_weighted": 0.705843
27
+ },
28
+ {
29
+ "accuracy": 0.7385,
30
+ "f1": 0.73445,
31
+ "f1_weighted": 0.73445,
32
+ "ap": 0.694791,
33
+ "ap_weighted": 0.694791
34
+ },
35
+ {
36
+ "accuracy": 0.73,
37
+ "f1": 0.724283,
38
+ "f1_weighted": 0.724283,
39
+ "ap": 0.656071,
40
+ "ap_weighted": 0.656071
41
+ },
42
+ {
43
+ "accuracy": 0.7345,
44
+ "f1": 0.732768,
45
+ "f1_weighted": 0.732768,
46
+ "ap": 0.664615,
47
+ "ap_weighted": 0.664615
48
+ },
49
+ {
50
+ "accuracy": 0.7425,
51
+ "f1": 0.741139,
52
+ "f1_weighted": 0.741139,
53
+ "ap": 0.672609,
54
+ "ap_weighted": 0.672609
55
+ },
56
+ {
57
+ "accuracy": 0.689,
58
+ "f1": 0.688999,
59
+ "f1_weighted": 0.688999,
60
+ "ap": 0.630364,
61
+ "ap_weighted": 0.630364
62
+ },
63
+ {
64
+ "accuracy": 0.768,
65
+ "f1": 0.767762,
66
+ "f1_weighted": 0.767762,
67
+ "ap": 0.710735,
68
+ "ap_weighted": 0.710735
69
+ },
70
+ {
71
+ "accuracy": 0.763,
72
+ "f1": 0.762991,
73
+ "f1_weighted": 0.762991,
74
+ "ap": 0.699849,
75
+ "ap_weighted": 0.699849
76
+ },
77
+ {
78
+ "accuracy": 0.773,
79
+ "f1": 0.772037,
80
+ "f1_weighted": 0.772037,
81
+ "ap": 0.702455,
82
+ "ap_weighted": 0.702455
83
+ }
84
+ ],
85
+ "main_score": 0.74645,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "rus-Cyrl"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 121.51295757293701,
94
+ "kg_co2_emissions": null
95
+ }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/SentiRuEval2016.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "8507eab0deef37f040a750afbcb4dba7a7de9c16",
3
+ "task_name": "SentiRuEval2016",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.5599,
9
+ "f1": 0.560088,
10
+ "f1_weighted": 0.560088,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.560333,
14
+ "f1": 0.561009,
15
+ "f1_weighted": 0.561009
16
+ },
17
+ {
18
+ "accuracy": 0.549333,
19
+ "f1": 0.548607,
20
+ "f1_weighted": 0.548607
21
+ },
22
+ {
23
+ "accuracy": 0.573667,
24
+ "f1": 0.573481,
25
+ "f1_weighted": 0.573481
26
+ },
27
+ {
28
+ "accuracy": 0.579667,
29
+ "f1": 0.581317,
30
+ "f1_weighted": 0.581317
31
+ },
32
+ {
33
+ "accuracy": 0.573667,
34
+ "f1": 0.578309,
35
+ "f1_weighted": 0.578309
36
+ },
37
+ {
38
+ "accuracy": 0.593667,
39
+ "f1": 0.586869,
40
+ "f1_weighted": 0.586869
41
+ },
42
+ {
43
+ "accuracy": 0.533667,
44
+ "f1": 0.534339,
45
+ "f1_weighted": 0.534339
46
+ },
47
+ {
48
+ "accuracy": 0.582,
49
+ "f1": 0.583939,
50
+ "f1_weighted": 0.583939
51
+ },
52
+ {
53
+ "accuracy": 0.535,
54
+ "f1": 0.535093,
55
+ "f1_weighted": 0.535093
56
+ },
57
+ {
58
+ "accuracy": 0.518,
59
+ "f1": 0.517913,
60
+ "f1_weighted": 0.517913
61
+ }
62
+ ],
63
+ "main_score": 0.5599,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "rus-Cyrl"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 210.0210063457489,
72
+ "kg_co2_emissions": null
73
+ }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/XNLI.json CHANGED
@@ -1,105 +1,58 @@
1
  {
2
  "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
- "evaluation_time": 4.641160011291504,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.85",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_accuracy": 0.6344322344322344,
10
- "cosine_accuracy_threshold": 0.5973379611968994,
11
- "cosine_ap": 0.6694155193073197,
12
- "cosine_f1": 0.6871165644171778,
13
- "cosine_f1_threshold": 0.47236740589141846,
14
- "cosine_precision": 0.5544554455445545,
15
- "cosine_recall": 0.9032258064516129,
16
- "dot_accuracy": 0.6344322344322344,
17
- "dot_accuracy_threshold": 0.5973379611968994,
18
- "dot_ap": 0.6694155193073197,
19
- "dot_f1": 0.6871165644171778,
20
- "dot_f1_threshold": 0.47236737608909607,
21
- "dot_precision": 0.5544554455445545,
22
- "dot_recall": 0.9032258064516129,
23
- "euclidean_accuracy": 0.6344322344322344,
24
- "euclidean_accuracy_threshold": 0.8973979949951172,
25
- "euclidean_ap": 0.6694155193073197,
26
- "euclidean_f1": 0.6871165644171778,
27
- "euclidean_f1_threshold": 1.0272610187530518,
28
- "euclidean_precision": 0.5544554455445545,
29
- "euclidean_recall": 0.9032258064516129,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "hf_subset": "ru",
31
  "languages": [
32
  "rus-Cyrl"
33
- ],
34
- "main_score": 0.6694155193073197,
35
- "manhattan_accuracy": 0.6293040293040293,
36
- "manhattan_accuracy_threshold": 18.583728790283203,
37
- "manhattan_ap": 0.6675004297025118,
38
- "manhattan_f1": 0.6845637583892618,
39
- "manhattan_f1_threshold": 22.675790786743164,
40
- "manhattan_precision": 0.5533453887884268,
41
- "manhattan_recall": 0.8973607038123167,
42
- "max_ap": 0.6694155193073197,
43
- "max_f1": 0.6871165644171778,
44
- "max_precision": 0.5544554455445545,
45
- "max_recall": 0.9032258064516129,
46
- "similarity_accuracy": 0.6344322344322344,
47
- "similarity_accuracy_threshold": 0.5973379611968994,
48
- "similarity_ap": 0.6694155193073197,
49
- "similarity_f1": 0.6871165644171778,
50
- "similarity_f1_threshold": 0.47236740589141846,
51
- "similarity_precision": 0.5544554455445545,
52
- "similarity_recall": 0.9032258064516129
53
- }
54
- ],
55
- "validation": [
56
- {
57
- "cosine_accuracy": 0.6461538461538462,
58
- "cosine_accuracy_threshold": 0.624803900718689,
59
- "cosine_ap": 0.6932765196531081,
60
- "cosine_f1": 0.6757641921397379,
61
- "cosine_f1_threshold": 0.4452674388885498,
62
- "cosine_precision": 0.5382608695652173,
63
- "cosine_recall": 0.907624633431085,
64
- "dot_accuracy": 0.6461538461538462,
65
- "dot_accuracy_threshold": 0.6248039603233337,
66
- "dot_ap": 0.6932765196531081,
67
- "dot_f1": 0.6757641921397379,
68
- "dot_f1_threshold": 0.4452674090862274,
69
- "dot_precision": 0.5382608695652173,
70
- "dot_recall": 0.907624633431085,
71
- "euclidean_accuracy": 0.6461538461538462,
72
- "euclidean_accuracy_threshold": 0.8662517070770264,
73
- "euclidean_ap": 0.6932765196531081,
74
- "euclidean_f1": 0.6757641921397379,
75
- "euclidean_f1_threshold": 1.0533115863800049,
76
- "euclidean_precision": 0.5382608695652173,
77
- "euclidean_recall": 0.907624633431085,
78
- "hf_subset": "ru",
79
- "languages": [
80
- "rus-Cyrl"
81
- ],
82
- "main_score": 0.6932765196531081,
83
- "manhattan_accuracy": 0.6417582417582418,
84
- "manhattan_accuracy_threshold": 18.69073486328125,
85
- "manhattan_ap": 0.6896549576521831,
86
- "manhattan_f1": 0.6727169597277368,
87
- "manhattan_f1_threshold": 22.60824966430664,
88
- "manhattan_precision": 0.5485661424606846,
89
- "manhattan_recall": 0.8695014662756598,
90
- "max_ap": 0.6932765196531081,
91
- "max_f1": 0.6757641921397379,
92
- "max_precision": 0.5485661424606846,
93
- "max_recall": 0.907624633431085,
94
- "similarity_accuracy": 0.6461538461538462,
95
- "similarity_accuracy_threshold": 0.6248039603233337,
96
- "similarity_ap": 0.6932765196531081,
97
- "similarity_f1": 0.6757641921397379,
98
- "similarity_f1_threshold": 0.4452674090862274,
99
- "similarity_precision": 0.5382608695652173,
100
- "similarity_recall": 0.907624633431085
101
  }
102
  ]
103
  },
104
- "task_name": "XNLI"
 
105
  }
 
1
  {
2
  "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
+ "task_name": "XNLI",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "similarity_accuracy": 0.634432,
9
+ "similarity_accuracy_threshold": 0.597338,
10
+ "similarity_f1": 0.687117,
11
+ "similarity_f1_threshold": 0.472367,
12
+ "similarity_precision": 0.554455,
13
+ "similarity_recall": 0.903226,
14
+ "similarity_ap": 0.669416,
15
+ "cosine_accuracy": 0.634432,
16
+ "cosine_accuracy_threshold": 0.597338,
17
+ "cosine_f1": 0.687117,
18
+ "cosine_f1_threshold": 0.472367,
19
+ "cosine_precision": 0.554455,
20
+ "cosine_recall": 0.903226,
21
+ "cosine_ap": 0.669416,
22
+ "manhattan_accuracy": 0.629304,
23
+ "manhattan_accuracy_threshold": 18.583729,
24
+ "manhattan_f1": 0.684564,
25
+ "manhattan_f1_threshold": 22.675791,
26
+ "manhattan_precision": 0.553345,
27
+ "manhattan_recall": 0.897361,
28
+ "manhattan_ap": 0.6675,
29
+ "euclidean_accuracy": 0.634432,
30
+ "euclidean_accuracy_threshold": 0.897398,
31
+ "euclidean_f1": 0.687117,
32
+ "euclidean_f1_threshold": 1.027261,
33
+ "euclidean_precision": 0.554455,
34
+ "euclidean_recall": 0.903226,
35
+ "euclidean_ap": 0.669416,
36
+ "dot_accuracy": 0.634432,
37
+ "dot_accuracy_threshold": 0.597338,
38
+ "dot_f1": 0.687117,
39
+ "dot_f1_threshold": 0.472367,
40
+ "dot_precision": 0.554455,
41
+ "dot_recall": 0.903226,
42
+ "dot_ap": 0.669416,
43
+ "max_accuracy": 0.634432,
44
+ "max_f1": 0.687117,
45
+ "max_precision": 0.554455,
46
+ "max_recall": 0.903226,
47
+ "max_ap": 0.669416,
48
+ "main_score": 0.669416,
49
  "hf_subset": "ru",
50
  "languages": [
51
  "rus-Cyrl"
52
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  }
54
  ]
55
  },
56
+ "evaluation_time": 103.16753029823303,
57
+ "kg_co2_emissions": null
58
  }
results/cointegrated__LaBSE-en-ru/cf0714e606d4af551e14ad69a7929cd6b0da7f7e/model_meta.json CHANGED
@@ -1 +1 @@
1
- {"name": "cointegrated/LaBSE-en-ru", "revision": "cf0714e606d4af551e14ad69a7929cd6b0da7f7e", "release_date": "2021-06-10", "languages": ["rus_Cyrl"], "n_parameters": 129000000, "memory_usage_mb": 492.0, "max_tokens": 512.0, "embed_dim": 768, "license": "Not specified", "open_weights": true, "public_training_code": "https://colab.research.google.com/drive/1dnPRn0-ugj3vZgSpyCC9sgslM2SuSfHy?usp=sharing", "public_training_data": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/cointegrated/LaBSE-en-ru", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": null, "adapted_from": "sentence-transformers/LaBSE", "superseded_by": null, "modalities": ["text"], "loader": null}
 
1
+ {"name": "cointegrated/LaBSE-en-ru", "revision": "cf0714e606d4af551e14ad69a7929cd6b0da7f7e", "release_date": "2021-06-10", "languages": ["rus-Cyrl"], "n_parameters": 129000000, "memory_usage_mb": 492.0, "max_tokens": 512.0, "embed_dim": 768, "license": "not specified", "open_weights": true, "public_training_code": "https://colab.research.google.com/drive/1dnPRn0-ugj3vZgSpyCC9sgslM2SuSfHy?usp=sharing", "public_training_data": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/cointegrated/LaBSE-en-ru", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": {}, "adapted_from": "sentence-transformers/LaBSE", "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": null}
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/InappropriatenessClassificationv2.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "698cb161a90150ec46618f714cdd8606cf21a9eb",
3
+ "task_name": "InappropriatenessClassificationv2",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.583967,
9
+ "f1": 0.551146,
10
+ "f1_weighted": 0.597345,
11
+ "ap": 0.337004,
12
+ "ap_weighted": 0.337004,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.58,
16
+ "f1": 0.555817,
17
+ "f1_weighted": 0.597965,
18
+ "ap": 0.338809,
19
+ "ap_weighted": 0.338809
20
+ },
21
+ {
22
+ "accuracy": 0.597333,
23
+ "f1": 0.564992,
24
+ "f1_weighted": 0.613227,
25
+ "ap": 0.340589,
26
+ "ap_weighted": 0.340589
27
+ },
28
+ {
29
+ "accuracy": 0.583,
30
+ "f1": 0.53005,
31
+ "f1_weighted": 0.5942,
32
+ "ap": 0.313211,
33
+ "ap_weighted": 0.313211
34
+ },
35
+ {
36
+ "accuracy": 0.643667,
37
+ "f1": 0.600611,
38
+ "f1_weighted": 0.653939,
39
+ "ap": 0.36245,
40
+ "ap_weighted": 0.36245
41
+ },
42
+ {
43
+ "accuracy": 0.627667,
44
+ "f1": 0.546527,
45
+ "f1_weighted": 0.624534,
46
+ "ap": 0.320252,
47
+ "ap_weighted": 0.320252
48
+ },
49
+ {
50
+ "accuracy": 0.568667,
51
+ "f1": 0.551435,
52
+ "f1_weighted": 0.587188,
53
+ "ap": 0.341018,
54
+ "ap_weighted": 0.341018
55
+ },
56
+ {
57
+ "accuracy": 0.607333,
58
+ "f1": 0.590629,
59
+ "f1_weighted": 0.624258,
60
+ "ap": 0.370165,
61
+ "ap_weighted": 0.370165
62
+ },
63
+ {
64
+ "accuracy": 0.480333,
65
+ "f1": 0.47835,
66
+ "f1_weighted": 0.491431,
67
+ "ap": 0.31753,
68
+ "ap_weighted": 0.31753
69
+ },
70
+ {
71
+ "accuracy": 0.621333,
72
+ "f1": 0.592188,
73
+ "f1_weighted": 0.636524,
74
+ "ap": 0.361876,
75
+ "ap_weighted": 0.361876
76
+ },
77
+ {
78
+ "accuracy": 0.530333,
79
+ "f1": 0.50086,
80
+ "f1_weighted": 0.550185,
81
+ "ap": 0.304137,
82
+ "ap_weighted": 0.304137
83
+ }
84
+ ],
85
+ "main_score": 0.583967,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "rus-Cyrl"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 729.2099950313568,
94
+ "kg_co2_emissions": null
95
+ }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RUParaPhraserSTS.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
  "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
- "evaluation_time": 7.859699964523315,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.89",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_pearson": 0.7041190853920707,
10
- "cosine_spearman": 0.7636316983639839,
11
- "euclidean_pearson": 0.739314163032501,
12
- "euclidean_spearman": 0.7636316983639839,
 
 
 
 
 
13
  "hf_subset": "default",
14
  "languages": [
15
  "rus-Cyrl"
16
- ],
17
- "main_score": 0.7636316983639839,
18
- "manhattan_pearson": 0.7386674297478613,
19
- "manhattan_spearman": 0.7629341039032052,
20
- "pearson": [
21
- 0.7041190975887106,
22
- 4.235954969321406e-288
23
- ],
24
- "spearman": [
25
- 0.7636316983639839,
26
- 0.0
27
  ]
28
  }
29
  ]
30
  },
31
- "task_name": "RUParaPhraserSTS"
 
32
  }
 
1
  {
2
  "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
+ "task_name": "RUParaPhraserSTS",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "pearson": 0.704119,
9
+ "spearman": 0.763632,
10
+ "cosine_pearson": 0.704119,
11
+ "cosine_spearman": 0.763632,
12
+ "manhattan_pearson": 0.738667,
13
+ "manhattan_spearman": 0.762936,
14
+ "euclidean_pearson": 0.739314,
15
+ "euclidean_spearman": 0.763632,
16
+ "main_score": 0.763632,
17
  "hf_subset": "default",
18
  "languages": [
19
  "rus-Cyrl"
 
 
 
 
 
 
 
 
 
 
 
20
  ]
21
  }
22
  ]
23
  },
24
+ "evaluation_time": 442.6190392971039,
25
+ "kg_co2_emissions": null
26
  }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RuNLUIntentClassification.json ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "424d0f767aaa5c411e3a529eec04658e5726a39e",
3
+ "task_name": "RuNLUIntentClassification",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.67526,
9
+ "f1": 0.622438,
10
+ "f1_weighted": 0.661377,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.6668,
14
+ "f1": 0.61536,
15
+ "f1_weighted": 0.654751
16
+ },
17
+ {
18
+ "accuracy": 0.6674,
19
+ "f1": 0.612932,
20
+ "f1_weighted": 0.654284
21
+ },
22
+ {
23
+ "accuracy": 0.6704,
24
+ "f1": 0.622815,
25
+ "f1_weighted": 0.656435
26
+ },
27
+ {
28
+ "accuracy": 0.6688,
29
+ "f1": 0.617273,
30
+ "f1_weighted": 0.654483
31
+ },
32
+ {
33
+ "accuracy": 0.6614,
34
+ "f1": 0.612942,
35
+ "f1_weighted": 0.644385
36
+ },
37
+ {
38
+ "accuracy": 0.6862,
39
+ "f1": 0.632888,
40
+ "f1_weighted": 0.676347
41
+ },
42
+ {
43
+ "accuracy": 0.7052,
44
+ "f1": 0.642786,
45
+ "f1_weighted": 0.692953
46
+ },
47
+ {
48
+ "accuracy": 0.6932,
49
+ "f1": 0.633756,
50
+ "f1_weighted": 0.679369
51
+ },
52
+ {
53
+ "accuracy": 0.6652,
54
+ "f1": 0.61899,
55
+ "f1_weighted": 0.65066
56
+ },
57
+ {
58
+ "accuracy": 0.668,
59
+ "f1": 0.614636,
60
+ "f1_weighted": 0.650105
61
+ }
62
+ ],
63
+ "main_score": 0.67526,
64
+ "hf_subset": "rus-eng",
65
+ "languages": [
66
+ "rus-Cyrl",
67
+ "rus-Latn"
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.66502,
72
+ "f1": 0.614131,
73
+ "f1_weighted": 0.649639,
74
+ "scores_per_experiment": [
75
+ {
76
+ "accuracy": 0.6586,
77
+ "f1": 0.609867,
78
+ "f1_weighted": 0.645076
79
+ },
80
+ {
81
+ "accuracy": 0.651,
82
+ "f1": 0.602708,
83
+ "f1_weighted": 0.633525
84
+ },
85
+ {
86
+ "accuracy": 0.6524,
87
+ "f1": 0.606122,
88
+ "f1_weighted": 0.634037
89
+ },
90
+ {
91
+ "accuracy": 0.6678,
92
+ "f1": 0.610612,
93
+ "f1_weighted": 0.65469
94
+ },
95
+ {
96
+ "accuracy": 0.6546,
97
+ "f1": 0.609156,
98
+ "f1_weighted": 0.63422
99
+ },
100
+ {
101
+ "accuracy": 0.6688,
102
+ "f1": 0.616936,
103
+ "f1_weighted": 0.657347
104
+ },
105
+ {
106
+ "accuracy": 0.6986,
107
+ "f1": 0.631111,
108
+ "f1_weighted": 0.688726
109
+ },
110
+ {
111
+ "accuracy": 0.6876,
112
+ "f1": 0.625106,
113
+ "f1_weighted": 0.675906
114
+ },
115
+ {
116
+ "accuracy": 0.6494,
117
+ "f1": 0.612241,
118
+ "f1_weighted": 0.630208
119
+ },
120
+ {
121
+ "accuracy": 0.6614,
122
+ "f1": 0.617452,
123
+ "f1_weighted": 0.642658
124
+ }
125
+ ],
126
+ "main_score": 0.66502,
127
+ "hf_subset": "rus",
128
+ "languages": [
129
+ "rus-Cyrl"
130
+ ]
131
+ }
132
+ ]
133
+ },
134
+ "evaluation_time": 1643.1368434429169,
135
+ "kg_co2_emissions": null
136
+ }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RuSTSBenchmarkSTS.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
  "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
- "evaluation_time": 5.600545883178711,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.89",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_pearson": 0.828842342855342,
10
- "cosine_spearman": 0.8335322116369309,
11
- "euclidean_pearson": 0.8177980000904476,
12
- "euclidean_spearman": 0.8335366906064878,
 
 
 
 
 
13
  "hf_subset": "default",
14
  "languages": [
15
  "rus-Cyrl"
16
- ],
17
- "main_score": 0.8335322116369309,
18
- "manhattan_pearson": 0.8174659748970756,
19
- "manhattan_spearman": 0.8333153016100148,
20
- "pearson": [
21
- 0.8288423554170269,
22
- 1.3775e-320
23
- ],
24
- "spearman": [
25
- 0.8335403744396689,
26
- 0.0
27
  ]
28
  }
29
  ]
30
  },
31
- "task_name": "RuSTSBenchmarkSTS"
 
32
  }
 
1
  {
2
  "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
+ "task_name": "RuSTSBenchmarkSTS",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "pearson": 0.828842,
9
+ "spearman": 0.833536,
10
+ "cosine_pearson": 0.828842,
11
+ "cosine_spearman": 0.833532,
12
+ "manhattan_pearson": 0.817466,
13
+ "manhattan_spearman": 0.833313,
14
+ "euclidean_pearson": 0.817798,
15
+ "euclidean_spearman": 0.833533,
16
+ "main_score": 0.833532,
17
  "hf_subset": "default",
18
  "languages": [
19
  "rus-Cyrl"
 
 
 
 
 
 
 
 
 
 
 
20
  ]
21
  }
22
  ]
23
  },
24
+ "evaluation_time": 286.582839012146,
25
+ "kg_co2_emissions": null
26
  }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/RuToxicOKMLCUPClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "13722b7320ef4b6a471f9e8b379f3f49167d0517",
3
+ "task_name": "RuToxicOKMLCUPClassification",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.8271,
9
+ "f1": 0.826442,
10
+ "f1_weighted": 0.826442,
11
+ "ap": 0.766108,
12
+ "ap_weighted": 0.766108,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.8345,
16
+ "f1": 0.833697,
17
+ "f1_weighted": 0.833697,
18
+ "ap": 0.765486,
19
+ "ap_weighted": 0.765486
20
+ },
21
+ {
22
+ "accuracy": 0.8575,
23
+ "f1": 0.857498,
24
+ "f1_weighted": 0.857498,
25
+ "ap": 0.807457,
26
+ "ap_weighted": 0.807457
27
+ },
28
+ {
29
+ "accuracy": 0.817,
30
+ "f1": 0.816749,
31
+ "f1_weighted": 0.816749,
32
+ "ap": 0.767019,
33
+ "ap_weighted": 0.767019
34
+ },
35
+ {
36
+ "accuracy": 0.8175,
37
+ "f1": 0.815925,
38
+ "f1_weighted": 0.815925,
39
+ "ap": 0.743819,
40
+ "ap_weighted": 0.743819
41
+ },
42
+ {
43
+ "accuracy": 0.816,
44
+ "f1": 0.814395,
45
+ "f1_weighted": 0.814395,
46
+ "ap": 0.742196,
47
+ "ap_weighted": 0.742196
48
+ },
49
+ {
50
+ "accuracy": 0.8145,
51
+ "f1": 0.813749,
52
+ "f1_weighted": 0.813749,
53
+ "ap": 0.745014,
54
+ "ap_weighted": 0.745014
55
+ },
56
+ {
57
+ "accuracy": 0.8075,
58
+ "f1": 0.806861,
59
+ "f1_weighted": 0.806861,
60
+ "ap": 0.760593,
61
+ "ap_weighted": 0.760593
62
+ },
63
+ {
64
+ "accuracy": 0.829,
65
+ "f1": 0.828901,
66
+ "f1_weighted": 0.828901,
67
+ "ap": 0.778199,
68
+ "ap_weighted": 0.778199
69
+ },
70
+ {
71
+ "accuracy": 0.84,
72
+ "f1": 0.839977,
73
+ "f1_weighted": 0.839977,
74
+ "ap": 0.782891,
75
+ "ap_weighted": 0.782891
76
+ },
77
+ {
78
+ "accuracy": 0.8375,
79
+ "f1": 0.836665,
80
+ "f1_weighted": 0.836665,
81
+ "ap": 0.768406,
82
+ "ap_weighted": 0.768406
83
+ }
84
+ ],
85
+ "main_score": 0.8271,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "rus-Cyrl"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 415.5862648487091,
94
+ "kg_co2_emissions": null
95
+ }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/SentiRuEval2016.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "8507eab0deef37f040a750afbcb4dba7a7de9c16",
3
+ "task_name": "SentiRuEval2016",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.700367,
9
+ "f1": 0.698701,
10
+ "f1_weighted": 0.698701,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.649667,
14
+ "f1": 0.653762,
15
+ "f1_weighted": 0.653762
16
+ },
17
+ {
18
+ "accuracy": 0.693333,
19
+ "f1": 0.694562,
20
+ "f1_weighted": 0.694562
21
+ },
22
+ {
23
+ "accuracy": 0.714667,
24
+ "f1": 0.708116,
25
+ "f1_weighted": 0.708116
26
+ },
27
+ {
28
+ "accuracy": 0.732667,
29
+ "f1": 0.728508,
30
+ "f1_weighted": 0.728508
31
+ },
32
+ {
33
+ "accuracy": 0.751,
34
+ "f1": 0.746931,
35
+ "f1_weighted": 0.746931
36
+ },
37
+ {
38
+ "accuracy": 0.706667,
39
+ "f1": 0.702989,
40
+ "f1_weighted": 0.702989
41
+ },
42
+ {
43
+ "accuracy": 0.688,
44
+ "f1": 0.689836,
45
+ "f1_weighted": 0.689836
46
+ },
47
+ {
48
+ "accuracy": 0.675667,
49
+ "f1": 0.67818,
50
+ "f1_weighted": 0.67818
51
+ },
52
+ {
53
+ "accuracy": 0.680667,
54
+ "f1": 0.674608,
55
+ "f1_weighted": 0.674608
56
+ },
57
+ {
58
+ "accuracy": 0.711333,
59
+ "f1": 0.709519,
60
+ "f1_weighted": 0.709519
61
+ }
62
+ ],
63
+ "main_score": 0.700367,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "rus-Cyrl"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 685.6424822807312,
72
+ "kg_co2_emissions": null
73
+ }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/XNLI.json CHANGED
@@ -1,105 +1,58 @@
1
  {
2
  "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
- "evaluation_time": 11.323561191558838,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.89",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_accuracy": 0.8183150183150183,
10
- "cosine_accuracy_threshold": 0.6379859447479248,
11
- "cosine_ap": 0.881087728421007,
12
- "cosine_f1": 0.8187290969899664,
13
- "cosine_f1_threshold": 0.5825437307357788,
14
- "cosine_precision": 0.7527675276752768,
15
- "cosine_recall": 0.8973607038123167,
16
- "dot_accuracy": 0.8183150183150183,
17
- "dot_accuracy_threshold": 0.6379859447479248,
18
- "dot_ap": 0.8810877284210071,
19
- "dot_f1": 0.8187290969899664,
20
- "dot_f1_threshold": 0.5825437307357788,
21
- "dot_precision": 0.7527675276752768,
22
- "dot_recall": 0.8973607038123167,
23
- "euclidean_accuracy": 0.8183150183150183,
24
- "euclidean_accuracy_threshold": 0.8508976697921753,
25
- "euclidean_ap": 0.8810877284210071,
26
- "euclidean_f1": 0.8187290969899664,
27
- "euclidean_f1_threshold": 0.9137352705001831,
28
- "euclidean_precision": 0.7527675276752768,
29
- "euclidean_recall": 0.8973607038123167,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "hf_subset": "ru",
31
  "languages": [
32
  "rus-Cyrl"
33
- ],
34
- "main_score": 0.8810886544496116,
35
- "manhattan_accuracy": 0.8161172161172161,
36
- "manhattan_accuracy_threshold": 21.465694427490234,
37
- "manhattan_ap": 0.8789801519259974,
38
- "manhattan_f1": 0.8132450331125829,
39
- "manhattan_f1_threshold": 23.237516403198242,
40
- "manhattan_precision": 0.7415458937198067,
41
- "manhattan_recall": 0.9002932551319648,
42
- "max_ap": 0.8810886544496116,
43
- "max_f1": 0.8187290969899664,
44
- "max_precision": 0.7527675276752768,
45
- "max_recall": 0.9002932551319648,
46
- "similarity_accuracy": 0.8183150183150183,
47
- "similarity_accuracy_threshold": 0.6379860043525696,
48
- "similarity_ap": 0.8810886544496116,
49
- "similarity_f1": 0.8187290969899664,
50
- "similarity_f1_threshold": 0.5825437903404236,
51
- "similarity_precision": 0.7527675276752768,
52
- "similarity_recall": 0.8973607038123167
53
- }
54
- ],
55
- "validation": [
56
- {
57
- "cosine_accuracy": 0.8183150183150183,
58
- "cosine_accuracy_threshold": 0.5942411422729492,
59
- "cosine_ap": 0.8899707625260412,
60
- "cosine_f1": 0.8277777777777778,
61
- "cosine_f1_threshold": 0.5928568840026855,
62
- "cosine_precision": 0.7862796833773087,
63
- "cosine_recall": 0.873900293255132,
64
- "dot_accuracy": 0.8183150183150183,
65
- "dot_accuracy_threshold": 0.5942410826683044,
66
- "dot_ap": 0.8899707625260412,
67
- "dot_f1": 0.8277777777777778,
68
- "dot_f1_threshold": 0.5928568840026855,
69
- "dot_precision": 0.7862796833773087,
70
- "dot_recall": 0.873900293255132,
71
- "euclidean_accuracy": 0.8183150183150183,
72
- "euclidean_accuracy_threshold": 0.9008427262306213,
73
- "euclidean_ap": 0.8899707625260412,
74
- "euclidean_f1": 0.8277777777777778,
75
- "euclidean_f1_threshold": 0.9023779630661011,
76
- "euclidean_precision": 0.7862796833773087,
77
- "euclidean_recall": 0.873900293255132,
78
- "hf_subset": "ru",
79
- "languages": [
80
- "rus-Cyrl"
81
- ],
82
- "main_score": 0.8899707625260412,
83
- "manhattan_accuracy": 0.8153846153846154,
84
- "manhattan_accuracy_threshold": 22.59930419921875,
85
- "manhattan_ap": 0.8891830849017338,
86
- "manhattan_f1": 0.8236914600550964,
87
- "manhattan_f1_threshold": 22.93381118774414,
88
- "manhattan_precision": 0.7766233766233767,
89
- "manhattan_recall": 0.8768328445747801,
90
- "max_ap": 0.8899707625260412,
91
- "max_f1": 0.8277777777777778,
92
- "max_precision": 0.7862796833773087,
93
- "max_recall": 0.8768328445747801,
94
- "similarity_accuracy": 0.8183150183150183,
95
- "similarity_accuracy_threshold": 0.594241201877594,
96
- "similarity_ap": 0.8899707625260412,
97
- "similarity_f1": 0.8277777777777778,
98
- "similarity_f1_threshold": 0.5928570032119751,
99
- "similarity_precision": 0.7862796833773087,
100
- "similarity_recall": 0.873900293255132
101
  }
102
  ]
103
  },
104
- "task_name": "XNLI"
 
105
  }
 
1
  {
2
  "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
+ "task_name": "XNLI",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "similarity_accuracy": 0.818315,
9
+ "similarity_accuracy_threshold": 0.637986,
10
+ "similarity_f1": 0.818729,
11
+ "similarity_f1_threshold": 0.582544,
12
+ "similarity_precision": 0.752768,
13
+ "similarity_recall": 0.897361,
14
+ "similarity_ap": 0.881088,
15
+ "cosine_accuracy": 0.818315,
16
+ "cosine_accuracy_threshold": 0.637986,
17
+ "cosine_f1": 0.818729,
18
+ "cosine_f1_threshold": 0.582544,
19
+ "cosine_precision": 0.752768,
20
+ "cosine_recall": 0.897361,
21
+ "cosine_ap": 0.881088,
22
+ "manhattan_accuracy": 0.816117,
23
+ "manhattan_accuracy_threshold": 21.465694,
24
+ "manhattan_f1": 0.813245,
25
+ "manhattan_f1_threshold": 23.237514,
26
+ "manhattan_precision": 0.741546,
27
+ "manhattan_recall": 0.900293,
28
+ "manhattan_ap": 0.87898,
29
+ "euclidean_accuracy": 0.818315,
30
+ "euclidean_accuracy_threshold": 0.850898,
31
+ "euclidean_f1": 0.818729,
32
+ "euclidean_f1_threshold": 0.913736,
33
+ "euclidean_precision": 0.752768,
34
+ "euclidean_recall": 0.897361,
35
+ "euclidean_ap": 0.881088,
36
+ "dot_accuracy": 0.818315,
37
+ "dot_accuracy_threshold": 0.637986,
38
+ "dot_f1": 0.818729,
39
+ "dot_f1_threshold": 0.582544,
40
+ "dot_precision": 0.752768,
41
+ "dot_recall": 0.897361,
42
+ "dot_ap": 0.881088,
43
+ "max_accuracy": 0.818315,
44
+ "max_f1": 0.818729,
45
+ "max_precision": 0.752768,
46
+ "max_recall": 0.900293,
47
+ "max_ap": 0.881088,
48
+ "main_score": 0.881088,
49
  "hf_subset": "ru",
50
  "languages": [
51
  "rus-Cyrl"
52
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  }
54
  ]
55
  },
56
+ "evaluation_time": 358.2322008609772,
57
+ "kg_co2_emissions": null
58
  }
results/deepvk__USER-bge-m3/0cc6cfe48e260fb0474c753087a69369e88709ae/model_meta.json CHANGED
@@ -1 +1 @@
1
- {"name": "deepvk/USER-bge-m3", "revision": "0cc6cfe48e260fb0474c753087a69369e88709ae", "release_date": "2024-07-05", "languages": ["rus_Cyrl"], "n_parameters": 359026688, "max_tokens": 8194.0, "embed_dim": 1024, "license": "apache-2.0", "open_weights": true, "public_training_code": null, "framework": ["PyTorch", "Sentence Transformers"], "reference": "https://huggingface.co/deepvk/USER-bge-m3", "similarity_fn_name": "cosine", "use_instructions": null, "training_datasets": {"MIRACLRetrieval": ["train"], "MIRACLRetrievalHardNegatives": ["train"], "MIRACLReranking": ["train"], "LeCaRDv2": ["train"], "CMedQAv1-reranking": ["train"], "CMedQAv2-reranking": ["train"], "MrTidyRetrieval": ["train"], "T2Reranking": ["train"], "MSMARCO": ["train"], "MSMARCOHardNegatives": ["train"], "NanoMSMARCORetrieval": ["train"], "MSMARCO-PL": ["train"], "NQ": ["train"], "NQHardNegatives": ["train"], "NanoNQRetrieval": ["train"], "NQ-PL": ["train"], "HotpotQA": ["train"], "HotpotQA-PL": ["train"], "HotpotQAHardNegatives": ["train"]}, "adapted_from": "USER-bge-m3", "superseded_by": null, "loader": null}
 
1
+ {"name": "deepvk/USER-bge-m3", "revision": "0cc6cfe48e260fb0474c753087a69369e88709ae", "release_date": "2024-07-05", "languages": ["rus-Cyrl"], "n_parameters": 359026688, "memory_usage_mb": 1370.0, "max_tokens": 8194.0, "embed_dim": 1024, "license": "apache-2.0", "open_weights": true, "public_training_code": null, "public_training_data": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/deepvk/USER-base", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": {"BibleNLPBitextMining": ["train"], "MLSUMClusteringP2P": ["train"], "MLSUMClusteringP2P.v2": ["train"], "MLSUMClusteringS2S": ["train"], "MLSUMClusteringS2S.v2": ["train"]}, "adapted_from": "BAAI/bge-m3", "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": "sentence_transformers_loader"}
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/InappropriatenessClassificationv2.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "698cb161a90150ec46618f714cdd8606cf21a9eb",
3
+ "task_name": "InappropriatenessClassificationv2",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.5511,
9
+ "f1": 0.527286,
10
+ "f1_weighted": 0.565115,
11
+ "ap": 0.327773,
12
+ "ap_weighted": 0.327773,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.554667,
16
+ "f1": 0.533433,
17
+ "f1_weighted": 0.57391,
18
+ "ap": 0.32637,
19
+ "ap_weighted": 0.32637
20
+ },
21
+ {
22
+ "accuracy": 0.578333,
23
+ "f1": 0.533396,
24
+ "f1_weighted": 0.592283,
25
+ "ap": 0.316676,
26
+ "ap_weighted": 0.316676
27
+ },
28
+ {
29
+ "accuracy": 0.541667,
30
+ "f1": 0.505145,
31
+ "f1_weighted": 0.559815,
32
+ "ap": 0.303982,
33
+ "ap_weighted": 0.303982
34
+ },
35
+ {
36
+ "accuracy": 0.596667,
37
+ "f1": 0.567585,
38
+ "f1_weighted": 0.613189,
39
+ "ap": 0.343977,
40
+ "ap_weighted": 0.343977
41
+ },
42
+ {
43
+ "accuracy": 0.609333,
44
+ "f1": 0.553121,
45
+ "f1_weighted": 0.617575,
46
+ "ap": 0.326082,
47
+ "ap_weighted": 0.326082
48
+ },
49
+ {
50
+ "accuracy": 0.522667,
51
+ "f1": 0.51871,
52
+ "f1_weighted": 0.536456,
53
+ "ap": 0.33736,
54
+ "ap_weighted": 0.33736
55
+ },
56
+ {
57
+ "accuracy": 0.604667,
58
+ "f1": 0.58107,
59
+ "f1_weighted": 0.621503,
60
+ "ap": 0.357009,
61
+ "ap_weighted": 0.357009
62
+ },
63
+ {
64
+ "accuracy": 0.437667,
65
+ "f1": 0.437634,
66
+ "f1_weighted": 0.43588,
67
+ "ap": 0.309056,
68
+ "ap_weighted": 0.309056
69
+ },
70
+ {
71
+ "accuracy": 0.520333,
72
+ "f1": 0.514494,
73
+ "f1_weighted": 0.536147,
74
+ "ap": 0.330724,
75
+ "ap_weighted": 0.330724
76
+ },
77
+ {
78
+ "accuracy": 0.545,
79
+ "f1": 0.528268,
80
+ "f1_weighted": 0.564397,
81
+ "ap": 0.326489,
82
+ "ap_weighted": 0.326489
83
+ }
84
+ ],
85
+ "main_score": 0.5511,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "rus-Cyrl"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 9.736698389053345,
94
+ "kg_co2_emissions": null
95
+ }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RUParaPhraserSTS.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
  "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
- "evaluation_time": 0.4717836380004883,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.49",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_pearson": 0.6517293362215222,
10
- "cosine_spearman": 0.7214872507255558,
11
- "euclidean_pearson": 0.6939028550512482,
12
- "euclidean_spearman": 0.7214872507255558,
 
 
 
 
 
13
  "hf_subset": "default",
14
  "languages": [
15
  "rus-Cyrl"
16
- ],
17
- "main_score": 0.7214872507255558,
18
- "manhattan_pearson": 0.6930934614737492,
19
- "manhattan_spearman": 0.7204933049290007,
20
- "pearson": [
21
- 0.6517293500848672,
22
- 4.645388381232623e-233
23
- ],
24
- "spearman": [
25
- 0.7214872507255558,
26
- 3.99259112703708e-309
27
  ]
28
  }
29
  ]
30
  },
31
- "task_name": "RUParaPhraserSTS"
 
32
  }
 
1
  {
2
  "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
+ "task_name": "RUParaPhraserSTS",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "pearson": 0.651729,
9
+ "spearman": 0.721487,
10
+ "cosine_pearson": 0.651729,
11
+ "cosine_spearman": 0.721487,
12
+ "manhattan_pearson": 0.693093,
13
+ "manhattan_spearman": 0.720493,
14
+ "euclidean_pearson": 0.693903,
15
+ "euclidean_spearman": 0.721488,
16
+ "main_score": 0.721487,
17
  "hf_subset": "default",
18
  "languages": [
19
  "rus-Cyrl"
 
 
 
 
 
 
 
 
 
 
 
20
  ]
21
  }
22
  ]
23
  },
24
+ "evaluation_time": 4.93528151512146,
25
+ "kg_co2_emissions": null
26
  }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RuNLUIntentClassification.json ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "424d0f767aaa5c411e3a529eec04658e5726a39e",
3
+ "task_name": "RuNLUIntentClassification",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.4724,
9
+ "f1": 0.392756,
10
+ "f1_weighted": 0.455779,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.4464,
14
+ "f1": 0.368174,
15
+ "f1_weighted": 0.434366
16
+ },
17
+ {
18
+ "accuracy": 0.4618,
19
+ "f1": 0.388816,
20
+ "f1_weighted": 0.449327
21
+ },
22
+ {
23
+ "accuracy": 0.4648,
24
+ "f1": 0.377578,
25
+ "f1_weighted": 0.448245
26
+ },
27
+ {
28
+ "accuracy": 0.4958,
29
+ "f1": 0.419202,
30
+ "f1_weighted": 0.486174
31
+ },
32
+ {
33
+ "accuracy": 0.4756,
34
+ "f1": 0.404469,
35
+ "f1_weighted": 0.452901
36
+ },
37
+ {
38
+ "accuracy": 0.4906,
39
+ "f1": 0.398144,
40
+ "f1_weighted": 0.481159
41
+ },
42
+ {
43
+ "accuracy": 0.5142,
44
+ "f1": 0.419423,
45
+ "f1_weighted": 0.50309
46
+ },
47
+ {
48
+ "accuracy": 0.4686,
49
+ "f1": 0.393683,
50
+ "f1_weighted": 0.444535
51
+ },
52
+ {
53
+ "accuracy": 0.4524,
54
+ "f1": 0.391621,
55
+ "f1_weighted": 0.432915
56
+ },
57
+ {
58
+ "accuracy": 0.4538,
59
+ "f1": 0.366447,
60
+ "f1_weighted": 0.425076
61
+ }
62
+ ],
63
+ "main_score": 0.4724,
64
+ "hf_subset": "rus-eng",
65
+ "languages": [
66
+ "rus-Cyrl",
67
+ "rus-Latn"
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.56762,
72
+ "f1": 0.500122,
73
+ "f1_weighted": 0.545324,
74
+ "scores_per_experiment": [
75
+ {
76
+ "accuracy": 0.556,
77
+ "f1": 0.487812,
78
+ "f1_weighted": 0.533283
79
+ },
80
+ {
81
+ "accuracy": 0.5606,
82
+ "f1": 0.489978,
83
+ "f1_weighted": 0.533402
84
+ },
85
+ {
86
+ "accuracy": 0.5554,
87
+ "f1": 0.496122,
88
+ "f1_weighted": 0.530499
89
+ },
90
+ {
91
+ "accuracy": 0.5734,
92
+ "f1": 0.491798,
93
+ "f1_weighted": 0.552881
94
+ },
95
+ {
96
+ "accuracy": 0.5622,
97
+ "f1": 0.50382,
98
+ "f1_weighted": 0.539404
99
+ },
100
+ {
101
+ "accuracy": 0.574,
102
+ "f1": 0.499504,
103
+ "f1_weighted": 0.556207
104
+ },
105
+ {
106
+ "accuracy": 0.6064,
107
+ "f1": 0.527689,
108
+ "f1_weighted": 0.593617
109
+ },
110
+ {
111
+ "accuracy": 0.5708,
112
+ "f1": 0.508466,
113
+ "f1_weighted": 0.548251
114
+ },
115
+ {
116
+ "accuracy": 0.5522,
117
+ "f1": 0.493897,
118
+ "f1_weighted": 0.521955
119
+ },
120
+ {
121
+ "accuracy": 0.5652,
122
+ "f1": 0.502135,
123
+ "f1_weighted": 0.543745
124
+ }
125
+ ],
126
+ "main_score": 0.56762,
127
+ "hf_subset": "rus",
128
+ "languages": [
129
+ "rus-Cyrl"
130
+ ]
131
+ }
132
+ ]
133
+ },
134
+ "evaluation_time": 28.75963258743286,
135
+ "kg_co2_emissions": null
136
+ }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RuSTSBenchmarkSTS.json CHANGED
@@ -1,32 +1,26 @@
1
  {
2
  "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
- "evaluation_time": 0.3050382137298584,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.49",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_pearson": 0.7867181755069355,
10
- "cosine_spearman": 0.7848157070388886,
11
- "euclidean_pearson": 0.7816400243944963,
12
- "euclidean_spearman": 0.7848124817526004,
 
 
 
 
 
13
  "hf_subset": "default",
14
  "languages": [
15
  "rus-Cyrl"
16
- ],
17
- "main_score": 0.7848157070388886,
18
- "manhattan_pearson": 0.7804437263885238,
19
- "manhattan_spearman": 0.7834292373482942,
20
- "pearson": [
21
- 0.7867181753606889,
22
- 1.1817960468203117e-266
23
- ],
24
- "spearman": [
25
- 0.7848127666181108,
26
- 1.6539709393469749e-264
27
  ]
28
  }
29
  ]
30
  },
31
- "task_name": "RuSTSBenchmarkSTS"
 
32
  }
 
1
  {
2
  "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
+ "task_name": "RuSTSBenchmarkSTS",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "pearson": 0.786718,
9
+ "spearman": 0.784816,
10
+ "cosine_pearson": 0.786718,
11
+ "cosine_spearman": 0.784816,
12
+ "manhattan_pearson": 0.780444,
13
+ "manhattan_spearman": 0.783429,
14
+ "euclidean_pearson": 0.78164,
15
+ "euclidean_spearman": 0.784813,
16
+ "main_score": 0.784816,
17
  "hf_subset": "default",
18
  "languages": [
19
  "rus-Cyrl"
 
 
 
 
 
 
 
 
 
 
 
20
  ]
21
  }
22
  ]
23
  },
24
+ "evaluation_time": 3.4048428535461426,
25
+ "kg_co2_emissions": null
26
  }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/RuToxicOKMLCUPClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "13722b7320ef4b6a471f9e8b379f3f49167d0517",
3
+ "task_name": "RuToxicOKMLCUPClassification",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.7484,
9
+ "f1": 0.746853,
10
+ "f1_weighted": 0.746853,
11
+ "ap": 0.682316,
12
+ "ap_weighted": 0.682316,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.7665,
16
+ "f1": 0.763972,
17
+ "f1_weighted": 0.763972,
18
+ "ap": 0.692092,
19
+ "ap_weighted": 0.692092
20
+ },
21
+ {
22
+ "accuracy": 0.796,
23
+ "f1": 0.795604,
24
+ "f1_weighted": 0.795604,
25
+ "ap": 0.74407,
26
+ "ap_weighted": 0.74407
27
+ },
28
+ {
29
+ "accuracy": 0.7545,
30
+ "f1": 0.754425,
31
+ "f1_weighted": 0.754425,
32
+ "ap": 0.694369,
33
+ "ap_weighted": 0.694369
34
+ },
35
+ {
36
+ "accuracy": 0.759,
37
+ "f1": 0.756156,
38
+ "f1_weighted": 0.756156,
39
+ "ap": 0.684665,
40
+ "ap_weighted": 0.684665
41
+ },
42
+ {
43
+ "accuracy": 0.7415,
44
+ "f1": 0.739978,
45
+ "f1_weighted": 0.739978,
46
+ "ap": 0.671333,
47
+ "ap_weighted": 0.671333
48
+ },
49
+ {
50
+ "accuracy": 0.7195,
51
+ "f1": 0.716282,
52
+ "f1_weighted": 0.716282,
53
+ "ap": 0.64947,
54
+ "ap_weighted": 0.64947
55
+ },
56
+ {
57
+ "accuracy": 0.704,
58
+ "f1": 0.70101,
59
+ "f1_weighted": 0.70101,
60
+ "ap": 0.63668,
61
+ "ap_weighted": 0.63668
62
+ },
63
+ {
64
+ "accuracy": 0.709,
65
+ "f1": 0.708951,
66
+ "f1_weighted": 0.708951,
67
+ "ap": 0.649347,
68
+ "ap_weighted": 0.649347
69
+ },
70
+ {
71
+ "accuracy": 0.7675,
72
+ "f1": 0.767451,
73
+ "f1_weighted": 0.767451,
74
+ "ap": 0.707443,
75
+ "ap_weighted": 0.707443
76
+ },
77
+ {
78
+ "accuracy": 0.7665,
79
+ "f1": 0.764698,
80
+ "f1_weighted": 0.764698,
81
+ "ap": 0.693694,
82
+ "ap_weighted": 0.693694
83
+ }
84
+ ],
85
+ "main_score": 0.7484,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "rus-Cyrl"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 5.789905786514282,
94
+ "kg_co2_emissions": null
95
+ }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/SentiRuEval2016.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "8507eab0deef37f040a750afbcb4dba7a7de9c16",
3
+ "task_name": "SentiRuEval2016",
4
+ "mteb_version": "1.38.0",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.597567,
9
+ "f1": 0.597931,
10
+ "f1_weighted": 0.597931,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.576333,
14
+ "f1": 0.578412,
15
+ "f1_weighted": 0.578412
16
+ },
17
+ {
18
+ "accuracy": 0.578333,
19
+ "f1": 0.580362,
20
+ "f1_weighted": 0.580362
21
+ },
22
+ {
23
+ "accuracy": 0.595333,
24
+ "f1": 0.593486,
25
+ "f1_weighted": 0.593486
26
+ },
27
+ {
28
+ "accuracy": 0.608333,
29
+ "f1": 0.609387,
30
+ "f1_weighted": 0.609387
31
+ },
32
+ {
33
+ "accuracy": 0.617,
34
+ "f1": 0.617019,
35
+ "f1_weighted": 0.617019
36
+ },
37
+ {
38
+ "accuracy": 0.638667,
39
+ "f1": 0.639016,
40
+ "f1_weighted": 0.639016
41
+ },
42
+ {
43
+ "accuracy": 0.568333,
44
+ "f1": 0.567216,
45
+ "f1_weighted": 0.567216
46
+ },
47
+ {
48
+ "accuracy": 0.581333,
49
+ "f1": 0.584385,
50
+ "f1_weighted": 0.584385
51
+ },
52
+ {
53
+ "accuracy": 0.609667,
54
+ "f1": 0.605779,
55
+ "f1_weighted": 0.605779
56
+ },
57
+ {
58
+ "accuracy": 0.602333,
59
+ "f1": 0.604244,
60
+ "f1_weighted": 0.604244
61
+ }
62
+ ],
63
+ "main_score": 0.597567,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "rus-Cyrl"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 12.903911352157593,
72
+ "kg_co2_emissions": null
73
+ }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/XNLI.json CHANGED
@@ -1,105 +1,58 @@
1
  {
2
  "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
- "evaluation_time": 2.1733453273773193,
4
- "kg_co2_emissions": null,
5
- "mteb_version": "1.12.85",
6
  "scores": {
7
  "test": [
8
  {
9
- "cosine_accuracy": 0.7098901098901099,
10
- "cosine_accuracy_threshold": 0.8146822452545166,
11
- "cosine_ap": 0.7846836789486711,
12
- "cosine_f1": 0.7344051446945338,
13
- "cosine_f1_threshold": 0.795316219329834,
14
- "cosine_precision": 0.6540664375715922,
15
- "cosine_recall": 0.8372434017595308,
16
- "dot_accuracy": 0.7098901098901099,
17
- "dot_accuracy_threshold": 0.814682126045227,
18
- "dot_ap": 0.7846836789486711,
19
- "dot_f1": 0.7344051446945338,
20
- "dot_f1_threshold": 0.7953161597251892,
21
- "dot_precision": 0.6540664375715922,
22
- "dot_recall": 0.8372434017595308,
23
- "euclidean_accuracy": 0.7098901098901099,
24
- "euclidean_accuracy_threshold": 0.6087983846664429,
25
- "euclidean_ap": 0.7846836789486711,
26
- "euclidean_f1": 0.7344051446945338,
27
- "euclidean_f1_threshold": 0.6398184299468994,
28
- "euclidean_precision": 0.6540664375715922,
29
- "euclidean_recall": 0.8372434017595308,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "hf_subset": "ru",
31
  "languages": [
32
  "rus-Cyrl"
33
- ],
34
- "main_score": 0.7846836789486711,
35
- "manhattan_accuracy": 0.7106227106227107,
36
- "manhattan_accuracy_threshold": 8.387863159179688,
37
- "manhattan_ap": 0.7824209982582098,
38
- "manhattan_f1": 0.73510773130545,
39
- "manhattan_f1_threshold": 9.101426124572754,
40
- "manhattan_precision": 0.6473214285714286,
41
- "manhattan_recall": 0.8504398826979472,
42
- "max_ap": 0.7846836789486711,
43
- "max_f1": 0.73510773130545,
44
- "max_precision": 0.6540664375715922,
45
- "max_recall": 0.8504398826979472,
46
- "similarity_accuracy": 0.7098901098901099,
47
- "similarity_accuracy_threshold": 0.8146823644638062,
48
- "similarity_ap": 0.7846836789486711,
49
- "similarity_f1": 0.7344051446945338,
50
- "similarity_f1_threshold": 0.795316219329834,
51
- "similarity_precision": 0.6540664375715922,
52
- "similarity_recall": 0.8372434017595308
53
- }
54
- ],
55
- "validation": [
56
- {
57
- "cosine_accuracy": 0.717948717948718,
58
- "cosine_accuracy_threshold": 0.8193197250366211,
59
- "cosine_ap": 0.7827761660048811,
60
- "cosine_f1": 0.7352380952380952,
61
- "cosine_f1_threshold": 0.7867473363876343,
62
- "cosine_precision": 0.6483762597984323,
63
- "cosine_recall": 0.8489736070381232,
64
- "dot_accuracy": 0.717948717948718,
65
- "dot_accuracy_threshold": 0.8193197250366211,
66
- "dot_ap": 0.7827747952600053,
67
- "dot_f1": 0.7352380952380952,
68
- "dot_f1_threshold": 0.7867473363876343,
69
- "dot_precision": 0.6483762597984323,
70
- "dot_recall": 0.8489736070381232,
71
- "euclidean_accuracy": 0.717948717948718,
72
- "euclidean_accuracy_threshold": 0.6011327505111694,
73
- "euclidean_ap": 0.7827767588794166,
74
- "euclidean_f1": 0.7352380952380952,
75
- "euclidean_f1_threshold": 0.653073787689209,
76
- "euclidean_precision": 0.6483762597984323,
77
- "euclidean_recall": 0.8489736070381232,
78
- "hf_subset": "ru",
79
- "languages": [
80
- "rus-Cyrl"
81
- ],
82
- "main_score": 0.7827767588794166,
83
- "manhattan_accuracy": 0.7172161172161172,
84
- "manhattan_accuracy_threshold": 8.609776496887207,
85
- "manhattan_ap": 0.7821101951229235,
86
- "manhattan_f1": 0.7385409941897999,
87
- "manhattan_f1_threshold": 9.108965873718262,
88
- "manhattan_precision": 0.6597462514417531,
89
- "manhattan_recall": 0.8387096774193549,
90
- "max_ap": 0.7827767588794166,
91
- "max_f1": 0.7385409941897999,
92
- "max_precision": 0.6597462514417531,
93
- "max_recall": 0.8489736070381232,
94
- "similarity_accuracy": 0.717948717948718,
95
- "similarity_accuracy_threshold": 0.8193197846412659,
96
- "similarity_ap": 0.7827761660048811,
97
- "similarity_f1": 0.7352380952380952,
98
- "similarity_f1_threshold": 0.786747395992279,
99
- "similarity_precision": 0.6483762597984323,
100
- "similarity_recall": 0.8489736070381232
101
  }
102
  ]
103
  },
104
- "task_name": "XNLI"
 
105
  }
 
1
  {
2
  "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
+ "task_name": "XNLI",
4
+ "mteb_version": "1.38.0",
 
5
  "scores": {
6
  "test": [
7
  {
8
+ "similarity_accuracy": 0.70989,
9
+ "similarity_accuracy_threshold": 0.814682,
10
+ "similarity_f1": 0.734405,
11
+ "similarity_f1_threshold": 0.795316,
12
+ "similarity_precision": 0.654066,
13
+ "similarity_recall": 0.837243,
14
+ "similarity_ap": 0.784684,
15
+ "cosine_accuracy": 0.70989,
16
+ "cosine_accuracy_threshold": 0.814682,
17
+ "cosine_f1": 0.734405,
18
+ "cosine_f1_threshold": 0.795316,
19
+ "cosine_precision": 0.654066,
20
+ "cosine_recall": 0.837243,
21
+ "cosine_ap": 0.784684,
22
+ "manhattan_accuracy": 0.710623,
23
+ "manhattan_accuracy_threshold": 8.387863,
24
+ "manhattan_f1": 0.735108,
25
+ "manhattan_f1_threshold": 9.101425,
26
+ "manhattan_precision": 0.647321,
27
+ "manhattan_recall": 0.85044,
28
+ "manhattan_ap": 0.782421,
29
+ "euclidean_accuracy": 0.70989,
30
+ "euclidean_accuracy_threshold": 0.608798,
31
+ "euclidean_f1": 0.734405,
32
+ "euclidean_f1_threshold": 0.639818,
33
+ "euclidean_precision": 0.654066,
34
+ "euclidean_recall": 0.837243,
35
+ "euclidean_ap": 0.784684,
36
+ "dot_accuracy": 0.70989,
37
+ "dot_accuracy_threshold": 0.814682,
38
+ "dot_f1": 0.734405,
39
+ "dot_f1_threshold": 0.795316,
40
+ "dot_precision": 0.654066,
41
+ "dot_recall": 0.837243,
42
+ "dot_ap": 0.784684,
43
+ "max_accuracy": 0.710623,
44
+ "max_f1": 0.735108,
45
+ "max_precision": 0.654066,
46
+ "max_recall": 0.85044,
47
+ "max_ap": 0.784684,
48
+ "main_score": 0.784684,
49
  "hf_subset": "ru",
50
  "languages": [
51
  "rus-Cyrl"
52
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  }
54
  ]
55
  },
56
+ "evaluation_time": 4.327247142791748,
57
+ "kg_co2_emissions": null
58
  }
results/sergeyzh__rubert-tiny-turbo/8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054/model_meta.json CHANGED
@@ -1 +1 @@
1
- {"name": "sergeyzh/rubert-tiny-turbo", "revision": "8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054", "release_date": "2024-06-21", "languages": ["rus_Cyrl"], "n_parameters": 29200000, "memory_usage_mb": 111.0, "max_tokens": 2048.0, "embed_dim": 312, "license": "mit", "open_weights": true, "public_training_code": null, "public_training_data": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/sergeyzh/rubert-tiny-turbo", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": null, "adapted_from": "cointegrated/rubert-tiny2", "superseded_by": null, "modalities": ["text"], "loader": null}
 
1
+ {"name": "sergeyzh/rubert-tiny-turbo", "revision": "8ce0cf757446ce9bb2d5f5a4ac8103c7a1049054", "release_date": "2024-06-21", "languages": ["rus-Cyrl"], "n_parameters": 29200000, "memory_usage_mb": 111.0, "max_tokens": 2048.0, "embed_dim": 312, "license": "mit", "open_weights": true, "public_training_code": null, "public_training_data": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/sergeyzh/rubert-tiny-turbo", "similarity_fn_name": "cosine", "use_instructions": false, "training_datasets": {}, "adapted_from": "cointegrated/rubert-tiny2", "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": null}