Datasets:
mteb
/

Sailesh97 commited on
Commit
b8e8809
·
unverified ·
1 Parent(s): ec3c9cb

Added Hinvec results (#226)

Browse files

* Adding Hinvec Results.

* Adding results of Sailesh97/Hinvec

Files changed (24) hide show
  1. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/BelebeleRetrieval.json +0 -0
  2. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/BengaliSentimentAnalysis.json +95 -0
  3. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/GujaratiNewsClassification.json +73 -0
  4. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/HindiDiscourseClassification.json +73 -0
  5. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IN22ConvBitextMining.json +0 -0
  6. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IN22GenBitextMining.json +0 -0
  7. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IndicCrosslingualSTS.json +203 -0
  8. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IndicGenBenchFloresBitextMining.json +61 -0
  9. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IndicLangClassification.json +96 -0
  10. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/LinceMTBitextMining.json +23 -0
  11. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/MTOPIntentClassification.json +137 -0
  12. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/MalayalamNewsClassification.json +73 -0
  13. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/MultiHateClassification.json +95 -0
  14. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/NepaliNewsClassification.json +73 -0
  15. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/PunjabiNewsClassification.json +95 -0
  16. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/SIB200ClusteringS2S.json +494 -0
  17. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/SanskritShlokasClassification.json +137 -0
  18. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/SentimentAnalysisHindi.json +73 -0
  19. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/TweetSentimentClassification.json +73 -0
  20. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/UrduRomanSentimentClassification.json +73 -0
  21. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/WikipediaRerankingMultilingual.json +41 -0
  22. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/XNLI.json +107 -0
  23. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/XQuADRetrieval.json +158 -0
  24. results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/model_meta.json +1 -0
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/BelebeleRetrieval.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/BengaliSentimentAnalysis.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "a4b3685b1854cc26c554dda4c7cb918a36a6fb6c",
3
+ "task_name": "BengaliSentimentAnalysis",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "accuracy": 0.5396,
9
+ "f1": 0.500113,
10
+ "f1_weighted": 0.558233,
11
+ "ap": 0.72935,
12
+ "ap_weighted": 0.72935,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.474121,
16
+ "f1": 0.468261,
17
+ "f1_weighted": 0.492792,
18
+ "ap": 0.732543,
19
+ "ap_weighted": 0.732543
20
+ },
21
+ {
22
+ "accuracy": 0.492676,
23
+ "f1": 0.4828,
24
+ "f1_weighted": 0.514207,
25
+ "ap": 0.734656,
26
+ "ap_weighted": 0.734656
27
+ },
28
+ {
29
+ "accuracy": 0.547363,
30
+ "f1": 0.515005,
31
+ "f1_weighted": 0.570057,
32
+ "ap": 0.735529,
33
+ "ap_weighted": 0.735529
34
+ },
35
+ {
36
+ "accuracy": 0.602539,
37
+ "f1": 0.527534,
38
+ "f1_weighted": 0.61026,
39
+ "ap": 0.732023,
40
+ "ap_weighted": 0.732023
41
+ },
42
+ {
43
+ "accuracy": 0.581543,
44
+ "f1": 0.517328,
45
+ "f1_weighted": 0.594695,
46
+ "ap": 0.728804,
47
+ "ap_weighted": 0.728804
48
+ },
49
+ {
50
+ "accuracy": 0.560547,
51
+ "f1": 0.50204,
52
+ "f1_weighted": 0.577049,
53
+ "ap": 0.722987,
54
+ "ap_weighted": 0.722987
55
+ },
56
+ {
57
+ "accuracy": 0.515625,
58
+ "f1": 0.49581,
59
+ "f1_weighted": 0.539734,
60
+ "ap": 0.732716,
61
+ "ap_weighted": 0.732716
62
+ },
63
+ {
64
+ "accuracy": 0.585449,
65
+ "f1": 0.523392,
66
+ "f1_weighted": 0.598969,
67
+ "ap": 0.731742,
68
+ "ap_weighted": 0.731742
69
+ },
70
+ {
71
+ "accuracy": 0.503418,
72
+ "f1": 0.466756,
73
+ "f1_weighted": 0.528201,
74
+ "ap": 0.71225,
75
+ "ap_weighted": 0.71225
76
+ },
77
+ {
78
+ "accuracy": 0.532715,
79
+ "f1": 0.502208,
80
+ "f1_weighted": 0.556363,
81
+ "ap": 0.730245,
82
+ "ap_weighted": 0.730245
83
+ }
84
+ ],
85
+ "main_score": 0.500113,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "ben-Beng"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 119.4375410079956,
94
+ "kg_co2_emissions": null
95
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/GujaratiNewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1a5f2fa2914bfeff4fcdc6fff4194fa8ec8fa19e",
3
+ "task_name": "GujaratiNewsClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.463354,
9
+ "f1": 0.457946,
10
+ "f1_weighted": 0.467136,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.484067,
14
+ "f1": 0.484632,
15
+ "f1_weighted": 0.50609
16
+ },
17
+ {
18
+ "accuracy": 0.433991,
19
+ "f1": 0.429658,
20
+ "f1_weighted": 0.438257
21
+ },
22
+ {
23
+ "accuracy": 0.449165,
24
+ "f1": 0.443331,
25
+ "f1_weighted": 0.457906
26
+ },
27
+ {
28
+ "accuracy": 0.440061,
29
+ "f1": 0.432535,
30
+ "f1_weighted": 0.442558
31
+ },
32
+ {
33
+ "accuracy": 0.447648,
34
+ "f1": 0.454185,
35
+ "f1_weighted": 0.443672
36
+ },
37
+ {
38
+ "accuracy": 0.519727,
39
+ "f1": 0.521608,
40
+ "f1_weighted": 0.521961
41
+ },
42
+ {
43
+ "accuracy": 0.462822,
44
+ "f1": 0.453687,
45
+ "f1_weighted": 0.479159
46
+ },
47
+ {
48
+ "accuracy": 0.538695,
49
+ "f1": 0.517286,
50
+ "f1_weighted": 0.536211
51
+ },
52
+ {
53
+ "accuracy": 0.435508,
54
+ "f1": 0.418152,
55
+ "f1_weighted": 0.422411
56
+ },
57
+ {
58
+ "accuracy": 0.421851,
59
+ "f1": 0.424385,
60
+ "f1_weighted": 0.423139
61
+ }
62
+ ],
63
+ "main_score": 0.463354,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "guj-Gujr"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 58.837788820266724,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/HindiDiscourseClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "218ce687943a0da435d6d62751a4ab216be6cd40",
3
+ "task_name": "HindiDiscourseClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "accuracy": 0.506885,
9
+ "f1": 0.380896,
10
+ "f1_weighted": 0.542776,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.494141,
14
+ "f1": 0.358637,
15
+ "f1_weighted": 0.541825
16
+ },
17
+ {
18
+ "accuracy": 0.513672,
19
+ "f1": 0.393973,
20
+ "f1_weighted": 0.540734
21
+ },
22
+ {
23
+ "accuracy": 0.543457,
24
+ "f1": 0.395137,
25
+ "f1_weighted": 0.589294
26
+ },
27
+ {
28
+ "accuracy": 0.449219,
29
+ "f1": 0.337105,
30
+ "f1_weighted": 0.480569
31
+ },
32
+ {
33
+ "accuracy": 0.460938,
34
+ "f1": 0.353526,
35
+ "f1_weighted": 0.491463
36
+ },
37
+ {
38
+ "accuracy": 0.512695,
39
+ "f1": 0.401698,
40
+ "f1_weighted": 0.536773
41
+ },
42
+ {
43
+ "accuracy": 0.567871,
44
+ "f1": 0.418374,
45
+ "f1_weighted": 0.603057
46
+ },
47
+ {
48
+ "accuracy": 0.489258,
49
+ "f1": 0.375362,
50
+ "f1_weighted": 0.516744
51
+ },
52
+ {
53
+ "accuracy": 0.464355,
54
+ "f1": 0.34455,
55
+ "f1_weighted": 0.510851
56
+ },
57
+ {
58
+ "accuracy": 0.573242,
59
+ "f1": 0.4306,
60
+ "f1_weighted": 0.616451
61
+ }
62
+ ],
63
+ "main_score": 0.506885,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "hin-Deva"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 42.62646460533142,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IN22ConvBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IN22GenBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IndicCrosslingualSTS.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "0ca7b87dda68ef4ebb2f50a20a62b9dbebcac3e4",
3
+ "task_name": "IndicCrosslingualSTS",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "pearson": 0.438388,
9
+ "spearman": 0.401516,
10
+ "cosine_pearson": 0.438388,
11
+ "cosine_spearman": 0.401516,
12
+ "manhattan_pearson": 0.487275,
13
+ "manhattan_spearman": 0.44142,
14
+ "euclidean_pearson": 0.490759,
15
+ "euclidean_spearman": 0.444066,
16
+ "main_score": 0.401516,
17
+ "hf_subset": "en-hi",
18
+ "languages": [
19
+ "eng-Latn",
20
+ "hin-Deva"
21
+ ]
22
+ },
23
+ {
24
+ "pearson": -0.191138,
25
+ "spearman": -0.190454,
26
+ "cosine_pearson": -0.191138,
27
+ "cosine_spearman": -0.190454,
28
+ "manhattan_pearson": -0.106502,
29
+ "manhattan_spearman": -0.102748,
30
+ "euclidean_pearson": -0.100152,
31
+ "euclidean_spearman": -0.093731,
32
+ "main_score": -0.190454,
33
+ "hf_subset": "en-as",
34
+ "languages": [
35
+ "eng-Latn",
36
+ "asm-Beng"
37
+ ]
38
+ },
39
+ {
40
+ "pearson": 0.012804,
41
+ "spearman": 0.06013,
42
+ "cosine_pearson": 0.012804,
43
+ "cosine_spearman": 0.06013,
44
+ "manhattan_pearson": -0.155865,
45
+ "manhattan_spearman": -0.171934,
46
+ "euclidean_pearson": -0.15638,
47
+ "euclidean_spearman": -0.164257,
48
+ "main_score": 0.06013,
49
+ "hf_subset": "en-bn",
50
+ "languages": [
51
+ "eng-Latn",
52
+ "ben-Beng"
53
+ ]
54
+ },
55
+ {
56
+ "pearson": 0.09463,
57
+ "spearman": 0.121711,
58
+ "cosine_pearson": 0.09463,
59
+ "cosine_spearman": 0.121711,
60
+ "manhattan_pearson": 0.084547,
61
+ "manhattan_spearman": 0.068734,
62
+ "euclidean_pearson": 0.083184,
63
+ "euclidean_spearman": 0.062745,
64
+ "main_score": 0.121711,
65
+ "hf_subset": "en-gu",
66
+ "languages": [
67
+ "eng-Latn",
68
+ "guj-Gujr"
69
+ ]
70
+ },
71
+ {
72
+ "pearson": 0.021017,
73
+ "spearman": 0.000765,
74
+ "cosine_pearson": 0.021017,
75
+ "cosine_spearman": 0.000765,
76
+ "manhattan_pearson": 0.012251,
77
+ "manhattan_spearman": -0.018142,
78
+ "euclidean_pearson": 0.012784,
79
+ "euclidean_spearman": -0.015407,
80
+ "main_score": 0.000765,
81
+ "hf_subset": "en-kn",
82
+ "languages": [
83
+ "eng-Latn",
84
+ "kan-Knda"
85
+ ]
86
+ },
87
+ {
88
+ "pearson": 0.128407,
89
+ "spearman": 0.16236,
90
+ "cosine_pearson": 0.128407,
91
+ "cosine_spearman": 0.16236,
92
+ "manhattan_pearson": 0.056132,
93
+ "manhattan_spearman": 0.036987,
94
+ "euclidean_pearson": 0.062097,
95
+ "euclidean_spearman": 0.040787,
96
+ "main_score": 0.16236,
97
+ "hf_subset": "en-ml",
98
+ "languages": [
99
+ "eng-Latn",
100
+ "mal-Mlym"
101
+ ]
102
+ },
103
+ {
104
+ "pearson": 0.145786,
105
+ "spearman": 0.150029,
106
+ "cosine_pearson": 0.145786,
107
+ "cosine_spearman": 0.150029,
108
+ "manhattan_pearson": 0.086932,
109
+ "manhattan_spearman": 0.064635,
110
+ "euclidean_pearson": 0.088672,
111
+ "euclidean_spearman": 0.068092,
112
+ "main_score": 0.150029,
113
+ "hf_subset": "en-mr",
114
+ "languages": [
115
+ "eng-Latn",
116
+ "mar-Deva"
117
+ ]
118
+ },
119
+ {
120
+ "pearson": 0.088671,
121
+ "spearman": 0.029551,
122
+ "cosine_pearson": 0.088671,
123
+ "cosine_spearman": 0.029551,
124
+ "manhattan_pearson": 0.099202,
125
+ "manhattan_spearman": 0.028613,
126
+ "euclidean_pearson": 0.104516,
127
+ "euclidean_spearman": 0.035474,
128
+ "main_score": 0.029551,
129
+ "hf_subset": "en-or",
130
+ "languages": [
131
+ "eng-Latn",
132
+ "ory-Orya"
133
+ ]
134
+ },
135
+ {
136
+ "pearson": 0.128925,
137
+ "spearman": 0.096049,
138
+ "cosine_pearson": 0.128924,
139
+ "cosine_spearman": 0.096049,
140
+ "manhattan_pearson": 0.064831,
141
+ "manhattan_spearman": 0.044536,
142
+ "euclidean_pearson": 0.059524,
143
+ "euclidean_spearman": 0.043537,
144
+ "main_score": 0.096049,
145
+ "hf_subset": "en-pa",
146
+ "languages": [
147
+ "eng-Latn",
148
+ "pan-Guru"
149
+ ]
150
+ },
151
+ {
152
+ "pearson": -0.123216,
153
+ "spearman": -0.081303,
154
+ "cosine_pearson": -0.123216,
155
+ "cosine_spearman": -0.081303,
156
+ "manhattan_pearson": -0.156227,
157
+ "manhattan_spearman": -0.131783,
158
+ "euclidean_pearson": -0.151907,
159
+ "euclidean_spearman": -0.122488,
160
+ "main_score": -0.081303,
161
+ "hf_subset": "en-ta",
162
+ "languages": [
163
+ "eng-Latn",
164
+ "tam-Taml"
165
+ ]
166
+ },
167
+ {
168
+ "pearson": 0.071218,
169
+ "spearman": 0.103845,
170
+ "cosine_pearson": 0.071218,
171
+ "cosine_spearman": 0.103845,
172
+ "manhattan_pearson": 0.016148,
173
+ "manhattan_spearman": -0.005168,
174
+ "euclidean_pearson": 0.031578,
175
+ "euclidean_spearman": 0.005655,
176
+ "main_score": 0.103845,
177
+ "hf_subset": "en-te",
178
+ "languages": [
179
+ "eng-Latn",
180
+ "tel-Telu"
181
+ ]
182
+ },
183
+ {
184
+ "pearson": -0.061433,
185
+ "spearman": -0.016477,
186
+ "cosine_pearson": -0.061433,
187
+ "cosine_spearman": -0.016477,
188
+ "manhattan_pearson": -0.01825,
189
+ "manhattan_spearman": -0.011916,
190
+ "euclidean_pearson": -0.028747,
191
+ "euclidean_spearman": -0.017251,
192
+ "main_score": -0.016477,
193
+ "hf_subset": "en-ur",
194
+ "languages": [
195
+ "eng-Latn",
196
+ "urd-Arab"
197
+ ]
198
+ }
199
+ ]
200
+ },
201
+ "evaluation_time": 233.77654337882996,
202
+ "kg_co2_emissions": null
203
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IndicGenBenchFloresBitextMining.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f8650438298df086750ff4973661bb58a201a5ee",
3
+ "task_name": "IndicGenBenchFloresBitextMining",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "validation": [
7
+ {
8
+ "precision": 0.985289,
9
+ "recall": 0.98997,
10
+ "f1": 0.986794,
11
+ "accuracy": 0.98997,
12
+ "main_score": 0.986794,
13
+ "hf_subset": "hin-eng",
14
+ "languages": [
15
+ "hin-Deva",
16
+ "eng-Latn"
17
+ ]
18
+ },
19
+ {
20
+ "precision": 0.989468,
21
+ "recall": 0.992979,
22
+ "f1": 0.990639,
23
+ "accuracy": 0.992979,
24
+ "main_score": 0.990639,
25
+ "hf_subset": "eng-hin",
26
+ "languages": [
27
+ "eng-Latn",
28
+ "hin-Deva"
29
+ ]
30
+ }
31
+ ],
32
+ "test": [
33
+ {
34
+ "precision": 0.982213,
35
+ "recall": 0.988142,
36
+ "f1": 0.98419,
37
+ "accuracy": 0.988142,
38
+ "main_score": 0.98419,
39
+ "hf_subset": "hin-eng",
40
+ "languages": [
41
+ "hin-Deva",
42
+ "eng-Latn"
43
+ ]
44
+ },
45
+ {
46
+ "precision": 0.989954,
47
+ "recall": 0.993083,
48
+ "f1": 0.990942,
49
+ "accuracy": 0.993083,
50
+ "main_score": 0.990942,
51
+ "hf_subset": "eng-hin",
52
+ "languages": [
53
+ "eng-Latn",
54
+ "hin-Deva"
55
+ ]
56
+ }
57
+ ]
58
+ },
59
+ "evaluation_time": 139.14086055755615,
60
+ "kg_co2_emissions": null
61
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/IndicLangClassification.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c54a95d9b9d62c891a03bd5da60715df7176b097",
3
+ "task_name": "IndicLangClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.55165,
9
+ "f1": 0.562261,
10
+ "f1_weighted": 0.55707,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.559471,
14
+ "f1": 0.567203,
15
+ "f1_weighted": 0.565362
16
+ },
17
+ {
18
+ "accuracy": 0.545828,
19
+ "f1": 0.555609,
20
+ "f1_weighted": 0.549485
21
+ },
22
+ {
23
+ "accuracy": 0.537905,
24
+ "f1": 0.550774,
25
+ "f1_weighted": 0.543201
26
+ },
27
+ {
28
+ "accuracy": 0.539483,
29
+ "f1": 0.548573,
30
+ "f1_weighted": 0.540289
31
+ },
32
+ {
33
+ "accuracy": 0.579197,
34
+ "f1": 0.587894,
35
+ "f1_weighted": 0.586099
36
+ },
37
+ {
38
+ "accuracy": 0.555493,
39
+ "f1": 0.560808,
40
+ "f1_weighted": 0.55933
41
+ },
42
+ {
43
+ "accuracy": 0.536163,
44
+ "f1": 0.554534,
45
+ "f1_weighted": 0.544409
46
+ },
47
+ {
48
+ "accuracy": 0.552502,
49
+ "f1": 0.562609,
50
+ "f1_weighted": 0.559625
51
+ },
52
+ {
53
+ "accuracy": 0.555165,
54
+ "f1": 0.569065,
55
+ "f1_weighted": 0.561937
56
+ },
57
+ {
58
+ "accuracy": 0.555296,
59
+ "f1": 0.565541,
60
+ "f1_weighted": 0.560966
61
+ }
62
+ ],
63
+ "main_score": 0.55165,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "asm-Beng",
67
+ "brx-Deva",
68
+ "ben-Beng",
69
+ "doi-Deva",
70
+ "gom-Deva",
71
+ "guj-Gujr",
72
+ "hin-Deva",
73
+ "kan-Knda",
74
+ "kas-Arab",
75
+ "kas-Deva",
76
+ "mai-Deva",
77
+ "mal-Mlym",
78
+ "mar-Deva",
79
+ "mni-Beng",
80
+ "mni-Mtei",
81
+ "npi-Deva",
82
+ "ory-Orya",
83
+ "pan-Guru",
84
+ "san-Deva",
85
+ "sat-Olck",
86
+ "snd-Arab",
87
+ "tam-Taml",
88
+ "tel-Telu",
89
+ "urd-Arab"
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "evaluation_time": 1037.6013538837433,
95
+ "kg_co2_emissions": null
96
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/LinceMTBitextMining.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "483f2494f76fdc04acbbdbbac129de1925b34215",
3
+ "task_name": "LinceMTBitextMining",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "precision": 0.236663,
9
+ "recall": 0.340117,
10
+ "f1": 0.262673,
11
+ "accuracy": 0.340117,
12
+ "main_score": 0.262673,
13
+ "hf_subset": "eng-eng_hin",
14
+ "languages": [
15
+ "eng-Latn",
16
+ "hin-Latn"
17
+ ]
18
+ }
19
+ ]
20
+ },
21
+ "evaluation_time": 242.40082168579102,
22
+ "kg_co2_emissions": null
23
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/MTOPIntentClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba",
3
+ "task_name": "MTOPIntentClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "validation": [
7
+ {
8
+ "accuracy": 0.69334,
9
+ "f1": 0.429968,
10
+ "f1_weighted": 0.730282,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.685388,
14
+ "f1": 0.433016,
15
+ "f1_weighted": 0.71967
16
+ },
17
+ {
18
+ "accuracy": 0.705765,
19
+ "f1": 0.431352,
20
+ "f1_weighted": 0.741774
21
+ },
22
+ {
23
+ "accuracy": 0.679423,
24
+ "f1": 0.430065,
25
+ "f1_weighted": 0.720771
26
+ },
27
+ {
28
+ "accuracy": 0.699304,
29
+ "f1": 0.432123,
30
+ "f1_weighted": 0.734553
31
+ },
32
+ {
33
+ "accuracy": 0.692843,
34
+ "f1": 0.439862,
35
+ "f1_weighted": 0.733449
36
+ },
37
+ {
38
+ "accuracy": 0.709245,
39
+ "f1": 0.433505,
40
+ "f1_weighted": 0.741758
41
+ },
42
+ {
43
+ "accuracy": 0.687376,
44
+ "f1": 0.423519,
45
+ "f1_weighted": 0.724933
46
+ },
47
+ {
48
+ "accuracy": 0.711233,
49
+ "f1": 0.431096,
50
+ "f1_weighted": 0.750194
51
+ },
52
+ {
53
+ "accuracy": 0.670974,
54
+ "f1": 0.404929,
55
+ "f1_weighted": 0.705261
56
+ },
57
+ {
58
+ "accuracy": 0.691849,
59
+ "f1": 0.440214,
60
+ "f1_weighted": 0.730457
61
+ }
62
+ ],
63
+ "main_score": 0.69334,
64
+ "hf_subset": "hi",
65
+ "languages": [
66
+ "hin-Deva"
67
+ ]
68
+ }
69
+ ],
70
+ "test": [
71
+ {
72
+ "accuracy": 0.68849,
73
+ "f1": 0.45109,
74
+ "f1_weighted": 0.722504,
75
+ "scores_per_experiment": [
76
+ {
77
+ "accuracy": 0.685909,
78
+ "f1": 0.462259,
79
+ "f1_weighted": 0.719985
80
+ },
81
+ {
82
+ "accuracy": 0.683041,
83
+ "f1": 0.451782,
84
+ "f1_weighted": 0.718594
85
+ },
86
+ {
87
+ "accuracy": 0.681606,
88
+ "f1": 0.447911,
89
+ "f1_weighted": 0.718963
90
+ },
91
+ {
92
+ "accuracy": 0.706705,
93
+ "f1": 0.449041,
94
+ "f1_weighted": 0.742514
95
+ },
96
+ {
97
+ "accuracy": 0.669416,
98
+ "f1": 0.458844,
99
+ "f1_weighted": 0.706249
100
+ },
101
+ {
102
+ "accuracy": 0.713159,
103
+ "f1": 0.45964,
104
+ "f1_weighted": 0.742509
105
+ },
106
+ {
107
+ "accuracy": 0.668698,
108
+ "f1": 0.435953,
109
+ "f1_weighted": 0.706381
110
+ },
111
+ {
112
+ "accuracy": 0.709573,
113
+ "f1": 0.467978,
114
+ "f1_weighted": 0.742277
115
+ },
116
+ {
117
+ "accuracy": 0.691287,
118
+ "f1": 0.445883,
119
+ "f1_weighted": 0.721344
120
+ },
121
+ {
122
+ "accuracy": 0.675511,
123
+ "f1": 0.431606,
124
+ "f1_weighted": 0.70622
125
+ }
126
+ ],
127
+ "main_score": 0.68849,
128
+ "hf_subset": "hi",
129
+ "languages": [
130
+ "hin-Deva"
131
+ ]
132
+ }
133
+ ]
134
+ },
135
+ "evaluation_time": 280.5332052707672,
136
+ "kg_co2_emissions": null
137
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/MalayalamNewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "666f63bba2387456d8f846ea4d0565181bd47b81",
3
+ "task_name": "MalayalamNewsClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.405714,
9
+ "f1": 0.40239,
10
+ "f1_weighted": 0.402022,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.461905,
14
+ "f1": 0.46116,
15
+ "f1_weighted": 0.459947
16
+ },
17
+ {
18
+ "accuracy": 0.407143,
19
+ "f1": 0.403114,
20
+ "f1_weighted": 0.401852
21
+ },
22
+ {
23
+ "accuracy": 0.410317,
24
+ "f1": 0.40315,
25
+ "f1_weighted": 0.401155
26
+ },
27
+ {
28
+ "accuracy": 0.338889,
29
+ "f1": 0.327493,
30
+ "f1_weighted": 0.332671
31
+ },
32
+ {
33
+ "accuracy": 0.455556,
34
+ "f1": 0.449851,
35
+ "f1_weighted": 0.451768
36
+ },
37
+ {
38
+ "accuracy": 0.411905,
39
+ "f1": 0.408367,
40
+ "f1_weighted": 0.407041
41
+ },
42
+ {
43
+ "accuracy": 0.392857,
44
+ "f1": 0.392551,
45
+ "f1_weighted": 0.392421
46
+ },
47
+ {
48
+ "accuracy": 0.39127,
49
+ "f1": 0.390352,
50
+ "f1_weighted": 0.388692
51
+ },
52
+ {
53
+ "accuracy": 0.416667,
54
+ "f1": 0.416736,
55
+ "f1_weighted": 0.416273
56
+ },
57
+ {
58
+ "accuracy": 0.370635,
59
+ "f1": 0.371128,
60
+ "f1_weighted": 0.368397
61
+ }
62
+ ],
63
+ "main_score": 0.405714,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "mal-Mlym"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 85.51849627494812,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/MultiHateClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "8f95949846bb9e33c6aaf730ccfdb8fe6bcfb7a9",
3
+ "task_name": "MultiHateClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.5738,
9
+ "f1": 0.538517,
10
+ "f1_weighted": 0.585447,
11
+ "ap": 0.33195,
12
+ "ap_weighted": 0.33195,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.621,
16
+ "f1": 0.574997,
17
+ "f1_weighted": 0.630368,
18
+ "ap": 0.347281,
19
+ "ap_weighted": 0.347281
20
+ },
21
+ {
22
+ "accuracy": 0.563,
23
+ "f1": 0.533617,
24
+ "f1_weighted": 0.579974,
25
+ "ap": 0.326275,
26
+ "ap_weighted": 0.326275
27
+ },
28
+ {
29
+ "accuracy": 0.649,
30
+ "f1": 0.571625,
31
+ "f1_weighted": 0.64372,
32
+ "ap": 0.342148,
33
+ "ap_weighted": 0.342148
34
+ },
35
+ {
36
+ "accuracy": 0.553,
37
+ "f1": 0.523957,
38
+ "f1_weighted": 0.57052,
39
+ "ap": 0.32087,
40
+ "ap_weighted": 0.32087
41
+ },
42
+ {
43
+ "accuracy": 0.539,
44
+ "f1": 0.529376,
45
+ "f1_weighted": 0.556027,
46
+ "ap": 0.3379,
47
+ "ap_weighted": 0.3379
48
+ },
49
+ {
50
+ "accuracy": 0.582,
51
+ "f1": 0.566685,
52
+ "f1_weighted": 0.598944,
53
+ "ap": 0.356974,
54
+ "ap_weighted": 0.356974
55
+ },
56
+ {
57
+ "accuracy": 0.61,
58
+ "f1": 0.564886,
59
+ "f1_weighted": 0.620368,
60
+ "ap": 0.340612,
61
+ "ap_weighted": 0.340612
62
+ },
63
+ {
64
+ "accuracy": 0.556,
65
+ "f1": 0.518812,
66
+ "f1_weighted": 0.571785,
67
+ "ap": 0.315326,
68
+ "ap_weighted": 0.315326
69
+ },
70
+ {
71
+ "accuracy": 0.597,
72
+ "f1": 0.535838,
73
+ "f1_weighted": 0.60256,
74
+ "ap": 0.320307,
75
+ "ap_weighted": 0.320307
76
+ },
77
+ {
78
+ "accuracy": 0.468,
79
+ "f1": 0.46538,
80
+ "f1_weighted": 0.4802,
81
+ "ap": 0.311804,
82
+ "ap_weighted": 0.311804
83
+ }
84
+ ],
85
+ "main_score": 0.5738,
86
+ "hf_subset": "hin",
87
+ "languages": [
88
+ "hin-Deva"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 17.595994234085083,
94
+ "kg_co2_emissions": null
95
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/NepaliNewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "79125f20d858a08f71ec4923169a6545221725c4",
3
+ "task_name": "NepaliNewsClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "accuracy": 0.924561,
9
+ "f1": 0.924133,
10
+ "f1_weighted": 0.924472,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.936523,
14
+ "f1": 0.935931,
15
+ "f1_weighted": 0.936544
16
+ },
17
+ {
18
+ "accuracy": 0.881836,
19
+ "f1": 0.880923,
20
+ "f1_weighted": 0.880423
21
+ },
22
+ {
23
+ "accuracy": 0.944824,
24
+ "f1": 0.944878,
25
+ "f1_weighted": 0.944882
26
+ },
27
+ {
28
+ "accuracy": 0.918457,
29
+ "f1": 0.917218,
30
+ "f1_weighted": 0.91802
31
+ },
32
+ {
33
+ "accuracy": 0.93457,
34
+ "f1": 0.934546,
35
+ "f1_weighted": 0.934775
36
+ },
37
+ {
38
+ "accuracy": 0.935059,
39
+ "f1": 0.934757,
40
+ "f1_weighted": 0.935123
41
+ },
42
+ {
43
+ "accuracy": 0.906738,
44
+ "f1": 0.907454,
45
+ "f1_weighted": 0.907254
46
+ },
47
+ {
48
+ "accuracy": 0.928223,
49
+ "f1": 0.927593,
50
+ "f1_weighted": 0.928184
51
+ },
52
+ {
53
+ "accuracy": 0.924316,
54
+ "f1": 0.923739,
55
+ "f1_weighted": 0.924565
56
+ },
57
+ {
58
+ "accuracy": 0.935059,
59
+ "f1": 0.93429,
60
+ "f1_weighted": 0.934954
61
+ }
62
+ ],
63
+ "main_score": 0.924561,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "nep-Deva"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 110.68125009536743,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/PunjabiNewsClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "cec3923e16519efe51d535497e711932b8f1dc44",
3
+ "task_name": "PunjabiNewsClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.607006,
9
+ "f1": 0.52947,
10
+ "f1_weighted": 0.655174,
11
+ "ap": 0.213219,
12
+ "ap_weighted": 0.213219,
13
+ "scores_per_experiment": [
14
+ {
15
+ "accuracy": 0.687898,
16
+ "f1": 0.570823,
17
+ "f1_weighted": 0.720736,
18
+ "ap": 0.215456,
19
+ "ap_weighted": 0.215456
20
+ },
21
+ {
22
+ "accuracy": 0.611465,
23
+ "f1": 0.554827,
24
+ "f1_weighted": 0.661023,
25
+ "ap": 0.243345,
26
+ "ap_weighted": 0.243345
27
+ },
28
+ {
29
+ "accuracy": 0.656051,
30
+ "f1": 0.579464,
31
+ "f1_weighted": 0.699488,
32
+ "ap": 0.245667,
33
+ "ap_weighted": 0.245667
34
+ },
35
+ {
36
+ "accuracy": 0.426752,
37
+ "f1": 0.380263,
38
+ "f1_weighted": 0.493781,
39
+ "ap": 0.152109,
40
+ "ap_weighted": 0.152109
41
+ },
42
+ {
43
+ "accuracy": 0.681529,
44
+ "f1": 0.593685,
45
+ "f1_weighted": 0.720035,
46
+ "ap": 0.248969,
47
+ "ap_weighted": 0.248969
48
+ },
49
+ {
50
+ "accuracy": 0.605096,
51
+ "f1": 0.510559,
52
+ "f1_weighted": 0.654419,
53
+ "ap": 0.187641,
54
+ "ap_weighted": 0.187641
55
+ },
56
+ {
57
+ "accuracy": 0.56051,
58
+ "f1": 0.496444,
59
+ "f1_weighted": 0.616567,
60
+ "ap": 0.194976,
61
+ "ap_weighted": 0.194976
62
+ },
63
+ {
64
+ "accuracy": 0.649682,
65
+ "f1": 0.549627,
66
+ "f1_weighted": 0.691596,
67
+ "ap": 0.208687,
68
+ "ap_weighted": 0.208687
69
+ },
70
+ {
71
+ "accuracy": 0.592357,
72
+ "f1": 0.51412,
73
+ "f1_weighted": 0.644515,
74
+ "ap": 0.197326,
75
+ "ap_weighted": 0.197326
76
+ },
77
+ {
78
+ "accuracy": 0.598726,
79
+ "f1": 0.544886,
80
+ "f1_weighted": 0.649575,
81
+ "ap": 0.238017,
82
+ "ap_weighted": 0.238017
83
+ }
84
+ ],
85
+ "main_score": 0.607006,
86
+ "hf_subset": "default",
87
+ "languages": [
88
+ "pan-Guru"
89
+ ]
90
+ }
91
+ ]
92
+ },
93
+ "evaluation_time": 328.4837553501129,
94
+ "kg_co2_emissions": null
95
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/SIB200ClusteringS2S.json ADDED
@@ -0,0 +1,494 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "a74d7350ea12af010cfb1c21e34f1f81fd2e615b",
3
+ "task_name": "SIB200ClusteringS2S",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "v_measures": {
9
+ "Level 0": [
10
+ 0.490771,
11
+ 0.414164,
12
+ 0.475788,
13
+ 0.455051,
14
+ 0.426136,
15
+ 0.514653,
16
+ 0.479276,
17
+ 0.453802,
18
+ 0.454876,
19
+ 0.511396
20
+ ]
21
+ },
22
+ "v_measure": 0.467591,
23
+ "v_measure_std": 0.031554,
24
+ "main_score": 0.467591,
25
+ "hf_subset": "hin_Deva",
26
+ "languages": [
27
+ "hin-Deva"
28
+ ]
29
+ },
30
+ {
31
+ "v_measures": {
32
+ "Level 0": [
33
+ 0.026521,
34
+ 0.021341,
35
+ 0.031533,
36
+ 0.031263,
37
+ 0.027136,
38
+ 0.027984,
39
+ 0.029328,
40
+ 0.022703,
41
+ 0.029197,
42
+ 0.027353
43
+ ]
44
+ },
45
+ "v_measure": 0.027436,
46
+ "v_measure_std": 0.003145,
47
+ "main_score": 0.027436,
48
+ "hf_subset": "asm_Beng",
49
+ "languages": [
50
+ "asm-Beng"
51
+ ]
52
+ },
53
+ {
54
+ "v_measures": {
55
+ "Level 0": [
56
+ 0.323417,
57
+ 0.536556,
58
+ 0.42354,
59
+ 0.313102,
60
+ 0.522444,
61
+ 0.389338,
62
+ 0.381625,
63
+ 0.332254,
64
+ 0.386568,
65
+ 0.41446
66
+ ]
67
+ },
68
+ "v_measure": 0.40233,
69
+ "v_measure_std": 0.072792,
70
+ "main_score": 0.40233,
71
+ "hf_subset": "awa_Deva",
72
+ "languages": [
73
+ "awa-Deva"
74
+ ]
75
+ },
76
+ {
77
+ "v_measures": {
78
+ "Level 0": [
79
+ 0.02918,
80
+ 0.02814,
81
+ 0.029377,
82
+ 0.031069,
83
+ 0.027772,
84
+ 0.039999,
85
+ 0.030476,
86
+ 0.034526,
87
+ 0.032607,
88
+ 0.029403
89
+ ]
90
+ },
91
+ "v_measure": 0.031255,
92
+ "v_measure_std": 0.003503,
93
+ "main_score": 0.031255,
94
+ "hf_subset": "ben_Beng",
95
+ "languages": [
96
+ "ben-Beng"
97
+ ]
98
+ },
99
+ {
100
+ "v_measures": {
101
+ "Level 0": [
102
+ 0.340212,
103
+ 0.403437,
104
+ 0.352381,
105
+ 0.310511,
106
+ 0.398602,
107
+ 0.475459,
108
+ 0.366212,
109
+ 0.36109,
110
+ 0.386068,
111
+ 0.305149
112
+ ]
113
+ },
114
+ "v_measure": 0.369912,
115
+ "v_measure_std": 0.047278,
116
+ "main_score": 0.369912,
117
+ "hf_subset": "bho_Deva",
118
+ "languages": [
119
+ "bho-Deva"
120
+ ]
121
+ },
122
+ {
123
+ "v_measures": {
124
+ "Level 0": [
125
+ 0.050988,
126
+ 0.044545,
127
+ 0.037526,
128
+ 0.038442,
129
+ 0.042796,
130
+ 0.04405,
131
+ 0.035288,
132
+ 0.049462,
133
+ 0.039057,
134
+ 0.045301
135
+ ]
136
+ },
137
+ "v_measure": 0.042746,
138
+ "v_measure_std": 0.004894,
139
+ "main_score": 0.042746,
140
+ "hf_subset": "guj_Gujr",
141
+ "languages": [
142
+ "guj-Gujr"
143
+ ]
144
+ },
145
+ {
146
+ "v_measures": {
147
+ "Level 0": [
148
+ 0.393021,
149
+ 0.329622,
150
+ 0.34936,
151
+ 0.386729,
152
+ 0.320137,
153
+ 0.394497,
154
+ 0.365073,
155
+ 0.380257,
156
+ 0.389527,
157
+ 0.395999
158
+ ]
159
+ },
160
+ "v_measure": 0.370422,
161
+ "v_measure_std": 0.026716,
162
+ "main_score": 0.370422,
163
+ "hf_subset": "hne_Deva",
164
+ "languages": [
165
+ "hne-Deva"
166
+ ]
167
+ },
168
+ {
169
+ "v_measures": {
170
+ "Level 0": [
171
+ 0.033309,
172
+ 0.038233,
173
+ 0.042127,
174
+ 0.0353,
175
+ 0.047552,
176
+ 0.044716,
177
+ 0.037167,
178
+ 0.039806,
179
+ 0.045006,
180
+ 0.035292
181
+ ]
182
+ },
183
+ "v_measure": 0.039851,
184
+ "v_measure_std": 0.00457,
185
+ "main_score": 0.039851,
186
+ "hf_subset": "kan_Knda",
187
+ "languages": [
188
+ "kan-Knda"
189
+ ]
190
+ },
191
+ {
192
+ "v_measures": {
193
+ "Level 0": [
194
+ 0.142907,
195
+ 0.10126,
196
+ 0.079093,
197
+ 0.071165,
198
+ 0.126392,
199
+ 0.107448,
200
+ 0.106658,
201
+ 0.137598,
202
+ 0.092086,
203
+ 0.165249
204
+ ]
205
+ },
206
+ "v_measure": 0.112986,
207
+ "v_measure_std": 0.028205,
208
+ "main_score": 0.112986,
209
+ "hf_subset": "kas_Deva",
210
+ "languages": [
211
+ "kas-Deva"
212
+ ]
213
+ },
214
+ {
215
+ "v_measures": {
216
+ "Level 0": [
217
+ 0.429795,
218
+ 0.368769,
219
+ 0.402701,
220
+ 0.365502,
221
+ 0.420914,
222
+ 0.386177,
223
+ 0.434047,
224
+ 0.397424,
225
+ 0.380972,
226
+ 0.443705
227
+ ]
228
+ },
229
+ "v_measure": 0.403001,
230
+ "v_measure_std": 0.026527,
231
+ "main_score": 0.403001,
232
+ "hf_subset": "mai_Deva",
233
+ "languages": [
234
+ "mai-Deva"
235
+ ]
236
+ },
237
+ {
238
+ "v_measures": {
239
+ "Level 0": [
240
+ 0.035746,
241
+ 0.041301,
242
+ 0.033529,
243
+ 0.032239,
244
+ 0.033829,
245
+ 0.041073,
246
+ 0.032026,
247
+ 0.035397,
248
+ 0.031232,
249
+ 0.041161
250
+ ]
251
+ },
252
+ "v_measure": 0.035753,
253
+ "v_measure_std": 0.003792,
254
+ "main_score": 0.035753,
255
+ "hf_subset": "mal_Mlym",
256
+ "languages": [
257
+ "mal-Mlym"
258
+ ]
259
+ },
260
+ {
261
+ "v_measures": {
262
+ "Level 0": [
263
+ 0.236511,
264
+ 0.249115,
265
+ 0.229444,
266
+ 0.188847,
267
+ 0.201051,
268
+ 0.222164,
269
+ 0.173428,
270
+ 0.224413,
271
+ 0.19048,
272
+ 0.246052
273
+ ]
274
+ },
275
+ "v_measure": 0.216151,
276
+ "v_measure_std": 0.024767,
277
+ "main_score": 0.216151,
278
+ "hf_subset": "mar_Deva",
279
+ "languages": [
280
+ "mar-Deva"
281
+ ]
282
+ },
283
+ {
284
+ "v_measures": {
285
+ "Level 0": [
286
+ 0.056688,
287
+ 0.060144,
288
+ 0.056672,
289
+ 0.057948,
290
+ 0.044304,
291
+ 0.047973,
292
+ 0.045953,
293
+ 0.056628,
294
+ 0.070091,
295
+ 0.052356
296
+ ]
297
+ },
298
+ "v_measure": 0.054876,
299
+ "v_measure_std": 0.007236,
300
+ "main_score": 0.054876,
301
+ "hf_subset": "mni_Beng",
302
+ "languages": [
303
+ "mni-Beng"
304
+ ]
305
+ },
306
+ {
307
+ "v_measures": {
308
+ "Level 0": [
309
+ 0.282943,
310
+ 0.345287,
311
+ 0.331704,
312
+ 0.301754,
313
+ 0.271444,
314
+ 0.343171,
315
+ 0.31082,
316
+ 0.283996,
317
+ 0.232197,
318
+ 0.284734
319
+ ]
320
+ },
321
+ "v_measure": 0.298805,
322
+ "v_measure_std": 0.033494,
323
+ "main_score": 0.298805,
324
+ "hf_subset": "npi_Deva",
325
+ "languages": [
326
+ "npi-Deva"
327
+ ]
328
+ },
329
+ {
330
+ "v_measures": {
331
+ "Level 0": [
332
+ 0.033016,
333
+ 0.045057,
334
+ 0.031903,
335
+ 0.034467,
336
+ 0.039413,
337
+ 0.03938,
338
+ 0.047601,
339
+ 0.045989,
340
+ 0.031913,
341
+ 0.045794
342
+ ]
343
+ },
344
+ "v_measure": 0.039453,
345
+ "v_measure_std": 0.006007,
346
+ "main_score": 0.039453,
347
+ "hf_subset": "ory_Orya",
348
+ "languages": [
349
+ "ory-Orya"
350
+ ]
351
+ },
352
+ {
353
+ "v_measures": {
354
+ "Level 0": [
355
+ 0.047493,
356
+ 0.050154,
357
+ 0.043398,
358
+ 0.049491,
359
+ 0.040115,
360
+ 0.043358,
361
+ 0.042431,
362
+ 0.037815,
363
+ 0.0519,
364
+ 0.045573
365
+ ]
366
+ },
367
+ "v_measure": 0.045173,
368
+ "v_measure_std": 0.004335,
369
+ "main_score": 0.045173,
370
+ "hf_subset": "pan_Guru",
371
+ "languages": [
372
+ "pan-Guru"
373
+ ]
374
+ },
375
+ {
376
+ "v_measures": {
377
+ "Level 0": [
378
+ 0.187505,
379
+ 0.190909,
380
+ 0.174973,
381
+ 0.168179,
382
+ 0.177296,
383
+ 0.197338,
384
+ 0.153418,
385
+ 0.17804,
386
+ 0.195879,
387
+ 0.183026
388
+ ]
389
+ },
390
+ "v_measure": 0.180656,
391
+ "v_measure_std": 0.012711,
392
+ "main_score": 0.180656,
393
+ "hf_subset": "san_Deva",
394
+ "languages": [
395
+ "san-Deva"
396
+ ]
397
+ },
398
+ {
399
+ "v_measures": {
400
+ "Level 0": [
401
+ 0.038249,
402
+ 0.046718,
403
+ 0.033,
404
+ 0.043277,
405
+ 0.041489,
406
+ 0.045315,
407
+ 0.040831,
408
+ 0.040931,
409
+ 0.039929,
410
+ 0.044411
411
+ ]
412
+ },
413
+ "v_measure": 0.041415,
414
+ "v_measure_std": 0.003734,
415
+ "main_score": 0.041415,
416
+ "hf_subset": "snd_Arab",
417
+ "languages": [
418
+ "snd-Arab"
419
+ ]
420
+ },
421
+ {
422
+ "v_measures": {
423
+ "Level 0": [
424
+ 0.041074,
425
+ 0.047903,
426
+ 0.044805,
427
+ 0.04647,
428
+ 0.036797,
429
+ 0.044198,
430
+ 0.04056,
431
+ 0.044546,
432
+ 0.050842,
433
+ 0.045937
434
+ ]
435
+ },
436
+ "v_measure": 0.044313,
437
+ "v_measure_std": 0.003797,
438
+ "main_score": 0.044313,
439
+ "hf_subset": "tam_Taml",
440
+ "languages": [
441
+ "tam-Taml"
442
+ ]
443
+ },
444
+ {
445
+ "v_measures": {
446
+ "Level 0": [
447
+ 0.046961,
448
+ 0.047149,
449
+ 0.041577,
450
+ 0.033162,
451
+ 0.04062,
452
+ 0.038913,
453
+ 0.05148,
454
+ 0.048707,
455
+ 0.043264,
456
+ 0.04446
457
+ ]
458
+ },
459
+ "v_measure": 0.043629,
460
+ "v_measure_std": 0.005068,
461
+ "main_score": 0.043629,
462
+ "hf_subset": "tel_Telu",
463
+ "languages": [
464
+ "tel-Telu"
465
+ ]
466
+ },
467
+ {
468
+ "v_measures": {
469
+ "Level 0": [
470
+ 0.043624,
471
+ 0.041582,
472
+ 0.037776,
473
+ 0.03272,
474
+ 0.038606,
475
+ 0.045682,
476
+ 0.040204,
477
+ 0.044896,
478
+ 0.038754,
479
+ 0.043533
480
+ ]
481
+ },
482
+ "v_measure": 0.040738,
483
+ "v_measure_std": 0.003747,
484
+ "main_score": 0.040738,
485
+ "hf_subset": "urd_Arab",
486
+ "languages": [
487
+ "urd-Arab"
488
+ ]
489
+ }
490
+ ]
491
+ },
492
+ "evaluation_time": 1390.8170924186707,
493
+ "kg_co2_emissions": null
494
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/SanskritShlokasClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5a79d6472db143690c7ce6e974995d3610eee7f0",
3
+ "task_name": "SanskritShlokasClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "accuracy": 0.806527,
9
+ "f1": 0.805588,
10
+ "f1_weighted": 0.806036,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.804178,
14
+ "f1": 0.804779,
15
+ "f1_weighted": 0.803759
16
+ },
17
+ {
18
+ "accuracy": 0.81201,
19
+ "f1": 0.811362,
20
+ "f1_weighted": 0.811872
21
+ },
22
+ {
23
+ "accuracy": 0.73107,
24
+ "f1": 0.731275,
25
+ "f1_weighted": 0.731477
26
+ },
27
+ {
28
+ "accuracy": 0.793734,
29
+ "f1": 0.79336,
30
+ "f1_weighted": 0.793895
31
+ },
32
+ {
33
+ "accuracy": 0.825065,
34
+ "f1": 0.8249,
35
+ "f1_weighted": 0.823347
36
+ },
37
+ {
38
+ "accuracy": 0.830287,
39
+ "f1": 0.831028,
40
+ "f1_weighted": 0.830776
41
+ },
42
+ {
43
+ "accuracy": 0.840731,
44
+ "f1": 0.840266,
45
+ "f1_weighted": 0.840778
46
+ },
47
+ {
48
+ "accuracy": 0.798956,
49
+ "f1": 0.797466,
50
+ "f1_weighted": 0.79882
51
+ },
52
+ {
53
+ "accuracy": 0.825065,
54
+ "f1": 0.824052,
55
+ "f1_weighted": 0.825784
56
+ },
57
+ {
58
+ "accuracy": 0.804178,
59
+ "f1": 0.797398,
60
+ "f1_weighted": 0.799851
61
+ }
62
+ ],
63
+ "main_score": 0.806527,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "san-Deva"
67
+ ]
68
+ }
69
+ ],
70
+ "validation": [
71
+ {
72
+ "accuracy": 0.797917,
73
+ "f1": 0.803869,
74
+ "f1_weighted": 0.798732,
75
+ "scores_per_experiment": [
76
+ {
77
+ "accuracy": 0.791667,
78
+ "f1": 0.801585,
79
+ "f1_weighted": 0.793711
80
+ },
81
+ {
82
+ "accuracy": 0.791667,
83
+ "f1": 0.798799,
84
+ "f1_weighted": 0.79298
85
+ },
86
+ {
87
+ "accuracy": 0.739583,
88
+ "f1": 0.745576,
89
+ "f1_weighted": 0.740177
90
+ },
91
+ {
92
+ "accuracy": 0.729167,
93
+ "f1": 0.738438,
94
+ "f1_weighted": 0.730875
95
+ },
96
+ {
97
+ "accuracy": 0.833333,
98
+ "f1": 0.841094,
99
+ "f1_weighted": 0.835121
100
+ },
101
+ {
102
+ "accuracy": 0.885417,
103
+ "f1": 0.889662,
104
+ "f1_weighted": 0.886189
105
+ },
106
+ {
107
+ "accuracy": 0.8125,
108
+ "f1": 0.819892,
109
+ "f1_weighted": 0.81376
110
+ },
111
+ {
112
+ "accuracy": 0.802083,
113
+ "f1": 0.808211,
114
+ "f1_weighted": 0.803157
115
+ },
116
+ {
117
+ "accuracy": 0.8125,
118
+ "f1": 0.80608,
119
+ "f1_weighted": 0.80863
120
+ },
121
+ {
122
+ "accuracy": 0.78125,
123
+ "f1": 0.789355,
124
+ "f1_weighted": 0.782724
125
+ }
126
+ ],
127
+ "main_score": 0.797917,
128
+ "hf_subset": "default",
129
+ "languages": [
130
+ "san-Deva"
131
+ ]
132
+ }
133
+ ]
134
+ },
135
+ "evaluation_time": 52.777884006500244,
136
+ "kg_co2_emissions": null
137
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/SentimentAnalysisHindi.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1beac1b941da76a9c51e3e5b39d230fde9a80983",
3
+ "task_name": "SentimentAnalysisHindi",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "accuracy": 0.62041,
9
+ "f1": 0.605507,
10
+ "f1_weighted": 0.62203,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.649902,
14
+ "f1": 0.629402,
15
+ "f1_weighted": 0.655058
16
+ },
17
+ {
18
+ "accuracy": 0.645508,
19
+ "f1": 0.629092,
20
+ "f1_weighted": 0.655024
21
+ },
22
+ {
23
+ "accuracy": 0.643066,
24
+ "f1": 0.62644,
25
+ "f1_weighted": 0.648214
26
+ },
27
+ {
28
+ "accuracy": 0.580078,
29
+ "f1": 0.568407,
30
+ "f1_weighted": 0.586486
31
+ },
32
+ {
33
+ "accuracy": 0.613281,
34
+ "f1": 0.596796,
35
+ "f1_weighted": 0.612399
36
+ },
37
+ {
38
+ "accuracy": 0.55127,
39
+ "f1": 0.526134,
40
+ "f1_weighted": 0.52781
41
+ },
42
+ {
43
+ "accuracy": 0.663086,
44
+ "f1": 0.649698,
45
+ "f1_weighted": 0.668184
46
+ },
47
+ {
48
+ "accuracy": 0.644531,
49
+ "f1": 0.636392,
50
+ "f1_weighted": 0.649319
51
+ },
52
+ {
53
+ "accuracy": 0.580078,
54
+ "f1": 0.571286,
55
+ "f1_weighted": 0.58402
56
+ },
57
+ {
58
+ "accuracy": 0.633301,
59
+ "f1": 0.621427,
60
+ "f1_weighted": 0.633787
61
+ }
62
+ ],
63
+ "main_score": 0.605507,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "hin-Deva"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 38.996551513671875,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/TweetSentimentClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "d522bb117c32f5e0207344f69f7075fc9941168b",
3
+ "task_name": "TweetSentimentClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "accuracy": 0.369141,
9
+ "f1": 0.363107,
10
+ "f1_weighted": 0.363236,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.304688,
14
+ "f1": 0.304534,
15
+ "f1_weighted": 0.304564
16
+ },
17
+ {
18
+ "accuracy": 0.390625,
19
+ "f1": 0.388111,
20
+ "f1_weighted": 0.388235
21
+ },
22
+ {
23
+ "accuracy": 0.320312,
24
+ "f1": 0.313247,
25
+ "f1_weighted": 0.313541
26
+ },
27
+ {
28
+ "accuracy": 0.4375,
29
+ "f1": 0.433267,
30
+ "f1_weighted": 0.433204
31
+ },
32
+ {
33
+ "accuracy": 0.394531,
34
+ "f1": 0.376671,
35
+ "f1_weighted": 0.377005
36
+ },
37
+ {
38
+ "accuracy": 0.3125,
39
+ "f1": 0.308521,
40
+ "f1_weighted": 0.308576
41
+ },
42
+ {
43
+ "accuracy": 0.378906,
44
+ "f1": 0.373636,
45
+ "f1_weighted": 0.373612
46
+ },
47
+ {
48
+ "accuracy": 0.3125,
49
+ "f1": 0.309998,
50
+ "f1_weighted": 0.310147
51
+ },
52
+ {
53
+ "accuracy": 0.417969,
54
+ "f1": 0.409904,
55
+ "f1_weighted": 0.410191
56
+ },
57
+ {
58
+ "accuracy": 0.421875,
59
+ "f1": 0.413183,
60
+ "f1_weighted": 0.413283
61
+ }
62
+ ],
63
+ "main_score": 0.369141,
64
+ "hf_subset": "hindi",
65
+ "languages": [
66
+ "hin-Deva"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 18.04403829574585,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/UrduRomanSentimentClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "905c1121c002c4b9adc4ebc5faaf4d6f50d1b1ee",
3
+ "task_name": "UrduRomanSentimentClassification",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "train": [
7
+ {
8
+ "accuracy": 0.412451,
9
+ "f1": 0.403225,
10
+ "f1_weighted": 0.41328,
11
+ "scores_per_experiment": [
12
+ {
13
+ "accuracy": 0.453125,
14
+ "f1": 0.437027,
15
+ "f1_weighted": 0.453606
16
+ },
17
+ {
18
+ "accuracy": 0.429199,
19
+ "f1": 0.415439,
20
+ "f1_weighted": 0.424916
21
+ },
22
+ {
23
+ "accuracy": 0.384277,
24
+ "f1": 0.377522,
25
+ "f1_weighted": 0.391027
26
+ },
27
+ {
28
+ "accuracy": 0.444824,
29
+ "f1": 0.442262,
30
+ "f1_weighted": 0.447926
31
+ },
32
+ {
33
+ "accuracy": 0.399902,
34
+ "f1": 0.378851,
35
+ "f1_weighted": 0.397383
36
+ },
37
+ {
38
+ "accuracy": 0.43457,
39
+ "f1": 0.42486,
40
+ "f1_weighted": 0.439183
41
+ },
42
+ {
43
+ "accuracy": 0.388184,
44
+ "f1": 0.388619,
45
+ "f1_weighted": 0.384174
46
+ },
47
+ {
48
+ "accuracy": 0.399414,
49
+ "f1": 0.38684,
50
+ "f1_weighted": 0.400376
51
+ },
52
+ {
53
+ "accuracy": 0.393066,
54
+ "f1": 0.388769,
55
+ "f1_weighted": 0.392817
56
+ },
57
+ {
58
+ "accuracy": 0.397949,
59
+ "f1": 0.392055,
60
+ "f1_weighted": 0.401393
61
+ }
62
+ ],
63
+ "main_score": 0.403225,
64
+ "hf_subset": "default",
65
+ "languages": [
66
+ "urd-Latn"
67
+ ]
68
+ }
69
+ ]
70
+ },
71
+ "evaluation_time": 56.51226830482483,
72
+ "kg_co2_emissions": null
73
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/WikipediaRerankingMultilingual.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "6268b37d6f975f2a134791ba2f250a91d0bdfb4f",
3
+ "task_name": "WikipediaRerankingMultilingual",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "map": 0.720752,
9
+ "mrr": 0.721508,
10
+ "nAUC_map_max": 0.302833,
11
+ "nAUC_map_std": 0.317203,
12
+ "nAUC_map_diff1": 0.554237,
13
+ "nAUC_mrr_max": 0.305184,
14
+ "nAUC_mrr_std": 0.321101,
15
+ "nAUC_mrr_diff1": 0.551788,
16
+ "main_score": 0.720752,
17
+ "hf_subset": "hi",
18
+ "languages": [
19
+ "hin-Deva"
20
+ ]
21
+ },
22
+ {
23
+ "map": 0.346355,
24
+ "mrr": 0.346355,
25
+ "nAUC_map_max": 0.081376,
26
+ "nAUC_map_std": 0.062626,
27
+ "nAUC_map_diff1": 0.036116,
28
+ "nAUC_mrr_max": 0.081376,
29
+ "nAUC_mrr_std": 0.062626,
30
+ "nAUC_mrr_diff1": 0.036116,
31
+ "main_score": 0.346355,
32
+ "hf_subset": "bn",
33
+ "languages": [
34
+ "ben-Beng"
35
+ ]
36
+ }
37
+ ]
38
+ },
39
+ "evaluation_time": 3309.075793027878,
40
+ "kg_co2_emissions": null
41
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/XNLI.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
+ "task_name": "XNLI",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "test": [
7
+ {
8
+ "similarity_accuracy": 0.610989,
9
+ "similarity_accuracy_threshold": 0.618919,
10
+ "similarity_f1": 0.670124,
11
+ "similarity_f1_threshold": 0.363081,
12
+ "similarity_precision": 0.518459,
13
+ "similarity_recall": 0.947214,
14
+ "similarity_ap": 0.638881,
15
+ "cosine_accuracy": 0.610989,
16
+ "cosine_accuracy_threshold": 0.618919,
17
+ "cosine_f1": 0.670124,
18
+ "cosine_f1_threshold": 0.363081,
19
+ "cosine_precision": 0.518459,
20
+ "cosine_recall": 0.947214,
21
+ "cosine_ap": 0.638881,
22
+ "manhattan_accuracy": 0.613919,
23
+ "manhattan_accuracy_threshold": 820.554321,
24
+ "manhattan_f1": 0.668367,
25
+ "manhattan_f1_threshold": 1077.517822,
26
+ "manhattan_precision": 0.51252,
27
+ "manhattan_recall": 0.960411,
28
+ "manhattan_ap": 0.638252,
29
+ "euclidean_accuracy": 0.613187,
30
+ "euclidean_accuracy_threshold": 22.749931,
31
+ "euclidean_f1": 0.668421,
32
+ "euclidean_f1_threshold": 28.699625,
33
+ "euclidean_precision": 0.521346,
34
+ "euclidean_recall": 0.931085,
35
+ "euclidean_ap": 0.638549,
36
+ "dot_accuracy": 0.602198,
37
+ "dot_accuracy_threshold": 418.685181,
38
+ "dot_f1": 0.668721,
39
+ "dot_f1_threshold": 221.012344,
40
+ "dot_precision": 0.514625,
41
+ "dot_recall": 0.954545,
42
+ "dot_ap": 0.625792,
43
+ "max_accuracy": 0.613919,
44
+ "max_f1": 0.670124,
45
+ "max_precision": 0.521346,
46
+ "max_recall": 0.960411,
47
+ "max_ap": 0.638881,
48
+ "main_score": 0.638881,
49
+ "hf_subset": "hi",
50
+ "languages": [
51
+ "hin-Deva"
52
+ ]
53
+ }
54
+ ],
55
+ "validation": [
56
+ {
57
+ "similarity_accuracy": 0.617582,
58
+ "similarity_accuracy_threshold": 0.640458,
59
+ "similarity_f1": 0.666341,
60
+ "similarity_f1_threshold": 0.121911,
61
+ "similarity_precision": 0.5,
62
+ "similarity_recall": 0.998534,
63
+ "similarity_ap": 0.650756,
64
+ "cosine_accuracy": 0.617582,
65
+ "cosine_accuracy_threshold": 0.640458,
66
+ "cosine_f1": 0.666341,
67
+ "cosine_f1_threshold": 0.121911,
68
+ "cosine_precision": 0.5,
69
+ "cosine_recall": 0.998534,
70
+ "cosine_ap": 0.650756,
71
+ "manhattan_accuracy": 0.615385,
72
+ "manhattan_accuracy_threshold": 787.284607,
73
+ "manhattan_f1": 0.666994,
74
+ "manhattan_f1_threshold": 1191.316406,
75
+ "manhattan_precision": 0.501477,
76
+ "manhattan_recall": 0.995601,
77
+ "manhattan_ap": 0.647826,
78
+ "euclidean_accuracy": 0.614652,
79
+ "euclidean_accuracy_threshold": 21.930321,
80
+ "euclidean_f1": 0.666994,
81
+ "euclidean_f1_threshold": 32.825119,
82
+ "euclidean_precision": 0.501477,
83
+ "euclidean_recall": 0.995601,
84
+ "euclidean_ap": 0.647908,
85
+ "dot_accuracy": 0.616117,
86
+ "dot_accuracy_threshold": 397.997833,
87
+ "dot_f1": 0.665689,
88
+ "dot_f1_threshold": 50.910923,
89
+ "dot_precision": 0.499267,
90
+ "dot_recall": 0.998534,
91
+ "dot_ap": 0.645428,
92
+ "max_accuracy": 0.617582,
93
+ "max_f1": 0.666994,
94
+ "max_precision": 0.501477,
95
+ "max_recall": 0.998534,
96
+ "max_ap": 0.650756,
97
+ "main_score": 0.650756,
98
+ "hf_subset": "hi",
99
+ "languages": [
100
+ "hin-Deva"
101
+ ]
102
+ }
103
+ ]
104
+ },
105
+ "evaluation_time": 67.37477612495422,
106
+ "kg_co2_emissions": null
107
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/XQuADRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "51adfef1c1287aab1d2d91b5bead9bcfb9c68583",
3
+ "task_name": "XQuADRetrieval",
4
+ "mteb_version": "1.38.30",
5
+ "scores": {
6
+ "validation": [
7
+ {
8
+ "ndcg_at_1": 0.68639,
9
+ "ndcg_at_3": 0.78173,
10
+ "ndcg_at_5": 0.80675,
11
+ "ndcg_at_10": 0.81917,
12
+ "ndcg_at_20": 0.8271,
13
+ "ndcg_at_100": 0.83125,
14
+ "ndcg_at_1000": 0.83136,
15
+ "map_at_1": 0.68639,
16
+ "map_at_3": 0.75881,
17
+ "map_at_5": 0.77267,
18
+ "map_at_10": 0.77786,
19
+ "map_at_20": 0.78006,
20
+ "map_at_100": 0.78071,
21
+ "map_at_1000": 0.78072,
22
+ "recall_at_1": 0.68639,
23
+ "recall_at_3": 0.84784,
24
+ "recall_at_5": 0.90871,
25
+ "recall_at_10": 0.94675,
26
+ "recall_at_20": 0.97802,
27
+ "recall_at_100": 0.99915,
28
+ "recall_at_1000": 1.0,
29
+ "precision_at_1": 0.68639,
30
+ "precision_at_3": 0.28261,
31
+ "precision_at_5": 0.18174,
32
+ "precision_at_10": 0.09467,
33
+ "precision_at_20": 0.0489,
34
+ "precision_at_100": 0.00999,
35
+ "precision_at_1000": 0.001,
36
+ "mrr_at_1": 0.686391,
37
+ "mrr_at_3": 0.758805,
38
+ "mrr_at_5": 0.772668,
39
+ "mrr_at_10": 0.777864,
40
+ "mrr_at_20": 0.780055,
41
+ "mrr_at_100": 0.780712,
42
+ "mrr_at_1000": 0.780717,
43
+ "nauc_ndcg_at_1_max": 0.507077,
44
+ "nauc_ndcg_at_1_std": 0.037766,
45
+ "nauc_ndcg_at_1_diff1": 0.744468,
46
+ "nauc_ndcg_at_3_max": 0.586458,
47
+ "nauc_ndcg_at_3_std": 0.117106,
48
+ "nauc_ndcg_at_3_diff1": 0.710042,
49
+ "nauc_ndcg_at_5_max": 0.589233,
50
+ "nauc_ndcg_at_5_std": 0.140464,
51
+ "nauc_ndcg_at_5_diff1": 0.706994,
52
+ "nauc_ndcg_at_10_max": 0.592023,
53
+ "nauc_ndcg_at_10_std": 0.148668,
54
+ "nauc_ndcg_at_10_diff1": 0.709179,
55
+ "nauc_ndcg_at_20_max": 0.579272,
56
+ "nauc_ndcg_at_20_std": 0.129263,
57
+ "nauc_ndcg_at_20_diff1": 0.713265,
58
+ "nauc_ndcg_at_100_max": 0.56979,
59
+ "nauc_ndcg_at_100_std": 0.112209,
60
+ "nauc_ndcg_at_100_diff1": 0.718055,
61
+ "nauc_ndcg_at_1000_max": 0.569455,
62
+ "nauc_ndcg_at_1000_std": 0.111735,
63
+ "nauc_ndcg_at_1000_diff1": 0.717836,
64
+ "nauc_map_at_1_max": 0.507077,
65
+ "nauc_map_at_1_std": 0.037766,
66
+ "nauc_map_at_1_diff1": 0.744468,
67
+ "nauc_map_at_3_max": 0.56441,
68
+ "nauc_map_at_3_std": 0.094475,
69
+ "nauc_map_at_3_diff1": 0.719387,
70
+ "nauc_map_at_5_max": 0.564391,
71
+ "nauc_map_at_5_std": 0.104574,
72
+ "nauc_map_at_5_diff1": 0.718333,
73
+ "nauc_map_at_10_max": 0.565007,
74
+ "nauc_map_at_10_std": 0.106354,
75
+ "nauc_map_at_10_diff1": 0.719306,
76
+ "nauc_map_at_20_max": 0.562049,
77
+ "nauc_map_at_20_std": 0.101816,
78
+ "nauc_map_at_20_diff1": 0.720339,
79
+ "nauc_map_at_100_max": 0.56085,
80
+ "nauc_map_at_100_std": 0.09962,
81
+ "nauc_map_at_100_diff1": 0.721051,
82
+ "nauc_map_at_1000_max": 0.560838,
83
+ "nauc_map_at_1000_std": 0.099602,
84
+ "nauc_map_at_1000_diff1": 0.721043,
85
+ "nauc_recall_at_1_max": 0.507077,
86
+ "nauc_recall_at_1_std": 0.037766,
87
+ "nauc_recall_at_1_diff1": 0.744468,
88
+ "nauc_recall_at_3_max": 0.678659,
89
+ "nauc_recall_at_3_std": 0.212137,
90
+ "nauc_recall_at_3_diff1": 0.671105,
91
+ "nauc_recall_at_5_max": 0.746044,
92
+ "nauc_recall_at_5_std": 0.373595,
93
+ "nauc_recall_at_5_diff1": 0.635481,
94
+ "nauc_recall_at_10_max": 0.878038,
95
+ "nauc_recall_at_10_std": 0.618254,
96
+ "nauc_recall_at_10_diff1": 0.606185,
97
+ "nauc_recall_at_20_max": 0.925665,
98
+ "nauc_recall_at_20_std": 0.741256,
99
+ "nauc_recall_at_20_diff1": 0.570075,
100
+ "nauc_recall_at_100_max": 1.0,
101
+ "nauc_recall_at_100_std": 0.722197,
102
+ "nauc_recall_at_100_diff1": 1.0,
103
+ "nauc_recall_at_1000_max": NaN,
104
+ "nauc_recall_at_1000_std": NaN,
105
+ "nauc_recall_at_1000_diff1": NaN,
106
+ "nauc_precision_at_1_max": 0.507077,
107
+ "nauc_precision_at_1_std": 0.037766,
108
+ "nauc_precision_at_1_diff1": 0.744468,
109
+ "nauc_precision_at_3_max": 0.678659,
110
+ "nauc_precision_at_3_std": 0.212137,
111
+ "nauc_precision_at_3_diff1": 0.671105,
112
+ "nauc_precision_at_5_max": 0.746044,
113
+ "nauc_precision_at_5_std": 0.373595,
114
+ "nauc_precision_at_5_diff1": 0.635481,
115
+ "nauc_precision_at_10_max": 0.878038,
116
+ "nauc_precision_at_10_std": 0.618254,
117
+ "nauc_precision_at_10_diff1": 0.606185,
118
+ "nauc_precision_at_20_max": 0.925665,
119
+ "nauc_precision_at_20_std": 0.741256,
120
+ "nauc_precision_at_20_diff1": 0.570075,
121
+ "nauc_precision_at_100_max": 1.0,
122
+ "nauc_precision_at_100_std": 0.722197,
123
+ "nauc_precision_at_100_diff1": 1.0,
124
+ "nauc_precision_at_1000_max": 1.0,
125
+ "nauc_precision_at_1000_std": 1.0,
126
+ "nauc_precision_at_1000_diff1": 1.0,
127
+ "nauc_mrr_at_1_max": 0.507077,
128
+ "nauc_mrr_at_1_std": 0.037766,
129
+ "nauc_mrr_at_1_diff1": 0.744468,
130
+ "nauc_mrr_at_3_max": 0.56441,
131
+ "nauc_mrr_at_3_std": 0.094475,
132
+ "nauc_mrr_at_3_diff1": 0.719387,
133
+ "nauc_mrr_at_5_max": 0.564391,
134
+ "nauc_mrr_at_5_std": 0.104574,
135
+ "nauc_mrr_at_5_diff1": 0.718333,
136
+ "nauc_mrr_at_10_max": 0.565007,
137
+ "nauc_mrr_at_10_std": 0.106354,
138
+ "nauc_mrr_at_10_diff1": 0.719306,
139
+ "nauc_mrr_at_20_max": 0.562049,
140
+ "nauc_mrr_at_20_std": 0.101816,
141
+ "nauc_mrr_at_20_diff1": 0.720339,
142
+ "nauc_mrr_at_100_max": 0.56085,
143
+ "nauc_mrr_at_100_std": 0.09962,
144
+ "nauc_mrr_at_100_diff1": 0.721051,
145
+ "nauc_mrr_at_1000_max": 0.560838,
146
+ "nauc_mrr_at_1000_std": 0.099602,
147
+ "nauc_mrr_at_1000_diff1": 0.721043,
148
+ "main_score": 0.81917,
149
+ "hf_subset": "hi",
150
+ "languages": [
151
+ "hin-Deva"
152
+ ]
153
+ }
154
+ ]
155
+ },
156
+ "evaluation_time": 43.8482825756073,
157
+ "kg_co2_emissions": null
158
+ }
results/Sailesh97__Hinvec/d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b/model_meta.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "Sailesh97/Hinvec", "revision": "d4fc678720cc1b8c5d18599ce2d9a4d6090c8b6b", "release_date": null, "languages": null, "n_parameters": null, "memory_usage_mb": null, "max_tokens": null, "embed_dim": null, "license": "apache-2.0", "open_weights": true, "public_training_code": null, "public_training_data": null, "framework": ["PyTorch"], "reference": null, "similarity_fn_name": null, "use_instructions": null, "training_datasets": null, "adapted_from": null, "superseded_by": null, "is_cross_encoder": null, "modalities": ["text"], "loader": null}