eduagarcia commited on
Commit
4617d97
·
verified ·
1 Parent(s): c696f01

Updating model anthracite-org/magnum-v2.5-12b-kto

Browse files
anthracite-org/magnum-v2.5-12b-kto/results_2025-05-07T19-17-25.777956.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "start_date": "2025-05-07T19-17-25.777956",
4
+ "start_time": 1746645446.8992753,
5
+ "end_time": 1746646622.3891776,
6
+ "total_evaluation_time_seconds": 1175.4899022579193,
7
+ "n_gpus": 1,
8
+ "model_dtype": "float16",
9
+ "model_is_loaded_in_4bit": null,
10
+ "model_is_loaded_in_8bit": null,
11
+ "model_is_quantized": false,
12
+ "model_quantization": null,
13
+ "model_sha": "aee0374e5a43e950c9977b0004dede1c57be2999",
14
+ "batch_size": "auto",
15
+ "max_length": 4098,
16
+ "max_gen_toks": 2048,
17
+ "until": null,
18
+ "gen_kwargs": {},
19
+ "effective_batch_size": 7075.0,
20
+ "model_name": "anthracite-org/magnum-v2.5-12b-kto",
21
+ "job_id": 1633,
22
+ "model_id": "anthracite-org/magnum-v2.5-12b-kto_eval_request_False_float16_Original",
23
+ "model_base_model": "",
24
+ "model_weight_type": "Original",
25
+ "model_revision": "main",
26
+ "model_private": false,
27
+ "model_type": "💬 : chat (RLHF, DPO, IFT, ...)",
28
+ "model_architectures": "MistralForCausalLM",
29
+ "submitted_time": "2024-08-22T18:39:50Z",
30
+ "lm_eval_model_type": "vllm",
31
+ "eval_version": "1.1.0"
32
+ },
33
+ "results": {
34
+ "all_grouped_average": 0.6514789809876652,
35
+ "all_grouped_npm": 0.47674524279200825,
36
+ "all_grouped": {
37
+ "enem_challenge": 0.6431070678796361,
38
+ "bluex": 0.5173852573018081,
39
+ "oab_exams": 0.4419134396355353,
40
+ "assin2_rte": 0.8674910385642457,
41
+ "assin2_sts": 0.6823897405705615,
42
+ "faquad_nli": 0.6125554125554126,
43
+ "hatebr_offensive": 0.7709540048143357,
44
+ "portuguese_hate_speech": 0.6691879250295485,
45
+ "tweetsentbr": 0.6583269425379031
46
+ },
47
+ "all": {
48
+ "harness|enem_challenge|enem_challenge|None|3": 0.6431070678796361,
49
+ "harness|bluex|bluex|None|3": 0.5173852573018081,
50
+ "harness|oab_exams|oab_exams|None|3": 0.4419134396355353,
51
+ "harness|assin2_rte|assin2_rte|None|15": 0.8674910385642457,
52
+ "harness|assin2_sts|assin2_sts|None|15": 0.6823897405705615,
53
+ "harness|faquad_nli|faquad_nli|None|15": 0.6125554125554126,
54
+ "harness|hatebr_offensive|hatebr_offensive|None|25": 0.7709540048143357,
55
+ "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": 0.6691879250295485,
56
+ "harness|tweetsentbr|tweetsentbr|None|25": 0.6583269425379031
57
+ },
58
+ "harness|enem_challenge|enem_challenge|None|3": {
59
+ "acc,all": 0.6431070678796361,
60
+ "acc,exam_id__2010": 0.6495726495726496,
61
+ "acc,exam_id__2015": 0.6386554621848739,
62
+ "acc,exam_id__2012": 0.6637931034482759,
63
+ "acc,exam_id__2009": 0.6434782608695652,
64
+ "acc,exam_id__2016_2": 0.6747967479674797,
65
+ "acc,exam_id__2016": 0.6446280991735537,
66
+ "acc,exam_id__2017": 0.6120689655172413,
67
+ "acc,exam_id__2014": 0.5963302752293578,
68
+ "acc,exam_id__2022": 0.6165413533834586,
69
+ "acc,exam_id__2023": 0.6296296296296297,
70
+ "acc,exam_id__2011": 0.7008547008547008,
71
+ "acc,exam_id__2013": 0.6481481481481481,
72
+ "main_score": 0.6431070678796361
73
+ },
74
+ "harness|bluex|bluex|None|3": {
75
+ "acc,all": 0.5173852573018081,
76
+ "acc,exam_id__UNICAMP_2021_2": 0.6862745098039216,
77
+ "acc,exam_id__UNICAMP_2018": 0.46296296296296297,
78
+ "acc,exam_id__USP_2024": 0.6585365853658537,
79
+ "acc,exam_id__UNICAMP_2021_1": 0.41304347826086957,
80
+ "acc,exam_id__UNICAMP_2023": 0.5116279069767442,
81
+ "acc,exam_id__USP_2020": 0.44642857142857145,
82
+ "acc,exam_id__UNICAMP_2022": 0.6153846153846154,
83
+ "acc,exam_id__UNICAMP_2024": 0.4444444444444444,
84
+ "acc,exam_id__USP_2021": 0.5576923076923077,
85
+ "acc,exam_id__USP_2018": 0.37037037037037035,
86
+ "acc,exam_id__UNICAMP_2020": 0.509090909090909,
87
+ "acc,exam_id__USP_2022": 0.46938775510204084,
88
+ "acc,exam_id__USP_2023": 0.6363636363636364,
89
+ "acc,exam_id__USP_2019": 0.45,
90
+ "acc,exam_id__UNICAMP_2019": 0.58,
91
+ "main_score": 0.5173852573018081
92
+ },
93
+ "harness|oab_exams|oab_exams|None|3": {
94
+ "acc,all": 0.4419134396355353,
95
+ "acc,exam_id__2012-08": 0.375,
96
+ "acc,exam_id__2015-16": 0.4125,
97
+ "acc,exam_id__2014-13": 0.375,
98
+ "acc,exam_id__2011-03": 0.42424242424242425,
99
+ "acc,exam_id__2016-19": 0.5256410256410257,
100
+ "acc,exam_id__2012-07": 0.45,
101
+ "acc,exam_id__2011-05": 0.45,
102
+ "acc,exam_id__2013-12": 0.425,
103
+ "acc,exam_id__2017-23": 0.4,
104
+ "acc,exam_id__2016-21": 0.45,
105
+ "acc,exam_id__2016-20": 0.475,
106
+ "acc,exam_id__2013-10": 0.4875,
107
+ "acc,exam_id__2010-01": 0.43529411764705883,
108
+ "acc,exam_id__2011-04": 0.325,
109
+ "acc,exam_id__2015-17": 0.5769230769230769,
110
+ "acc,exam_id__2017-22": 0.5375,
111
+ "acc,exam_id__2016-20a": 0.3625,
112
+ "acc,exam_id__2017-24": 0.375,
113
+ "acc,exam_id__2015-18": 0.4125,
114
+ "acc,exam_id__2014-14": 0.4625,
115
+ "acc,exam_id__2013-11": 0.525,
116
+ "acc,exam_id__2012-06a": 0.45,
117
+ "acc,exam_id__2010-02": 0.5,
118
+ "acc,exam_id__2012-06": 0.3875,
119
+ "acc,exam_id__2014-15": 0.46153846153846156,
120
+ "acc,exam_id__2018-25": 0.475,
121
+ "acc,exam_id__2012-09": 0.38961038961038963,
122
+ "main_score": 0.4419134396355353
123
+ },
124
+ "harness|assin2_rte|assin2_rte|None|15": {
125
+ "f1_macro,all": 0.8674910385642457,
126
+ "acc,all": 0.8676470588235294,
127
+ "main_score": 0.8674910385642457
128
+ },
129
+ "harness|assin2_sts|assin2_sts|None|15": {
130
+ "pearson,all": 0.6823897405705615,
131
+ "mse,all": 0.6388841405942445,
132
+ "main_score": 0.6823897405705615
133
+ },
134
+ "harness|faquad_nli|faquad_nli|None|15": {
135
+ "f1_macro,all": 0.6125554125554126,
136
+ "acc,all": 0.8061538461538461,
137
+ "main_score": 0.6125554125554126
138
+ },
139
+ "harness|hatebr_offensive|hatebr_offensive|None|25": {
140
+ "f1_macro,all": 0.7709540048143357,
141
+ "acc,all": 0.7807142857142857,
142
+ "main_score": 0.7709540048143357
143
+ },
144
+ "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": {
145
+ "f1_macro,all": 0.6691879250295485,
146
+ "acc,all": 0.7097532314923619,
147
+ "main_score": 0.6691879250295485
148
+ },
149
+ "harness|tweetsentbr|tweetsentbr|None|25": {
150
+ "f1_macro,all": 0.6583269425379031,
151
+ "acc,all": 0.6776119402985075,
152
+ "main_score": 0.6583269425379031
153
+ }
154
+ },
155
+ "config_tasks": {
156
+ "harness|enem_challenge|enem_challenge": "LM Harness task",
157
+ "harness|bluex|bluex": "LM Harness task",
158
+ "harness|oab_exams|oab_exams": "LM Harness task",
159
+ "harness|assin2_rte|assin2_rte": "LM Harness task",
160
+ "harness|assin2_sts|assin2_sts": "LM Harness task",
161
+ "harness|faquad_nli|faquad_nli": "LM Harness task",
162
+ "harness|hatebr_offensive|hatebr_offensive": "LM Harness task",
163
+ "harness|portuguese_hate_speech|portuguese_hate_speech": "LM Harness task",
164
+ "harness|tweetsentbr|tweetsentbr": "LM Harness task"
165
+ },
166
+ "versions": {
167
+ "all": 0,
168
+ "harness|enem_challenge|enem_challenge": 1.1,
169
+ "harness|bluex|bluex": 1.1,
170
+ "harness|oab_exams|oab_exams": 1.5,
171
+ "harness|assin2_rte|assin2_rte": 1.1,
172
+ "harness|assin2_sts|assin2_sts": 1.1,
173
+ "harness|faquad_nli|faquad_nli": 1.1,
174
+ "harness|hatebr_offensive|hatebr_offensive": 1.0,
175
+ "harness|portuguese_hate_speech|portuguese_hate_speech": 1.0,
176
+ "harness|tweetsentbr|tweetsentbr": 1.0
177
+ },
178
+ "summary_tasks": {
179
+ "harness|enem_challenge|enem_challenge|None|3": {
180
+ "sample_size": 1429
181
+ },
182
+ "harness|bluex|bluex|None|3": {
183
+ "sample_size": 719
184
+ },
185
+ "harness|oab_exams|oab_exams|None|3": {
186
+ "sample_size": 2195
187
+ },
188
+ "harness|assin2_rte|assin2_rte|None|15": {
189
+ "sample_size": 2448
190
+ },
191
+ "harness|assin2_sts|assin2_sts|None|15": {
192
+ "sample_size": 2448
193
+ },
194
+ "harness|faquad_nli|faquad_nli|None|15": {
195
+ "sample_size": 650
196
+ },
197
+ "harness|hatebr_offensive|hatebr_offensive|None|25": {
198
+ "sample_size": 1400
199
+ },
200
+ "harness|portuguese_hate_speech|portuguese_hate_speech|None|25": {
201
+ "sample_size": 851
202
+ },
203
+ "harness|tweetsentbr|tweetsentbr|None|25": {
204
+ "sample_size": 2010
205
+ }
206
+ },
207
+ "summary_general": {}
208
+ }