lm1-misc-pile / 421m32b32b /421m32b32bpile /evaluation /rankeval /lm1-421m-32b-results_lm-eval_global_step60336_2023-01-24-17-05-33_5shots.json
Muennighoff's picture
Add
0461a88
{
"results": {
"anli_r1": {
"acc": 0.325,
"acc_stderr": 0.014818724459095526
},
"anli_r2": {
"acc": 0.322,
"acc_stderr": 0.014782913600996662
},
"anli_r3": {
"acc": 0.3383333333333333,
"acc_stderr": 0.013664144006618275
},
"cb": {
"acc": 0.5,
"acc_stderr": 0.06741998624632421,
"f1": 0.3176319176319176
},
"copa": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099
},
"hellaswag": {
"acc": 0.29954192391953793,
"acc_stderr": 0.004571212360565283,
"acc_norm": 0.3345947022505477,
"acc_norm_stderr": 0.0047088426001774385
},
"rte": {
"acc": 0.5270758122743683,
"acc_stderr": 0.030052303463143706
},
"winogrande": {
"acc": 0.5217048145224941,
"acc_stderr": 0.01403923921648463
},
"storycloze_2016": {
"acc": 0.5889898450026724,
"acc_stderr": 0.011377828319387507
},
"boolq": {
"acc": 0.5382262996941896,
"acc_stderr": 0.00871946009810685
},
"arc_easy": {
"acc": 0.48569023569023567,
"acc_stderr": 0.010255580881603624,
"acc_norm": 0.4642255892255892,
"acc_norm_stderr": 0.010233488709726556
},
"arc_challenge": {
"acc": 0.20051194539249148,
"acc_stderr": 0.011700318050499358,
"acc_norm": 0.2363481228668942,
"acc_norm_stderr": 0.01241496052430183
},
"sciq": {
"acc": 0.844,
"acc_stderr": 0.01148023500612236,
"acc_norm": 0.826,
"acc_norm_stderr": 0.01199449323097343
},
"piqa": {
"acc": 0.6398258977149075,
"acc_stderr": 0.011200375176667486,
"acc_norm": 0.6436343852013058,
"acc_norm_stderr": 0.011174109865864729
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}