lm1-misc-pile / 421m32b32b /421m32b32bpile /evaluation /rankeval /lm1-421m-32b-results_lm-eval_global_step60336_2023-01-24-13-53-29_2shots.json
Muennighoff's picture
Add
0461a88
{
"results": {
"anli_r1": {
"acc": 0.321,
"acc_stderr": 0.014770821817934644
},
"anli_r2": {
"acc": 0.361,
"acc_stderr": 0.015195720118175118
},
"anli_r3": {
"acc": 0.3333333333333333,
"acc_stderr": 0.013613950010225603
},
"cb": {
"acc": 0.39285714285714285,
"acc_stderr": 0.0658538889806635,
"f1": 0.27449576321756775
},
"copa": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218
},
"hellaswag": {
"acc": 0.3007369049990042,
"acc_stderr": 0.0045764127139515,
"acc_norm": 0.3345947022505477,
"acc_norm_stderr": 0.004708842600177446
},
"rte": {
"acc": 0.49458483754512633,
"acc_stderr": 0.030094698123239966
},
"winogrande": {
"acc": 0.5153906866614049,
"acc_stderr": 0.014045826789783665
},
"storycloze_2016": {
"acc": 0.5905932656333511,
"acc_stderr": 0.01137105952719707
},
"boolq": {
"acc": 0.544954128440367,
"acc_stderr": 0.008709637955263421
},
"arc_easy": {
"acc": 0.4861111111111111,
"acc_stderr": 0.010255824507190342,
"acc_norm": 0.4659090909090909,
"acc_norm_stderr": 0.010235908103438688
},
"arc_challenge": {
"acc": 0.20051194539249148,
"acc_stderr": 0.011700318050499361,
"acc_norm": 0.24146757679180889,
"acc_norm_stderr": 0.012506564839739432
},
"sciq": {
"acc": 0.817,
"acc_stderr": 0.012233587399477825,
"acc_norm": 0.81,
"acc_norm_stderr": 0.012411851354816329
},
"piqa": {
"acc": 0.6425462459194777,
"acc_stderr": 0.011181692590867659,
"acc_norm": 0.6512513601741022,
"acc_norm_stderr": 0.011119263056159599
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}