model
stringlengths 4
89
| revision
stringclasses 1
value | model_sha
stringlengths 0
40
| results
dict | commit
stringlengths 40
40
| date
timestamp[ns] | score
float64 21.8
83
⌀ |
---|---|---|---|---|---|---|
togethercomputer/RedPajama-INCITE-Instruct-7B-v0.1
|
main
|
95667a602ff2646bf67fe3a57c4eb9a1edec87fe
|
{
"arc:challenge": 44.1,
"hellaswag": 72,
"hendrycksTest": 37.6,
"truthfulqa:mc": 34
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 46.9 |
togethercomputer/RedPajama-INCITE-7B-Instruct
|
main
|
95667a602ff2646bf67fe3a57c4eb9a1edec87fe
|
{
"arc:challenge": 44.1,
"hellaswag": 72,
"hendrycksTest": 37.6,
"truthfulqa:mc": 34
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 46.9 |
togethercomputer/Llama-2-7B-32K-Instruct
|
main
|
35696b9a7ab330dcbe240ff76fb44ab1eccf45bf
|
{
"arc:challenge": 51.1,
"hellaswag": 78.5,
"hendrycksTest": 46.1,
"truthfulqa:mc": 44.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 55.2 |
togethercomputer/LLaMA-2-7B-32K
|
main
|
aef6d8946ae1015bdb65c478a2dd73b58daaef47
|
{
"arc:challenge": 48,
"hellaswag": 77.5,
"hendrycksTest": 45.4,
"truthfulqa:mc": 38.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 52.4 |
togethercomputer/RedPajama-INCITE-Base-3B-v1
|
main
|
094fbdd0c911feb485ce55de1952ab2e75277e1e
|
{
"arc:challenge": 40.2,
"hellaswag": 64.8,
"hendrycksTest": 27,
"truthfulqa:mc": 33.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 41.3 |
togethercomputer/RedPajama-INCITE-7B-Base
|
main
|
78f7e482443971f4873ba3239f0ac810a367833b
|
{
"arc:challenge": 46.2,
"hellaswag": 71.6,
"hendrycksTest": 27.7,
"truthfulqa:mc": 33
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 44.6 |
togethercomputer/RedPajama-INCITE-Instruct-3B-v1
|
main
|
0c66778ee09a036886741707733620b91057909a
|
{
"arc:challenge": 41.6,
"hellaswag": 65.5,
"hendrycksTest": 25,
"truthfulqa:mc": 36.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 42.1 |
togethercomputer/Pythia-Chat-Base-7B
|
main
|
97aa918c383820e1a69f042801091d7deb996c20
|
{
"arc:challenge": 40,
"hellaswag": 68.7,
"hendrycksTest": 27.4,
"truthfulqa:mc": 34.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 42.7 |
togethercomputer/GPT-NeoXT-Chat-Base-20B
|
main
|
d386708e84d862a65f7d2b4989f64750cb657227
|
{
"arc:challenge": 45.6,
"hellaswag": 74,
"hendrycksTest": 29.9,
"truthfulqa:mc": 34.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 46 |
togethercomputer/GPT-JT-6B-v1
|
main
|
f34aa35f906895602c1f86f5685e598afdea8051
|
{
"arc:challenge": 40.9,
"hellaswag": 67.1,
"hendrycksTest": 47.2,
"truthfulqa:mc": 37.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 48.1 |
togethercomputer/GPT-JT-Moderation-6B
|
main
|
1297870783f6091294769014afddf94499966a78
|
{
"arc:challenge": 40.5,
"hellaswag": 67.7,
"hendrycksTest": 41.6,
"truthfulqa:mc": 37.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 46.8 |
togethercomputer/RedPajama-INCITE-Base-7B-v0.1
|
main
|
78f7e482443971f4873ba3239f0ac810a367833b
|
{
"arc:challenge": 46.2,
"hellaswag": 71.6,
"hendrycksTest": 27.7,
"truthfulqa:mc": 33
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 44.6 |
togethercomputer/RedPajama-INCITE-Chat-3B-v1
|
main
|
f0e0995eba801096ed04cb87931d96a8316871af
|
{
"arc:challenge": 42.8,
"hellaswag": 67.6,
"hendrycksTest": 26.2,
"truthfulqa:mc": 34.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 42.8 |
Aspik101/trurl-2-13b-pl-instruct_unload
|
main
|
17f57642165e30a4025d6817bd47dcd80d0c5c4d
|
{
"arc:challenge": 59.9,
"hellaswag": 80,
"hendrycksTest": 78.7,
"truthfulqa:mc": 45.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 66.1 |
Aspik101/trurl-2-7b-pl-instruct_unload
|
main
|
768d800e4dbe3fc95334f30ca7cd02113d3e3fd3
|
{
"arc:challenge": 53.2,
"hellaswag": 74.6,
"hendrycksTest": 49.9,
"truthfulqa:mc": 45.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 55.8 |
Aspik101/Vicuzard-30B-Uncensored-instruct-PL-lora_unload
|
main
|
652f03ac67b4293198d98b618e64285fb32a28e9
|
{
"arc:challenge": 62.5,
"hellaswag": 83.7,
"hendrycksTest": 57.8,
"truthfulqa:mc": 50.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 63.7 |
Aspik101/Nous-Hermes-13b-pl-lora_unload
|
main
|
d0ef3991a11c4dc2ea2f832d4082c89c3c5e810c
|
{
"arc:challenge": 57.1,
"hellaswag": 81.5,
"hendrycksTest": 49.2,
"truthfulqa:mc": 48.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59 |
Aspik101/vicuna-13b-v1.5-PL-lora_unload
|
main
|
5c8aeb722e11d1c7258abd45f9f2840f57976c28
|
{
"arc:challenge": 56.9,
"hellaswag": 81.2,
"hendrycksTest": 56.1,
"truthfulqa:mc": 49.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 61 |
Aspik101/Redmond-Puffin-13B-instruct-PL-lora_unload
|
main
|
b933009635299bca32c694336aa2007d756a2dda
|
{
"arc:challenge": 60.9,
"hellaswag": 82.4,
"hendrycksTest": 55.6,
"truthfulqa:mc": 44.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.8 |
Aspik101/tulu-7b-instruct-pl-lora_unload
|
main
|
962d4e5d8da5a4ec0ec047b6f8f08f1bb9e509fe
|
{
"arc:challenge": 28.7,
"hellaswag": 26.1,
"hendrycksTest": 23.1,
"truthfulqa:mc": 48.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.6 |
Aspik101/llama-30b-2048-instruct-PL-lora_unload
|
main
|
b15f4310ea37fef99e4f16372a4b1f2342e27613
|
{
"arc:challenge": 63.8,
"hellaswag": 84.7,
"hendrycksTest": 61.5,
"truthfulqa:mc": 52.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 65.6 |
Aspik101/30B-Lazarus-instruct-PL-lora_unload
|
main
|
eeb29b35ceb6dd5c532f1e4e1235f1cdd3f51f23
|
{
"arc:challenge": 62.8,
"hellaswag": 84.1,
"hendrycksTest": 56.9,
"truthfulqa:mc": 55.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 64.8 |
Aspik101/vicuna-7b-v1.3-instruct-pl-lora_unload
|
main
|
e4b19d9d6168b32402da4ab2b5ec7ff27cf40d9b
|
{
"arc:challenge": 48,
"hellaswag": 76.3,
"hendrycksTest": 47.4,
"truthfulqa:mc": 44.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54 |
Aspik101/StableBeluga-13B-instruct-PL-lora_unload
|
main
|
6e1a6e1f91f6ac97b643be1bd24be6096e2e7dd3
|
{
"arc:challenge": 60.9,
"hellaswag": 82.1,
"hendrycksTest": 57,
"truthfulqa:mc": 48.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 62.2 |
Aspik101/llama-30b-instruct-2048-PL-lora
|
main
|
1a076bce564f03bd47951eecab628c541fb1a6ad
|
{
"arc:challenge": 63.3,
"hellaswag": 84.7,
"hendrycksTest": 61.7,
"truthfulqa:mc": 53.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 65.8 |
Aspik101/WizardVicuna-Uncensored-3B-instruct-PL-lora_unload
|
main
|
e471ec778771f29992293d1660cc108f29c9c69e
|
{
"arc:challenge": 42,
"hellaswag": 66.8,
"hendrycksTest": 25.7,
"truthfulqa:mc": 39.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 43.6 |
Aspik101/Llama-2-7b-hf-instruct-pl-lora_unload
|
main
|
3dfef350be9c8ce92c2d314dbe96a002bd6ca97d
|
{
"arc:challenge": 53.8,
"hellaswag": 78.3,
"hendrycksTest": 46.8,
"truthfulqa:mc": 42.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 55.3 |
NewstaR/Starlight-13B
|
main
|
cb9fced568b1abd881133c642c427aaa488f00cc
|
{
"arc:challenge": 59.3,
"hellaswag": 82.2,
"hendrycksTest": 55.7,
"truthfulqa:mc": 37.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.6 |
NewstaR/Starlight-7B
|
main
|
1f7436c458ebc3d8d31b91091c1a7a48e942cd3b
|
{
"arc:challenge": 53.1,
"hellaswag": 78.6,
"hendrycksTest": 46.8,
"truthfulqa:mc": 38.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54.3 |
NewstaR/Morningstar-13b-hf
|
main
|
2605b5b3b0ecba906ac26d39aab40f33c2ec81c9
|
{
"arc:challenge": 59,
"hellaswag": 81.9,
"hendrycksTest": 54.6,
"truthfulqa:mc": 44.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.9 |
AtomEchoAI/AtomGPT_56k
|
main
|
f69ecfd630ec89afffa4ca7bd8a5eda0daf57643
|
{
"arc:challenge": 53.2,
"hellaswag": 76.7,
"hendrycksTest": 45.3,
"truthfulqa:mc": 40.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 53.9 |
MayaPH/GodziLLa-30B-plus
|
main
|
a66b1860d11ebf8aed07237cf636fdd2b3a07f06
|
{
"arc:challenge": 28.9,
"hellaswag": 26.4,
"hendrycksTest": 24.6,
"truthfulqa:mc": 48.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.2 |
MayaPH/GodziLLa-30B-instruct
|
main
|
642bf3683801e20e4b7cf28d94374d5e6054c007
|
{
"arc:challenge": 29,
"hellaswag": 26.5,
"hendrycksTest": 24.9,
"truthfulqa:mc": 48.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.3 |
MayaPH/FinOPT-Washington
|
main
|
cdd8a6cde7902de39757cf31d73af1f51df0d8e8
|
{
"arc:challenge": 25.2,
"hellaswag": 26.2,
"hendrycksTest": 24.8,
"truthfulqa:mc": 45.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 30.5 |
MayaPH/GodziLLa2-70B
|
main
|
7b78087db07eec97f7b461d10758ece76d685543
|
{
"arc:challenge": 71.4,
"hellaswag": 87.5,
"hendrycksTest": 69.9,
"truthfulqa:mc": 61.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 72.6 |
MayaPH/GodziLLa-30B
|
main
|
aa9912a2ac60abeac28b4566731cd903dcc582ac
|
{
"arc:challenge": 61.5,
"hellaswag": 82.1,
"hendrycksTest": 54.2,
"truthfulqa:mc": 55.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 63.4 |
MayaPH/FinOPT-Franklin
|
main
|
1b13331834190bfe49a176f1661ba4d8309a5051
|
{
"arc:challenge": 27.7,
"hellaswag": 24.9,
"hendrycksTest": 23.1,
"truthfulqa:mc": 52.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32 |
MayaPH/FinOPT-Lincoln
|
main
|
7ddc381fa3968df22f72acb6cf03b75d3ac49661
|
{
"arc:challenge": 26.7,
"hellaswag": 25.6,
"hendrycksTest": 23,
"truthfulqa:mc": 50.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.5 |
MayaPH/opt-flan-iml-6.7b
|
main
|
cbe8d60db6f3c52e653ca73e23a1c34c08127d02
|
{
"arc:challenge": 30.1,
"hellaswag": 58.8,
"hendrycksTest": 25.1,
"truthfulqa:mc": 36.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.7 |
Mikivis/gpt2-large-lora-sft1
|
main
|
8e26a8d2dc1661d87a8652c75f00b805d63e7330
|
{
"arc:challenge": 24.7,
"hellaswag": 42.7,
"hendrycksTest": 24.9,
"truthfulqa:mc": 39.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.9 |
Mikivis/gpt2-large-lora-sft
|
main
|
1c0c5a686f3c83692e033416197155557e4d3a0d
|
{
"arc:challenge": 26.8,
"hellaswag": 44.2,
"hendrycksTest": 25.8,
"truthfulqa:mc": 39.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 34 |
Mikivis/gpt2-large-lora-sft2
|
main
|
1244efb5d20765beb54f6b4a4e1426cf6d5daf44
|
{
"arc:challenge": 26.6,
"hellaswag": 42.7,
"hendrycksTest": 24.7,
"truthfulqa:mc": 40.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 33.6 |
Mikivis/gpt2-large-lora-stf4
|
main
|
82eff3a62116fd589ad7319c9d75ff6b12f42f72
|
{
"arc:challenge": 26.9,
"hellaswag": 42.2,
"hendrycksTest": 25.5,
"truthfulqa:mc": 40.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 33.8 |
matsuo-lab/weblab-10b
|
main
|
d6fc432983b1633a4c1568d121c60de6b8c3e511
|
{
"arc:challenge": 39.5,
"hellaswag": 65.8,
"hendrycksTest": 26.3,
"truthfulqa:mc": 36
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 41.9 |
matsuo-lab/weblab-10b-instruction-sft
|
main
|
112a5ad9f556078ab14a5cd93511b9db4a0d4413
|
{
"arc:challenge": 40.1,
"hellaswag": 65.3,
"hendrycksTest": 26.7,
"truthfulqa:mc": 36.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 42.2 |
Norquinal/llama-2-7b-claude-chat-rp
|
main
|
4309eedebe8ba5709e0cc7cf186cb783f3bc8060
|
{
"arc:challenge": 54.9,
"hellaswag": 80.1,
"hendrycksTest": 47,
"truthfulqa:mc": 43.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 56.4 |
anton-l/gpt-j-tiny-random
|
main
|
feea91564dac0081f73aeb6744979c6cfe553fff
|
{
"arc:challenge": 26.4,
"hellaswag": 25.8,
"hendrycksTest": 24.5,
"truthfulqa:mc": 47.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31 |
GeorgiaTechResearchInstitute/starcoder-gpteacher-code-instruct
|
main
|
d866b68daa719239dc44979dbf39a608ed6f7bce
|
{
"arc:challenge": 32.7,
"hellaswag": 47.6,
"hendrycksTest": 28.6,
"truthfulqa:mc": 40.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.3 |
GeorgiaTechResearchInstitute/galactica-6.7b-evol-instruct-70k
|
main
|
14fa470051d0bc38fd871643186a9edfd3a8a9aa
|
{
"arc:challenge": 42.6,
"hellaswag": 49.3,
"hendrycksTest": 33,
"truthfulqa:mc": 42.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 41.8 |
GeorgiaTechResearchInstitute/galpaca-30b
|
main
|
a1f0c4bedd65b485a0d4d3a3bd60d7a4599f1eaf
|
{
"arc:challenge": 49.6,
"hellaswag": 58.2,
"hendrycksTest": 43.8,
"truthfulqa:mc": 41.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 48.2 |
xzuyn/MedicWizard-7B
|
main
|
0b3ef975fb5e8ac1eae775160ab54c98221889df
|
{
"arc:challenge": 53.5,
"hellaswag": 78.4,
"hendrycksTest": 44.6,
"truthfulqa:mc": 41.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54.4 |
xzuyn/Alpacino-SuperCOT-13B
|
main
|
3a82b04684fe99d59556421c3f96a187049a3cec
|
{
"arc:challenge": 58.4,
"hellaswag": 81.7,
"hendrycksTest": 47.9,
"truthfulqa:mc": 45.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.4 |
tianyil1/denas-llama2
|
main
|
b8aebc9157c0e427536aeac9132021fd66615702
|
{
"arc:challenge": 53.9,
"hellaswag": 77.8,
"hendrycksTest": 45.5,
"truthfulqa:mc": 45.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 55.6 |
NekoPunchBBB/Llama-2-13b-hf_Open-Platypus-QLoRA-multigpu
|
main
|
f65029ea8f030731ace568e40bab33a7097a13de
|
{
"arc:challenge": 57.5,
"hellaswag": 82.5,
"hendrycksTest": 54.8,
"truthfulqa:mc": 43.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.6 |
NekoPunchBBB/Llama-2-13b-hf_Open-Platypus-8bit-att
|
main
|
83a8e51d0a72dcfbe5de13dc7ee10dc20e91602e
|
{
"arc:challenge": 57.5,
"hellaswag": 82.1,
"hendrycksTest": 54.6,
"truthfulqa:mc": 42.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.1 |
NekoPunchBBB/Llama-2-13b-hf_Open-Platypus
|
main
|
c318a24121bd69509f395e17a9636093213ece21
|
{
"arc:challenge": 58.9,
"hellaswag": 82.1,
"hendrycksTest": 55,
"truthfulqa:mc": 42.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.7 |
FlagAlpha/Llama2-Chinese-13b-Chat
|
main
|
cb69cda10a72bc9736b1c10181ac41f28b69ff9b
|
{
"arc:challenge": 56,
"hellaswag": 82,
"hendrycksTest": 54.7,
"truthfulqa:mc": 48.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.4 |
jphme/orca_mini_v2_ger_7b
|
main
|
175965f50907c6a8cd40f1a4b10d28342969c066
|
{
"arc:challenge": 49.8,
"hellaswag": 75.5,
"hendrycksTest": 39.1,
"truthfulqa:mc": 45.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 52.5 |
jaspercatapang/Echidna-30B
|
main
|
20b13b6676d54b555ae2b9b2b4b6fc8a0c7c2e89
|
{
"arc:challenge": 28.5,
"hellaswag": 25.5,
"hendrycksTest": 24.9,
"truthfulqa:mc": 48.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.8 |
timdettmers/guanaco-33b-merged
|
main
|
b2e78a916582935b6616d184b22ea5e9e1eb4c34
|
{
"arc:challenge": 62.5,
"hellaswag": 84.5,
"hendrycksTest": 53.8,
"truthfulqa:mc": 51.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 63 |
timdettmers/guanaco-65b-merged
|
main
|
98c803bb6e70efe9f2aefb12cba36a96f2959d4d
|
{
"arc:challenge": 27.5,
"hellaswag": 26.6,
"hendrycksTest": 25.2,
"truthfulqa:mc": 48.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.9 |
xhyi/PT_GPTNEO350_ATG
|
main
|
56ab08aaa6802d0f830d42c352d5d536be72811d
|
{
"arc:challenge": 25.4,
"hellaswag": 37.6,
"hendrycksTest": 24.8,
"truthfulqa:mc": 43
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.7 |
Andron00e/YetAnother_Open-Llama-3B-LoRA-OpenOrca
|
main
|
{
"arc:challenge": 24.8,
"hellaswag": 26.3,
"hendrycksTest": 25.2,
"truthfulqa:mc": null
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | null |
|
Andron00e/YetAnother_Open-Llama-3B-LoRA
|
main
|
52c5cb0178831908ed0571f1750fcb0f0fb125f9
|
{
"arc:challenge": 25.9,
"hellaswag": 25.8,
"hendrycksTest": 24.7,
"truthfulqa:mc": null
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | null |
WizardLM/WizardLM-30B-V1.0
|
main
|
815e2dd7daabe446c429f3c9f70ef01582528f81
|
{
"arc:challenge": 27.4,
"hellaswag": 25.9,
"hendrycksTest": 23.1,
"truthfulqa:mc": 48.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.2 |
WizardLM/WizardMath-13B-V1.0
|
main
|
209316bea6eab73d8b18fca2a730b1dff3dcf999
|
{
"arc:challenge": 60.1,
"hellaswag": 82,
"hendrycksTest": 54.8,
"truthfulqa:mc": 42.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.9 |
WizardLM/WizardMath-70B-V1.0
|
main
|
e85b43e53c5379e35393b970c66d76c2d1060381
|
{
"arc:challenge": 68.2,
"hellaswag": 86.5,
"hendrycksTest": 68.9,
"truthfulqa:mc": 52.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 69.1 |
WizardLM/WizardLM-70B-V1.0
|
main
|
6dae38060d70b82dcfe787a612d04aaf0adf0738
|
{
"arc:challenge": 65.4,
"hellaswag": 84.4,
"hendrycksTest": 64,
"truthfulqa:mc": 54.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 67.2 |
WizardLM/WizardCoder-15B-V1.0
|
main
|
926ca1b215c4631bc5f8c3e47173381452c23e5c
|
{
"arc:challenge": 32.3,
"hellaswag": 47.2,
"hendrycksTest": 29.4,
"truthfulqa:mc": 41.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.6 |
WizardLM/WizardCoder-Python-34B-V1.0
|
main
|
5cdc34e4a81d202f1d4a3b5d60e028aab895dfeb
|
{
"arc:challenge": 52.1,
"hellaswag": 74.8,
"hendrycksTest": 49.1,
"truthfulqa:mc": 48.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 56.2 |
WizardLM/WizardLM-13B-V1.1
|
main
|
badd80f8a6f46fb15310fedf6d4db54959854897
|
{
"arc:challenge": 60.2,
"hellaswag": 81.4,
"hendrycksTest": 50.9,
"truthfulqa:mc": 54.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 61.8 |
WizardLM/WizardMath-7B-V1.0
|
main
|
06dbd3e0da08255c575e585cb82e0554c1d2707a
|
{
"arc:challenge": 54.1,
"hellaswag": 79.5,
"hendrycksTest": 46,
"truthfulqa:mc": 43.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 55.8 |
WizardLM/WizardLM-13B-V1.2
|
main
|
6760d0c07ffdc2405295ed7a29437cf4dc414bac
|
{
"arc:challenge": 59,
"hellaswag": 82.2,
"hendrycksTest": 54.6,
"truthfulqa:mc": 47.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.8 |
sartmis1/starcoder-finetune-openapi
|
main
|
fed87393fd749e46c0c82da09d433deb9b7cf9ee
|
{
"arc:challenge": 30.6,
"hellaswag": 48.1,
"hendrycksTest": 30.4,
"truthfulqa:mc": 41.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.7 |
sartmis1/starcoder-finetune-selfinstruct
|
main
|
b21bd307ea7417185e7dc59557c399a3e4e0092b
|
{
"arc:challenge": 31.2,
"hellaswag": 47.7,
"hendrycksTest": 29.5,
"truthfulqa:mc": 41.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.5 |
PY007/TinyLlama-1.1B-step-50K-105b
|
main
|
c1f1ef67c12e4bb85fe0bdf1747c645a202cc118
|
{
"arc:challenge": 25.9,
"hellaswag": 44.1,
"hendrycksTest": 26.8,
"truthfulqa:mc": 39.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 34.1 |
PY007/TinyLlama-1.1B-intermediate-step-240k-503b
|
main
|
213ebf60d7fdd3258fa5574840b06c97a7e8cf5d
|
{
"arc:challenge": 29.3,
"hellaswag": 49.7,
"hendrycksTest": 26.3,
"truthfulqa:mc": 40.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 36.4 |
HanningZhang/Robin-v2
|
main
|
{
"arc:challenge": 48.8,
"hellaswag": 74.5,
"hendrycksTest": 39.3,
"truthfulqa:mc": 42.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 51.2 |
|
nicholasKluge/Aira-2-774M
|
main
|
f43044cfe7bf0827a176f0d319c63251c2b29373
|
{
"arc:challenge": 28.8,
"hellaswag": 40.8,
"hendrycksTest": 25.1,
"truthfulqa:mc": 41.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 34 |
nicholasKluge/Aira-2-355M
|
main
|
2479f5b1bb62251ec88e60182ba81390a4c19cf9
|
{
"arc:challenge": 27.6,
"hellaswag": 38.9,
"hendrycksTest": 27.3,
"truthfulqa:mc": 38.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 33.1 |
nicholasKluge/Aira-124M
|
main
|
0c0d509ec9ce057e7b506e15c868eecf79cc8ae5
|
{
"arc:challenge": 24.6,
"hellaswag": 31.3,
"hendrycksTest": 25.3,
"truthfulqa:mc": 41
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 30.6 |
DanielSc4/RedPajama-INCITE-Chat-3B-v1-RL-LoRA-8bit-test1
|
main
|
a2ee88a9fa1c9ad41e0a8c15217a4b1230ec33c8
|
{
"arc:challenge": 41.3,
"hellaswag": 66.8,
"hendrycksTest": 26.1,
"truthfulqa:mc": 35
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 42.3 |
DanielSc4/RedPajama-INCITE-Chat-3B-v1-FT-LoRA-8bit-test1
|
main
|
f477d24b00e05fe4c5f8d5f933080994cfd90e4e
|
{
"arc:challenge": 38.7,
"hellaswag": 63.5,
"hendrycksTest": 25.2,
"truthfulqa:mc": 36.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 40.9 |
IGeniusDev/llama13B-quant8-testv1-openorca-customdataset
|
main
|
f364d000bedac80e72aa103c08b77aee1b61b7da
|
{
"arc:challenge": 60.2,
"hellaswag": 83,
"hendrycksTest": 54.3,
"truthfulqa:mc": 37.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.7 |
NousResearch/Nous-Hermes-Llama2-13b
|
main
|
8f95aa9cd207db7b24179fc779c2b8973e71bee2
|
{
"arc:challenge": 61.3,
"hellaswag": 83.3,
"hendrycksTest": 55,
"truthfulqa:mc": 50.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 62.5 |
NousResearch/CodeLlama-13b-hf
|
main
|
b7cfbbce945b966607d15ae275704922a6d04afc
|
{
"arc:challenge": 40.9,
"hellaswag": 63.4,
"hendrycksTest": 32.8,
"truthfulqa:mc": 43.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 45.2 |
NousResearch/Nous-Puffin-70B
|
main
|
129e0af93d04b1b9cc85ea48bbb300f1ccb44210
|
{
"arc:challenge": 67.4,
"hellaswag": 87.4,
"hendrycksTest": 69.8,
"truthfulqa:mc": 46.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 67.9 |
NousResearch/Nous-Hermes-13b
|
main
|
24e8c03148ffd1f3e469744dfc24ad2ad82848f8
|
{
"arc:challenge": 56.6,
"hellaswag": 82.1,
"hendrycksTest": 50.4,
"truthfulqa:mc": 51.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.2 |
NousResearch/Nous-Hermes-llama-2-7b
|
main
|
60e58acecdc1552e1b1752a38d1d91d942d1c3f0
|
{
"arc:challenge": 55.1,
"hellaswag": 78.9,
"hendrycksTest": 48.3,
"truthfulqa:mc": 49
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.8 |
NousResearch/CodeLlama-34b-hf
|
main
|
4e61ec70eb258047f5bc689fa6a66f7753da52b8
|
{
"arc:challenge": 37.5,
"hellaswag": 31.8,
"hendrycksTest": 37.2,
"truthfulqa:mc": 38.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 36.4 |
NousResearch/Nous-Hermes-Llama2-70b
|
main
|
13a0b4da159ad95c93e72a002d893c48ed0f257a
|
{
"arc:challenge": 67.6,
"hellaswag": 86.8,
"hendrycksTest": 69.7,
"truthfulqa:mc": 55
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 69.8 |
NousResearch/Redmond-Puffin-13B
|
main
|
12af25fa7ea02c4fc636952ea8b9dc9cf48e35be
|
{
"arc:challenge": 60.5,
"hellaswag": 83.2,
"hendrycksTest": 55,
"truthfulqa:mc": 42.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.2 |
ddobokki/Llama-2-70b-orca-200k
|
main
|
1ab69d47a467f15d8168b119ad24c1842d3ff54e
|
{
"arc:challenge": 64.8,
"hellaswag": 85.3,
"hendrycksTest": 66.9,
"truthfulqa:mc": 56.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 68.3 |
eachadea/vicuna-13b-1.1
|
main
|
bfcc6ca66694310be6c85ba0638597f4256c4143
|
{
"arc:challenge": 52.7,
"hellaswag": 80.1,
"hendrycksTest": 51.9,
"truthfulqa:mc": 52.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.2 |
eachadea/vicuna-13b
|
main
|
ac4218770a58baaaaf25201076fe082abb6ffd13
|
{
"arc:challenge": 51.7,
"hellaswag": 79.9,
"hendrycksTest": 50.8,
"truthfulqa:mc": 52.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.8 |
eachadea/vicuna-7b-1.1
|
main
|
9d8eea215e00b388a22e8f050768ea8911d41f1d
|
{
"arc:challenge": 53.7,
"hellaswag": 77.5,
"hendrycksTest": 45.6,
"truthfulqa:mc": 48.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 56.4 |
lvkaokao/llama2-7b-hf-chat-lora-v2
|
main
|
0b8e61d3325cddbad207cbf885c2b5db6a83a059
|
{
"arc:challenge": 55,
"hellaswag": 78.8,
"hendrycksTest": 51.3,
"truthfulqa:mc": 44
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.3 |
lvkaokao/llama2-7b-hf-instruction-lora
|
main
|
f660a40323b29040e78097acca320517ed242512
|
{
"arc:challenge": 55.4,
"hellaswag": 78.6,
"hendrycksTest": 49.4,
"truthfulqa:mc": 41.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 56.3 |
lvkaokao/llama2-7b-hf-chat-lora-v3
|
main
|
79047f667253c878ad3143b016e3dcb3df707572
|
{
"arc:challenge": 57.3,
"hellaswag": 78.6,
"hendrycksTest": 50.6,
"truthfulqa:mc": 50.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.3 |
OpenBuddy/openbuddy-openllama-13b-v7-fp16
|
main
|
8690c065bccd3e897ccbf3d8aa24b0216a6f5dba
|
{
"arc:challenge": 47.6,
"hellaswag": 72.2,
"hendrycksTest": 47.7,
"truthfulqa:mc": 48.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.