model
stringlengths 4
89
| revision
stringclasses 1
value | model_sha
stringlengths 0
40
| results
dict | commit
stringlengths 40
40
| date
timestamp[ns] | score
float64 21.8
83
⌀ |
---|---|---|---|---|---|---|
maywell/Synatra-11B-Testbench
|
main
|
9399ea6c2a1d955e31d6b4d68b2b86115aea0e59
|
{
"arc:challenge": 57.3,
"hellaswag": 78.7,
"hendrycksTest": 55.6,
"truthfulqa:mc": 52
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 60.9 |
maywell/koOpenChat-sft
|
main
|
47472b36e181694422564b130ee075ffa596537d
|
{
"arc:challenge": 59.8,
"hellaswag": 78.7,
"hendrycksTest": 61.3,
"truthfulqa:mc": 51.2
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 62.8 |
TaylorAI/Flash-Llama-30M-20001
|
main
|
6ff84442217565875450bd7a0457121dcedf6b0b
|
{
"arc:challenge": 23.9,
"hellaswag": 25.8,
"hendrycksTest": 24.1,
"truthfulqa:mc": 51.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 31.3 |
TaylorAI/Flash-Llama-3B
|
main
|
b4c7bb49171ff6955cfc1f7e33143383c57f7606
|
{
"arc:challenge": 40.1,
"hellaswag": 71.6,
"hendrycksTest": 26.9,
"truthfulqa:mc": 34.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 43.3 |
TaylorAI/Flash-Llama-7B
|
main
|
27c84ef23d850582453e1cc2dcea13de48da090f
|
{
"arc:challenge": 53.1,
"hellaswag": 78.6,
"hendrycksTest": 46.8,
"truthfulqa:mc": 38.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.3 |
TaylorAI/FLAN-Llama-7B-2_Llama2-7B-Flash_868_full_model
|
main
|
819f3f384e37f8906a62a8048556c9e58e495c02
|
{
"arc:challenge": 52.5,
"hellaswag": 79.1,
"hendrycksTest": 47.6,
"truthfulqa:mc": 37.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.1 |
starmpcc/Asclepius-Llama2-7B
|
main
|
2f15bd8250d7825307e59cc2c785074ebbec3395
|
{
"arc:challenge": 50.9,
"hellaswag": 76.5,
"hendrycksTest": 43.6,
"truthfulqa:mc": 43.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 53.6 |
starmpcc/Asclepius-Llama2-13B
|
main
|
579271bebb894d89369205060d151120a217ce81
|
{
"arc:challenge": 55.9,
"hellaswag": 79.7,
"hendrycksTest": 52.4,
"truthfulqa:mc": 40.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 57.2 |
revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE
|
main
|
55862462a23ab43fb73d4c784f1518ab4645764c
|
{
"arc:challenge": 53.1,
"hellaswag": 75.6,
"hendrycksTest": 48.8,
"truthfulqa:mc": 44.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 55.6 |
perlthoughts/Chupacabra-7B-v2
|
main
|
0c7f7c85359f15d3e6c361e8192738bdfb14ea6c
|
{
"arc:challenge": 66.5,
"hellaswag": 85.2,
"hendrycksTest": 64.5,
"truthfulqa:mc": 57.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 68.4 |
perlthoughts/Chupacabra-7B
|
main
|
ae20703e16d89ba4a4301d12195cede64bd2ebdd
|
{
"arc:challenge": 66.8,
"hellaswag": 83.5,
"hendrycksTest": 62.7,
"truthfulqa:mc": 52.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 66.3 |
hpcai-tech/Colossal-LLaMA-2-7b-base
|
main
|
1f30e4f2037e1e30122667639b8ef37138e85057
|
{
"arc:challenge": 53.5,
"hellaswag": 70.5,
"hendrycksTest": 54.4,
"truthfulqa:mc": 50.2
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 57.2 |
adept/persimmon-8b-chat
|
main
|
7f1c23bce0eb2a41a5c7417f10ef15405819286e
|
{
"arc:challenge": 45,
"hellaswag": 73.3,
"hendrycksTest": 45,
"truthfulqa:mc": 35.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 49.8 |
adept/persimmon-8b-base
|
main
|
94dc4e0bb7eeb26ec521eb3f78c36c91f6fe866b
|
{
"arc:challenge": 42.7,
"hellaswag": 71.1,
"hendrycksTest": 43.6,
"truthfulqa:mc": 37.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 48.8 |
TheTravellingEngineer/bloom-560m-RLHF-v2
|
main
|
7128cbfcdaf67f1eff27e45d875c35e7b47618db
|
{
"arc:challenge": 26.5,
"hellaswag": 37.7,
"hendrycksTest": 23.9,
"truthfulqa:mc": 43.5
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 32.9 |
TheTravellingEngineer/llama2-7b-chat-hf-guanaco
|
main
|
5d33696ee324899d52fc43794b46009fea08a9af
|
{
"arc:challenge": 50.5,
"hellaswag": 76.7,
"hendrycksTest": 48,
"truthfulqa:mc": 43.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.6 |
TheTravellingEngineer/bloom-560m-RLHF
|
main
|
b1769e92f325d8a28e7db1c21f133e6c85b84e78
|
{
"arc:challenge": 24.4,
"hellaswag": 37,
"hendrycksTest": 23.6,
"truthfulqa:mc": 40.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 31.4 |
TheTravellingEngineer/llama2-7b-hf-guanaco
|
main
|
6c1fc95e67b11f1011a3b2fc1aa05c7b83251e40
|
{
"arc:challenge": 52.5,
"hellaswag": 78.7,
"hendrycksTest": 45.3,
"truthfulqa:mc": 43.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 55.1 |
TheTravellingEngineer/bloom-1b1-RLHF-v2
|
main
|
05f7f0fd82fb3a5798d4bb284b6c10dd9d380f22
|
{
"arc:challenge": 22.7,
"hellaswag": 25,
"hendrycksTest": 23.1,
"truthfulqa:mc": null
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | null |
TheTravellingEngineer/llama2-7b-chat-hf-v4
|
main
|
405c54ec7aea0735996ef5ff6ede6c35ab930381
|
{
"arc:challenge": 53.1,
"hellaswag": 78.6,
"hendrycksTest": 46.8,
"truthfulqa:mc": 38.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.3 |
FabbriSimo01/Facebook_opt_1.3b_Quantized
|
main
|
7ef72ccee9d91d06967809e4e63ffbef62a9ad4a
|
{
"arc:challenge": 22.7,
"hellaswag": 25,
"hendrycksTest": 23.1,
"truthfulqa:mc": null
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | null |
Secbone/llama-2-13B-instructed
|
main
|
e676fbd9015beacfba5d71426beace7605200477
|
{
"arc:challenge": 59.4,
"hellaswag": 83.9,
"hendrycksTest": 55.6,
"truthfulqa:mc": 46.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 61.4 |
Secbone/llama-33B-instructed
|
main
|
7c40caaea4fe3264fd469dac428b0f9450e574a6
|
{
"arc:challenge": 64.6,
"hellaswag": 86.2,
"hendrycksTest": 60.5,
"truthfulqa:mc": 44.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.8 |
xDAN-AI/xDAN-L1-Thinking
|
main
|
6f3383932b5003e05beda95e31c0a4c7c92ba700
|
{
"arc:challenge": 63.7,
"hellaswag": 84.5,
"hendrycksTest": 62.9,
"truthfulqa:mc": 52.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 65.8 |
xDAN-AI/xDAN_13b_l2_lora
|
main
|
a8db938daa42016324291e38c4b45e34536ecbf4
|
{
"arc:challenge": 61,
"hellaswag": 82.6,
"hendrycksTest": 56,
"truthfulqa:mc": 44.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 61.1 |
speechlessai/speechless-llama2-dolphin-orca-platypus-13b
|
main
|
fd23b7d052eb7c18ecd2acc1be77c66b7b8d6dad
|
{
"arc:challenge": 59.6,
"hellaswag": 82.7,
"hendrycksTest": 57.9,
"truthfulqa:mc": 43.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 60.9 |
speechlessai/speechless-codellama-dolphin-orca-platypus-13b
|
main
|
25e1c346c2a01588a728307d5c35fbeecd58b51b
|
{
"arc:challenge": 45.8,
"hellaswag": 67.7,
"hendrycksTest": 45.9,
"truthfulqa:mc": 44.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 51 |
speechlessai/speechless-codellama-airoboros-orca-platypus-13b
|
main
|
f01d3ab70cc23e31dcf5d6418406b08dc2003153
|
{
"arc:challenge": 44.9,
"hellaswag": 67.7,
"hendrycksTest": 43.2,
"truthfulqa:mc": 40.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 49.2 |
speechlessai/speechless-codellama-34b-v1.0
|
main
|
1d64d871cd56da3031e19bc267ef8bd0b85b9936
|
{
"arc:challenge": 52.5,
"hellaswag": 74.1,
"hendrycksTest": 53.5,
"truthfulqa:mc": 47.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.8 |
itsliupeng/llama2_7b_zh
|
main
|
410711781d2e24226c0d62959e4990d1de851c3c
|
{
"arc:challenge": 52,
"hellaswag": 74.9,
"hendrycksTest": 60.7,
"truthfulqa:mc": 42.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 57.6 |
itsliupeng/llama2_7b_mmlu
|
main
|
553178f8d5d69eb1dfa5b9503d2ce0c1e481e5b1
|
{
"arc:challenge": 56.1,
"hellaswag": 79.1,
"hendrycksTest": 60,
"truthfulqa:mc": 41
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 59 |
itsliupeng/llama2_7b_code
|
main
|
0e6d1edd87c8753b55d280179c8fb0e65ebf5fa2
|
{
"arc:challenge": 52.1,
"hellaswag": 75.7,
"hendrycksTest": 48.1,
"truthfulqa:mc": 38.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 53.7 |
lu-vae/llama2-13b-sharegpt4-test
|
main
|
2be36a2dab4ed0f97727a1508367f53d59950818
|
{
"arc:challenge": 58,
"hellaswag": 82.7,
"hendrycksTest": 56,
"truthfulqa:mc": 48.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 61.2 |
meta-llama/Llama-2-7b-chat-hf
|
main
|
b7701a9e825e79a5ab18b5801be113c2160cc627
|
{
"arc:challenge": 52.9,
"hellaswag": 78.6,
"hendrycksTest": 48.3,
"truthfulqa:mc": 45.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.4 |
meta-llama/Llama-2-70b-chat-hf
|
main
|
7f54101c0fbb67a8143ca23eb8bd09b71f269c74
|
{
"arc:challenge": 64.6,
"hellaswag": 85.9,
"hendrycksTest": 63.9,
"truthfulqa:mc": 52.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 66.8 |
Yhyu13/chimera-inst-chat-13b-hf
|
main
|
a6943d2d30d0af904b3321559157d589e60f9e0f
|
{
"arc:challenge": 55.4,
"hellaswag": 78.9,
"hendrycksTest": 50.6,
"truthfulqa:mc": 50.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 58.8 |
chansung/gpt4-alpaca-lora-13b-decapoda-1024
|
main
|
7aedafea409de07a997d70a84e30242c7b86877c
|
{
"arc:challenge": 59.4,
"hellaswag": 81.9,
"hendrycksTest": 47.8,
"truthfulqa:mc": 52.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 60.4 |
HuggingFaceH4/zephyr-7b-beta
|
main
|
8af01af3d4f9dc9b962447180d6d0f8c5315da86
|
{
"arc:challenge": 62,
"hellaswag": 84.5,
"hendrycksTest": 61.1,
"truthfulqa:mc": 57.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 66.2 |
HuggingFaceH4/zephyr-7b-alpha
|
main
|
2cd2cd16a6ab22585d643cf264fac73b18e7852a
|
{
"arc:challenge": 61,
"hellaswag": 84,
"hendrycksTest": 61.4,
"truthfulqa:mc": 57.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 66.1 |
Rallio67/7B-redpajama-conditional-alpha
|
main
|
9a3f69a1eba3618930f222d4e013d534102a2af5
|
{
"arc:challenge": 42.6,
"hellaswag": 69.9,
"hendrycksTest": 26.5,
"truthfulqa:mc": 36.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 43.8 |
open-llm-leaderboard/bloomz-1b7-4bit-alpaca-auto-eval-adapter-applied
|
main
|
7c46d9e7aa05a8f711a93603199f9476742fe9d7
|
{
"arc:challenge": 29.1,
"hellaswag": 47.4,
"hendrycksTest": 31.8,
"truthfulqa:mc": 41.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 37.4 |
TFLai/Stable-Platypus2-13B-QLoRA-0.80-epoch
|
main
|
0c15b8540335b3e21a976a5fc5c33b47927fea6c
|
{
"arc:challenge": 62.3,
"hellaswag": 82.5,
"hendrycksTest": 57.1,
"truthfulqa:mc": 51.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.3 |
TFLai/PuddleJumper-Platypus2-13B-QLoRA-0.80-epoch
|
main
|
4b5aabc51907e4cba49f373c6dc09a2634f2fb8a
|
{
"arc:challenge": 54.5,
"hellaswag": 79.4,
"hendrycksTest": 55.1,
"truthfulqa:mc": 54.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 60.8 |
TFLai/Nova-13B
|
main
|
ae1145f9fa846ab8d39d8b7da888287ef917efb5
|
{
"arc:challenge": 62.7,
"hellaswag": 82.6,
"hendrycksTest": 58,
"truthfulqa:mc": 51.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.6 |
open-llm-leaderboard/bloom-560m-4bit-alpaca-auto-eval-adapter-applied
|
main
|
61e0b861d59319a96bba5af8c246e69d82e8e6e6
|
{
"arc:challenge": 24,
"hellaswag": 29.2,
"hendrycksTest": 25.2,
"truthfulqa:mc": 45.2
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 30.9 |
TFLai/SpeechlessV1-Nova-13B
|
main
|
fbe6f0e32b5ecf9d75510d0b11a286466f46d79e
|
{
"arc:challenge": 61.8,
"hellaswag": 82.7,
"hendrycksTest": 57.7,
"truthfulqa:mc": 51.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.4 |
TFLai/gpt2-turkish-uncased
|
main
|
4807e7df1dfb9d60c6d98e3cfeff62cb6b9a1579
|
{
"arc:challenge": 24.5,
"hellaswag": 25.1,
"hendrycksTest": 26.6,
"truthfulqa:mc": 52.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 32.1 |
TFLai/pythia-2.8b-4bit-alpaca
|
main
|
40e84b6d38aac92a0302c2a682498794ef0fd901
|
{
"arc:challenge": 34.7,
"hellaswag": 59,
"hendrycksTest": 25.5,
"truthfulqa:mc": 39.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 39.6 |
TFLai/OrcaMini-Platypus2-13B-QLoRA-0.80-epoch
|
main
|
1f81c0439f60d848e3cbc7f06fcd58b5161a8557
|
{
"arc:challenge": 60.8,
"hellaswag": 82.6,
"hendrycksTest": 56.4,
"truthfulqa:mc": 53.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.3 |
TFLai/gpt-neo-1.3B-4bit-alpaca
|
main
|
137d483d1dc757c81c59bd190016f7c5df01f978
|
{
"arc:challenge": 28.2,
"hellaswag": 46.3,
"hendrycksTest": 25.2,
"truthfulqa:mc": 39.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 34.8 |
TFLai/llama-2-13b-4bit-alpaca-gpt4
|
main
|
ccf1ad19b07196fa3fab67261b7a0f9bcf28638f
|
{
"arc:challenge": 57.7,
"hellaswag": 81,
"hendrycksTest": 51.8,
"truthfulqa:mc": 45.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 59 |
TFLai/Orca-Nova-13B
|
main
|
5a6c3686749ecb76971a915403da8c07a98078a6
|
{
"arc:challenge": 62.4,
"hellaswag": 82.5,
"hendrycksTest": 57.4,
"truthfulqa:mc": 46
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 62.1 |
LLMs/WizardLM-13B-V1.0
|
main
|
f802ea7c01e2da27b0f7091c70d3ecfd8fc042b9
|
{
"arc:challenge": 57.3,
"hellaswag": 80.9,
"hendrycksTest": 52.9,
"truthfulqa:mc": 50.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 60.4 |
LLMs/Stable-Vicuna-13B
|
main
|
51f3d9eaa71de287c96195abd0ff954839857b19
|
{
"arc:challenge": 53.4,
"hellaswag": 78.6,
"hendrycksTest": 50.4,
"truthfulqa:mc": 48.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 57.7 |
LLMs/AlpacaGPT4-7B-elina
|
main
|
bbece5e3f8ee9be09c8defc536a95c6ef780c681
|
{
"arc:challenge": 55,
"hellaswag": 78.8,
"hendrycksTest": 37.5,
"truthfulqa:mc": 41.5
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 53.2 |
Tincando/fiction_story_generator
|
main
|
377b080cf96e10d50289aa3e1fd79c330265f45a
|
{
"arc:challenge": 23.3,
"hellaswag": 28.7,
"hendrycksTest": 26.7,
"truthfulqa:mc": 43.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 30.6 |
TinyPixel/lima-test
|
main
|
4d6a006c6341f29b11c02f19bf9535f51b4da1b5
|
{
"arc:challenge": 53.1,
"hellaswag": 78.9,
"hendrycksTest": 46.4,
"truthfulqa:mc": 39.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.4 |
TinyPixel/testmodel-3
|
main
|
a1fbc4d8a2c1a3d211325bdff9e7f0539fa7a2b1
|
{
"arc:challenge": 53.2,
"hellaswag": 78.7,
"hendrycksTest": 46.6,
"truthfulqa:mc": 38.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.3 |
TinyPixel/llama2-7b-oa
|
main
|
f346cbe795a2dadb6da0b40d70afd4976bcae90e
|
{
"arc:challenge": 53.4,
"hellaswag": 78.7,
"hendrycksTest": 46.7,
"truthfulqa:mc": 41.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 55 |
TinyPixel/llama2-7b-instruct
|
main
|
4c0aa1032cbebeef1aad2becb5dcb613b8a1cc97
|
{
"arc:challenge": 53.6,
"hellaswag": 78.8,
"hendrycksTest": 46.1,
"truthfulqa:mc": 39.5
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.5 |
vihangd/shearedplats-2.7b-v2
|
main
|
2837296f28d6aa0fb6c1fe382f553e65c8e1e5f3
|
{
"arc:challenge": 42.4,
"hellaswag": 72.6,
"hendrycksTest": 27.5,
"truthfulqa:mc": 39.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 45.6 |
vihangd/smartyplats-3b-v1
|
main
|
89272b9edb323f5ace09e097a6449554c0dcd4e7
|
{
"arc:challenge": 40.5,
"hellaswag": 70.9,
"hendrycksTest": 25.3,
"truthfulqa:mc": 36.5
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 43.3 |
vihangd/shearedplats-1.3b-v1
|
main
|
7ac93152e1807ec1d732500255a747e27922fb1a
|
{
"arc:challenge": 35.4,
"hellaswag": 62.7,
"hendrycksTest": 24.7,
"truthfulqa:mc": 33.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 39.2 |
formulae/Dorflan
|
main
|
5d8e7e5764ace89e6ccd1deece33b0e8a4b4587b
|
{
"arc:challenge": 54.4,
"hellaswag": 75.8,
"hendrycksTest": 51.4,
"truthfulqa:mc": 51.2
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 58.2 |
uberkie/metharme-1.3b-finetuned
|
main
|
7335669475711806eb04f8850e4eef91a9d2677d
|
{
"arc:challenge": 20.6,
"hellaswag": 28,
"hendrycksTest": 25.3,
"truthfulqa:mc": 44.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 29.7 |
kfkas/Llama-2-ko-7b-Chat
|
main
|
3293b98cd8204371988f898dafa9b5a297555cbe
|
{
"arc:challenge": 40.4,
"hellaswag": 67.1,
"hendrycksTest": 30.2,
"truthfulqa:mc": 35.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 43.3 |
Locutusque/TinyMistral-248m
|
main
|
8f03f72bca0542aa164c29ba41f02cba6f9d7748
|
{
"arc:challenge": 20.8,
"hellaswag": 27,
"hendrycksTest": 23.1,
"truthfulqa:mc": 46.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 29.4 |
rombodawg/LosslessMegaCoder-llama2-7b-mini
|
main
|
186b105d61054611d0b921a55c220d41c6aefe43
|
{
"arc:challenge": 53.5,
"hellaswag": 77.4,
"hendrycksTest": 49.7,
"truthfulqa:mc": 45.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.6 |
rombodawg/LosslessMegaCoder-llama2-13b-mini
|
main
|
1f5609ffd40bc3af2dcbc5c88e9312d47a73c4b4
|
{
"arc:challenge": 60.6,
"hellaswag": 81.3,
"hendrycksTest": 57.9,
"truthfulqa:mc": 48.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 62.2 |
sequelbox/StellarBright
|
main
|
43efad8bfdb47139934e810906c1e59c25b5e269
|
{
"arc:challenge": 73,
"hellaswag": 87.8,
"hendrycksTest": 71.2,
"truthfulqa:mc": 64.5
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 74.1 |
sequelbox/DaringFortitude
|
main
|
0c463888cd83b7acebd7b6fb961562e11402e47d
|
{
"arc:challenge": 63.5,
"hellaswag": 83.6,
"hendrycksTest": 59.8,
"truthfulqa:mc": 56
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 65.7 |
sequelbox/SharpBalance
|
main
|
a87cb1756d7b7389cc5a6d4647cf53377e962aea
|
{
"arc:challenge": 69.3,
"hellaswag": 87.6,
"hendrycksTest": 69.5,
"truthfulqa:mc": 59
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 71.4 |
LeoLM/leo-hessianai-7b-chat
|
main
|
7c343a501f5cd3b768d2f78d9941b760fd66815d
|
{
"arc:challenge": 52.6,
"hellaswag": 77.6,
"hendrycksTest": 45.6,
"truthfulqa:mc": 44.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 55.2 |
golaxy/goims
|
main
|
9ef1045ca31f670d9cbf820af904b33a097cd787
|
{
"arc:challenge": 49.5,
"hellaswag": 72.7,
"hendrycksTest": 43.9,
"truthfulqa:mc": 44.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 52.7 |
golaxy/gogpt-3b-bloom
|
main
|
fe942d5d0faca8156eaf456ecdf569993eab8062
|
{
"arc:challenge": 31.9,
"hellaswag": 50.3,
"hendrycksTest": 25.2,
"truthfulqa:mc": 41.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 37.3 |
golaxy/gogpt-7b-bloom
|
main
|
8f9996f852db583b982efbd671465d18ad13ffae
|
{
"arc:challenge": 44.6,
"hellaswag": 62.6,
"hendrycksTest": 33.8,
"truthfulqa:mc": 40.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 45.4 |
golaxy/gogpt-560m
|
main
|
82bd8b88b95068eee614a35b790388c5d2415705
|
{
"arc:challenge": 26.4,
"hellaswag": 31.9,
"hendrycksTest": 25.3,
"truthfulqa:mc": 43.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 31.7 |
golaxy/gogpt2-13b-chat
|
main
|
6750491b8c720f2cc6f7ec53bbd61fb6efca6c04
|
{
"arc:challenge": 48.4,
"hellaswag": 71.8,
"hendrycksTest": 44.5,
"truthfulqa:mc": 44.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 52.4 |
golaxy/gogpt-7b
|
main
|
7eb70c0e330b7d3ff490047ddbb153bb96294882
|
{
"arc:challenge": 48.8,
"hellaswag": 73.8,
"hendrycksTest": 43,
"truthfulqa:mc": 41
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 51.6 |
upstage/llama-65b-instruct
|
main
|
f70a9865cb0a1ac1157ad928b3b428dd85d52946
|
{
"arc:challenge": 68.9,
"hellaswag": 86.4,
"hendrycksTest": 64.8,
"truthfulqa:mc": 59.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 70 |
upstage/Llama-2-70b-instruct
|
main
|
8469429924dc2e1a9394b8095753985668a4052e
|
{
"arc:challenge": 70.9,
"hellaswag": 87.5,
"hendrycksTest": 69.8,
"truthfulqa:mc": 61
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 72.3 |
NousResearch/Nous-Hermes-llama-2-7b
|
main
|
60e58acecdc1552e1b1752a38d1d91d942d1c3f0
|
{
"arc:challenge": 55.1,
"hellaswag": 78.9,
"hendrycksTest": 48.3,
"truthfulqa:mc": 49
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 57.8 |
NousResearch/Nous-Hermes-Llama2-70b
|
main
|
13a0b4da159ad95c93e72a002d893c48ed0f257a
|
{
"arc:challenge": 67.6,
"hellaswag": 86.8,
"hendrycksTest": 69.7,
"truthfulqa:mc": 55
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 69.8 |
NousResearch/Nous-Hermes-Llama2-13b
|
main
|
8f95aa9cd207db7b24179fc779c2b8973e71bee2
|
{
"arc:challenge": 61.5,
"hellaswag": 83.3,
"hendrycksTest": 55.1,
"truthfulqa:mc": 50.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 62.6 |
NousResearch/CodeLlama-7b-hf
|
main
|
855c92912ea4a8eb5f0be1db4bf776ffd0815dac
|
{
"arc:challenge": 39.8,
"hellaswag": 59.6,
"hendrycksTest": 30.5,
"truthfulqa:mc": 38.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 42.1 |
NousResearch/Nous-Capybara-7B
|
main
|
42dfc6f7d735670e2f3e30b0919708a81f9a0df9
|
{
"arc:challenge": 55.3,
"hellaswag": 80.7,
"hendrycksTest": 48.7,
"truthfulqa:mc": 51.1
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 59 |
PeanutJar/LLaMa-2-PeanutButter_v18_B-7B
|
main
|
bc8c239cacf1e3211f05e27be67a74d84c12aea9
|
{
"arc:challenge": 54.6,
"hellaswag": 81,
"hendrycksTest": 47.1,
"truthfulqa:mc": 41.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.2 |
PeanutJar/LLaMa-2-PeanutButter_v14-7B
|
main
|
18b55a04e4537ca77b69311e4144984388ae965c
|
{
"arc:challenge": 54.2,
"hellaswag": 80.4,
"hendrycksTest": 46,
"truthfulqa:mc": 44.7
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.3 |
PeanutJar/LLaMa-2-PeanutButter_v37_SFT-R1-DPO-R2-7B
|
main
|
7dbaa1eea3964e3218ed1788fc04a30e058d3daf
|
{
"arc:challenge": 54.1,
"hellaswag": 79.1,
"hendrycksTest": 47.3,
"truthfulqa:mc": 42
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 55.6 |
PeanutJar/LLaMa-2-PeanutButter_v19_R8-7B
|
main
|
b290ba1cfd60c5bb7b8bf4e9c08da1d3adb2d7b5
|
{
"arc:challenge": 53.3,
"hellaswag": 78.7,
"hendrycksTest": 46.5,
"truthfulqa:mc": 39.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.5 |
PeanutJar/Mistral-v0.1-PeanutButter-v0.0.5-DPO-7B-QLoRA
|
main
|
56e805fbebffaf25e61df5a3d68b75cb604a6e1c
|
{
"arc:challenge": 61.3,
"hellaswag": 84.5,
"hendrycksTest": 63.6,
"truthfulqa:mc": 45.8
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.8 |
PeanutJar/Mistral-v0.1-PeanutButter-v0.0.5-SFT-7B-QLoRA
|
main
|
2609363766acf308877a71aba352e60d7c044b49
|
{
"arc:challenge": 60.8,
"hellaswag": 84.2,
"hendrycksTest": 63.7,
"truthfulqa:mc": 44.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 63.4 |
PeanutJar/LLaMa-2-PeanutButter_v18_A-7B
|
main
|
15b2fa81418792841014f589e61d1d9e30457040
|
{
"arc:challenge": 53.2,
"hellaswag": 78.1,
"hendrycksTest": 45.5,
"truthfulqa:mc": 40.4
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 54.3 |
PeanutJar/LLaMa-2-PeanutButter_v4-7B
|
main
|
51aef62f2a8baf37156d13f9ca5a29154d694f57
|
{
"arc:challenge": 54.9,
"hellaswag": 80.8,
"hendrycksTest": 47.2,
"truthfulqa:mc": 42.3
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.3 |
PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B
|
main
|
f4d471d7a9447d0969a58d5b3146d50cfa3005b3
|
{
"arc:challenge": 61.8,
"hellaswag": 84.1,
"hendrycksTest": 64.4,
"truthfulqa:mc": 45.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 64 |
team-lucid/mptk-1b
|
main
|
aea467410ae0cead4fded6b98a3575e92b22862f
|
{
"arc:challenge": 22.7,
"hellaswag": 25.5,
"hendrycksTest": 27.1,
"truthfulqa:mc": null
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | null |
hoskinson-center/proofGPT-v0.1-6.7B
|
main
|
02f405f08ca0e5b1aaa90a7c3b11303b5f245102
|
{
"arc:challenge": 23.3,
"hellaswag": 28.5,
"hendrycksTest": 24.6,
"truthfulqa:mc": 50.9
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 31.8 |
Danielbrdz/Barcenas-3b
|
main
|
2b6b8bfd3946c02fa4a5182ed008df8ad324a406
|
{
"arc:challenge": 43.2,
"hellaswag": 67.8,
"hendrycksTest": 29.2,
"truthfulqa:mc": 41.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 45.4 |
Danielbrdz/Barcenas-7b
|
main
|
770fa73981a599e935c21a95b1817a553c726694
|
{
"arc:challenge": 55.1,
"hellaswag": 77.4,
"hendrycksTest": 49.3,
"truthfulqa:mc": 43.6
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 56.4 |
Danielbrdz/CodeBarcenas-7b
|
main
|
fe7a232baac5394e821f349cb7ef31dbd4ca2078
|
{
"arc:challenge": 42.3,
"hellaswag": 63.4,
"hendrycksTest": 33.4,
"truthfulqa:mc": 38.5
}
|
9ba100d35ce48d3d4c132947464c93c861932caa
| 2023-11-23T17:28:23 | 44.4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.