Dataset Viewer
model
stringlengths 4
89
| revision
stringclasses 1
value | model_sha
stringlengths 0
40
| results
dict | commit
stringlengths 40
40
| date
timestamp[ns] | score
float64 21.8
83
⌀ |
---|---|---|---|---|---|---|
TaylorAI/Flash-Llama-3B
|
main
|
b4c7bb49171ff6955cfc1f7e33143383c57f7606
|
{
"arc:challenge": 40.1,
"hellaswag": 71.6,
"hendrycksTest": 26.9,
"truthfulqa:mc": 34.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 43.3 |
TaylorAI/FLAN-Llama-7B-2_Llama2-7B-Flash_868_full_model
|
main
|
819f3f384e37f8906a62a8048556c9e58e495c02
|
{
"arc:challenge": 52.5,
"hellaswag": 79.1,
"hendrycksTest": 47.6,
"truthfulqa:mc": 37.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54.1 |
TaylorAI/Flash-Llama-13B
|
main
|
81b40096471a8980e3e1a8998f358bd363033783
|
{
"arc:challenge": 59.3,
"hellaswag": 82.2,
"hendrycksTest": 55.7,
"truthfulqa:mc": 37.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.6 |
TaylorAI/Flash-Llama-7B
|
main
|
27c84ef23d850582453e1cc2dcea13de48da090f
|
{
"arc:challenge": 53.1,
"hellaswag": 78.6,
"hendrycksTest": 46.8,
"truthfulqa:mc": 38.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54.3 |
facebook/xglm-1.7B
|
main
|
d23a5e8e2164af31a84a26756b9b17f925143050
|
{
"arc:challenge": 25.9,
"hellaswag": 45.7,
"hendrycksTest": 25.1,
"truthfulqa:mc": 37.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 33.5 |
facebook/xglm-564M
|
main
|
f3059f01b98ccc877c673149e0178c0e957660f9
|
{
"arc:challenge": 24.6,
"hellaswag": 34.6,
"hendrycksTest": 25.2,
"truthfulqa:mc": 40.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.2 |
facebook/opt-iml-max-30b
|
main
|
291753b04817a31a742631053ee361874d6db8a4
|
{
"arc:challenge": 43.9,
"hellaswag": 72.4,
"hendrycksTest": 41.1,
"truthfulqa:mc": 38.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 48.9 |
facebook/xglm-4.5B
|
main
|
dc6a67fac06c8bca7860b84656a0cb736293a7a8
|
{
"arc:challenge": 31.5,
"hellaswag": 57.9,
"hendrycksTest": 25.4,
"truthfulqa:mc": 35.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.7 |
facebook/opt-13b
|
main
|
e515202d1e7750da62d245fbccb2723b9c1790f5
|
{
"arc:challenge": 39.9,
"hellaswag": 71.2,
"hendrycksTest": 24.9,
"truthfulqa:mc": 34.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 42.5 |
facebook/galactica-30b
|
main
|
80bd55898b06c7c363c467dec877b8b32702a2c4
|
{
"arc:challenge": 47.4,
"hellaswag": 61.2,
"hendrycksTest": 47.6,
"truthfulqa:mc": 38
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 48.6 |
facebook/opt-66b
|
main
|
7259969061237fe940036d22bea0fd349e4485e9
|
{
"arc:challenge": 46.3,
"hellaswag": 76.2,
"hendrycksTest": 27,
"truthfulqa:mc": 35.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 46.2 |
facebook/galactica-1.3b
|
main
|
f711c69357d598defb703ddce93c5d7f7bc6e6da
|
{
"arc:challenge": 34.1,
"hellaswag": 40.9,
"hendrycksTest": 27.1,
"truthfulqa:mc": 41.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 35.9 |
facebook/opt-1.3b
|
main
|
8c7b10754972749675d22364c25c428b29face51
|
{
"arc:challenge": 29.5,
"hellaswag": 54.5,
"hendrycksTest": 25,
"truthfulqa:mc": 38.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 36.9 |
facebook/xglm-7.5B
|
main
|
732d59308a844004bd9a4def972cc7c3896a38e0
|
{
"arc:challenge": 34.1,
"hellaswag": 60.8,
"hendrycksTest": 27.8,
"truthfulqa:mc": 36.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 39.8 |
facebook/opt-6.7b
|
main
|
a45aa65bbeb77c1558bc99bedc6779195462dab0
|
{
"arc:challenge": 39.2,
"hellaswag": 68.7,
"hendrycksTest": 24.6,
"truthfulqa:mc": 35.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 41.9 |
facebook/opt-iml-max-1.3b
|
main
|
d60fa58f50def19751da2075791da359ca19d273
|
{
"arc:challenge": 30.7,
"hellaswag": 53.8,
"hendrycksTest": 27.6,
"truthfulqa:mc": 38.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.6 |
facebook/opt-125m
|
main
|
3d2b5f275bdf882b8775f902e1bfdb790e2cfc32
|
{
"arc:challenge": 22.9,
"hellaswag": 31.5,
"hendrycksTest": 26,
"truthfulqa:mc": 42.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 30.8 |
facebook/opt-2.7b
|
main
|
397f71a473a150c00f0fe3fc4a2f78ff3ccaf82d
|
{
"arc:challenge": 34,
"hellaswag": 61.4,
"hendrycksTest": 25.4,
"truthfulqa:mc": 37.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 39.6 |
facebook/opt-30b
|
main
|
ceea0a90ac0f6fae7c2c34bcb40477438c152546
|
{
"arc:challenge": 43.2,
"hellaswag": 74.1,
"hendrycksTest": 26.6,
"truthfulqa:mc": 35.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 44.8 |
facebook/opt-350m
|
main
|
cb32f77e905cccbca1d970436fb0f5e6b58ee3c5
|
{
"arc:challenge": 23.5,
"hellaswag": 36.7,
"hendrycksTest": 26,
"truthfulqa:mc": 40.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.8 |
gpt2-medium
|
main
|
f65d4965d1221eff2bcf34f53a2ba12120e18f24
|
{
"arc:challenge": 27,
"hellaswag": 40.2,
"hendrycksTest": 26.6,
"truthfulqa:mc": 40.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 33.7 |
ahnyeonchan/OpenOrca-AYT-13B
|
main
|
1357abceda30e8389007a023907824cc3a11e397
|
{
"arc:challenge": 27.2,
"hellaswag": 26,
"hendrycksTest": 25.1,
"truthfulqa:mc": null
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | null |
Taekyoon/llama2-ko-7b-test
|
main
|
1d9b52cc5832ae0ea37514330d38193b737e1d07
|
{
"arc:challenge": 37.8,
"hellaswag": 63,
"hendrycksTest": 29.6,
"truthfulqa:mc": 36
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 41.6 |
Mikael110/llama-2-13b-guanaco-fp16
|
main
|
feb7ef47ceca6aec9548264a39622b63fdcb853c
|
{
"arc:challenge": 60.9,
"hellaswag": 83.2,
"hendrycksTest": 54.6,
"truthfulqa:mc": 44
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.7 |
Mikael110/llama-2-7b-guanaco-fp16
|
main
|
f769fed10874af73ad12115efd044cb4a64506b0
|
{
"arc:challenge": 54.9,
"hellaswag": 79.6,
"hendrycksTest": 46.4,
"truthfulqa:mc": 43.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 56.2 |
ehartford/dolphin-llama2-7b
|
main
|
85aa4f67191fd016ab7ea8c389fddb5d9e5a9a52
|
{
"arc:challenge": 46.6,
"hellaswag": 67.5,
"hendrycksTest": 48.4,
"truthfulqa:mc": 49.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 53 |
ehartford/Wizard-Vicuna-30B-Uncensored
|
main
|
6374baef4cedd41f85c111b8eec3eb38ee24c4b9
|
{
"arc:challenge": 62.1,
"hellaswag": 83.4,
"hendrycksTest": 58.2,
"truthfulqa:mc": 50.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 63.6 |
ehartford/CodeLlama-34b-Python-hf
|
main
|
45f38e53a579a2b39298cc57ab04078722bebec0
|
{
"arc:challenge": 38.1,
"hellaswag": 34.8,
"hendrycksTest": 33,
"truthfulqa:mc": 43.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 37.4 |
ehartford/Samantha-1.11-7b
|
main
|
730cbd8f3077f3d24001aab714def991f1e4e7e8
|
{
"arc:challenge": 55,
"hellaswag": 79.1,
"hendrycksTest": 40.5,
"truthfulqa:mc": 50.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 56.2 |
ehartford/WizardLM-1.0-Uncensored-Llama2-13b
|
main
|
134cea14627fd875f6f277cad92f988024855478
|
{
"arc:challenge": 55.7,
"hellaswag": 80.3,
"hendrycksTest": 55.4,
"truthfulqa:mc": 51.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.7 |
ehartford/Samantha-1.11-70b
|
main
|
49e5b5ee0bed2864f0b38ba8bf9e01ccc5e0ba5f
|
{
"arc:challenge": 70.1,
"hellaswag": 87.6,
"hendrycksTest": 67.8,
"truthfulqa:mc": 65
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 72.6 |
ehartford/Wizard-Vicuna-13B-Uncensored
|
main
|
95bfd1640a54e76b3e857c2462fd3a77eca0b275
|
{
"arc:challenge": 59,
"hellaswag": 81.9,
"hendrycksTest": 47.9,
"truthfulqa:mc": 51.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.1 |
ehartford/WizardLM-13B-Uncensored
|
main
|
9025c5f96fef9525da9238369ad082961b0e9494
|
{
"arc:challenge": 50.9,
"hellaswag": 76.6,
"hendrycksTest": 44,
"truthfulqa:mc": 46.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54.6 |
ehartford/Wizard-Vicuna-7B-Uncensored
|
main
|
1097285acd9c48a1d09bc0a9844d365384732111
|
{
"arc:challenge": 53.4,
"hellaswag": 78.8,
"hendrycksTest": 37.1,
"truthfulqa:mc": 43.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 53.2 |
ehartford/WizardLM-1.0-Uncensored-CodeLlama-34b
|
main
|
3e8df2cf4a4ee1c0b2d079cb7be70024d425ea8c
|
{
"arc:challenge": 56.4,
"hellaswag": 75.5,
"hendrycksTest": 54.5,
"truthfulqa:mc": 43.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.4 |
ehartford/WizardLM-33B-V1.0-Uncensored
|
main
|
3eca9fdee0ce28d6a4a635a6f19d9a413caee3e7
|
{
"arc:challenge": 63.7,
"hellaswag": 83.8,
"hendrycksTest": 59.4,
"truthfulqa:mc": 56.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 65.9 |
ehartford/WizardLM-30B-Uncensored
|
main
|
761783745fcb97831ad8035d3cbd5de484aca3ce
|
{
"arc:challenge": 60.2,
"hellaswag": 82.9,
"hendrycksTest": 56.8,
"truthfulqa:mc": 51.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 62.9 |
ehartford/CodeLlama-34b-Instruct-hf
|
main
|
50ac374da09ab585b9cf7625a2ea3554ef97f18a
|
{
"arc:challenge": 40.8,
"hellaswag": 35.7,
"hendrycksTest": 39.7,
"truthfulqa:mc": 44.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 40.1 |
ehartford/dolphin-llama-13b
|
main
|
b6d16c3e1cffef5e914863f41fd96152dafddd6f
|
{
"arc:challenge": 55.5,
"hellaswag": 77.1,
"hendrycksTest": 52.2,
"truthfulqa:mc": 52.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.2 |
ehartford/minotaur-llama2-13b-qlora
|
main
|
22c83f7d68e547fb0b59acfa01c60b108c59fe55
|
{
"arc:challenge": 60.1,
"hellaswag": 82.4,
"hendrycksTest": 55.9,
"truthfulqa:mc": 45.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 61 |
ehartford/Samantha-1.11-13b
|
main
|
e355ead3a939f471fe2586201156fb972fad0f4b
|
{
"arc:challenge": 60.8,
"hellaswag": 83,
"hendrycksTest": 56,
"truthfulqa:mc": 47.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 61.9 |
ehartford/Samantha-1.1-70b
|
main
|
a3819d186f5b4d52ced7ddeb7fa16bf66e8a2ea7
|
{
"arc:challenge": 68.8,
"hellaswag": 87.5,
"hendrycksTest": 68.6,
"truthfulqa:mc": 64.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 72.4 |
ehartford/WizardLM-7B-Uncensored
|
main
|
14c23f9fa775ab5ce49010418f00df06d92b0b13
|
{
"arc:challenge": 47.9,
"hellaswag": 73.1,
"hendrycksTest": 35.4,
"truthfulqa:mc": 41.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 49.5 |
ehartford/based-30b
|
main
|
5818a6344f48dc5a324589b57cb288a9d54c0b79
|
{
"arc:challenge": 63.9,
"hellaswag": 85.7,
"hendrycksTest": 58.3,
"truthfulqa:mc": 35.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.9 |
ehartford/Samantha-1.11-CodeLlama-34b
|
main
|
3fd110de9282e52f56f999bf1da1a76425f00e29
|
{
"arc:challenge": 56.6,
"hellaswag": 75.5,
"hendrycksTest": 53.5,
"truthfulqa:mc": 50.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59 |
junelee/wizard-vicuna-13b
|
main
|
419dc5acc391de54a60d0b041e94e767d1ef2032
|
{
"arc:challenge": 54.7,
"hellaswag": 79.2,
"hendrycksTest": 48.9,
"truthfulqa:mc": 49.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.1 |
wenge-research/yayi-70b-llama2
|
main
|
2799b262292f78f7c3965a1410d0ad6211438603
|
{
"arc:challenge": 60.7,
"hellaswag": 83.9,
"hendrycksTest": 64.4,
"truthfulqa:mc": 47.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 64.2 |
wenge-research/yayi-7b
|
main
|
00be6c9e41a8367a855c6f18ebfa08f5ecdb2cc4
|
{
"arc:challenge": 46.3,
"hellaswag": 61.7,
"hendrycksTest": 36.3,
"truthfulqa:mc": 43.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 47 |
wenge-research/yayi-7b-llama2
|
main
|
18a4ed38285c732efc583a4bd883b3a681f8d005
|
{
"arc:challenge": 54.8,
"hellaswag": 77.9,
"hendrycksTest": 41.4,
"truthfulqa:mc": 44
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 54.5 |
wenge-research/yayi-13b-llama2
|
main
|
9fc1bc4409b9e71f54213245a91c2742fbf7b3d0
|
{
"arc:challenge": 48.5,
"hellaswag": 74.8,
"hendrycksTest": 38.7,
"truthfulqa:mc": 42.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 51 |
budecosystem/genz-13b-v2
|
main
|
98e0e2086df11b9f80e1571110540a657e52c2e8
|
{
"arc:challenge": 56,
"hellaswag": 80,
"hendrycksTest": 54.3,
"truthfulqa:mc": 48.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.6 |
budecosystem/genz-70b
|
main
|
32110b4f33e5e80073ca1f47638482fdc0e19297
|
{
"arc:challenge": 71.4,
"hellaswag": 88,
"hendrycksTest": 70.8,
"truthfulqa:mc": 62.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 73.2 |
Kiddyz/testlm-1
|
main
|
e00d8c50a007eb1da3fbfb4d5f5a73c1af3aa104
|
{
"arc:challenge": 53.5,
"hellaswag": 75.8,
"hendrycksTest": 51.2,
"truthfulqa:mc": 48.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.2 |
Kiddyz/testlm
|
main
|
e00d8c50a007eb1da3fbfb4d5f5a73c1af3aa104
|
{
"arc:challenge": 53.5,
"hellaswag": 75.8,
"hendrycksTest": 51.2,
"truthfulqa:mc": 48.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.2 |
Kiddyz/testlm2
|
main
|
9bffd9acfb12b5da1a1dd09825a633f804126dfa
|
{
"arc:challenge": 53,
"hellaswag": 75.6,
"hendrycksTest": 51.5,
"truthfulqa:mc": 48.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.2 |
Kiddyz/testlm-1-1
|
main
|
e00d8c50a007eb1da3fbfb4d5f5a73c1af3aa104
|
{
"arc:challenge": 53.5,
"hellaswag": 75.8,
"hendrycksTest": 51.2,
"truthfulqa:mc": 48.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.2 |
Kiddyz/testlm-3
|
main
|
6ba288ac39fc4145144e360a8f2641d6f5a6a33a
|
{
"arc:challenge": 53.6,
"hellaswag": 78.5,
"hendrycksTest": 51.8,
"truthfulqa:mc": 46.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 57.6 |
baichuan-inc/Baichuan-7B
|
main
|
8baef65be8363f3b5670adfe9a0b9c0311962d90
|
{
"arc:challenge": 40.7,
"hellaswag": 69,
"hendrycksTest": 43.6,
"truthfulqa:mc": 36.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 47.4 |
RobbeD/Orca-Platypus-3B
|
main
|
243f51d75ed6d425addde839740f6fd5bcc4630f
|
{
"arc:challenge": 43.1,
"hellaswag": 65.3,
"hendrycksTest": 26.8,
"truthfulqa:mc": 41.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 44.3 |
RobbeD/OpenLlama-Platypus-3B
|
main
|
d3a0bf8e1181be02cc9c4c4cdfedaedacaefbfac
|
{
"arc:challenge": 41.2,
"hellaswag": 71.7,
"hendrycksTest": 29.9,
"truthfulqa:mc": 36.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 44.8 |
behnamsh/gpt2_platypus-camel_physics
|
main
|
cd4d700d13b3bc9371bf45616ef74ac20d165c3d
|
{
"arc:challenge": 22.8,
"hellaswag": 31.2,
"hendrycksTest": 25.9,
"truthfulqa:mc": 39
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 29.7 |
AlekseyKorshuk/pygmalion-6b-vicuna-chatml
|
main
|
ee3ada91a69a194cedfabbfeab98f1499b75cb44
|
{
"arc:challenge": 40.6,
"hellaswag": 67.7,
"hendrycksTest": 33.9,
"truthfulqa:mc": 42.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 46.2 |
TheBloke/WizardLM-30B-fp16
|
main
|
465f87a243969963f25ae6cf8f8d2de6c0898bbe
|
{
"arc:challenge": 62.5,
"hellaswag": 83.3,
"hendrycksTest": 59,
"truthfulqa:mc": 52.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 64.3 |
TheBloke/CodeLlama-13B-Instruct-fp16
|
main
|
521c208c7251ccd3e44ccd9500b6bed419bca565
|
{
"arc:challenge": 44.6,
"hellaswag": 64.9,
"hendrycksTest": 38.8,
"truthfulqa:mc": 45.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 48.6 |
TheBloke/Planner-7B-fp16
|
main
|
afb4604a06c8541960fb51240259777764c4ce7e
|
{
"arc:challenge": 51,
"hellaswag": 77.8,
"hendrycksTest": 35.7,
"truthfulqa:mc": 34.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 49.7 |
TheBloke/orca_mini_13B-GPTQ
|
main
|
8ec18e5c597da86fa123c08b6e6bef7da6ec7440
|
{
"arc:challenge": 27.3,
"hellaswag": 25.9,
"hendrycksTest": 25.3,
"truthfulqa:mc": 48.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.6 |
TheBloke/WizardLM-30B-GPTQ
|
main
|
e2e97475a9775d2fe7afba098aee37e694b9220f
|
{
"arc:challenge": 28.8,
"hellaswag": 26.1,
"hendrycksTest": 24.6,
"truthfulqa:mc": 49.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.2 |
TheBloke/Project-Baize-v2-13B-GPTQ
|
main
|
8dee7c7129aaad1ded245fce712ff5dbb2845258
|
{
"arc:challenge": 27.6,
"hellaswag": 26.4,
"hendrycksTest": 25.9,
"truthfulqa:mc": 48.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32 |
TheBloke/vicuna-13b-v1.3.0-GPTQ
|
main
|
6ef1f8d8638ea2d6681a8e3da73be57c501d847b
|
{
"arc:challenge": 54.4,
"hellaswag": 79.5,
"hendrycksTest": 52,
"truthfulqa:mc": 50.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.2 |
TheBloke/Platypus-30B-SuperHOT-8K-fp16
|
main
|
e8ac508308911475125252dcf2677fe355dd3700
|
{
"arc:challenge": 25.7,
"hellaswag": 30.8,
"hendrycksTest": 23.6,
"truthfulqa:mc": 47.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.8 |
TheBloke/chronos-wizardlm-uc-scot-st-13B-GPTQ
|
main
|
c4246e4b8d3fc77b9fe4ebb1ead61cda4b83575b
|
{
"arc:challenge": 28,
"hellaswag": 26.1,
"hendrycksTest": 25.7,
"truthfulqa:mc": 49.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.4 |
TheBloke/Wizard-Vicuna-13B-Uncensored-GPTQ
|
main
|
d9b00ec47ae3546398432f0693fe2d5d92bf143b
|
{
"arc:challenge": 29.6,
"hellaswag": 25.5,
"hendrycksTest": 25.3,
"truthfulqa:mc": 50.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.7 |
TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-fp16
|
main
|
83905656ca3e63877b8d9f3a74118da0c9bc6939
|
{
"arc:challenge": 58.6,
"hellaswag": 81.1,
"hendrycksTest": 48.3,
"truthfulqa:mc": 54.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.6 |
TheBloke/wizard-mega-13B-GPTQ
|
main
|
848bf2514f804799dd28c188e5428d497dc983fb
|
{
"arc:challenge": 27.7,
"hellaswag": 26,
"hendrycksTest": 25,
"truthfulqa:mc": 48.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.8 |
TheBloke/Llama-2-70B-fp16
|
main
|
b25061ef1b440e970d15d4ac99bc42937cd442a2
|
{
"arc:challenge": 67.3,
"hellaswag": 87.3,
"hendrycksTest": 69.8,
"truthfulqa:mc": 44.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 67.3 |
TheBloke/WizardLM-7B-uncensored-GPTQ
|
main
|
cc30c031fd795ee3d3a50312ab4549415bfbdb46
|
{
"arc:challenge": 28.5,
"hellaswag": 25.4,
"hendrycksTest": 24.9,
"truthfulqa:mc": 50.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.4 |
TheBloke/OpenAssistant-SFT-7-Llama-30B-HF
|
main
|
a7a2306b9a63de2c545f35b24735f4540baf5903
|
{
"arc:challenge": 60.6,
"hellaswag": 82.2,
"hendrycksTest": 57.9,
"truthfulqa:mc": 46.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 61.9 |
TheBloke/openchat_v2_openorca_preview-GPTQ
|
main
|
5a4c2ea612b71d7c00118f796db7189bc1a0c930
|
{
"arc:challenge": 28,
"hellaswag": 26.1,
"hendrycksTest": 24.2,
"truthfulqa:mc": 50.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.1 |
TheBloke/Llama-2-70B-chat-GPTQ
|
main
|
054fbf6f65e7ab7691ec07ec9ad366acf2dd90bf
|
{
"arc:challenge": 62.6,
"hellaswag": 84.8,
"hendrycksTest": 62.7,
"truthfulqa:mc": 51
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 65.3 |
TheBloke/VicUnlocked-30B-LoRA-HF
|
main
|
3259cb3c2a10cfb429fb51c4a76fffa049f4c44d
|
{
"arc:challenge": 59.7,
"hellaswag": 84,
"hendrycksTest": 57.8,
"truthfulqa:mc": 48.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 62.5 |
TheBloke/h2ogpt-oasst1-512-30B-HF
|
main
|
3dc93836e4b08b7b2ee43e69c1e590a36fd24687
|
{
"arc:challenge": 57.3,
"hellaswag": 81.4,
"hendrycksTest": 48.1,
"truthfulqa:mc": 45.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.1 |
TheBloke/gpt4-alpaca-lora-13B-HF
|
main
|
49678a2dd15fb4e1f1b99616ccc1ffd269912833
|
{
"arc:challenge": 59.6,
"hellaswag": 82.1,
"hendrycksTest": 47.5,
"truthfulqa:mc": 49
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.6 |
TheBloke/alpaca-lora-65B-HF
|
main
|
113b61b37a2862b950ada68620e57acafbcefe13
|
{
"arc:challenge": 64.8,
"hellaswag": 85.6,
"hendrycksTest": 63.1,
"truthfulqa:mc": 45.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 64.6 |
TheBloke/WizardLM-30B-Uncensored-GPTQ
|
main
|
43c701ddbe0bceac26c860307e06763cc5203500
|
{
"arc:challenge": 29.4,
"hellaswag": 26.5,
"hendrycksTest": 24.4,
"truthfulqa:mc": 49.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.4 |
TheBloke/WizardLM-Uncensored-SuperCOT-StoryTelling-30B-GPTQ
|
main
|
cd07cc7c55b46524f61214012653c25226d24c0d
|
{
"arc:challenge": 28.4,
"hellaswag": 26.1,
"hendrycksTest": 24.7,
"truthfulqa:mc": 49.5
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.2 |
TheBloke/robin-65b-v2-fp16
|
main
|
40edb31ba93045d673735361bc98f56125bbc77b
|
{
"arc:challenge": 61.9,
"hellaswag": 84.6,
"hendrycksTest": 62.5,
"truthfulqa:mc": 52.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 65.3 |
TheBloke/guanaco-13B-HF
|
main
|
bd59c700815124df616a17f5b49a0bc51590b231
|
{
"arc:challenge": 57.8,
"hellaswag": 83.8,
"hendrycksTest": 48.3,
"truthfulqa:mc": 46.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.1 |
TheBloke/LongChat-13B-GPTQ
|
main
|
8ec25a29033b7be5daeafa26f08e1ea7cf232b98
|
{
"arc:challenge": 28.3,
"hellaswag": 26.1,
"hendrycksTest": 25.6,
"truthfulqa:mc": 48.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.1 |
TheBloke/GPlatty-30B-SuperHOT-8K-fp16
|
main
|
e2103a424c1700756df1c0c0b334195f37efe17b
|
{
"arc:challenge": 28.3,
"hellaswag": 33.5,
"hendrycksTest": 24.9,
"truthfulqa:mc": 46.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 33.2 |
TheBloke/llama-30b-supercot-SuperHOT-8K-fp16
|
main
|
7efdff78a90132c1c66e1d27518ad7cbadffa139
|
{
"arc:challenge": 25.9,
"hellaswag": 30.5,
"hendrycksTest": 23.5,
"truthfulqa:mc": 47
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 31.7 |
TheBloke/UltraLM-13B-fp16
|
main
|
734f5641f6c548474517d1536c46024517f120e0
|
{
"arc:challenge": 57.6,
"hellaswag": 80.2,
"hendrycksTest": 51.9,
"truthfulqa:mc": 51.6
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 60.3 |
TheBloke/gpt4-alpaca-lora_mlp-65B-HF
|
main
|
664ff8e3e1d446971a16a6c9018ab24de7664684
|
{
"arc:challenge": 65,
"hellaswag": 86.1,
"hendrycksTest": 62.7,
"truthfulqa:mc": 59.2
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 68.2 |
TheBloke/Wizard-Vicuna-30B-Uncensored-fp16
|
main
|
c7b7cecb5a314fc66deebabcb67c230a3fbe84f7
|
{
"arc:challenge": 62.1,
"hellaswag": 83.4,
"hendrycksTest": 58.2,
"truthfulqa:mc": 50.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 63.6 |
TheBloke/Llama-2-13B-fp16
|
main
|
b2e65e8ad4bb35e5abaee0170ebd5fc2134a50bb
|
{
"arc:challenge": 59.3,
"hellaswag": 82.2,
"hendrycksTest": 55.7,
"truthfulqa:mc": 37.4
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 58.6 |
TheBloke/robin-33B-v2-fp16
|
main
|
c0ed7d40c3e52379780638dac3bd1f69597b8e18
|
{
"arc:challenge": 62.4,
"hellaswag": 83.6,
"hendrycksTest": 54.7,
"truthfulqa:mc": 53.9
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 63.6 |
TheBloke/guanaco-65B-HF
|
main
|
7f83ae526f8b83705ca8434535da8fd8c692f9d0
|
{
"arc:challenge": 65.4,
"hellaswag": 86.5,
"hendrycksTest": 62.9,
"truthfulqa:mc": 52.8
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 66.9 |
TheBloke/CAMEL-33B-Combined-Data-SuperHOT-8K-fp16
|
main
|
14744d11eab7028c5c845f89db2edc9c6fe2becb
|
{
"arc:challenge": 25.9,
"hellaswag": 31.6,
"hendrycksTest": 23.7,
"truthfulqa:mc": 48.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 32.3 |
TheBloke/airoboros-7b-gpt4-fp16
|
main
|
14aa50fba9f6418c0d5e2d24087eb802931040ef
|
{
"arc:challenge": 53.1,
"hellaswag": 78.7,
"hendrycksTest": 38.9,
"truthfulqa:mc": 40.7
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 52.9 |
TheBloke/fiction.live-Kimiko-V2-70B-fp16
|
main
|
6b0c2cb654133cad2d4920e7da2e3f6cb1c4f7fd
|
{
"arc:challenge": 67.7,
"hellaswag": 87.7,
"hendrycksTest": 69.8,
"truthfulqa:mc": 49.3
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 68.6 |
TheBloke/Vicuna-13B-CoT-fp16
|
main
|
fe74a0ece9089828b301bd0f067ae5f257516179
|
{
"arc:challenge": 52.7,
"hellaswag": 80.1,
"hendrycksTest": 51.9,
"truthfulqa:mc": 52.1
}
|
10ee91ffb0d887b2c4d3f1c5a32f9c4752237da3
| 2023-09-20T10:22:33 | 59.2 |
End of preview. Expand
in Data Studio
No dataset card yet
- Downloads last month
- 29