run_id
large_stringlengths
64
64
timestamp_utc
int64
1,736B
1,737B
timestamp_day_hour_utc
int64
1,736B
1,737B
model_name_or_path
large_stringclasses
5 values
unitxt_card
large_stringclasses
76 values
unitxt_recipe
large_stringlengths
330
400
quantization_type
large_stringclasses
1 value
quantization_bit_count
large_stringclasses
1 value
inference_runtime_s
float64
1.2
295
generation_args
large_stringclasses
1 value
model_args
large_stringclasses
5 values
inference_engine
large_stringclasses
1 value
packages_versions
large_stringclasses
1 value
scores
large_stringlengths
174
242
num_gpu
int64
1
1
device
large_stringclasses
1 value
ce24848861df34630b26769712d0dea801135fa1fe79a885accb9ba7d7c0d7d9
1,736,452,787,928
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.logical_fallacies
card=cards.mmlu.logical_fallacies,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.520163
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.53, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.62, 'score_name': 'accuracy', 'score': 0.53, 'score_ci_high': 0.62, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
c333960582a1ec3a816f58e414d4a07819a2e7fb7887e637e4cc3be031684adb
1,736,452,787,346
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_geography
card=cards.mmlu.high_school_geography,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_keyboard_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.941566
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.57, 'accuracy_ci_low': 0.47, 'accuracy_ci_high': 0.66, 'score_name': 'accuracy', 'score': 0.57, 'score_ci_high': 0.66, 'score_ci_low': 0.47, 'num_of_instances': 100}
1
a100_80gb
6289a5b34575209afe5d37487bad3d3d69355e36203103767f26ce5e78487d9f
1,736,452,791,631
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.clinical_knowledge
card=cards.mmlu.clinical_knowledge,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_roman_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.741848
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.63, 'accuracy_ci_low': 0.54, 'accuracy_ci_high': 0.72, 'score_name': 'accuracy', 'score': 0.63, 'score_ci_high': 0.72, 'score_ci_low': 0.54, 'num_of_instances': 100}
1
a100_80gb
207fad51ece9f11bb0c00832a1fdfa55de187c4da3dddddf1f0d74deedfefb15
1,736,452,767,247
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_geography
card=cards.mmlu.high_school_geography,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_keyboard_choicesSeparator_semicolon_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.595378
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.67, 'accuracy_ci_low': 0.57, 'accuracy_ci_high': 0.75, 'score_name': 'accuracy', 'score': 0.67, 'score_ci_high': 0.75, 'score_ci_low': 0.57, 'num_of_instances': 100}
1
a100_80gb
0be12cc96fc8b1772d9e62f7140d30a2c043648d47f72378044b566363fbb910
1,736,452,782,609
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.law
card=cards.mmlu_pro.law,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithTopic.enumerator_keyboard_choicesSeparator_comma_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
13.832747
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.13, 'accuracy_ci_low': 0.07, 'accuracy_ci_high': 0.2, 'score_name': 'accuracy', 'score': 0.13, 'score_ci_high': 0.2, 'score_ci_low': 0.07, 'num_of_instances': 100}
1
a100_80gb
b7bd2ae0db98127af64d05cefd995389c42dcc26933939b48c8f163cfa8b6200
1,736,452,790,672
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.miscellaneous
card=cards.mmlu.miscellaneous,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_capitals_choicesSeparator_space_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.382192
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.79, 'accuracy_ci_low': 0.71, 'accuracy_ci_high': 0.86, 'score_name': 'accuracy', 'score': 0.79, 'score_ci_high': 0.86, 'score_ci_low': 0.71, 'num_of_instances': 100}
1
a100_80gb
93f8b1dfe30317fcd38249f4e11aa0a7ae6322173bb2f427c0a2fc5397125bae
1,736,452,790,194
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.business_ethics
card=cards.mmlu.business_ethics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.90809
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.51, 'accuracy_ci_low': 0.41, 'accuracy_ci_high': 0.61, 'score_name': 'accuracy', 'score': 0.51, 'score_ci_high': 0.61, 'score_ci_low': 0.41, 'num_of_instances': 100}
1
a100_80gb
340c5f9389198f7ed1650349efdb6f957a369a81068ac673fa50135ba4a40f86
1,736,452,797,923
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_geography
card=cards.mmlu.high_school_geography,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.162937
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.72, 'accuracy_ci_low': 0.6252387061106242, 'accuracy_ci_high': 0.8, 'score_name': 'accuracy', 'score': 0.72, 'score_ci_high': 0.8, 'score_ci_low': 0.6252387061106242, 'num_of_instances': 100}
1
a100_80gb
9879d339ac713577c2901c2ca1a5eea58427dd8b9fb87381bcc8447681dfbf77
1,736,452,772,775
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_chemistry
card=cards.mmlu.high_school_chemistry,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_greek_choicesSeparator_pipe_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.416428
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.4, 'accuracy_ci_low': 0.3, 'accuracy_ci_high': 0.5, 'score_name': 'accuracy', 'score': 0.4, 'score_ci_high': 0.5, 'score_ci_low': 0.3, 'num_of_instances': 100}
1
a100_80gb
3fe8a5d3b350bbbc90a3286e7755250701391fabd4be37209738c8c933a55382
1,736,452,778,833
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.world_religions
card=cards.mmlu.world_religions,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_keyboard_choicesSeparator_space_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.142202
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.76, 'accuracy_ci_low': 0.67, 'accuracy_ci_high': 0.83, 'score_name': 'accuracy', 'score': 0.76, 'score_ci_high': 0.83, 'score_ci_low': 0.67, 'num_of_instances': 100}
1
a100_80gb
46d6feae33ca02d3495d9379dbad5e266ac8fe85e58f4fe0ada042c525bf24ba
1,736,452,784,732
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_challenge
card=cards.ai2_arc.arc_challenge,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_greek_choicesSeparator_space_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.472641
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.59, 'accuracy_ci_low': 0.49, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.59, 'score_ci_high': 0.68, 'score_ci_low': 0.49, 'num_of_instances': 100}
1
a100_80gb
8a764531c0d38ba55cf47dfb2acb46e7c9cd1f5e5e4fcf87535c3ff33649af05
1,736,452,789,192
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_biology
card=cards.mmlu.college_biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_greek_choicesSeparator_newline_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.896434
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.59, 'accuracy_ci_low': 0.49, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.59, 'score_ci_high': 0.68, 'score_ci_low': 0.49, 'num_of_instances': 100}
1
a100_80gb
29fee5e5a241d02aa74ea6c429cae8c4d5c0254e4a743bbf2062e2b405741111
1,736,452,796,593
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.anatomy
card=cards.mmlu.anatomy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.822857
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.73, 'accuracy_ci_low': 0.64, 'accuracy_ci_high': 0.81, 'score_name': 'accuracy', 'score': 0.73, 'score_ci_high': 0.81, 'score_ci_low': 0.64, 'num_of_instances': 100}
1
a100_80gb
e5c06ded3dd90188803aca4c65879196e52e33ea658560cdc107c3b788c2ed86
1,736,452,791,714
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.chemistry
card=cards.mmlu_pro.chemistry,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
14.019857
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.48, 'accuracy_ci_low': 0.38, 'accuracy_ci_high': 0.57, 'score_name': 'accuracy', 'score': 0.48, 'score_ci_high': 0.57, 'score_ci_low': 0.38, 'num_of_instances': 100}
1
a100_80gb
6c2fe1071e306f53038a6155160d0fb068c37c0d3e99af8d574a750ebdab8bc3
1,736,452,779,339
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.prehistory
card=cards.mmlu.prehistory,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_keyboard_choicesSeparator_newline_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.268818
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.54, 'accuracy_ci_low': 0.45, 'accuracy_ci_high': 0.64, 'score_name': 'accuracy', 'score': 0.54, 'score_ci_high': 0.64, 'score_ci_low': 0.45, 'num_of_instances': 100}
1
a100_80gb
197103a1e1a29e553b4cc9cd3fc4a929c7aa42405a4ea6bc217679c85f482b99
1,736,452,783,187
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_keyboard_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.259971
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.28, 'accuracy_ci_low': 0.2, 'accuracy_ci_high': 0.38, 'score_name': 'accuracy', 'score': 0.28, 'score_ci_high': 0.38, 'score_ci_low': 0.2, 'num_of_instances': 100}
1
a100_80gb
38296a3b544ee3878f508f121a7316b048cced960f2cfc1b44b59d9e3add1baa
1,736,452,793,336
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.astronomy
card=cards.mmlu.astronomy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_lowercase_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.627021
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.56, 'accuracy_ci_low': 0.46, 'accuracy_ci_high': 0.65, 'score_name': 'accuracy', 'score': 0.56, 'score_ci_high': 0.65, 'score_ci_low': 0.46, 'num_of_instances': 100}
1
a100_80gb
21919f240f268e09ac3c6953a09594bb045e5e9add243469e0558eddccd30a1a
1,736,452,797,411
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.electrical_engineering
card=cards.mmlu.electrical_engineering,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_lowercase_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.073529
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.33, 'accuracy_ci_low': 0.24, 'accuracy_ci_high': 0.43, 'score_name': 'accuracy', 'score': 0.33, 'score_ci_high': 0.43, 'score_ci_low': 0.24, 'num_of_instances': 100}
1
a100_80gb
5894091c2c714c5a27ef1f3356d39d69310a47ace93d4eccf53a803997400fce
1,736,452,790,941
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.elementary_mathematics
card=cards.mmlu.elementary_mathematics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_lowercase_choicesSeparator_pipe_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.687253
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.4, 'accuracy_ci_low': 0.31, 'accuracy_ci_high': 0.5, 'score_name': 'accuracy', 'score': 0.4, 'score_ci_high': 0.5, 'score_ci_low': 0.31, 'num_of_instances': 100}
1
a100_80gb
d1eb7acab88c98f9c64bc7b5a325d0e2f147022424c54b7b12e2a5d2609c1f20
1,736,452,795,510
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.ai2_arc.arc_challenge
card=cards.ai2_arc.arc_challenge,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.AI2_ARC.MultipleChoiceTemplatesInstructionsProSACould.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.047032
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.41, 'accuracy_ci_low': 0.32, 'accuracy_ci_high': 0.5, 'score_name': 'accuracy', 'score': 0.41, 'score_ci_high': 0.5, 'score_ci_low': 0.32, 'num_of_instances': 100}
1
a100_80gb
1b47d6283e38cac814ab3a04b07c2f1aefc088c6387a8dd7b0eaf2e3972b3395
1,736,452,757,949
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.world_religions
card=cards.mmlu.world_religions,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.513685
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.81, 'accuracy_ci_low': 0.73, 'accuracy_ci_high': 0.88, 'score_name': 'accuracy', 'score': 0.81, 'score_ci_high': 0.88, 'score_ci_low': 0.73, 'num_of_instances': 100}
1
a100_80gb
40402b3765b4beb6eafb4cb0ad6fb99b13ae0976240c5b6425ca42eed05ef923
1,736,452,763,162
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_accounting
card=cards.mmlu.professional_accounting,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_lowercase_choicesSeparator_space_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.826794
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.27, 'accuracy_ci_low': 0.19, 'accuracy_ci_high': 0.36, 'score_name': 'accuracy', 'score': 0.27, 'score_ci_high': 0.36, 'score_ci_low': 0.19, 'num_of_instances': 100}
1
a100_80gb
beede28244f8382cf57da8938a6177dc4497dcb9c8ee1ef5c1877a086aa995c9
1,736,452,787,455
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_us_history
card=cards.mmlu.high_school_us_history,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_capitals_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
23.680922
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.79, 'accuracy_ci_low': 0.71, 'accuracy_ci_high': 0.86, 'score_name': 'accuracy', 'score': 0.79, 'score_ci_high': 0.86, 'score_ci_low': 0.71, 'num_of_instances': 100}
1
a100_80gb
aea24e195d73c81fc56b96b94631b6d7d0022ff74d984accf1963020a9dbfb4b
1,736,452,793,947
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_mathematics
card=cards.mmlu.college_mathematics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.373761
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.22, 'accuracy_ci_low': 0.14600848459589522, 'accuracy_ci_high': 0.31, 'score_name': 'accuracy', 'score': 0.22, 'score_ci_high': 0.31, 'score_ci_low': 0.14600848459589522, 'num_of_instances': 100}
1
a100_80gb
b9b5826a40a12a180454385f28d80d09ee314779658aec319bfa631db4b0ca3e
1,736,452,797,797
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.moral_disputes
card=cards.mmlu.moral_disputes,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_capitals_choicesSeparator_space_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.30528
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.43, 'accuracy_ci_low': 0.33, 'accuracy_ci_high': 0.53, 'score_name': 'accuracy', 'score': 0.43, 'score_ci_high': 0.53, 'score_ci_low': 0.33, 'num_of_instances': 100}
1
a100_80gb
4f947bcd016cb2bad6be644065020985f0a1eb606499aa3f21a75fc7deab9295
1,736,452,753,016
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_psychology
card=cards.mmlu.professional_psychology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_keyboard_choicesSeparator_space_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.335289
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.48, 'accuracy_ci_low': 0.38785933507609593, 'accuracy_ci_high': 0.58, 'score_name': 'accuracy', 'score': 0.48, 'score_ci_high': 0.58, 'score_ci_low': 0.38785933507609593, 'num_of_instances': 100}
1
a100_80gb
cbb9b71e65031a29c33ed5a915da54db653c4f3e4276278dc5e1b54eb9bd1221
1,736,452,766,912
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.computer_science
card=cards.mmlu_pro.computer_science,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateHere.enumerator_lowercase_choicesSeparator_space_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
13.235115
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.28, 'accuracy_ci_low': 0.2, 'accuracy_ci_high': 0.38, 'score_name': 'accuracy', 'score': 0.28, 'score_ci_high': 0.38, 'score_ci_low': 0.2, 'num_of_instances': 100}
1
a100_80gb
ec366f7692c5dda074152025ae7d07deb25013884bb4b668307a5b2eb81091e9
1,736,452,775,643
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.clinical_knowledge
card=cards.mmlu.clinical_knowledge,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.474358
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.59, 'accuracy_ci_low': 0.49858594536359513, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.59, 'score_ci_high': 0.68, 'score_ci_low': 0.49858594536359513, 'num_of_instances': 100}
1
a100_80gb
5ab8b36c9d106467c951617cbd1ca2edc6e3d011ceece390a5199ebc4bcd1f05
1,736,452,783,243
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_world_history
card=cards.mmlu.high_school_world_history,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_greek_choicesSeparator_semicolon_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.96327
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.72, 'accuracy_ci_low': 0.63, 'accuracy_ci_high': 0.8, 'score_name': 'accuracy', 'score': 0.72, 'score_ci_high': 0.8, 'score_ci_low': 0.63, 'num_of_instances': 100}
1
a100_80gb
12802e1c7b805a61c30d70d85e629e49498696cbdef92b34fd153ab59db19cbd
1,736,452,797,412
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.law
card=cards.mmlu_pro.law,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_roman_choicesSeparator_pipe_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
13.338299
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.18, 'accuracy_ci_low': 0.11, 'accuracy_ci_high': 0.26, 'score_name': 'accuracy', 'score': 0.18, 'score_ci_high': 0.26, 'score_ci_low': 0.11, 'num_of_instances': 100}
1
a100_80gb
e9becd60a9d4ded4b6f970bf37eb87b0345479792bdc9d9c78245130cb82ce02
1,736,452,762,988
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_physics
card=cards.mmlu.high_school_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_keyboard_choicesSeparator_orLower_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.750441
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.33, 'accuracy_ci_low': 0.24, 'accuracy_ci_high': 0.42, 'score_name': 'accuracy', 'score': 0.33, 'score_ci_high': 0.42, 'score_ci_low': 0.24, 'num_of_instances': 100}
1
a100_80gb
8832361c2e1d5004ed59683f17017a2862db05fb81bc61f3561bf3e223a81929
1,736,452,767,246
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_mathematics
card=cards.mmlu.high_school_mathematics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_lowercase_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.55793
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.2, 'accuracy_ci_low': 0.13, 'accuracy_ci_high': 0.29, 'score_name': 'accuracy', 'score': 0.2, 'score_ci_high': 0.29, 'score_ci_low': 0.13, 'num_of_instances': 100}
1
a100_80gb
61c6d2c340d6fc03a7193be347985b55e65631b6d0d7f3cff7a1f29a78099818
1,736,452,777,119
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_psychology
card=cards.mmlu.high_school_psychology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_greek_choicesSeparator_OrCapital_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.427416
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.77, 'accuracy_ci_low': 0.68, 'accuracy_ci_high': 0.85, 'score_name': 'accuracy', 'score': 0.77, 'score_ci_high': 0.85, 'score_ci_low': 0.68, 'num_of_instances': 100}
1
a100_80gb
28dba89c8c34cd4814da7e34b530bea94844b4f6c6761ece1c2f83b583b1b74e
1,736,452,784,690
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.jurisprudence
card=cards.mmlu.jurisprudence,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_roman_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.588209
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.68, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.77, 'score_name': 'accuracy', 'score': 0.68, 'score_ci_high': 0.77, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
b902c96e542a64b5f6681ec768bcf4535dccc9e07b0d41116ba67c5d4bed2b85
1,736,452,789,702
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.philosophy
card=cards.mmlu.philosophy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_numbers_choicesSeparator_newline_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.193004
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.48, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.68, 'score_ci_low': 0.48, 'num_of_instances': 100}
1
a100_80gb
736249a25288628c37aed9fb5ca4190dc17e580168358633c4338607d0490c67
1,736,452,797,040
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.virology
card=cards.mmlu.virology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_numbers_choicesSeparator_newline_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.772098
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.45, 'accuracy_ci_low': 0.35, 'accuracy_ci_high': 0.55, 'score_name': 'accuracy', 'score': 0.45, 'score_ci_high': 0.55, 'score_ci_low': 0.35, 'num_of_instances': 100}
1
a100_80gb
5ab7803b00f102e8a947782fd84bc48c170b176ec737f35abb5fce718606d070
1,736,452,739,881
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_law
card=cards.mmlu.professional_law,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_lowercase_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.802082
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.38, 'accuracy_ci_low': 0.29, 'accuracy_ci_high': 0.47, 'score_name': 'accuracy', 'score': 0.38, 'score_ci_high': 0.47, 'score_ci_low': 0.29, 'num_of_instances': 100}
1
a100_80gb
898d7198e778b35cbd5d31f526a8673a9b353cb6da0ea9c3ee945b1df5c8bcb5
1,736,452,764,088
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_us_history
card=cards.mmlu.high_school_us_history,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_keyboard_choicesSeparator_pipe_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
23.372029
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.74, 'accuracy_ci_low': 0.65, 'accuracy_ci_high': 0.82, 'score_name': 'accuracy', 'score': 0.74, 'score_ci_high': 0.82, 'score_ci_low': 0.65, 'num_of_instances': 100}
1
a100_80gb
1a0d25c7a1b99ae29a48c8ec7719325984764211c6f67f4377b266cdda180201
1,736,452,771,369
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.computer_security
card=cards.mmlu.computer_security,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.288399
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.54, 'accuracy_ci_low': 0.44, 'accuracy_ci_high': 0.64, 'score_name': 'accuracy', 'score': 0.54, 'score_ci_high': 0.64, 'score_ci_low': 0.44, 'num_of_instances': 100}
1
a100_80gb
72b6bc5f11732131ab793d678816da425adb77c6338de738867a7f995dbae5f8
1,736,452,775,678
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_computer_science
card=cards.mmlu.college_computer_science,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_greek_choicesSeparator_pipe_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.732747
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.42, 'accuracy_ci_low': 0.33, 'accuracy_ci_high': 0.52, 'score_name': 'accuracy', 'score': 0.42, 'score_ci_high': 0.52, 'score_ci_low': 0.33, 'num_of_instances': 100}
1
a100_80gb
9553352b814f486cb18e2f7f104deaa5f204927a82a06e8d8021027aa7929f82
1,736,452,779,380
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.108571
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.4, 'accuracy_ci_low': 0.31, 'accuracy_ci_high': 0.5, 'score_name': 'accuracy', 'score': 0.4, 'score_ci_high': 0.5, 'score_ci_low': 0.31, 'num_of_instances': 100}
1
a100_80gb
91562b2460cfb2afe05c03de71ab9a67c0feb3fed4330b2c6fbbab6da16134bb
1,736,452,783,644
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_statistics
card=cards.mmlu.high_school_statistics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.729398
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.39, 'accuracy_ci_low': 0.3, 'accuracy_ci_high': 0.5, 'score_name': 'accuracy', 'score': 0.39, 'score_ci_high': 0.5, 'score_ci_low': 0.3, 'num_of_instances': 100}
1
a100_80gb
c8605d7f1721b263a310bc42e3b8c49d9fbe9f54dfe4973477a90187c30e4580
1,736,452,794,012
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.philosophy
card=cards.mmlu_pro.philosophy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_capitals_choicesSeparator_orLower_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.725076
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.23, 'accuracy_ci_low': 0.16, 'accuracy_ci_high': 0.32, 'score_name': 'accuracy', 'score': 0.23, 'score_ci_high': 0.32, 'score_ci_low': 0.16, 'num_of_instances': 100}
1
a100_80gb
55fb3f69a83dbd50d8264bda338c7bd05666ae7a6a8da7ec11fee1afacd0e031
1,736,452,767,028
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.econometrics
card=cards.mmlu.econometrics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.986616
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.34, 'accuracy_ci_low': 0.26, 'accuracy_ci_high': 0.44, 'score_name': 'accuracy', 'score': 0.34, 'score_ci_high': 0.44, 'score_ci_low': 0.26, 'num_of_instances': 100}
1
a100_80gb
b4ce4981d5b787d4dfa8e75341af65e6c0e10dc46375865bc3dbfbdc3c5bf4e0
1,736,452,773,305
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_keyboard_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.676598
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.86, 'accuracy_ci_low': 0.77, 'accuracy_ci_high': 0.91, 'score_name': 'accuracy', 'score': 0.86, 'score_ci_high': 0.91, 'score_ci_low': 0.77, 'num_of_instances': 100}
1
a100_80gb
0dac7c01f79079fd35bf7fb2e7cfc2d8e4e4b55a98a8b6197e500358f303e673
1,736,452,778,630
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.professional_medicine
card=cards.mmlu.professional_medicine,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_lowercase_choicesSeparator_pipe_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.66101
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.34, 'accuracy_ci_low': 0.25, 'accuracy_ci_high': 0.43, 'score_name': 'accuracy', 'score': 0.34, 'score_ci_high': 0.43, 'score_ci_low': 0.25, 'num_of_instances': 100}
1
a100_80gb
e979c5f6607f6dec20062e312ae27b04e0248c5a9676231767b8aa20b67a24eb
1,736,452,789,351
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.other
card=cards.mmlu_pro.other,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateHere.enumerator_roman_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.644912
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.32, 'accuracy_ci_low': 0.23, 'accuracy_ci_high': 0.4166454078812933, 'score_name': 'accuracy', 'score': 0.32, 'score_ci_high': 0.4166454078812933, 'score_ci_low': 0.23, 'num_of_instances': 100}
1
a100_80gb
bf645133b29dfb87245c4147827306c6d409db56444e7fb268a48dfb39dd6b5e
1,736,452,794,305
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_microeconomics
card=cards.mmlu.high_school_microeconomics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_roman_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.394944
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.5, 'accuracy_ci_low': 0.4, 'accuracy_ci_high': 0.6027654047832152, 'score_name': 'accuracy', 'score': 0.5, 'score_ci_high': 0.6027654047832152, 'score_ci_low': 0.4, 'num_of_instances': 100}
1
a100_80gb
b83e3f294074b70e87b6873068e095d8408ce0afa3c0d530ad280e5869c2f075
1,736,452,761,929
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.moral_disputes
card=cards.mmlu.moral_disputes,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_numbers_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.300261
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.55, 'accuracy_ci_low': 0.45, 'accuracy_ci_high': 0.65, 'score_name': 'accuracy', 'score': 0.55, 'score_ci_high': 0.65, 'score_ci_low': 0.45, 'num_of_instances': 100}
1
a100_80gb
f88d24317d6e65594ee5d49ef5ce6895838bfbda27598618103bb2cf340f75f1
1,736,452,771,182
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_microeconomics
card=cards.mmlu.high_school_microeconomics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_capitals_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.284904
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.49, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.68, 'score_ci_low': 0.49, 'num_of_instances': 100}
1
a100_80gb
6f82df57a9cd694c14fa5a3c01b2c8c2d03069749ef8be5d45e4a888bb18f6d7
1,736,452,779,244
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_psychology
card=cards.mmlu.high_school_psychology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_numbers_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.228401
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.87, 'accuracy_ci_low': 0.8, 'accuracy_ci_high': 0.93, 'score_name': 'accuracy', 'score': 0.87, 'score_ci_high': 0.93, 'score_ci_low': 0.8, 'num_of_instances': 100}
1
a100_80gb
be1fc98906ffc685079837a77f6ba12944c4f57494655eed97d9a0b0784c9362
1,736,452,784,404
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_government_and_politics
card=cards.mmlu.high_school_government_and_politics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.534724
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.69, 'accuracy_ci_low': 0.6, 'accuracy_ci_high': 0.77, 'score_name': 'accuracy', 'score': 0.69, 'score_ci_high': 0.77, 'score_ci_low': 0.6, 'num_of_instances': 100}
1
a100_80gb
5808105afebf61e5efbf80d30ac5a82752fe6abea3c13b10bf79f42884ec3e40
1,736,452,788,186
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.global_facts
card=cards.mmlu.global_facts,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_capitals_choicesSeparator_newline_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.172409
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.34, 'accuracy_ci_low': 0.24, 'accuracy_ci_high': 0.44, 'score_name': 'accuracy', 'score': 0.34, 'score_ci_high': 0.44, 'score_ci_low': 0.24, 'num_of_instances': 100}
1
a100_80gb
7317b0b6e51b15cd289c7da4454141256f5402d1b4e85879cdeae1ead87781be
1,736,452,795,224
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.medical_genetics
card=cards.mmlu.medical_genetics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.546308
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.62, 'accuracy_ci_low': 0.52, 'accuracy_ci_high': 0.71, 'score_name': 'accuracy', 'score': 0.62, 'score_ci_high': 0.71, 'score_ci_low': 0.52, 'num_of_instances': 100}
1
a100_80gb
c67d0eb301cd950fbba6f31002e5670d334c12821322cbfcd875bca6f0816247
1,736,452,758,920
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.openbook_qa
card=cards.openbook_qa,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.OpenBookQA.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_capitals_choicesSeparator_orLower_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.800406
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.76, 'accuracy_ci_low': 0.6685824908592428, 'accuracy_ci_high': 0.84, 'score_name': 'accuracy', 'score': 0.76, 'score_ci_high': 0.84, 'score_ci_low': 0.6685824908592428, 'num_of_instances': 100}
1
a100_80gb
9eb43f5e8968ac71afcc56b8d3f02affbfbc03c0c85da4dab0c5a80ad0dd2d9f
1,736,452,765,648
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.social_iqa
card=cards.social_iqa,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.Social_IQa.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_roman_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.211868
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.34, 'accuracy_ci_low': 0.25, 'accuracy_ci_high': 0.44, 'score_name': 'accuracy', 'score': 0.34, 'score_ci_high': 0.44, 'score_ci_low': 0.25, 'num_of_instances': 100}
1
a100_80gb
3274d118062960bf5c5fe5b8c19076005471f5cb02410fd1b17e4c0f44130750
1,736,452,769,979
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_chemistry
card=cards.mmlu.high_school_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_lowercase_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.754184
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.41, 'accuracy_ci_low': 0.31, 'accuracy_ci_high': 0.51, 'score_name': 'accuracy', 'score': 0.41, 'score_ci_high': 0.51, 'score_ci_low': 0.31, 'num_of_instances': 100}
1
a100_80gb
3f65727348358d52c044fefbe0b4924030726b61539964151e9d3b4506fdd8f9
1,736,452,774,273
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.econometrics
card=cards.mmlu.econometrics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_roman_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.641729
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.38, 'accuracy_ci_low': 0.29, 'accuracy_ci_high': 0.48, 'score_name': 'accuracy', 'score': 0.38, 'score_ci_high': 0.48, 'score_ci_low': 0.29, 'num_of_instances': 100}
1
a100_80gb
99c55e048a4121dfb27053b01dc8c36cf9da6e039432e24f99863fecf95f2e56
1,736,452,784,523
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.business_ethics
card=cards.mmlu.business_ethics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_keyboard_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.581589
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.63, 'accuracy_ci_low': 0.53, 'accuracy_ci_high': 0.72, 'score_name': 'accuracy', 'score': 0.63, 'score_ci_high': 0.72, 'score_ci_low': 0.53, 'num_of_instances': 100}
1
a100_80gb
d5d56db3ac99da9023e36fa94d3eae64c834e8501b2d7591c463e1744739795d
1,736,452,792,230
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_microeconomics
card=cards.mmlu.high_school_microeconomics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_numbers_choicesSeparator_space_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.7295
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.6, 'accuracy_ci_low': 0.5, 'accuracy_ci_high': 0.7, 'score_name': 'accuracy', 'score': 0.6, 'score_ci_high': 0.7, 'score_ci_low': 0.5, 'num_of_instances': 100}
1
a100_80gb
0261ce53d15bbf50e22bc73f28516824e544150480cc2dc52b27d7db0de0e511
1,736,452,797,538
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_mathematics
card=cards.mmlu.college_mathematics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.481496
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.3, 'accuracy_ci_low': 0.21, 'accuracy_ci_high': 0.39381466799613807, 'score_name': 'accuracy', 'score': 0.3, 'score_ci_high': 0.39381466799613807, 'score_ci_low': 0.21, 'num_of_instances': 100}
1
a100_80gb
c216af4c60326752b0478dc2018131a7d6c01e56179b3e013a03cdbf93725cd5
1,736,452,747,472
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_psychology
card=cards.mmlu.high_school_psychology,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_lowercase_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.975397
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.82, 'accuracy_ci_low': 0.74, 'accuracy_ci_high': 0.89, 'score_name': 'accuracy', 'score': 0.82, 'score_ci_high': 0.89, 'score_ci_low': 0.74, 'num_of_instances': 100}
1
a100_80gb
481b1c32ab877fae6ab7fb6451fd992a5081832c3e424e188fdf3c4aa1ee3952
1,736,452,760,667
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.math
card=cards.mmlu_pro.math,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.541103
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.16, 'accuracy_ci_low': 0.09, 'accuracy_ci_high': 0.24, 'score_name': 'accuracy', 'score': 0.16, 'score_ci_high': 0.24, 'score_ci_low': 0.09, 'num_of_instances': 100}
1
a100_80gb
6291e9088b1754f06b02985e3efb614f5ebfc2018f4af1a1d5c05a4e50e018f6
1,736,452,767,484
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.biology
card=cards.mmlu_pro.biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.714688
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.27, 'accuracy_ci_low': 0.18, 'accuracy_ci_high': 0.37, 'score_name': 'accuracy', 'score': 0.27, 'score_ci_high': 0.37, 'score_ci_low': 0.18, 'num_of_instances': 100}
1
a100_80gb
f4043b101e0e5c642c19385240a7c132674e0bcd653d3052e435cd1fe5a6f119
1,736,452,774,887
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.abstract_algebra
card=cards.mmlu.abstract_algebra,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_capitals_choicesSeparator_newline_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.558856
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.34, 'accuracy_ci_low': 0.26, 'accuracy_ci_high': 0.44, 'score_name': 'accuracy', 'score': 0.34, 'score_ci_high': 0.44, 'score_ci_low': 0.26, 'num_of_instances': 100}
1
a100_80gb
d9d9ff5e7a131ac3a3601160ccd5332ff4660300e00baacd4dc0709b3bf7e260
1,736,452,782,663
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.logical_fallacies
card=cards.mmlu.logical_fallacies,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.053165
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.67, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.75, 'score_name': 'accuracy', 'score': 0.67, 'score_ci_high': 0.75, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
d28cbf66a883ecd18aa7a07c4cd3ff225702242f964017f4d7f44a5578dc1ecf
1,736,452,788,148
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_government_and_politics
card=cards.mmlu.high_school_government_and_politics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_keyboard_choicesSeparator_space_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.619267
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.69, 'accuracy_ci_low': 0.6, 'accuracy_ci_high': 0.7745212663815108, 'score_name': 'accuracy', 'score': 0.69, 'score_ci_high': 0.7745212663815108, 'score_ci_low': 0.6, 'num_of_instances': 100}
1
a100_80gb
d6d9e36a78d11d5fcc6fb100953463b6a985344252469f0ad7e3ff47a8fb6819
1,736,452,795,995
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_mathematics
card=cards.mmlu.high_school_mathematics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_lowercase_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.250901
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.28, 'accuracy_ci_low': 0.2, 'accuracy_ci_high': 0.37, 'score_name': 'accuracy', 'score': 0.28, 'score_ci_high': 0.37, 'score_ci_low': 0.2, 'num_of_instances': 100}
1
a100_80gb
f89eee45fe9c343a073bc25da590f8610a6dbe17d3f3e32cd46620da6889f7f0
1,736,452,756,749
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.philosophy
card=cards.mmlu.philosophy,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_lowercase_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.738193
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.65, 'accuracy_ci_low': 0.55, 'accuracy_ci_high': 0.74, 'score_name': 'accuracy', 'score': 0.65, 'score_ci_high': 0.74, 'score_ci_low': 0.55, 'num_of_instances': 100}
1
a100_80gb
041c4d58d38fc62924c23e089578a267644e7b4dfd6bb1b445ac73b47c4074af
1,736,452,764,866
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_physics
card=cards.mmlu.high_school_physics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_roman_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.540319
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.34, 'accuracy_ci_low': 0.24, 'accuracy_ci_high': 0.43, 'score_name': 'accuracy', 'score': 0.34, 'score_ci_high': 0.43, 'score_ci_low': 0.24, 'num_of_instances': 100}
1
a100_80gb
1a836972630e88ef56fed3f7bd2f1120ac5a7b3382549868c885674df279bb3b
1,736,452,769,673
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_sexuality
card=cards.mmlu.human_sexuality,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_capitals_choicesSeparator_newline_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.148738
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.55, 'accuracy_ci_low': 0.45, 'accuracy_ci_high': 0.65, 'score_name': 'accuracy', 'score': 0.55, 'score_ci_high': 0.65, 'score_ci_low': 0.45, 'num_of_instances': 100}
1
a100_80gb
ad1240ea06812dc3c4d05dfe11c9f358b15e10e6bfcdb9646c7bb17f7802efac
1,736,452,786,519
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_sexuality
card=cards.mmlu.human_sexuality,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_roman_choicesSeparator_pipe_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.929633
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.71, 'accuracy_ci_low': 0.62, 'accuracy_ci_high': 0.8, 'score_name': 'accuracy', 'score': 0.71, 'score_ci_high': 0.8, 'score_ci_low': 0.62, 'num_of_instances': 100}
1
a100_80gb
7ed6c6c5388245589bddf1dd9b5ef72653d4919d153fb27e3fc39f22b19a3c93
1,736,452,774,381
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.security_studies
card=cards.mmlu.security_studies,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.187674
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.42, 'accuracy_ci_low': 0.32297523195074235, 'accuracy_ci_high': 0.52, 'score_name': 'accuracy', 'score': 0.42, 'score_ci_high': 0.52, 'score_ci_low': 0.32297523195074235, 'num_of_instances': 100}
1
a100_80gb
d27c6319cd13d2b8e29064f2f738d79aa1d0f09dc94d00155d09eea5af2902de
1,736,452,778,956
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_statistics
card=cards.mmlu.high_school_statistics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_numbers_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.788402
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.24, 'accuracy_ci_low': 0.16, 'accuracy_ci_high': 0.33, 'score_name': 'accuracy', 'score': 0.24, 'score_ci_high': 0.33, 'score_ci_low': 0.16, 'num_of_instances': 100}
1
a100_80gb
1ad5a233b073a15159fe06f3cc6ed2aca4aebfae6123db44e34020a461de7ce4
1,736,452,793,978
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.logical_fallacies
card=cards.mmlu.logical_fallacies,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicFixed.enumerator_keyboard_choicesSeparator_OrCapital_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.72652
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.72, 'accuracy_ci_low': 0.62, 'accuracy_ci_high': 0.8, 'score_name': 'accuracy', 'score': 0.72, 'score_ci_high': 0.8, 'score_ci_low': 0.62, 'num_of_instances': 100}
1
a100_80gb
ef358370f3dc850d48a5cb96934738fb774a5556f91e6b567da76fb17413e82b
1,736,452,745,782
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_physics
card=cards.mmlu.college_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_keyboard_choicesSeparator_pipe_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.987248
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.32, 'accuracy_ci_low': 0.24, 'accuracy_ci_high': 0.4229857388197172, 'score_name': 'accuracy', 'score': 0.32, 'score_ci_high': 0.4229857388197172, 'score_ci_low': 0.24, 'num_of_instances': 100}
1
a100_80gb
49f418bee926e500f1ad4f038d39379b555e788ef348252b3c7898cf34632cd6
1,736,452,755,449
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_medicine
card=cards.mmlu.college_medicine,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_roman_choicesSeparator_pipe_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
9.072916
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.48, 'accuracy_ci_high': 0.68, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.68, 'score_ci_low': 0.48, 'num_of_instances': 100}
1
a100_80gb
117768c33171a3261f5c021664938168766a4e63eca9c87f452e134f16dc487b
1,736,452,762,134
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.law
card=cards.mmlu_pro.law,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_greek_choicesSeparator_newline_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.719879
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.16, 'accuracy_ci_low': 0.09662384711519124, 'accuracy_ci_high': 0.24, 'score_name': 'accuracy', 'score': 0.16, 'score_ci_high': 0.24, 'score_ci_low': 0.09662384711519124, 'num_of_instances': 100}
1
a100_80gb
0406eaee43f610b991ccbf64dadb4767e0e016b3f336061e5fb9c17e5b2925f4
1,736,452,770,825
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.us_foreign_policy
card=cards.mmlu.us_foreign_policy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopic.enumerator_roman_choicesSeparator_OrCapital_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.630924
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.69, 'accuracy_ci_low': 0.59, 'accuracy_ci_high': 0.78, 'score_name': 'accuracy', 'score': 0.69, 'score_ci_high': 0.78, 'score_ci_low': 0.59, 'num_of_instances': 100}
1
a100_80gb
57c4a67ff28e61ef0d5ba1ce5f903fc48fb3c703f5167a67719d66f90dfcd830
1,736,452,775,134
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_chemistry
card=cards.mmlu.high_school_chemistry,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.474153
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.42, 'accuracy_ci_low': 0.32, 'accuracy_ci_high': 0.52, 'score_name': 'accuracy', 'score': 0.42, 'score_ci_high': 0.52, 'score_ci_low': 0.32, 'num_of_instances': 100}
1
a100_80gb
0df421bd9768453b7171d2c6d0c121f9226073fd2e0a08b119fc12734798882b
1,736,452,781,169
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.world_religions
card=cards.mmlu.world_religions,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_lowercase_choicesSeparator_orLower_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
5.457812
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.75, 'accuracy_ci_low': 0.66, 'accuracy_ci_high': 0.84, 'score_name': 'accuracy', 'score': 0.75, 'score_ci_high': 0.84, 'score_ci_low': 0.66, 'num_of_instances': 100}
1
a100_80gb
89c5fb7b8bb71fd1a33ff629795f3481c8be998b969dfa703299ef98d228f253
1,736,452,789,043
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.openbook_qa
card=cards.openbook_qa,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.OpenBookQA.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_roman_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.456141
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.81, 'accuracy_ci_low': 0.72, 'accuracy_ci_high': 0.88, 'score_name': 'accuracy', 'score': 0.81, 'score_ci_high': 0.88, 'score_ci_low': 0.72, 'num_of_instances': 100}
1
a100_80gb
0f0f074198c1dcacb43aaa85ab0ed2ea6a7157cbf83f08fcad27b90869eb4ff4
1,736,452,792,889
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.human_aging
card=cards.mmlu.human_aging,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateHere.enumerator_capitals_choicesSeparator_OrCapital_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.176036
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.48, 'accuracy_ci_high': 0.67, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.67, 'score_ci_low': 0.48, 'num_of_instances': 100}
1
a100_80gb
998b2e841ed3cc9f7e18a9155983f11d0c71dc24389d5339f23d187b2a5964b1
1,736,452,796,672
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_macroeconomics
card=cards.mmlu.high_school_macroeconomics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_keyboard_choicesSeparator_space_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.261045
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.52, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.62, 'score_name': 'accuracy', 'score': 0.52, 'score_ci_high': 0.62, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
be9492cf074ccdc9963977bbc49ce4fea3326e7828b328ab5ed44f7efe5432e7
1,736,452,735,151
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.conceptual_physics
card=cards.mmlu.conceptual_physics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_numbers_choicesSeparator_semicolon_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.05414
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.55, 'accuracy_ci_low': 0.45, 'accuracy_ci_high': 0.65, 'score_name': 'accuracy', 'score': 0.55, 'score_ci_high': 0.65, 'score_ci_low': 0.45, 'num_of_instances': 100}
1
a100_80gb
9e632aed231e564d52331058e36b8b20250c529b8bda9988c7784843b9324f19
1,736,452,748,780
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.physics
card=cards.mmlu_pro.physics,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsStateBelow.enumerator_roman_choicesSeparator_orLower_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
12.165345
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.15, 'accuracy_ci_low': 0.09, 'accuracy_ci_high': 0.23, 'score_name': 'accuracy', 'score': 0.15, 'score_ci_high': 0.23, 'score_ci_low': 0.09, 'num_of_instances': 100}
1
a100_80gb
979f02cc4d21ca747bfe8c057354cef6606c5d557c604f103533b76d4c565376
1,736,452,753,935
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.world_religions
card=cards.mmlu.world_religions,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_lowercase_choicesSeparator_comma_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.227467
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.57, 'accuracy_ci_low': 0.47, 'accuracy_ci_high': 0.66, 'score_name': 'accuracy', 'score': 0.57, 'score_ci_high': 0.66, 'score_ci_low': 0.47, 'num_of_instances': 100}
1
a100_80gb
1198ade6018d7137f1054d25dedc7a7c7a99e7c0172a45e03754c06adadbcdd0
1,736,452,773,114
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.world_religions
card=cards.mmlu.world_religions,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
7.042838
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.77, 'accuracy_ci_low': 0.68, 'accuracy_ci_high': 0.85, 'score_name': 'accuracy', 'score': 0.77, 'score_ci_high': 0.85, 'score_ci_low': 0.68, 'num_of_instances': 100}
1
a100_80gb
be97e7565f24343b1e21f73bc88aa0531b3313aafffd705b0b9e9fc96aef240e
1,736,452,757,828
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_government_and_politics
card=cards.mmlu.high_school_government_and_politics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_numbers_choicesSeparator_newline_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.36456
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.49, 'accuracy_ci_low': 0.39, 'accuracy_ci_high': 0.58, 'score_name': 'accuracy', 'score': 0.49, 'score_ci_high': 0.58, 'score_ci_low': 0.39, 'num_of_instances': 100}
1
a100_80gb
6a43bc12075241ee032fde97a30c4450f674fd5de356e6220555d9afa795c02b
1,736,452,761,796
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_macroeconomics
card=cards.mmlu.high_school_macroeconomics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHarness.enumerator_greek_choicesSeparator_comma_shuffleChoices_placeCorrectChoiceFourth,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.356077
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.57, 'accuracy_ci_low': 0.47, 'accuracy_ci_high': 0.66, 'score_name': 'accuracy', 'score': 0.57, 'score_ci_high': 0.66, 'score_ci_low': 0.47, 'num_of_instances': 100}
1
a100_80gb
b41086d6ab1084815d4e6f04b57a5be4dfbc99ed70e759260b08549147cf260a
1,736,452,765,526
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.virology
card=cards.mmlu.virology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSACould.enumerator_capitals_choicesSeparator_semicolon_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.149271
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.31, 'accuracy_ci_low': 0.22, 'accuracy_ci_high': 0.4, 'score_name': 'accuracy', 'score': 0.31, 'score_ci_high': 0.4, 'score_ci_low': 0.22, 'num_of_instances': 100}
1
a100_80gb
11206c84d327e6e1113d49548cbf5ae109f919ad2000b5ee3e3e4a2012d272fa
1,736,452,777,553
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_computer_science
card=cards.mmlu.high_school_computer_science,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithoutTopic.enumerator_greek_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.736256
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.53, 'accuracy_ci_low': 0.43, 'accuracy_ci_high': 0.63, 'score_name': 'accuracy', 'score': 0.53, 'score_ci_high': 0.63, 'score_ci_low': 0.43, 'num_of_instances': 100}
1
a100_80gb
0622de98ebc4cf7552b3f06264204b045e64aa2b818acf87591637a8baa569a8
1,736,452,786,424
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_medicine
card=cards.mmlu.college_medicine,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithTopicHelm.enumerator_lowercase_choicesSeparator_orLower_shuffleChoices_placeCorrectChoiceFirst,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
8.249482
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.58, 'accuracy_ci_low': 0.49, 'accuracy_ci_high': 0.67, 'score_name': 'accuracy', 'score': 0.58, 'score_ci_high': 0.67, 'score_ci_low': 0.49, 'num_of_instances': 100}
1
a100_80gb
c8ce097e480318371ecc0c815cff2e6c348085a6a1ac525fad7207579d74f5fd
1,736,452,799,596
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.computer_science
card=cards.mmlu_pro.computer_science,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_keyboard_choicesSeparator_space_shuffleChoices_False,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
11.269517
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.26, 'accuracy_ci_low': 0.17, 'accuracy_ci_high': 0.36, 'score_name': 'accuracy', 'score': 0.26, 'score_ci_high': 0.36, 'score_ci_low': 0.17, 'num_of_instances': 100}
1
a100_80gb
56f864afcd7568117fca0c431a8d89306e684e5f7b415940895d56bf32818ff2
1,736,452,747,098
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.philosophy
card=cards.mmlu.philosophy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_capitals_choicesSeparator_orLower_shuffleChoices_lengthSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.528967
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.67, 'accuracy_ci_low': 0.58, 'accuracy_ci_high': 0.76, 'score_name': 'accuracy', 'score': 0.67, 'score_ci_high': 0.76, 'score_ci_low': 0.58, 'num_of_instances': 100}
1
a100_80gb
8ab771492e88a66e8a5244d57c93768934021b5a92a49aa2bd59c64dc7b2c19c
1,736,452,751,380
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_mathematics
card=cards.mmlu.college_mathematics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsProSASimple.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.530155
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.22, 'accuracy_ci_low': 0.15, 'accuracy_ci_high': 0.31, 'score_name': 'accuracy', 'score': 0.22, 'score_ci_high': 0.31, 'score_ci_low': 0.15, 'num_of_instances': 100}
1
a100_80gb
cc2358ae7495012669398d6b2e4b386e0be24bdb6c653d72eaba467d4425dfdc
1,736,452,758,910
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.anatomy
card=cards.mmlu.anatomy,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsWithoutTopicHelmFixed.enumerator_capitals_choicesSeparator_comma_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
6.981461
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.48, 'accuracy_ci_low': 0.38, 'accuracy_ci_high': 0.58, 'score_name': 'accuracy', 'score': 0.48, 'score_ci_high': 0.58, 'score_ci_low': 0.38, 'num_of_instances': 100}
1
a100_80gb
474497860e8d000e456486ac2c8b1bad4f2bde30d2b64997c558ad6d045b23e4
1,736,452,764,374
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu_pro.biology
card=cards.mmlu_pro.biology,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU_PRO.MultipleChoiceTemplatesInstructionsProSAAddress.enumerator_greek_choicesSeparator_comma_shuffleChoices_alphabeticalSort,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
4.733042
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.26, 'accuracy_ci_low': 0.18, 'accuracy_ci_high': 0.36, 'score_name': 'accuracy', 'score': 0.26, 'score_ci_high': 0.36, 'score_ci_low': 0.18, 'num_of_instances': 100}
1
a100_80gb
f67d50b331177761d8cc75613e4612f086945f5c5d43da19e013e5cff67a5c1e
1,736,452,778,452
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.high_school_computer_science
card=cards.mmlu.high_school_computer_science,demos_pool_size=100,num_demos=5,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesInstructionsStateBelowPlease.enumerator_numbers_choicesSeparator_OrCapital_shuffleChoices_alphabeticalSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
13.225769
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.61, 'accuracy_ci_low': 0.51, 'accuracy_ci_high': 0.7, 'score_name': 'accuracy', 'score': 0.61, 'score_ci_high': 0.7, 'score_ci_low': 0.51, 'num_of_instances': 100}
1
a100_80gb
2c2343cedfe3531333fbc28f73b2569723922e76327c7ed5f7c6a64893767717
1,736,452,783,370
1,736,449,200,000
mistralai_Mistral-7B-Instruct-v0.3
cards.mmlu.college_physics
card=cards.mmlu.college_physics,demos_pool_size=100,num_demos=0,format=formats.chat_api,template=templates.huji_workshop.MMLU.MultipleChoiceTemplatesStructuredWithTopic.enumerator_numbers_choicesSeparator_comma_shuffleChoices_lengthSortReverse,system_prompt=system_prompts.empty,demos_taken_from=train,demos_removed_from_data=True,max_test_instances=100
None
half
3.652971
{"n": 1, "skip_special_tokens": false, "max_tokens": 64, "seed": 42, "top_p": null, "top_k": -1, "temperature": null, "logprobs": 5, "prompt_logprobs": 1}
{"model": "mistralai/Mistral-7B-Instruct-v0.3", "seed": 0, "device": "auto", "max_num_batched_tokens": 4096, "gpu_memory_utilization": 0.7, "max_model_len": 4096, "tensor_parallel_size": 1}
VLLM
{"torch": "2.5.1", "transformers": "4.46.3", "evaluate": "0.4.0", "datasets": "2.21.0", "vllm": "0.6.4.post1", "unitxt": "1.16.4"}
{'accuracy': 0.23, 'accuracy_ci_low': 0.16, 'accuracy_ci_high': 0.33, 'score_name': 'accuracy', 'score': 0.23, 'score_ci_high': 0.33, 'score_ci_low': 0.16, 'num_of_instances': 100}
1
a100_80gb