results
dict
groups
dict
group_subtasks
dict
configs
dict
versions
dict
n-shot
dict
higher_is_better
dict
n-samples
dict
config
dict
git_hash
string
date
float64
pretty_env_info
string
transformers_version
string
lm_eval_version
string
upper_git_hash
null
tokenizer_pad_token
sequence
tokenizer_eos_token
sequence
tokenizer_bos_token
sequence
eot_token_id
int64
max_length
int64
task_hashes
dict
model_source
string
model_name
string
model_name_sanitized
string
system_instruction
null
system_instruction_sha
null
fewshot_as_multiturn
bool
chat_template
string
chat_template_sha
string
start_time
float64
end_time
float64
total_evaluation_time_seconds
string
{ "ifeval": { "alias": "ifeval", "prompt_level_strict_acc,none": 0.75, "prompt_level_strict_acc_stderr,none": 0.11180339887498948, "inst_level_strict_acc,none": 0.7916666666666666, "inst_level_strict_acc_stderr,none": "N/A", "prompt_level_loose_acc,none": 0.8125, "prompt_level_loose_acc_stderr,none": 0.10077822185373188, "inst_level_loose_acc,none": 0.8333333333333334, "inst_level_loose_acc_stderr,none": "N/A" }, "mmlu": { "acc,none": 0.71875, "acc_stderr,none": 0.011540537619239823, "alias": "mmlu" }, "mmlu_humanities": { "acc,none": 0.7836538461538461, "acc_stderr,none": 0.025711042225232572, "alias": " - humanities" }, "mmlu_formal_logic": { "alias": " - formal_logic", "acc,none": 0.375, "acc_stderr,none": 0.125 }, "mmlu_high_school_european_history": { "alias": " - high_school_european_history", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_high_school_us_history": { "alias": " - high_school_us_history", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_high_school_world_history": { "alias": " - high_school_world_history", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_international_law": { "alias": " - international_law", "acc,none": 1, "acc_stderr,none": 0 }, "mmlu_jurisprudence": { "alias": " - jurisprudence", "acc,none": 0.75, "acc_stderr,none": 0.11180339887498948 }, "mmlu_logical_fallacies": { "alias": " - logical_fallacies", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_moral_disputes": { "alias": " - moral_disputes", "acc,none": 0.625, "acc_stderr,none": 0.125 }, "mmlu_moral_scenarios": { "alias": " - moral_scenarios", "acc,none": 0.375, "acc_stderr,none": 0.125 }, "mmlu_philosophy": { "alias": " - philosophy", "acc,none": 1, "acc_stderr,none": 0 }, "mmlu_prehistory": { "alias": " - prehistory", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_professional_law": { "alias": " - professional_law", "acc,none": 0.75, "acc_stderr,none": 0.11180339887498948 }, "mmlu_world_religions": { "alias": " - world_religions", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_other": { "acc,none": 0.7980769230769231, "acc_stderr,none": 0.026739299505915654, "alias": " - other" }, "mmlu_business_ethics": { "alias": " - business_ethics", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_clinical_knowledge": { "alias": " - clinical_knowledge", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_college_medicine": { "alias": " - college_medicine", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_global_facts": { "alias": " - global_facts", "acc,none": 0.5625, "acc_stderr,none": 0.128086884574495 }, "mmlu_human_aging": { "alias": " - human_aging", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_management": { "alias": " - management", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_marketing": { "alias": " - marketing", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_medical_genetics": { "alias": " - medical_genetics", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_miscellaneous": { "alias": " - miscellaneous", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_nutrition": { "alias": " - nutrition", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_professional_accounting": { "alias": " - professional_accounting", "acc,none": 0.5625, "acc_stderr,none": 0.128086884574495 }, "mmlu_professional_medicine": { "alias": " - professional_medicine", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_virology": { "alias": " - virology", "acc,none": 0.5, "acc_stderr,none": 0.12909944487358055 }, "mmlu_social_sciences": { "acc,none": 0.8697916666666666, "acc_stderr,none": 0.024018643896557307, "alias": " - social sciences" }, "mmlu_econometrics": { "alias": " - econometrics", "acc,none": 0.6875, "acc_stderr,none": 0.11967838846954226 }, "mmlu_high_school_geography": { "alias": " - high_school_geography", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_high_school_government_and_politics": { "alias": " - high_school_government_and_politics", "acc,none": 1, "acc_stderr,none": 0 }, "mmlu_high_school_macroeconomics": { "alias": " - high_school_macroeconomics", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_high_school_microeconomics": { "alias": " - high_school_microeconomics", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_high_school_psychology": { "alias": " - high_school_psychology", "acc,none": 1, "acc_stderr,none": 0 }, "mmlu_human_sexuality": { "alias": " - human_sexuality", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_professional_psychology": { "alias": " - professional_psychology", "acc,none": 1, "acc_stderr,none": 0 }, "mmlu_public_relations": { "alias": " - public_relations", "acc,none": 0.75, "acc_stderr,none": 0.11180339887498948 }, "mmlu_security_studies": { "alias": " - security_studies", "acc,none": 0.8125, "acc_stderr,none": 0.10077822185373188 }, "mmlu_sociology": { "alias": " - sociology", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_us_foreign_policy": { "alias": " - us_foreign_policy", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_stem": { "acc,none": 0.6217105263157895, "acc_stderr,none": 0.017724535659877357, "alias": "stem" }, "mmlu_abstract_algebra": { "alias": " - abstract_algebra", "acc,none": 0.4375, "acc_stderr,none": 0.128086884574495 }, "mmlu_anatomy": { "alias": " - anatomy", "acc,none": 0.75, "acc_stderr,none": 0.11180339887498948 }, "mmlu_astronomy": { "alias": " - astronomy", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_college_biology": { "alias": " - college_biology", "acc,none": 0.9375, "acc_stderr,none": 0.0625 }, "mmlu_college_chemistry": { "alias": " - college_chemistry", "acc,none": 0.5625, "acc_stderr,none": 0.128086884574495 }, "mmlu_college_computer_science": { "alias": " - college_computer_science", "acc,none": 0.5, "acc_stderr,none": 0.12909944487358055 }, "mmlu_college_mathematics": { "alias": " - college_mathematics", "acc,none": 0.25, "acc_stderr,none": 0.11180339887498948 }, "mmlu_college_physics": { "alias": " - college_physics", "acc,none": 0.75, "acc_stderr,none": 0.11180339887498948 }, "mmlu_computer_security": { "alias": " - computer_security", "acc,none": 0.75, "acc_stderr,none": 0.11180339887498948 }, "mmlu_conceptual_physics": { "alias": " - conceptual_physics", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_electrical_engineering": { "alias": " - electrical_engineering", "acc,none": 0.5625, "acc_stderr,none": 0.128086884574495 }, "mmlu_elementary_mathematics": { "alias": " - elementary_mathematics", "acc,none": 0.3125, "acc_stderr,none": 0.11967838846954226 }, "mmlu_high_school_biology": { "alias": " - high_school_biology", "acc,none": 1, "acc_stderr,none": 0 }, "mmlu_high_school_chemistry": { "alias": " - high_school_chemistry", "acc,none": 0.6875, "acc_stderr,none": 0.11967838846954226 }, "mmlu_high_school_computer_science": { "alias": " - high_school_computer_science", "acc,none": 0.875, "acc_stderr,none": 0.08539125638299665 }, "mmlu_high_school_mathematics": { "alias": " - high_school_mathematics", "acc,none": 0.1875, "acc_stderr,none": 0.10077822185373188 }, "mmlu_high_school_physics": { "alias": " - high_school_physics", "acc,none": 0.3125, "acc_stderr,none": 0.11967838846954226 }, "mmlu_high_school_statistics": { "alias": " - high_school_statistics", "acc,none": 0.625, "acc_stderr,none": 0.125 }, "mmlu_machine_learning": { "alias": " - machine_learning", "acc,none": 0.5625, "acc_stderr,none": 0.128086884574495 } }
{ "mmlu": { "acc,none": 0.71875, "acc_stderr,none": 0.011540537619239823, "alias": "mmlu" }, "mmlu_humanities": { "acc,none": 0.7836538461538461, "acc_stderr,none": 0.025711042225232572, "alias": " - humanities" }, "mmlu_other": { "acc,none": 0.7980769230769231, "acc_stderr,none": 0.026739299505915654, "alias": " - other" }, "mmlu_social_sciences": { "acc,none": 0.8697916666666666, "acc_stderr,none": 0.024018643896557307, "alias": " - social sciences" }, "mmlu_stem": { "acc,none": 0.6217105263157895, "acc_stderr,none": 0.017724535659877357, "alias": "stem" } }
{ "ifeval": [], "mmlu_humanities": [ "mmlu_philosophy", "mmlu_moral_scenarios", "mmlu_international_law", "mmlu_world_religions", "mmlu_logical_fallacies", "mmlu_high_school_european_history", "mmlu_jurisprudence", "mmlu_professional_law", "mmlu_high_school_us_history", "mmlu_formal_logic", "mmlu_moral_disputes", "mmlu_prehistory", "mmlu_high_school_world_history" ], "mmlu_social_sciences": [ "mmlu_human_sexuality", "mmlu_high_school_government_and_politics", "mmlu_professional_psychology", "mmlu_sociology", "mmlu_high_school_psychology", "mmlu_us_foreign_policy", "mmlu_security_studies", "mmlu_high_school_microeconomics", "mmlu_high_school_macroeconomics", "mmlu_high_school_geography", "mmlu_public_relations", "mmlu_econometrics" ], "mmlu_other": [ "mmlu_global_facts", "mmlu_college_medicine", "mmlu_virology", "mmlu_marketing", "mmlu_miscellaneous", "mmlu_clinical_knowledge", "mmlu_professional_accounting", "mmlu_nutrition", "mmlu_human_aging", "mmlu_business_ethics", "mmlu_professional_medicine", "mmlu_medical_genetics", "mmlu_management" ], "mmlu": [ "mmlu_stem", "mmlu_other", "mmlu_social_sciences", "mmlu_humanities" ], "mmlu_stem": [ "mmlu_high_school_chemistry", "mmlu_college_computer_science", "mmlu_high_school_computer_science", "mmlu_high_school_physics", "mmlu_high_school_biology", "mmlu_college_physics", "mmlu_conceptual_physics", "mmlu_electrical_engineering", "mmlu_elementary_mathematics", "mmlu_college_mathematics", "mmlu_machine_learning", "mmlu_college_biology", "mmlu_anatomy", "mmlu_high_school_statistics", "mmlu_astronomy", "mmlu_college_chemistry", "mmlu_high_school_mathematics", "mmlu_abstract_algebra", "mmlu_computer_security" ] }
{ "ifeval": { "task": "ifeval", "dataset_path": "google/IFEval", "test_split": "train", "doc_to_text": "prompt", "doc_to_target": 0, "unsafe_code": false, "process_results": "def process_results(doc, results):\n inp = InputExample(\n key=doc[\"key\"],\n instruction_id_list=doc[\"instruction_id_list\"],\n prompt=doc[\"prompt\"],\n kwargs=doc[\"kwargs\"],\n )\n response = results[0]\n\n out_strict = test_instruction_following_strict(inp, response)\n out_loose = test_instruction_following_loose(inp, response)\n\n return {\n \"prompt_level_strict_acc\": out_strict.follow_all_instructions,\n \"inst_level_strict_acc\": out_strict.follow_instruction_list,\n \"prompt_level_loose_acc\": out_loose.follow_all_instructions,\n \"inst_level_loose_acc\": out_loose.follow_instruction_list,\n }\n", "description": "", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "num_fewshot": 0, "metric_list": [ { "metric": "prompt_level_strict_acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "inst_level_strict_acc", "aggregation": "def agg_inst_level_acc(items):\n flat_items = [item for sublist in items for item in sublist]\n inst_level_acc = sum(flat_items) / len(flat_items)\n return inst_level_acc\n", "higher_is_better": true }, { "metric": "prompt_level_loose_acc", "aggregation": "mean", "higher_is_better": true }, { "metric": "inst_level_loose_acc", "aggregation": "def agg_inst_level_acc(items):\n flat_items = [item for sublist in items for item in sublist]\n inst_level_acc = sum(flat_items) / len(flat_items)\n return inst_level_acc\n", "higher_is_better": true } ], "output_type": "generate_until", "generation_kwargs": { "until": [], "do_sample": false, "temperature": 0, "max_gen_toks": 1280 }, "repeats": 1, "should_decontaminate": false, "metadata": { "version": 4, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_abstract_algebra": { "task": "mmlu_abstract_algebra", "task_alias": "abstract_algebra", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "abstract_algebra", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_anatomy": { "task": "mmlu_anatomy", "task_alias": "anatomy", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "anatomy", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about anatomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_astronomy": { "task": "mmlu_astronomy", "task_alias": "astronomy", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "astronomy", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about astronomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_business_ethics": { "task": "mmlu_business_ethics", "task_alias": "business_ethics", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "business_ethics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about business ethics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_clinical_knowledge": { "task": "mmlu_clinical_knowledge", "task_alias": "clinical_knowledge", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "clinical_knowledge", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_college_biology": { "task": "mmlu_college_biology", "task_alias": "college_biology", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_biology", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college biology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_college_chemistry": { "task": "mmlu_college_chemistry", "task_alias": "college_chemistry", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_chemistry", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college chemistry.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_college_computer_science": { "task": "mmlu_college_computer_science", "task_alias": "college_computer_science", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_computer_science", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college computer science.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_college_mathematics": { "task": "mmlu_college_mathematics", "task_alias": "college_mathematics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_mathematics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_college_medicine": { "task": "mmlu_college_medicine", "task_alias": "college_medicine", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_medicine", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college medicine.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_college_physics": { "task": "mmlu_college_physics", "task_alias": "college_physics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_physics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_computer_security": { "task": "mmlu_computer_security", "task_alias": "computer_security", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "computer_security", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about computer security.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_conceptual_physics": { "task": "mmlu_conceptual_physics", "task_alias": "conceptual_physics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "conceptual_physics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_econometrics": { "task": "mmlu_econometrics", "task_alias": "econometrics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "econometrics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about econometrics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_electrical_engineering": { "task": "mmlu_electrical_engineering", "task_alias": "electrical_engineering", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "electrical_engineering", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_elementary_mathematics": { "task": "mmlu_elementary_mathematics", "task_alias": "elementary_mathematics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "elementary_mathematics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_formal_logic": { "task": "mmlu_formal_logic", "task_alias": "formal_logic", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "formal_logic", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about formal logic.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_global_facts": { "task": "mmlu_global_facts", "task_alias": "global_facts", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "global_facts", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about global facts.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_biology": { "task": "mmlu_high_school_biology", "task_alias": "high_school_biology", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_biology", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school biology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_chemistry": { "task": "mmlu_high_school_chemistry", "task_alias": "high_school_chemistry", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_chemistry", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_computer_science": { "task": "mmlu_high_school_computer_science", "task_alias": "high_school_computer_science", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_computer_science", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school computer science.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_european_history": { "task": "mmlu_high_school_european_history", "task_alias": "high_school_european_history", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_european_history", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school european history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_geography": { "task": "mmlu_high_school_geography", "task_alias": "high_school_geography", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_geography", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school geography.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_government_and_politics": { "task": "mmlu_high_school_government_and_politics", "task_alias": "high_school_government_and_politics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_government_and_politics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_macroeconomics": { "task": "mmlu_high_school_macroeconomics", "task_alias": "high_school_macroeconomics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_macroeconomics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_mathematics": { "task": "mmlu_high_school_mathematics", "task_alias": "high_school_mathematics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_mathematics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_microeconomics": { "task": "mmlu_high_school_microeconomics", "task_alias": "high_school_microeconomics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_microeconomics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_physics": { "task": "mmlu_high_school_physics", "task_alias": "high_school_physics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_physics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_psychology": { "task": "mmlu_high_school_psychology", "task_alias": "high_school_psychology", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_psychology", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school psychology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_statistics": { "task": "mmlu_high_school_statistics", "task_alias": "high_school_statistics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_statistics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school statistics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_us_history": { "task": "mmlu_high_school_us_history", "task_alias": "high_school_us_history", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_us_history", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school us history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_high_school_world_history": { "task": "mmlu_high_school_world_history", "task_alias": "high_school_world_history", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_world_history", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school world history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_human_aging": { "task": "mmlu_human_aging", "task_alias": "human_aging", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "human_aging", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about human aging.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_human_sexuality": { "task": "mmlu_human_sexuality", "task_alias": "human_sexuality", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "human_sexuality", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about human sexuality.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_international_law": { "task": "mmlu_international_law", "task_alias": "international_law", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "international_law", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about international law.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_jurisprudence": { "task": "mmlu_jurisprudence", "task_alias": "jurisprudence", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "jurisprudence", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_logical_fallacies": { "task": "mmlu_logical_fallacies", "task_alias": "logical_fallacies", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "logical_fallacies", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_machine_learning": { "task": "mmlu_machine_learning", "task_alias": "machine_learning", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "machine_learning", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about machine learning.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_management": { "task": "mmlu_management", "task_alias": "management", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "management", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about management.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_marketing": { "task": "mmlu_marketing", "task_alias": "marketing", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "marketing", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about marketing.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_medical_genetics": { "task": "mmlu_medical_genetics", "task_alias": "medical_genetics", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "medical_genetics", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about medical genetics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_miscellaneous": { "task": "mmlu_miscellaneous", "task_alias": "miscellaneous", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "miscellaneous", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_moral_disputes": { "task": "mmlu_moral_disputes", "task_alias": "moral_disputes", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "moral_disputes", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about moral disputes.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_moral_scenarios": { "task": "mmlu_moral_scenarios", "task_alias": "moral_scenarios", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "moral_scenarios", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_nutrition": { "task": "mmlu_nutrition", "task_alias": "nutrition", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "nutrition", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about nutrition.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_philosophy": { "task": "mmlu_philosophy", "task_alias": "philosophy", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "philosophy", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about philosophy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_prehistory": { "task": "mmlu_prehistory", "task_alias": "prehistory", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "prehistory", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about prehistory.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_professional_accounting": { "task": "mmlu_professional_accounting", "task_alias": "professional_accounting", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_accounting", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional accounting.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_professional_law": { "task": "mmlu_professional_law", "task_alias": "professional_law", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_law", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional law.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_professional_medicine": { "task": "mmlu_professional_medicine", "task_alias": "professional_medicine", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_medicine", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional medicine.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_professional_psychology": { "task": "mmlu_professional_psychology", "task_alias": "professional_psychology", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_psychology", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional psychology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_public_relations": { "task": "mmlu_public_relations", "task_alias": "public_relations", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "public_relations", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about public relations.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_security_studies": { "task": "mmlu_security_studies", "task_alias": "security_studies", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "security_studies", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about security studies.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_sociology": { "task": "mmlu_sociology", "task_alias": "sociology", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "sociology", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about sociology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_us_foreign_policy": { "task": "mmlu_us_foreign_policy", "task_alias": "us_foreign_policy", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "us_foreign_policy", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_virology": { "task": "mmlu_virology", "task_alias": "virology", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "virology", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about virology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } }, "mmlu_world_religions": { "task": "mmlu_world_religions", "task_alias": "world_religions", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "world_religions", "dataset_kwargs": { "trust_remote_code": true }, "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about world religions.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n" }, "num_fewshot": 0, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1, "pretrained": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "tokenizer": "/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b", "max_gen_toks": 4096, "max_model_len": 8192, "enable_prefix_caching": true, "enable_chunked_prefill": true, "tensor_parallel_size": 8 } } }
{ "ifeval": 4, "mmlu": 2, "mmlu_abstract_algebra": 1, "mmlu_anatomy": 1, "mmlu_astronomy": 1, "mmlu_business_ethics": 1, "mmlu_clinical_knowledge": 1, "mmlu_college_biology": 1, "mmlu_college_chemistry": 1, "mmlu_college_computer_science": 1, "mmlu_college_mathematics": 1, "mmlu_college_medicine": 1, "mmlu_college_physics": 1, "mmlu_computer_security": 1, "mmlu_conceptual_physics": 1, "mmlu_econometrics": 1, "mmlu_electrical_engineering": 1, "mmlu_elementary_mathematics": 1, "mmlu_formal_logic": 1, "mmlu_global_facts": 1, "mmlu_high_school_biology": 1, "mmlu_high_school_chemistry": 1, "mmlu_high_school_computer_science": 1, "mmlu_high_school_european_history": 1, "mmlu_high_school_geography": 1, "mmlu_high_school_government_and_politics": 1, "mmlu_high_school_macroeconomics": 1, "mmlu_high_school_mathematics": 1, "mmlu_high_school_microeconomics": 1, "mmlu_high_school_physics": 1, "mmlu_high_school_psychology": 1, "mmlu_high_school_statistics": 1, "mmlu_high_school_us_history": 1, "mmlu_high_school_world_history": 1, "mmlu_human_aging": 1, "mmlu_human_sexuality": 1, "mmlu_humanities": 2, "mmlu_international_law": 1, "mmlu_jurisprudence": 1, "mmlu_logical_fallacies": 1, "mmlu_machine_learning": 1, "mmlu_management": 1, "mmlu_marketing": 1, "mmlu_medical_genetics": 1, "mmlu_miscellaneous": 1, "mmlu_moral_disputes": 1, "mmlu_moral_scenarios": 1, "mmlu_nutrition": 1, "mmlu_other": 2, "mmlu_philosophy": 1, "mmlu_prehistory": 1, "mmlu_professional_accounting": 1, "mmlu_professional_law": 1, "mmlu_professional_medicine": 1, "mmlu_professional_psychology": 1, "mmlu_public_relations": 1, "mmlu_security_studies": 1, "mmlu_social_sciences": 2, "mmlu_sociology": 1, "mmlu_stem": 2, "mmlu_us_foreign_policy": 1, "mmlu_virology": 1, "mmlu_world_religions": 1 }
{ "ifeval": 0, "mmlu_abstract_algebra": 0, "mmlu_anatomy": 0, "mmlu_astronomy": 0, "mmlu_business_ethics": 0, "mmlu_clinical_knowledge": 0, "mmlu_college_biology": 0, "mmlu_college_chemistry": 0, "mmlu_college_computer_science": 0, "mmlu_college_mathematics": 0, "mmlu_college_medicine": 0, "mmlu_college_physics": 0, "mmlu_computer_security": 0, "mmlu_conceptual_physics": 0, "mmlu_econometrics": 0, "mmlu_electrical_engineering": 0, "mmlu_elementary_mathematics": 0, "mmlu_formal_logic": 0, "mmlu_global_facts": 0, "mmlu_high_school_biology": 0, "mmlu_high_school_chemistry": 0, "mmlu_high_school_computer_science": 0, "mmlu_high_school_european_history": 0, "mmlu_high_school_geography": 0, "mmlu_high_school_government_and_politics": 0, "mmlu_high_school_macroeconomics": 0, "mmlu_high_school_mathematics": 0, "mmlu_high_school_microeconomics": 0, "mmlu_high_school_physics": 0, "mmlu_high_school_psychology": 0, "mmlu_high_school_statistics": 0, "mmlu_high_school_us_history": 0, "mmlu_high_school_world_history": 0, "mmlu_human_aging": 0, "mmlu_human_sexuality": 0, "mmlu_international_law": 0, "mmlu_jurisprudence": 0, "mmlu_logical_fallacies": 0, "mmlu_machine_learning": 0, "mmlu_management": 0, "mmlu_marketing": 0, "mmlu_medical_genetics": 0, "mmlu_miscellaneous": 0, "mmlu_moral_disputes": 0, "mmlu_moral_scenarios": 0, "mmlu_nutrition": 0, "mmlu_philosophy": 0, "mmlu_prehistory": 0, "mmlu_professional_accounting": 0, "mmlu_professional_law": 0, "mmlu_professional_medicine": 0, "mmlu_professional_psychology": 0, "mmlu_public_relations": 0, "mmlu_security_studies": 0, "mmlu_sociology": 0, "mmlu_us_foreign_policy": 0, "mmlu_virology": 0, "mmlu_world_religions": 0 }
{ "ifeval": { "prompt_level_strict_acc": true, "inst_level_strict_acc": true, "prompt_level_loose_acc": true, "inst_level_loose_acc": true }, "mmlu": { "acc": true }, "mmlu_abstract_algebra": { "acc": true }, "mmlu_anatomy": { "acc": true }, "mmlu_astronomy": { "acc": true }, "mmlu_business_ethics": { "acc": true }, "mmlu_clinical_knowledge": { "acc": true }, "mmlu_college_biology": { "acc": true }, "mmlu_college_chemistry": { "acc": true }, "mmlu_college_computer_science": { "acc": true }, "mmlu_college_mathematics": { "acc": true }, "mmlu_college_medicine": { "acc": true }, "mmlu_college_physics": { "acc": true }, "mmlu_computer_security": { "acc": true }, "mmlu_conceptual_physics": { "acc": true }, "mmlu_econometrics": { "acc": true }, "mmlu_electrical_engineering": { "acc": true }, "mmlu_elementary_mathematics": { "acc": true }, "mmlu_formal_logic": { "acc": true }, "mmlu_global_facts": { "acc": true }, "mmlu_high_school_biology": { "acc": true }, "mmlu_high_school_chemistry": { "acc": true }, "mmlu_high_school_computer_science": { "acc": true }, "mmlu_high_school_european_history": { "acc": true }, "mmlu_high_school_geography": { "acc": true }, "mmlu_high_school_government_and_politics": { "acc": true }, "mmlu_high_school_macroeconomics": { "acc": true }, "mmlu_high_school_mathematics": { "acc": true }, "mmlu_high_school_microeconomics": { "acc": true }, "mmlu_high_school_physics": { "acc": true }, "mmlu_high_school_psychology": { "acc": true }, "mmlu_high_school_statistics": { "acc": true }, "mmlu_high_school_us_history": { "acc": true }, "mmlu_high_school_world_history": { "acc": true }, "mmlu_human_aging": { "acc": true }, "mmlu_human_sexuality": { "acc": true }, "mmlu_humanities": { "acc": true }, "mmlu_international_law": { "acc": true }, "mmlu_jurisprudence": { "acc": true }, "mmlu_logical_fallacies": { "acc": true }, "mmlu_machine_learning": { "acc": true }, "mmlu_management": { "acc": true }, "mmlu_marketing": { "acc": true }, "mmlu_medical_genetics": { "acc": true }, "mmlu_miscellaneous": { "acc": true }, "mmlu_moral_disputes": { "acc": true }, "mmlu_moral_scenarios": { "acc": true }, "mmlu_nutrition": { "acc": true }, "mmlu_other": { "acc": true }, "mmlu_philosophy": { "acc": true }, "mmlu_prehistory": { "acc": true }, "mmlu_professional_accounting": { "acc": true }, "mmlu_professional_law": { "acc": true }, "mmlu_professional_medicine": { "acc": true }, "mmlu_professional_psychology": { "acc": true }, "mmlu_public_relations": { "acc": true }, "mmlu_security_studies": { "acc": true }, "mmlu_social_sciences": { "acc": true }, "mmlu_sociology": { "acc": true }, "mmlu_stem": { "acc": true }, "mmlu_us_foreign_policy": { "acc": true }, "mmlu_virology": { "acc": true }, "mmlu_world_religions": { "acc": true } }
{ "mmlu_high_school_chemistry": { "original": 203, "effective": 16 }, "mmlu_college_computer_science": { "original": 100, "effective": 16 }, "mmlu_high_school_computer_science": { "original": 100, "effective": 16 }, "mmlu_high_school_physics": { "original": 151, "effective": 16 }, "mmlu_high_school_biology": { "original": 310, "effective": 16 }, "mmlu_college_physics": { "original": 102, "effective": 16 }, "mmlu_conceptual_physics": { "original": 235, "effective": 16 }, "mmlu_electrical_engineering": { "original": 145, "effective": 16 }, "mmlu_elementary_mathematics": { "original": 378, "effective": 16 }, "mmlu_college_mathematics": { "original": 100, "effective": 16 }, "mmlu_machine_learning": { "original": 112, "effective": 16 }, "mmlu_college_biology": { "original": 144, "effective": 16 }, "mmlu_anatomy": { "original": 135, "effective": 16 }, "mmlu_high_school_statistics": { "original": 216, "effective": 16 }, "mmlu_astronomy": { "original": 152, "effective": 16 }, "mmlu_college_chemistry": { "original": 100, "effective": 16 }, "mmlu_high_school_mathematics": { "original": 270, "effective": 16 }, "mmlu_abstract_algebra": { "original": 100, "effective": 16 }, "mmlu_computer_security": { "original": 100, "effective": 16 }, "mmlu_global_facts": { "original": 100, "effective": 16 }, "mmlu_college_medicine": { "original": 173, "effective": 16 }, "mmlu_virology": { "original": 166, "effective": 16 }, "mmlu_marketing": { "original": 234, "effective": 16 }, "mmlu_miscellaneous": { "original": 783, "effective": 16 }, "mmlu_clinical_knowledge": { "original": 265, "effective": 16 }, "mmlu_professional_accounting": { "original": 282, "effective": 16 }, "mmlu_nutrition": { "original": 306, "effective": 16 }, "mmlu_human_aging": { "original": 223, "effective": 16 }, "mmlu_business_ethics": { "original": 100, "effective": 16 }, "mmlu_professional_medicine": { "original": 272, "effective": 16 }, "mmlu_medical_genetics": { "original": 100, "effective": 16 }, "mmlu_management": { "original": 103, "effective": 16 }, "mmlu_human_sexuality": { "original": 131, "effective": 16 }, "mmlu_high_school_government_and_politics": { "original": 193, "effective": 16 }, "mmlu_professional_psychology": { "original": 612, "effective": 16 }, "mmlu_sociology": { "original": 201, "effective": 16 }, "mmlu_high_school_psychology": { "original": 545, "effective": 16 }, "mmlu_us_foreign_policy": { "original": 100, "effective": 16 }, "mmlu_security_studies": { "original": 245, "effective": 16 }, "mmlu_high_school_microeconomics": { "original": 238, "effective": 16 }, "mmlu_high_school_macroeconomics": { "original": 390, "effective": 16 }, "mmlu_high_school_geography": { "original": 198, "effective": 16 }, "mmlu_public_relations": { "original": 110, "effective": 16 }, "mmlu_econometrics": { "original": 114, "effective": 16 }, "mmlu_philosophy": { "original": 311, "effective": 16 }, "mmlu_moral_scenarios": { "original": 895, "effective": 16 }, "mmlu_international_law": { "original": 121, "effective": 16 }, "mmlu_world_religions": { "original": 171, "effective": 16 }, "mmlu_logical_fallacies": { "original": 163, "effective": 16 }, "mmlu_high_school_european_history": { "original": 165, "effective": 16 }, "mmlu_jurisprudence": { "original": 108, "effective": 16 }, "mmlu_professional_law": { "original": 1534, "effective": 16 }, "mmlu_high_school_us_history": { "original": 204, "effective": 16 }, "mmlu_formal_logic": { "original": 126, "effective": 16 }, "mmlu_moral_disputes": { "original": 346, "effective": 16 }, "mmlu_prehistory": { "original": 324, "effective": 16 }, "mmlu_high_school_world_history": { "original": 237, "effective": 16 }, "ifeval": { "original": 541, "effective": 16 } }
{ "model": "vllm", "model_args": "pretrained=/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b,tokenizer=/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b,max_gen_toks=4096,max_model_len=8192,enable_prefix_caching=True,enable_chunked_prefill=True,tensor_parallel_size=8", "batch_size": "auto", "batch_sizes": [], "device": null, "use_cache": null, "limit": 16, "bootstrap_iters": 100000, "gen_kwargs": null, "random_seed": 0, "numpy_seed": 1234, "torch_seed": 1234, "fewshot_seed": 1234 }
3f792954
1,749,007,490.84404
PyTorch version: 2.7.0+cu126 Is debug build: False CUDA used to build PyTorch: 12.6 ROCM used to build PyTorch: N/A OS: Ubuntu 24.04.2 LTS (x86_64) GCC version: (Ubuntu 13.3.0-6ubuntu2~24.04) 13.3.0 Clang version: Could not collect CMake version: Could not collect Libc version: glibc-2.39 Python version: 3.12.3 (main, Feb 4 2025, 14:48:35) [GCC 13.3.0] (64-bit runtime) Python platform: Linux-6.8.0-60-generic-x86_64-with-glibc2.39 Is CUDA available: True CUDA runtime version: Could not collect CUDA_MODULE_LOADING set to: LAZY GPU models and configuration: GPU 0: NVIDIA GeForce RTX 3090 GPU 1: NVIDIA GeForce RTX 3090 GPU 2: NVIDIA GeForce RTX 3090 GPU 3: NVIDIA GeForce RTX 3090 GPU 4: NVIDIA GeForce RTX 3090 GPU 5: NVIDIA GeForce RTX 3090 GPU 6: NVIDIA GeForce RTX 3090 GPU 7: NVIDIA GeForce RTX 3090 Nvidia driver version: 575.51.03 cuDNN version: Could not collect HIP runtime version: N/A MIOpen runtime version: N/A Is XNNPACK available: True CPU: Architecture: x86_64 CPU op-mode(s): 32-bit, 64-bit Address sizes: 43 bits physical, 48 bits virtual Byte Order: Little Endian CPU(s): 64 On-line CPU(s) list: 0-63 Vendor ID: AuthenticAMD Model name: AMD EPYC 7702 64-Core Processor CPU family: 23 Model: 49 Thread(s) per core: 1 Core(s) per socket: 64 Socket(s): 1 Stepping: 0 Frequency boost: enabled CPU(s) scaling MHz: 80% CPU max MHz: 2183.5930 CPU min MHz: 1500.0000 BogoMIPS: 3999.61 Flags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl nonstop_tsc cpuid extd_apicid aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw ibs skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb cat_l3 cdp_l3 hw_pstate ssbd mba ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 cqm rdt_a rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local clzero irperf xsaveerptr rdpru wbnoinvd amd_ppin arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif v_spec_ctrl umip rdpid overflow_recov succor smca sev sev_es Virtualization: AMD-V L1d cache: 2 MiB (64 instances) L1i cache: 2 MiB (64 instances) L2 cache: 32 MiB (64 instances) L3 cache: 256 MiB (16 instances) NUMA node(s): 1 NUMA node0 CPU(s): 0-63 Vulnerability Gather data sampling: Not affected Vulnerability Itlb multihit: Not affected Vulnerability L1tf: Not affected Vulnerability Mds: Not affected Vulnerability Meltdown: Not affected Vulnerability Mmio stale data: Not affected Vulnerability Reg file data sampling: Not affected Vulnerability Retbleed: Mitigation; untrained return thunk; SMT disabled Vulnerability Spec rstack overflow: Mitigation; SMT disabled Vulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl Vulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization Vulnerability Spectre v2: Mitigation; Retpolines; IBPB conditional; STIBP disabled; RSB filling; PBRSB-eIBRS Not affected; BHI Not affected Vulnerability Srbds: Not affected Vulnerability Tsx async abort: Not affected Versions of relevant libraries: [pip3] numpy==2.2.6 [pip3] nvidia-cublas-cu12==12.6.4.1 [pip3] nvidia-cuda-cupti-cu12==12.6.80 [pip3] nvidia-cuda-nvrtc-cu12==12.6.77 [pip3] nvidia-cuda-runtime-cu12==12.6.77 [pip3] nvidia-cudnn-cu12==9.5.1.17 [pip3] nvidia-cufft-cu12==11.3.0.4 [pip3] nvidia-curand-cu12==10.3.7.77 [pip3] nvidia-cusolver-cu12==11.7.1.2 [pip3] nvidia-cusparse-cu12==12.5.4.2 [pip3] nvidia-cusparselt-cu12==0.6.3 [pip3] nvidia-nccl-cu12==2.26.2 [pip3] nvidia-nvjitlink-cu12==12.6.85 [pip3] nvidia-nvtx-cu12==12.6.77 [pip3] torch==2.7.0 [pip3] torchaudio==2.7.0 [pip3] torchvision==0.22.0 [pip3] triton==3.3.0 [conda] Could not collect
4.52.4
0.4.8
null
[ "<pad>", "11" ]
[ "<|im_end|>", "2" ]
[ "<s>", "1" ]
2
8,192
{ "mmlu_high_school_chemistry": "04b8485697f03f4b11ff38151e36940bd9dbf5ca082ac4064e997761b889578d", "mmlu_college_computer_science": "839d136fd6235ec4cb39c625a5609e2b2105a0f8f2cb27c4e3a69c4a4aacf4e0", "mmlu_high_school_computer_science": "c531a331b60206696b995c66971a0e33d169c4179b3d7311ccd860398d60494c", "mmlu_high_school_physics": "db7860431cd6131f26be549576d7c502a47c14c9e1a606e75e39f39291a86837", "mmlu_high_school_biology": "bfbb1b6bb0a518b372876b91b3fdad027c640ad3ecbbb40f75992e975830bee4", "mmlu_college_physics": "9733cbd505a479d0f0b7c6b6881061ec1caaeb19091f44a98bfe29f6640b7359", "mmlu_conceptual_physics": "a7fc4042f4356c68edc58c93159ad9c7d8905e06e5af9c5d975933e14988d6f6", "mmlu_electrical_engineering": "1dfacd3fb011e4b5db00d40be7344f478b40fc533cbb355733c0cb22fc944e52", "mmlu_elementary_mathematics": "d80e36718c2d8880c7aa567e1a487ea3f9c901908c1b9541b90b0d24af5f0797", "mmlu_college_mathematics": "65626a40ef8b820decd2b3d95ce57104fc9e9992850ba217a140950f340c7c9b", "mmlu_machine_learning": "400b2df05cbe0558af48b3bb89b8ccacf394977845e3f78e71082adfca996867", "mmlu_college_biology": "bca92f70e5181bb068ff602b4e4fa8edff58d3fb6bb8738aa8bfd21f7c5d8042", "mmlu_anatomy": "21ff43a9cfcf09472865aee208a78046e95a170939ccee9e839a216688a1e6e5", "mmlu_high_school_statistics": "44ac08961dc3b47e233cb135090518b985eb7f7874fb0414c57d4959321760ed", "mmlu_astronomy": "33202f5de4adc7a2fcc5d0aa43a189ec569822e734e7f0616e40990d8ec5e99e", "mmlu_college_chemistry": "b1e29d6d52c92887fd850e6acf70bcb8288087c3b02acc435da7b3456d16f13c", "mmlu_high_school_mathematics": "5c2886769a6a9d87db472ae2967f5c3b220f9dcd54666e70cfcbc4a07c6103b3", "mmlu_abstract_algebra": "306dad68e247792c7b6d8c938938ce158d9ca1c9b1d20e31cdcf3ca93f623459", "mmlu_computer_security": "2984c9720f48d13ff1c9e1979903f75ec48d4d931ad9a34e292d7dfa554b10ff", "mmlu_global_facts": "a00d829a1975a3963a76be607d75efd4e1b7e2dbf50d53b76785a3cff3981d8e", "mmlu_college_medicine": "2c2ee7aeb456f696d5b2357c9862fbc085dc247c6e2df5cfb43c42187f317029", "mmlu_virology": "96ee91c7545c91976f28b4855850b94a5ab8884f1e7a5e885ea8e5df002dc6ae", "mmlu_marketing": "49b3d44ff3a5f772e7167c5fc1bebadc99e26fac3c3b4294a7c49364bd1d3cfa", "mmlu_miscellaneous": "ed50282373369554165afd01968c91bddc2cf8f8080c293311c8b17669659893", "mmlu_clinical_knowledge": "066d8513877a9e0d70f8c770120dff4de2e415177f8c6e17c66aac808dc3b1d2", "mmlu_professional_accounting": "47f05bdb034dadc2620cf53a54a0cd5b0fa4d1820498c6837583423d6eb526b5", "mmlu_nutrition": "e0beda3b6e049d673cc0bb64bb363a90c80b501422e16e24f90bd60c67b1110e", "mmlu_human_aging": "9d9dec33a218846fa7d4fd665d8c8aca05c4351c1f99495533a44db671bc970d", "mmlu_business_ethics": "2b3dbab994f96f5d559256023d90ab11a6ba29af256b5c89ee9f701b355497e4", "mmlu_professional_medicine": "08b6cd402cd4b86216cafa18c3d9b9fd9f012be70d62db835ca0371fde8195f4", "mmlu_medical_genetics": "79400878f3a16f33a1ce466b5f2c70578bf065430fcefe20f5fa7edb9fbaf2eb", "mmlu_management": "164bce28224f6161b676a96cdd46a0fce09fc422a56a00c5d71f6cdec8aa6b50", "mmlu_human_sexuality": "bf3ca8c82250a2b329dce29a4f69a44c8976c5fbe2d2a28e3c618f05ec0ef1b1", "mmlu_high_school_government_and_politics": "3a1145c3a0649a1221db1cc3836fff6e5742280b5fabe1309ac7e0386abb45ab", "mmlu_professional_psychology": "ed47f7209b780f4497c9f1f6db90e14ab17bded96f78ad39e98925de8d02f5b5", "mmlu_sociology": "83e3f94cf89b744b4a79a48ab860d4c0f9638ff1b6cd8222d4cf586b3a9428d5", "mmlu_high_school_psychology": "8c117997d78eaeaa5b158d0cda8286fa3c5d47f9f9f6adb4a1115782847f1099", "mmlu_us_foreign_policy": "a7e6ca41efb45d1a5ecb7501bd52b5b0bcd512eec5b841d8a731c8e853a6e17e", "mmlu_security_studies": "f2e45eb4a1cd1e52bae35705401857a943d06ac08e527805b01345ccb2ef130e", "mmlu_high_school_microeconomics": "a3fb188b99424ab9f3b80e241a8bfc0b02d22495c250eece1ad06f5ac15a859f", "mmlu_high_school_macroeconomics": "87080803352e610038354ae18c07e07d70e643dc81b5e659774cf38e0f42bce8", "mmlu_high_school_geography": "55b1af364b60255c1b134a613cba08ab2848790c9a4852b1a95158c601c1429c", "mmlu_public_relations": "41d8427538878b3bfb9965c70799bbb543d012531ddd6fccfdb7c53a664d68ec", "mmlu_econometrics": "ed1aa19429e751c05873116a91bc9eb8b98741255f07daee817abc3603ed62d6", "mmlu_philosophy": "d5294760472bb25a2b99f1d08d8f05e25c7525a0dbc9b70f9ad7f014e1b61496", "mmlu_moral_scenarios": "f95fa942f7bfedb10f82610183883a66335e36a8d37b1c4daa181386e801b762", "mmlu_international_law": "3acac0c014f3291a3a2311d06b48f39768cf2e66e2c8d48fc13b82e350e8949c", "mmlu_world_religions": "4fba9c400cf816bd8c6ad676ac5b5fd759e69c95effb5f4bd97cd1aef5cddfed", "mmlu_logical_fallacies": "535b8a3413c603a46202d7b8a11216feeec3ae23404b662ca16958d072d1d9ff", "mmlu_high_school_european_history": "5b543b4386c9e53ec77950345ef22fb5796156a35af077f6d1b96151e014d118", "mmlu_jurisprudence": "185ebca8d7574ba10be8cf85378322d60a6a6c2ab8ac1a87a2391ec48dcc15f3", "mmlu_professional_law": "a14fbc179eb8ebf0e987525955c3420bd27c0efd60faff85dadb8b7f8a89f568", "mmlu_high_school_us_history": "61ce55ee84021252cf3fedf6dc70dd1af047bcade3560727fe3842f20c05ab4c", "mmlu_formal_logic": "978cea32140627c9ab7867190ea2ef18211210b179116fa1899f52ffc488a0e6", "mmlu_moral_disputes": "9c8b23b0b46cd38765b597cc8468e62919c241af7120cf2b21891ad79a764110", "mmlu_prehistory": "d9ce273d481a02ba1a66144cb0d974491f9098500c4326e5e4f4d28890258dff", "mmlu_high_school_world_history": "37ac314ee41259b6354e1795086dc73727c6b0c70b1c028ff3f8e138f5f70442", "ifeval": "3b702ca57b63cb129c78cdd813bdff55b9c801e8b3e5e746aa875c1ebe821ef8" }
vllm
/home/quixi/Mango/models/PocketDoc_Dans-PersonalityEngine-V1.2.0-24b
__home__quixi__Mango__models__PocketDoc_Dans-PersonalityEngine-V1.2.0-24b
null
null
true
{{ bos_token }}{%- set loop_messages = messages %} {%- for message in loop_messages %} {%- set content = '<|im_start|>' + message['role'] + '\n'+ message['content'] | trim %} {%- if loop.index0 == 0 %} {%- set content = content %} {%- endif %} {%- if not (loop.last and message['role'] == 'assistant') %} {%- set content = content + '<|im_end|>\n' %} {%- endif %} {{- content }} {%- endfor %} {%- if messages[-1]['role'] != 'assistant' %} {{- '<|im_start|>assistant\n' }} {%- endif %}
fb4aa2c12fdd53c3a2bb6a85ecdfc0082988bc68212e14f3a7c2f8055b0d84ac
87,247.277149
87,894.242174
646.965024264995