Files
ALLaM-7B-Instruct-preview/evaluations/en/jais-adapted-70b-chat/mmlu_0_shot.json

3347 lines
102 KiB
JSON
Raw Normal View History

{
"results": {
"mmlu": {
"acc,none": 0.6522575131747614,
"acc_stderr,none": 0.00375442237713615,
"alias": "mmlu"
},
"mmlu_humanities": {
"acc,none": 0.5995749202975558,
"acc_stderr,none": 0.006560646191394197,
"alias": " - humanities"
},
"mmlu_formal_logic": {
"alias": " - formal_logic",
"acc,none": 0.4523809523809524,
"acc_stderr,none": 0.044518079590553275
},
"mmlu_high_school_european_history": {
"alias": " - high_school_european_history",
"acc,none": 0.8424242424242424,
"acc_stderr,none": 0.028450388805284332
},
"mmlu_high_school_us_history": {
"alias": " - high_school_us_history",
"acc,none": 0.8774509803921569,
"acc_stderr,none": 0.023015389732458258
},
"mmlu_high_school_world_history": {
"alias": " - high_school_world_history",
"acc,none": 0.8649789029535865,
"acc_stderr,none": 0.022245776632003694
},
"mmlu_international_law": {
"alias": " - international_law",
"acc,none": 0.7768595041322314,
"acc_stderr,none": 0.03800754475228733
},
"mmlu_jurisprudence": {
"alias": " - jurisprudence",
"acc,none": 0.7777777777777778,
"acc_stderr,none": 0.0401910747255735
},
"mmlu_logical_fallacies": {
"alias": " - logical_fallacies",
"acc,none": 0.7791411042944786,
"acc_stderr,none": 0.032591773927421776
},
"mmlu_moral_disputes": {
"alias": " - moral_disputes",
"acc,none": 0.7283236994219653,
"acc_stderr,none": 0.023948512905468348
},
"mmlu_moral_scenarios": {
"alias": " - moral_scenarios",
"acc,none": 0.29608938547486036,
"acc_stderr,none": 0.015268677317602281
},
"mmlu_philosophy": {
"alias": " - philosophy",
"acc,none": 0.752411575562701,
"acc_stderr,none": 0.024513879973621967
},
"mmlu_prehistory": {
"alias": " - prehistory",
"acc,none": 0.7592592592592593,
"acc_stderr,none": 0.023788583551658537
},
"mmlu_professional_law": {
"alias": " - professional_law",
"acc,none": 0.5202086049543677,
"acc_stderr,none": 0.012759801427767559
},
"mmlu_world_religions": {
"alias": " - world_religions",
"acc,none": 0.8245614035087719,
"acc_stderr,none": 0.02917088550072767
},
"mmlu_other": {
"acc,none": 0.7100096556163502,
"acc_stderr,none": 0.007844213155132828,
"alias": " - other"
},
"mmlu_business_ethics": {
"alias": " - business_ethics",
"acc,none": 0.71,
"acc_stderr,none": 0.045604802157206845
},
"mmlu_clinical_knowledge": {
"alias": " - clinical_knowledge",
"acc,none": 0.7169811320754716,
"acc_stderr,none": 0.027724236492700918
},
"mmlu_college_medicine": {
"alias": " - college_medicine",
"acc,none": 0.630057803468208,
"acc_stderr,none": 0.0368122963339432
},
"mmlu_global_facts": {
"alias": " - global_facts",
"acc,none": 0.44,
"acc_stderr,none": 0.04988876515698589
},
"mmlu_human_aging": {
"alias": " - human_aging",
"acc,none": 0.7623318385650224,
"acc_stderr,none": 0.02856807946471428
},
"mmlu_management": {
"alias": " - management",
"acc,none": 0.8058252427184466,
"acc_stderr,none": 0.03916667762822582
},
"mmlu_marketing": {
"alias": " - marketing",
"acc,none": 0.8846153846153846,
"acc_stderr,none": 0.02093019318517933
},
"mmlu_medical_genetics": {
"alias": " - medical_genetics",
"acc,none": 0.72,
"acc_stderr,none": 0.04512608598542127
},
"mmlu_miscellaneous": {
"alias": " - miscellaneous",
"acc,none": 0.8109833971902938,
"acc_stderr,none": 0.014000791294406999
},
"mmlu_nutrition": {
"alias": " - nutrition",
"acc,none": 0.7483660130718954,
"acc_stderr,none": 0.024848018263875192
},
"mmlu_professional_accounting": {
"alias": " - professional_accounting",
"acc,none": 0.4787234042553192,
"acc_stderr,none": 0.029800481645628693
},
"mmlu_professional_medicine": {
"alias": " - professional_medicine",
"acc,none": 0.6470588235294118,
"acc_stderr,none": 0.029029422815681407
},
"mmlu_virology": {
"alias": " - virology",
"acc,none": 0.5120481927710844,
"acc_stderr,none": 0.03891364495835817
},
"mmlu_social_sciences": {
"acc,none": 0.7676308092297692,
"acc_stderr,none": 0.00740933282907595,
"alias": " - social sciences"
},
"mmlu_econometrics": {
"alias": " - econometrics",
"acc,none": 0.43859649122807015,
"acc_stderr,none": 0.04668000738510455
},
"mmlu_high_school_geography": {
"alias": " - high_school_geography",
"acc,none": 0.8434343434343434,
"acc_stderr,none": 0.025890520358141454
},
"mmlu_high_school_government_and_politics": {
"alias": " - high_school_government_and_politics",
"acc,none": 0.917098445595855,
"acc_stderr,none": 0.01989934131572178
},
"mmlu_high_school_macroeconomics": {
"alias": " - high_school_macroeconomics",
"acc,none": 0.6666666666666666,
"acc_stderr,none": 0.023901157979402534
},
"mmlu_high_school_microeconomics": {
"alias": " - high_school_microeconomics",
"acc,none": 0.7563025210084033,
"acc_stderr,none": 0.02788682807838058
},
"mmlu_high_school_psychology": {
"alias": " - high_school_psychology",
"acc,none": 0.8550458715596331,
"acc_stderr,none": 0.015094215699700462
},
"mmlu_human_sexuality": {
"alias": " - human_sexuality",
"acc,none": 0.7786259541984732,
"acc_stderr,none": 0.036412970813137276
},
"mmlu_professional_psychology": {
"alias": " - professional_psychology",
"acc,none": 0.7140522875816994,
"acc_stderr,none": 0.018280485072954683
},
"mmlu_public_relations": {
"alias": " - public_relations",
"acc,none": 0.7545454545454545,
"acc_stderr,none": 0.041220665028782855
},
"mmlu_security_studies": {
"alias": " - security_studies",
"acc,none": 0.7428571428571429,
"acc_stderr,none": 0.027979823538744546
},
"mmlu_sociology": {
"alias": " - sociology",
"acc,none": 0.8208955223880597,
"acc_stderr,none": 0.027113286753111837
},
"mmlu_us_foreign_policy": {
"alias": " - us_foreign_policy",
"acc,none": 0.93,
"acc_stderr,none": 0.025643239997624294
},
"mmlu_stem": {
"acc,none": 0.5613701236917221,
"acc_stderr,none": 0.008468341117645424,
"alias": " - stem"
},
"mmlu_abstract_algebra": {
"alias": " - abstract_algebra",
"acc,none": 0.32,
"acc_stderr,none": 0.046882617226215034
},
"mmlu_anatomy": {
"alias": " - anatomy",
"acc,none": 0.5555555555555556,
"acc_stderr,none": 0.04292596718256981
},
"mmlu_astronomy": {
"alias": " - astronomy",
"acc,none": 0.75,
"acc_stderr,none": 0.03523807393012047
},
"mmlu_college_biology": {
"alias": " - college_biology",
"acc,none": 0.75,
"acc_stderr,none": 0.03621034121889507
},
"mmlu_college_chemistry": {
"alias": " - college_chemistry",
"acc,none": 0.5,
"acc_stderr,none": 0.050251890762960605
},
"mmlu_college_computer_science": {
"alias": " - college_computer_science",
"acc,none": 0.57,
"acc_stderr,none": 0.049756985195624284
},
"mmlu_college_mathematics": {
"alias": " - college_mathematics",
"acc,none": 0.36,
"acc_stderr,none": 0.048241815132442176
},
"mmlu_college_physics": {
"alias": " - college_physics",
"acc,none": 0.46078431372549017,
"acc_stderr,none": 0.04959859966384181
},
"mmlu_computer_security": {
"alias": " - computer_security",
"acc,none": 0.75,
"acc_stderr,none": 0.04351941398892446
},
"mmlu_conceptual_physics": {
"alias": " - conceptual_physics",
"acc,none": 0.5617021276595745,
"acc_stderr,none": 0.03243618636108102
},
"mmlu_electrical_engineering": {
"alias": " - electrical_engineering",
"acc,none": 0.6413793103448275,
"acc_stderr,none": 0.03996629574876719
},
"mmlu_elementary_mathematics": {
"alias": " - elementary_mathematics",
"acc,none": 0.4603174603174603,
"acc_stderr,none": 0.025670080636909193
},
"mmlu_high_school_biology": {
"alias": " - high_school_biology",
"acc,none": 0.8129032258064516,
"acc_stderr,none": 0.02218571009225225
},
"mmlu_high_school_chemistry": {
"alias": " - high_school_chemistry",
"acc,none": 0.4876847290640394,
"acc_stderr,none": 0.035169204442208966
},
"mmlu_high_school_computer_science": {
"alias": " - high_school_computer_science",
"acc,none": 0.65,
"acc_stderr,none": 0.0479372485441102
},
"mmlu_high_school_mathematics": {
"alias": " - high_school_mathematics",
"acc,none": 0.37037037037037035,
"acc_stderr,none": 0.02944316932303154
},
"mmlu_high_school_physics": {
"alias": " - high_school_physics",
"acc,none": 0.3841059602649007,
"acc_stderr,none": 0.03971301814719198
},
"mmlu_high_school_statistics": {
"alias": " - high_school_statistics",
"acc,none": 0.6435185185185185,
"acc_stderr,none": 0.032664783315272714
},
"mmlu_machine_learning": {
"alias": " - machine_learning",
"acc,none": 0.5714285714285714,
"acc_stderr,none": 0.04697113923010212
}
},
"groups": {
"mmlu": {
"acc,none": 0.6522575131747614,
"acc_stderr,none": 0.00375442237713615,
"alias": "mmlu"
},
"mmlu_humanities": {
"acc,none": 0.5995749202975558,
"acc_stderr,none": 0.006560646191394197,
"alias": " - humanities"
},
"mmlu_other": {
"acc,none": 0.7100096556163502,
"acc_stderr,none": 0.007844213155132828,
"alias": " - other"
},
"mmlu_social_sciences": {
"acc,none": 0.7676308092297692,
"acc_stderr,none": 0.00740933282907595,
"alias": " - social sciences"
},
"mmlu_stem": {
"acc,none": 0.5613701236917221,
"acc_stderr,none": 0.008468341117645424,
"alias": " - stem"
}
},
"group_subtasks": {
"mmlu_humanities": [
"mmlu_high_school_world_history",
"mmlu_high_school_european_history",
"mmlu_high_school_us_history",
"mmlu_logical_fallacies",
"mmlu_moral_scenarios",
"mmlu_formal_logic",
"mmlu_moral_disputes",
"mmlu_prehistory",
"mmlu_world_religions",
"mmlu_philosophy",
"mmlu_jurisprudence",
"mmlu_international_law",
"mmlu_professional_law"
],
"mmlu_social_sciences": [
"mmlu_high_school_government_and_politics",
"mmlu_human_sexuality",
"mmlu_high_school_psychology",
"mmlu_sociology",
"mmlu_high_school_macroeconomics",
"mmlu_us_foreign_policy",
"mmlu_high_school_geography",
"mmlu_public_relations",
"mmlu_professional_psychology",
"mmlu_high_school_microeconomics",
"mmlu_security_studies",
"mmlu_econometrics"
],
"mmlu_other": [
"mmlu_human_aging",
"mmlu_professional_medicine",
"mmlu_clinical_knowledge",
"mmlu_nutrition",
"mmlu_marketing",
"mmlu_business_ethics",
"mmlu_global_facts",
"mmlu_miscellaneous",
"mmlu_management",
"mmlu_college_medicine",
"mmlu_medical_genetics",
"mmlu_professional_accounting",
"mmlu_virology"
],
"mmlu_stem": [
"mmlu_high_school_statistics",
"mmlu_astronomy",
"mmlu_college_chemistry",
"mmlu_college_physics",
"mmlu_college_biology",
"mmlu_high_school_mathematics",
"mmlu_machine_learning",
"mmlu_abstract_algebra",
"mmlu_anatomy",
"mmlu_elementary_mathematics",
"mmlu_college_computer_science",
"mmlu_high_school_chemistry",
"mmlu_high_school_biology",
"mmlu_computer_security",
"mmlu_college_mathematics",
"mmlu_high_school_computer_science",
"mmlu_electrical_engineering",
"mmlu_conceptual_physics",
"mmlu_high_school_physics"
],
"mmlu": [
"mmlu_stem",
"mmlu_other",
"mmlu_social_sciences",
"mmlu_humanities"
]
},
"configs": {
"mmlu_abstract_algebra": {
"task": "mmlu_abstract_algebra",
"task_alias": "abstract_algebra",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "abstract_algebra",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_anatomy": {
"task": "mmlu_anatomy",
"task_alias": "anatomy",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "anatomy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about anatomy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_astronomy": {
"task": "mmlu_astronomy",
"task_alias": "astronomy",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "astronomy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_business_ethics": {
"task": "mmlu_business_ethics",
"task_alias": "business_ethics",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "business_ethics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about business ethics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_clinical_knowledge": {
"task": "mmlu_clinical_knowledge",
"task_alias": "clinical_knowledge",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "clinical_knowledge",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_biology": {
"task": "mmlu_college_biology",
"task_alias": "college_biology",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_biology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college biology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_chemistry": {
"task": "mmlu_college_chemistry",
"task_alias": "college_chemistry",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_chemistry",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college chemistry.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_computer_science": {
"task": "mmlu_college_computer_science",
"task_alias": "college_computer_science",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_computer_science",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college computer science.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_mathematics": {
"task": "mmlu_college_mathematics",
"task_alias": "college_mathematics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_mathematics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college mathematics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_medicine": {
"task": "mmlu_college_medicine",
"task_alias": "college_medicine",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_medicine",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college medicine.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_college_physics": {
"task": "mmlu_college_physics",
"task_alias": "college_physics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "college_physics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about college physics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_computer_security": {
"task": "mmlu_computer_security",
"task_alias": "computer_security",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "computer_security",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about computer security.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_conceptual_physics": {
"task": "mmlu_conceptual_physics",
"task_alias": "conceptual_physics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "conceptual_physics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_econometrics": {
"task": "mmlu_econometrics",
"task_alias": "econometrics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "econometrics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about econometrics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_electrical_engineering": {
"task": "mmlu_electrical_engineering",
"task_alias": "electrical_engineering",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "electrical_engineering",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_elementary_mathematics": {
"task": "mmlu_elementary_mathematics",
"task_alias": "elementary_mathematics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "elementary_mathematics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_formal_logic": {
"task": "mmlu_formal_logic",
"task_alias": "formal_logic",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "formal_logic",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about formal logic.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_global_facts": {
"task": "mmlu_global_facts",
"task_alias": "global_facts",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "global_facts",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about global facts.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_biology": {
"task": "mmlu_high_school_biology",
"task_alias": "high_school_biology",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_biology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school biology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_chemistry": {
"task": "mmlu_high_school_chemistry",
"task_alias": "high_school_chemistry",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_chemistry",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_computer_science": {
"task": "mmlu_high_school_computer_science",
"task_alias": "high_school_computer_science",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_computer_science",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school computer science.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_european_history": {
"task": "mmlu_high_school_european_history",
"task_alias": "high_school_european_history",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_european_history",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school european history.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_geography": {
"task": "mmlu_high_school_geography",
"task_alias": "high_school_geography",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_geography",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school geography.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_government_and_politics": {
"task": "mmlu_high_school_government_and_politics",
"task_alias": "high_school_government_and_politics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_government_and_politics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_macroeconomics": {
"task": "mmlu_high_school_macroeconomics",
"task_alias": "high_school_macroeconomics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_macroeconomics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_mathematics": {
"task": "mmlu_high_school_mathematics",
"task_alias": "high_school_mathematics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_mathematics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_microeconomics": {
"task": "mmlu_high_school_microeconomics",
"task_alias": "high_school_microeconomics",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_microeconomics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_physics": {
"task": "mmlu_high_school_physics",
"task_alias": "high_school_physics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_physics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school physics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_psychology": {
"task": "mmlu_high_school_psychology",
"task_alias": "high_school_psychology",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_psychology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school psychology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_statistics": {
"task": "mmlu_high_school_statistics",
"task_alias": "high_school_statistics",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_statistics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school statistics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_us_history": {
"task": "mmlu_high_school_us_history",
"task_alias": "high_school_us_history",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_us_history",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school us history.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_high_school_world_history": {
"task": "mmlu_high_school_world_history",
"task_alias": "high_school_world_history",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "high_school_world_history",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about high school world history.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_human_aging": {
"task": "mmlu_human_aging",
"task_alias": "human_aging",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "human_aging",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about human aging.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_human_sexuality": {
"task": "mmlu_human_sexuality",
"task_alias": "human_sexuality",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "human_sexuality",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about human sexuality.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_international_law": {
"task": "mmlu_international_law",
"task_alias": "international_law",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "international_law",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about international law.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_jurisprudence": {
"task": "mmlu_jurisprudence",
"task_alias": "jurisprudence",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "jurisprudence",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_logical_fallacies": {
"task": "mmlu_logical_fallacies",
"task_alias": "logical_fallacies",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "logical_fallacies",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_machine_learning": {
"task": "mmlu_machine_learning",
"task_alias": "machine_learning",
"tag": "mmlu_stem_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "machine_learning",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about machine learning.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_management": {
"task": "mmlu_management",
"task_alias": "management",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "management",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about management.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_marketing": {
"task": "mmlu_marketing",
"task_alias": "marketing",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "marketing",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about marketing.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_medical_genetics": {
"task": "mmlu_medical_genetics",
"task_alias": "medical_genetics",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "medical_genetics",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about medical genetics.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_miscellaneous": {
"task": "mmlu_miscellaneous",
"task_alias": "miscellaneous",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "miscellaneous",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_moral_disputes": {
"task": "mmlu_moral_disputes",
"task_alias": "moral_disputes",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "moral_disputes",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about moral disputes.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_moral_scenarios": {
"task": "mmlu_moral_scenarios",
"task_alias": "moral_scenarios",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "moral_scenarios",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_nutrition": {
"task": "mmlu_nutrition",
"task_alias": "nutrition",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "nutrition",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about nutrition.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_philosophy": {
"task": "mmlu_philosophy",
"task_alias": "philosophy",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "philosophy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about philosophy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_prehistory": {
"task": "mmlu_prehistory",
"task_alias": "prehistory",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "prehistory",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about prehistory.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_accounting": {
"task": "mmlu_professional_accounting",
"task_alias": "professional_accounting",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_accounting",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional accounting.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_law": {
"task": "mmlu_professional_law",
"task_alias": "professional_law",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_law",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional law.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_medicine": {
"task": "mmlu_professional_medicine",
"task_alias": "professional_medicine",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_medicine",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional medicine.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_professional_psychology": {
"task": "mmlu_professional_psychology",
"task_alias": "professional_psychology",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "professional_psychology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about professional psychology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_public_relations": {
"task": "mmlu_public_relations",
"task_alias": "public_relations",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "public_relations",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about public relations.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_security_studies": {
"task": "mmlu_security_studies",
"task_alias": "security_studies",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "security_studies",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about security studies.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_sociology": {
"task": "mmlu_sociology",
"task_alias": "sociology",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "sociology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about sociology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_us_foreign_policy": {
"task": "mmlu_us_foreign_policy",
"task_alias": "us_foreign_policy",
"tag": "mmlu_social_sciences_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "us_foreign_policy",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_virology": {
"task": "mmlu_virology",
"task_alias": "virology",
"tag": "mmlu_other_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "virology",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about virology.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
},
"mmlu_world_religions": {
"task": "mmlu_world_religions",
"task_alias": "world_religions",
"tag": "mmlu_humanities_tasks",
"dataset_path": "hails/mmlu_no_train",
"dataset_name": "world_religions",
"dataset_kwargs": {
"trust_remote_code": true
},
"test_split": "test",
"fewshot_split": "dev",
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
"doc_to_target": "answer",
"doc_to_choice": [
"A",
"B",
"C",
"D"
],
"description": "The following are multiple choice questions (with answers) about world religions.\n\n",
"target_delimiter": " ",
"fewshot_delimiter": "\n\n",
"fewshot_config": {
"sampler": "first_n"
},
"num_fewshot": 0,
"metric_list": [
{
"metric": "acc",
"aggregation": "mean",
"higher_is_better": true
}
],
"output_type": "multiple_choice",
"repeats": 1,
"should_decontaminate": false,
"metadata": {
"version": 1.0
}
}
},
"versions": {
"mmlu": 2,
"mmlu_abstract_algebra": 1.0,
"mmlu_anatomy": 1.0,
"mmlu_astronomy": 1.0,
"mmlu_business_ethics": 1.0,
"mmlu_clinical_knowledge": 1.0,
"mmlu_college_biology": 1.0,
"mmlu_college_chemistry": 1.0,
"mmlu_college_computer_science": 1.0,
"mmlu_college_mathematics": 1.0,
"mmlu_college_medicine": 1.0,
"mmlu_college_physics": 1.0,
"mmlu_computer_security": 1.0,
"mmlu_conceptual_physics": 1.0,
"mmlu_econometrics": 1.0,
"mmlu_electrical_engineering": 1.0,
"mmlu_elementary_mathematics": 1.0,
"mmlu_formal_logic": 1.0,
"mmlu_global_facts": 1.0,
"mmlu_high_school_biology": 1.0,
"mmlu_high_school_chemistry": 1.0,
"mmlu_high_school_computer_science": 1.0,
"mmlu_high_school_european_history": 1.0,
"mmlu_high_school_geography": 1.0,
"mmlu_high_school_government_and_politics": 1.0,
"mmlu_high_school_macroeconomics": 1.0,
"mmlu_high_school_mathematics": 1.0,
"mmlu_high_school_microeconomics": 1.0,
"mmlu_high_school_physics": 1.0,
"mmlu_high_school_psychology": 1.0,
"mmlu_high_school_statistics": 1.0,
"mmlu_high_school_us_history": 1.0,
"mmlu_high_school_world_history": 1.0,
"mmlu_human_aging": 1.0,
"mmlu_human_sexuality": 1.0,
"mmlu_humanities": 2,
"mmlu_international_law": 1.0,
"mmlu_jurisprudence": 1.0,
"mmlu_logical_fallacies": 1.0,
"mmlu_machine_learning": 1.0,
"mmlu_management": 1.0,
"mmlu_marketing": 1.0,
"mmlu_medical_genetics": 1.0,
"mmlu_miscellaneous": 1.0,
"mmlu_moral_disputes": 1.0,
"mmlu_moral_scenarios": 1.0,
"mmlu_nutrition": 1.0,
"mmlu_other": 2,
"mmlu_philosophy": 1.0,
"mmlu_prehistory": 1.0,
"mmlu_professional_accounting": 1.0,
"mmlu_professional_law": 1.0,
"mmlu_professional_medicine": 1.0,
"mmlu_professional_psychology": 1.0,
"mmlu_public_relations": 1.0,
"mmlu_security_studies": 1.0,
"mmlu_social_sciences": 2,
"mmlu_sociology": 1.0,
"mmlu_stem": 2,
"mmlu_us_foreign_policy": 1.0,
"mmlu_virology": 1.0,
"mmlu_world_religions": 1.0
},
"n-shot": {
"mmlu_abstract_algebra": 0,
"mmlu_anatomy": 0,
"mmlu_astronomy": 0,
"mmlu_business_ethics": 0,
"mmlu_clinical_knowledge": 0,
"mmlu_college_biology": 0,
"mmlu_college_chemistry": 0,
"mmlu_college_computer_science": 0,
"mmlu_college_mathematics": 0,
"mmlu_college_medicine": 0,
"mmlu_college_physics": 0,
"mmlu_computer_security": 0,
"mmlu_conceptual_physics": 0,
"mmlu_econometrics": 0,
"mmlu_electrical_engineering": 0,
"mmlu_elementary_mathematics": 0,
"mmlu_formal_logic": 0,
"mmlu_global_facts": 0,
"mmlu_high_school_biology": 0,
"mmlu_high_school_chemistry": 0,
"mmlu_high_school_computer_science": 0,
"mmlu_high_school_european_history": 0,
"mmlu_high_school_geography": 0,
"mmlu_high_school_government_and_politics": 0,
"mmlu_high_school_macroeconomics": 0,
"mmlu_high_school_mathematics": 0,
"mmlu_high_school_microeconomics": 0,
"mmlu_high_school_physics": 0,
"mmlu_high_school_psychology": 0,
"mmlu_high_school_statistics": 0,
"mmlu_high_school_us_history": 0,
"mmlu_high_school_world_history": 0,
"mmlu_human_aging": 0,
"mmlu_human_sexuality": 0,
"mmlu_international_law": 0,
"mmlu_jurisprudence": 0,
"mmlu_logical_fallacies": 0,
"mmlu_machine_learning": 0,
"mmlu_management": 0,
"mmlu_marketing": 0,
"mmlu_medical_genetics": 0,
"mmlu_miscellaneous": 0,
"mmlu_moral_disputes": 0,
"mmlu_moral_scenarios": 0,
"mmlu_nutrition": 0,
"mmlu_philosophy": 0,
"mmlu_prehistory": 0,
"mmlu_professional_accounting": 0,
"mmlu_professional_law": 0,
"mmlu_professional_medicine": 0,
"mmlu_professional_psychology": 0,
"mmlu_public_relations": 0,
"mmlu_security_studies": 0,
"mmlu_sociology": 0,
"mmlu_us_foreign_policy": 0,
"mmlu_virology": 0,
"mmlu_world_religions": 0
},
"higher_is_better": {
"mmlu": {
"acc": true
},
"mmlu_abstract_algebra": {
"acc": true
},
"mmlu_anatomy": {
"acc": true
},
"mmlu_astronomy": {
"acc": true
},
"mmlu_business_ethics": {
"acc": true
},
"mmlu_clinical_knowledge": {
"acc": true
},
"mmlu_college_biology": {
"acc": true
},
"mmlu_college_chemistry": {
"acc": true
},
"mmlu_college_computer_science": {
"acc": true
},
"mmlu_college_mathematics": {
"acc": true
},
"mmlu_college_medicine": {
"acc": true
},
"mmlu_college_physics": {
"acc": true
},
"mmlu_computer_security": {
"acc": true
},
"mmlu_conceptual_physics": {
"acc": true
},
"mmlu_econometrics": {
"acc": true
},
"mmlu_electrical_engineering": {
"acc": true
},
"mmlu_elementary_mathematics": {
"acc": true
},
"mmlu_formal_logic": {
"acc": true
},
"mmlu_global_facts": {
"acc": true
},
"mmlu_high_school_biology": {
"acc": true
},
"mmlu_high_school_chemistry": {
"acc": true
},
"mmlu_high_school_computer_science": {
"acc": true
},
"mmlu_high_school_european_history": {
"acc": true
},
"mmlu_high_school_geography": {
"acc": true
},
"mmlu_high_school_government_and_politics": {
"acc": true
},
"mmlu_high_school_macroeconomics": {
"acc": true
},
"mmlu_high_school_mathematics": {
"acc": true
},
"mmlu_high_school_microeconomics": {
"acc": true
},
"mmlu_high_school_physics": {
"acc": true
},
"mmlu_high_school_psychology": {
"acc": true
},
"mmlu_high_school_statistics": {
"acc": true
},
"mmlu_high_school_us_history": {
"acc": true
},
"mmlu_high_school_world_history": {
"acc": true
},
"mmlu_human_aging": {
"acc": true
},
"mmlu_human_sexuality": {
"acc": true
},
"mmlu_humanities": {
"acc": true
},
"mmlu_international_law": {
"acc": true
},
"mmlu_jurisprudence": {
"acc": true
},
"mmlu_logical_fallacies": {
"acc": true
},
"mmlu_machine_learning": {
"acc": true
},
"mmlu_management": {
"acc": true
},
"mmlu_marketing": {
"acc": true
},
"mmlu_medical_genetics": {
"acc": true
},
"mmlu_miscellaneous": {
"acc": true
},
"mmlu_moral_disputes": {
"acc": true
},
"mmlu_moral_scenarios": {
"acc": true
},
"mmlu_nutrition": {
"acc": true
},
"mmlu_other": {
"acc": true
},
"mmlu_philosophy": {
"acc": true
},
"mmlu_prehistory": {
"acc": true
},
"mmlu_professional_accounting": {
"acc": true
},
"mmlu_professional_law": {
"acc": true
},
"mmlu_professional_medicine": {
"acc": true
},
"mmlu_professional_psychology": {
"acc": true
},
"mmlu_public_relations": {
"acc": true
},
"mmlu_security_studies": {
"acc": true
},
"mmlu_social_sciences": {
"acc": true
},
"mmlu_sociology": {
"acc": true
},
"mmlu_stem": {
"acc": true
},
"mmlu_us_foreign_policy": {
"acc": true
},
"mmlu_virology": {
"acc": true
},
"mmlu_world_religions": {
"acc": true
}
},
"n-samples": {
"mmlu_high_school_statistics": {
"original": 216,
"effective": 216
},
"mmlu_astronomy": {
"original": 152,
"effective": 152
},
"mmlu_college_chemistry": {
"original": 100,
"effective": 100
},
"mmlu_college_physics": {
"original": 102,
"effective": 102
},
"mmlu_college_biology": {
"original": 144,
"effective": 144
},
"mmlu_high_school_mathematics": {
"original": 270,
"effective": 270
},
"mmlu_machine_learning": {
"original": 112,
"effective": 112
},
"mmlu_abstract_algebra": {
"original": 100,
"effective": 100
},
"mmlu_anatomy": {
"original": 135,
"effective": 135
},
"mmlu_elementary_mathematics": {
"original": 378,
"effective": 378
},
"mmlu_college_computer_science": {
"original": 100,
"effective": 100
},
"mmlu_high_school_chemistry": {
"original": 203,
"effective": 203
},
"mmlu_high_school_biology": {
"original": 310,
"effective": 310
},
"mmlu_computer_security": {
"original": 100,
"effective": 100
},
"mmlu_college_mathematics": {
"original": 100,
"effective": 100
},
"mmlu_high_school_computer_science": {
"original": 100,
"effective": 100
},
"mmlu_electrical_engineering": {
"original": 145,
"effective": 145
},
"mmlu_conceptual_physics": {
"original": 235,
"effective": 235
},
"mmlu_high_school_physics": {
"original": 151,
"effective": 151
},
"mmlu_human_aging": {
"original": 223,
"effective": 223
},
"mmlu_professional_medicine": {
"original": 272,
"effective": 272
},
"mmlu_clinical_knowledge": {
"original": 265,
"effective": 265
},
"mmlu_nutrition": {
"original": 306,
"effective": 306
},
"mmlu_marketing": {
"original": 234,
"effective": 234
},
"mmlu_business_ethics": {
"original": 100,
"effective": 100
},
"mmlu_global_facts": {
"original": 100,
"effective": 100
},
"mmlu_miscellaneous": {
"original": 783,
"effective": 783
},
"mmlu_management": {
"original": 103,
"effective": 103
},
"mmlu_college_medicine": {
"original": 173,
"effective": 173
},
"mmlu_medical_genetics": {
"original": 100,
"effective": 100
},
"mmlu_professional_accounting": {
"original": 282,
"effective": 282
},
"mmlu_virology": {
"original": 166,
"effective": 166
},
"mmlu_high_school_government_and_politics": {
"original": 193,
"effective": 193
},
"mmlu_human_sexuality": {
"original": 131,
"effective": 131
},
"mmlu_high_school_psychology": {
"original": 545,
"effective": 545
},
"mmlu_sociology": {
"original": 201,
"effective": 201
},
"mmlu_high_school_macroeconomics": {
"original": 390,
"effective": 390
},
"mmlu_us_foreign_policy": {
"original": 100,
"effective": 100
},
"mmlu_high_school_geography": {
"original": 198,
"effective": 198
},
"mmlu_public_relations": {
"original": 110,
"effective": 110
},
"mmlu_professional_psychology": {
"original": 612,
"effective": 612
},
"mmlu_high_school_microeconomics": {
"original": 238,
"effective": 238
},
"mmlu_security_studies": {
"original": 245,
"effective": 245
},
"mmlu_econometrics": {
"original": 114,
"effective": 114
},
"mmlu_high_school_world_history": {
"original": 237,
"effective": 237
},
"mmlu_high_school_european_history": {
"original": 165,
"effective": 165
},
"mmlu_high_school_us_history": {
"original": 204,
"effective": 204
},
"mmlu_logical_fallacies": {
"original": 163,
"effective": 163
},
"mmlu_moral_scenarios": {
"original": 895,
"effective": 895
},
"mmlu_formal_logic": {
"original": 126,
"effective": 126
},
"mmlu_moral_disputes": {
"original": 346,
"effective": 346
},
"mmlu_prehistory": {
"original": 324,
"effective": 324
},
"mmlu_world_religions": {
"original": 171,
"effective": 171
},
"mmlu_philosophy": {
"original": 311,
"effective": 311
},
"mmlu_jurisprudence": {
"original": 108,
"effective": 108
},
"mmlu_international_law": {
"original": 121,
"effective": 121
},
"mmlu_professional_law": {
"original": 1534,
"effective": 1534
}
},
"config": {
"model": "hf",
"model_args": "pretrained=inceptionai/jais-adapted-70b-chat,trust_remote_code=True,cache_dir=/tmp,parallelize=True",
"model_num_parameters": 69500936192,
"model_dtype": "torch.float32",
"model_revision": "main",
"model_sha": "07c93d6799cba82e240633e5fc9bb4cceea6feb2",
"batch_size": "auto",
"batch_sizes": [
32
],
"device": null,
"use_cache": null,
"limit": null,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"random_seed": 0,
"numpy_seed": 1234,
"torch_seed": 1234,
"fewshot_seed": 1234
},
"git_hash": "150ae04f",
"date": 1737632572.1049643,
"pretty_env_info": "PyTorch version: 2.4.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.3 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.27.1\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-5.15.0-1064-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.2.128\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100-SXM4-80GB\nGPU 1: NVIDIA A100-SXM4-80GB\nGPU 2: NVIDIA A100-SXM4-80GB\nGPU 3: NVIDIA A100-SXM4-80GB\nGPU 4: NVIDIA A100-SXM4-80GB\nGPU 5: NVIDIA A100-SXM4-80GB\nGPU 6: NVIDIA A100-SXM4-80GB\nGPU 7: NVIDIA A100-SXM4-80GB\n\nNvidia driver version: 535.161.08\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.4\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 96\nOn-line CPU(s) list: 0-95\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V12 64-Core Processor\nCPU family: 23\nModel: 49\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 2\nStepping: 0\nBogoMIPS: 4890.88\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core ssbd vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 clzero xsaveerptr rdpru arat umip rdpid\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 3 MiB (96 instances)\nL1i cache: 3 MiB (96 instances)\nL2 cache: 48 MiB (96 instances)\nL3 cache: 384 MiB (24 instances)\nNUMA node(s): 4\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nNUMA node2 CPU(s): 48-71\nNUMA node3 CPU(s): 72-95\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Mitigation; untrained return thunk; SMT disabled\nVulnerability Spec rstack overflow: Mitigation; safe RET, no microcode\nVulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl and seccomp\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines; STIBP disabled; RSB filling; PBRSB-eIBRS Not affected; BHI Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort:
"transformers_version": "4.48.1",
"upper_git_hash": null,
"tokenizer_pad_token": [
"<unk>",
"0"
],
"tokenizer_eos_token": [
"</s>",
"2"
],
"tokenizer_bos_token": [
"<s>",
"1"
],
"eot_token_id": 2,
"max_length": 4096,
"task_hashes": {
"mmlu_high_school_statistics": "d46af02553938b20e9bce032a6ad424a0d56ae6e7784d0a351a96185695653f0",
"mmlu_astronomy": "c9eca6773bb6f58214e51f833bfd88e5eafcaa0c05d0a4c2ee3e9bbed1272002",
"mmlu_college_chemistry": "9d6d9332909abd7956faabfd895b7bd46a1085f65f31678e8f3535fee315a29a",
"mmlu_college_physics": "3f3da5b2a15744fd5445d372a816c3b07433b0ea50cb9e7fc8e08a8b2b2b962b",
"mmlu_college_biology": "d983837a4ac4327e74ff7f131eda1f0c23f6c9f2a1088e3a5162c6ede31605d5",
"mmlu_high_school_mathematics": "fcb250f2c0a888667054bdaa209b5c2b677ecc9c1ac81fa8b8dce87a05dbc3d7",
"mmlu_machine_learning": "4aa26a0049db413da3860533cc38acdf747bafd4849f6f6fc9f58028bb8b4cc6",
"mmlu_abstract_algebra": "019c53bb7725c435b6977919f6e4a0043f6045691070942190fe4e0257b6e1e4",
"mmlu_anatomy": "8a394ba6aa4d3366637e72da67c7d4c0286d47cb371a4f4a9814259be8bbe3ad",
"mmlu_elementary_mathematics": "5f96932b45fc8d0ea0e09c979e7a0290505fb53fdb647624ad00ca162a2a7c50",
"mmlu_college_computer_science": "44cc706099add4f2fa3d3903e33447378c38401a9b22e738008aef4db99ca7ae",
"mmlu_high_school_chemistry": "1c7e3e5bffefd467481de9fb6425ec50eca053f9fce3b25af745ff886195176b",
"mmlu_high_school_biology": "18ee3f74ce477d1ee3492951cfae846b1903dfcc4d107227ffaa6e305681a20f",
"mmlu_computer_security": "9d94057a3894877d08645c17c769a104d28a2ed4249de8865d23d46953b15545",
"mmlu_college_mathematics": "f1a2766207148367dedfa3e2961fc69de59078cfbc9631210b38068c0df8bbd7",
"mmlu_high_school_computer_science": "34a7f3d2bbe6a0dc39d03973d87f9053076ccfbd7ea7ab40dca7073b68640db7",
"mmlu_electrical_engineering": "fd6ef46bf380068043ad0568d1987c5485397a06d582f3f546bf2bea6cc02f3a",
"mmlu_conceptual_physics": "ab3e1ecbb255ddc5c9ce70494d102bda7e259eb20596633235a42ca3d635239b",
"mmlu_high_school_physics": "59513856cfc584e2815f43814216c8143f9c8866599ed8aaf7d53eec6ce308e9",
"mmlu_human_aging": "2127e79731bae760ca6ff04ca6f2217d030a612d04a868032f4f6d8b42293550",
"mmlu_professional_medicine": "b1c4eea40bd1d93e49c50cadd35db8bbb96392c40d208ae1ffd6e72c306d757a",
"mmlu_clinical_knowledge": "839bf7b05724190f7277a957e8b2183a7b4dc74ab9ca72063d10872092a1ea7a",
"mmlu_nutrition": "5a8f9ce8f1f4e9179460896281757c2f3e0c127c150608a5801d41101f6e8df1",
"mmlu_marketing": "5ae8fb39ae90c5cd69adbffe8a62ebd10813d8da0d61fcd05cf143c65cee0303",
"mmlu_business_ethics": "0115853241ce686fdf365cd34614a8b07067e96c385e2820e77c6820f1e1ea0b",
"mmlu_global_facts": "217258f063f285ebf53d6ade8753260d4feb2932345188e50f65c798db1e8bb4",
"mmlu_miscellaneous": "50d1ec8566cca1585a54310882df59a1a36d12921a2c54eb50f5d8cd43671470",
"mmlu_management": "21dc8d1b1528148e3e5eab8e5b2e9e1cd69513c82a87509bb777c44fbcf06684",
"mmlu_college_medicine": "14529d73333850b8be0fc1d4c102c4500b76434c8c761611be6899af27608455",
"mmlu_medical_genetics": "9b736fa6d447dd8f017f7e2dc81e7487f3412a8551075ca312e48db9c4c5e108",
"mmlu_professional_accounting": "e37d42330a5af8d569f0a9713de9c729bf3acad5b941d1a94d99367454bf1f5e",
"mmlu_virology": "ddac9a6463dfa4d91ade252fcca4b74d91d72a4d7b26dae24bd9e3fd69cc6ab1",
"mmlu_high_school_government_and_politics": "83f0261792e1d7045e66cbff5c00e9c3a515d509b5289edc8b86afd55bf5c040",
"mmlu_human_sexuality": "7604529311a8c33437ec37d29eee91d421a9d9076978761eff23632ad7e01e2d",
"mmlu_high_school_psychology": "c31c14be9ba52af0c00b299cd1a23e9c2bc6b58ad9bd1add9f0e7cd8c4b8f26e",
"mmlu_sociology": "dba3af859d4a1892e17fa154a7e28c8443a38df517518fe41ad5f477c59aafb5",
"mmlu_high_school_macroeconomics": "1347c24ea6e4de5497b8f15c93253c347014ac11e2673eab6bebee69ee3cd60b",
"mmlu_us_foreign_policy": "e9f167f26afe88fb4ed49f9220279bf0488b7f91635b9852fb57b78acea6830d",
"mmlu_high_school_geography": "5324a0d02e70d093d0205e24c6e9fdd08e70bae33d2bb8f7de23ad11a98de706",
"mmlu_public_relations": "42cede91b1bc0c4814d1489f3ee115fb4a4553e71e9bec3c786ffdf481016605",
"mmlu_professional_psychology": "b4d03640e1e416075995ad4e405b94f803abde50471e95a1b76af13d43423138",
"mmlu_high_school_microeconomics": "8c4f05dcc2d4cb5cb12d795a01721ac214435e2727c079828a1e181f9520c4e2",
"mmlu_security_studies": "67977d134979b89d013f2219feabde20d42a53c8b011e19883b82ff1adc53a53",
"mmlu_econometrics": "62edd95ee828a143df05736ad152a13aeb06e5ad72f806a26b82f2bc23b7b96e",
"mmlu_high_school_world_history": "ed0f7014f54490189a3314ece657db77d28c1d80d182d061d53e7dd5038bfa17",
"mmlu_high_school_european_history": "6d2776b2a93371215b91173033622c3ac6eecd62b344806259cc88e6a87af105",
"mmlu_high_school_us_history": "3f974bbd34dd5fd88eca6d39b3adcfba9a397892f8a361ab421550554bceced0",
"mmlu_logical_fallacies": "a1f7d58d172d3a3fe8725432d03bcc7e20beb3cad8d53b671298777d13a989b8",
"mmlu_moral_scenarios": "cc0ebef61f42135e2a01adfbda1487c34d90050f053e65392546e7dfdab4da70",
"mmlu_formal_logic": "d3d2b48bf6e87059cd113f7cbad53dc846191b9c7f46658f2fa83a772a8943f4",
"mmlu_moral_disputes": "47393c3796d5c0ca3c6cb26967667b5e2b8fdf16e82af39e15de44ad510af169",
"mmlu_prehistory": "5a23a5a7ca9bb1eba10d3efe09f5f9cf973c19344bce299a944288ea1ba257a4",
"mmlu_world_religions": "71ce37f2bfc410129589c84784ff6307ff34cb28fbc7f3472322166d71def5bf",
"mmlu_philosophy": "dcde538e417b322195cb862c260c735ae6908adaef15bfb03e23e9ca407797fe",
"mmlu_jurisprudence": "18267944042c67ccbc3951e9caf555e7fc470edb55380aea8267e6ec0932e56c",
"mmlu_international_law": "0cb13702f8813cd46e74859a47a1f380fa344240d4e7fd16811171f08f41ce08",
"mmlu_professional_law": "f43120983c735793b59ddf88207e1e0009f26e198b1efa8315c0f39138e2f7e4"
},
"model_source": "hf",
"model_name": "inceptionai/jais-adapted-70b-chat",
"model_name_sanitized": "inceptionai__jais-adapted-70b-chat",
"system_instruction": null,
"system_instruction_sha": null,
"fewshot_as_multiturn": false,
"chat_template": null,
"chat_template_sha": null,
"start_time": 361151.154868588,
"end_time": 364064.686803542,
"total_evaluation_time_seconds": "2913.531934953993"
}