3283 lines
96 KiB
JSON
3283 lines
96 KiB
JSON
|
|
{
|
||
|
|
"results": {
|
||
|
|
"mmlu": {
|
||
|
|
"acc,none": 0.6462042444096282,
|
||
|
|
"acc_stderr,none": 0.0038063070482910162,
|
||
|
|
"alias": "mmlu"
|
||
|
|
},
|
||
|
|
"mmlu_humanities": {
|
||
|
|
"acc,none": 0.5976620616365569,
|
||
|
|
"acc_stderr,none": 0.006774327437175231,
|
||
|
|
"alias": " - humanities"
|
||
|
|
},
|
||
|
|
"mmlu_formal_logic": {
|
||
|
|
"alias": " - formal_logic",
|
||
|
|
"acc,none": 0.5079365079365079,
|
||
|
|
"acc_stderr,none": 0.044715725362943486
|
||
|
|
},
|
||
|
|
"mmlu_high_school_european_history": {
|
||
|
|
"alias": " - high_school_european_history",
|
||
|
|
"acc,none": 0.7696969696969697,
|
||
|
|
"acc_stderr,none": 0.0328766675860349
|
||
|
|
},
|
||
|
|
"mmlu_high_school_us_history": {
|
||
|
|
"alias": " - high_school_us_history",
|
||
|
|
"acc,none": 0.8186274509803921,
|
||
|
|
"acc_stderr,none": 0.02704462171947407
|
||
|
|
},
|
||
|
|
"mmlu_high_school_world_history": {
|
||
|
|
"alias": " - high_school_world_history",
|
||
|
|
"acc,none": 0.8354430379746836,
|
||
|
|
"acc_stderr,none": 0.024135736240566946
|
||
|
|
},
|
||
|
|
"mmlu_international_law": {
|
||
|
|
"alias": " - international_law",
|
||
|
|
"acc,none": 0.7851239669421488,
|
||
|
|
"acc_stderr,none": 0.03749492448709699
|
||
|
|
},
|
||
|
|
"mmlu_jurisprudence": {
|
||
|
|
"alias": " - jurisprudence",
|
||
|
|
"acc,none": 0.7592592592592593,
|
||
|
|
"acc_stderr,none": 0.041331194402438376
|
||
|
|
},
|
||
|
|
"mmlu_logical_fallacies": {
|
||
|
|
"alias": " - logical_fallacies",
|
||
|
|
"acc,none": 0.7852760736196319,
|
||
|
|
"acc_stderr,none": 0.03226219377286774
|
||
|
|
},
|
||
|
|
"mmlu_moral_disputes": {
|
||
|
|
"alias": " - moral_disputes",
|
||
|
|
"acc,none": 0.7225433526011561,
|
||
|
|
"acc_stderr,none": 0.024105712607754307
|
||
|
|
},
|
||
|
|
"mmlu_moral_scenarios": {
|
||
|
|
"alias": " - moral_scenarios",
|
||
|
|
"acc,none": 0.4134078212290503,
|
||
|
|
"acc_stderr,none": 0.016469814928406164
|
||
|
|
},
|
||
|
|
"mmlu_philosophy": {
|
||
|
|
"alias": " - philosophy",
|
||
|
|
"acc,none": 0.7041800643086816,
|
||
|
|
"acc_stderr,none": 0.025922371788818788
|
||
|
|
},
|
||
|
|
"mmlu_prehistory": {
|
||
|
|
"alias": " - prehistory",
|
||
|
|
"acc,none": 0.7345679012345679,
|
||
|
|
"acc_stderr,none": 0.02456922360046085
|
||
|
|
},
|
||
|
|
"mmlu_professional_law": {
|
||
|
|
"alias": " - professional_law",
|
||
|
|
"acc,none": 0.47783572359843546,
|
||
|
|
"acc_stderr,none": 0.012757683047716177
|
||
|
|
},
|
||
|
|
"mmlu_world_religions": {
|
||
|
|
"alias": " - world_religions",
|
||
|
|
"acc,none": 0.8245614035087719,
|
||
|
|
"acc_stderr,none": 0.029170885500727654
|
||
|
|
},
|
||
|
|
"mmlu_other": {
|
||
|
|
"acc,none": 0.7129063405214033,
|
||
|
|
"acc_stderr,none": 0.007791731325474898,
|
||
|
|
"alias": " - other"
|
||
|
|
},
|
||
|
|
"mmlu_business_ethics": {
|
||
|
|
"alias": " - business_ethics",
|
||
|
|
"acc,none": 0.68,
|
||
|
|
"acc_stderr,none": 0.04688261722621505
|
||
|
|
},
|
||
|
|
"mmlu_clinical_knowledge": {
|
||
|
|
"alias": " - clinical_knowledge",
|
||
|
|
"acc,none": 0.7433962264150943,
|
||
|
|
"acc_stderr,none": 0.026880647889051968
|
||
|
|
},
|
||
|
|
"mmlu_college_medicine": {
|
||
|
|
"alias": " - college_medicine",
|
||
|
|
"acc,none": 0.6242774566473989,
|
||
|
|
"acc_stderr,none": 0.036928207672648664
|
||
|
|
},
|
||
|
|
"mmlu_global_facts": {
|
||
|
|
"alias": " - global_facts",
|
||
|
|
"acc,none": 0.36,
|
||
|
|
"acc_stderr,none": 0.048241815132442176
|
||
|
|
},
|
||
|
|
"mmlu_human_aging": {
|
||
|
|
"alias": " - human_aging",
|
||
|
|
"acc,none": 0.6995515695067265,
|
||
|
|
"acc_stderr,none": 0.03076935200822914
|
||
|
|
},
|
||
|
|
"mmlu_management": {
|
||
|
|
"alias": " - management",
|
||
|
|
"acc,none": 0.8058252427184466,
|
||
|
|
"acc_stderr,none": 0.03916667762822583
|
||
|
|
},
|
||
|
|
"mmlu_marketing": {
|
||
|
|
"alias": " - marketing",
|
||
|
|
"acc,none": 0.9145299145299145,
|
||
|
|
"acc_stderr,none": 0.018315891685625828
|
||
|
|
},
|
||
|
|
"mmlu_medical_genetics": {
|
||
|
|
"alias": " - medical_genetics",
|
||
|
|
"acc,none": 0.75,
|
||
|
|
"acc_stderr,none": 0.04351941398892446
|
||
|
|
},
|
||
|
|
"mmlu_miscellaneous": {
|
||
|
|
"alias": " - miscellaneous",
|
||
|
|
"acc,none": 0.8263090676883781,
|
||
|
|
"acc_stderr,none": 0.013547415658662259
|
||
|
|
},
|
||
|
|
"mmlu_nutrition": {
|
||
|
|
"alias": " - nutrition",
|
||
|
|
"acc,none": 0.7156862745098039,
|
||
|
|
"acc_stderr,none": 0.025829163272757468
|
||
|
|
},
|
||
|
|
"mmlu_professional_accounting": {
|
||
|
|
"alias": " - professional_accounting",
|
||
|
|
"acc,none": 0.5212765957446809,
|
||
|
|
"acc_stderr,none": 0.029800481645628693
|
||
|
|
},
|
||
|
|
"mmlu_professional_medicine": {
|
||
|
|
"alias": " - professional_medicine",
|
||
|
|
"acc,none": 0.6580882352941176,
|
||
|
|
"acc_stderr,none": 0.028814722422254174
|
||
|
|
},
|
||
|
|
"mmlu_virology": {
|
||
|
|
"alias": " - virology",
|
||
|
|
"acc,none": 0.5180722891566265,
|
||
|
|
"acc_stderr,none": 0.038899512528272166
|
||
|
|
},
|
||
|
|
"mmlu_social_sciences": {
|
||
|
|
"acc,none": 0.7595060123496913,
|
||
|
|
"acc_stderr,none": 0.007537668422916037,
|
||
|
|
"alias": " - social sciences"
|
||
|
|
},
|
||
|
|
"mmlu_econometrics": {
|
||
|
|
"alias": " - econometrics",
|
||
|
|
"acc,none": 0.5,
|
||
|
|
"acc_stderr,none": 0.047036043419179864
|
||
|
|
},
|
||
|
|
"mmlu_high_school_geography": {
|
||
|
|
"alias": " - high_school_geography",
|
||
|
|
"acc,none": 0.803030303030303,
|
||
|
|
"acc_stderr,none": 0.02833560973246336
|
||
|
|
},
|
||
|
|
"mmlu_high_school_government_and_politics": {
|
||
|
|
"alias": " - high_school_government_and_politics",
|
||
|
|
"acc,none": 0.8911917098445595,
|
||
|
|
"acc_stderr,none": 0.02247325333276876
|
||
|
|
},
|
||
|
|
"mmlu_high_school_macroeconomics": {
|
||
|
|
"alias": " - high_school_macroeconomics",
|
||
|
|
"acc,none": 0.6487179487179487,
|
||
|
|
"acc_stderr,none": 0.024203665177902803
|
||
|
|
},
|
||
|
|
"mmlu_high_school_microeconomics": {
|
||
|
|
"alias": " - high_school_microeconomics",
|
||
|
|
"acc,none": 0.7436974789915967,
|
||
|
|
"acc_stderr,none": 0.02835962087053395
|
||
|
|
},
|
||
|
|
"mmlu_high_school_psychology": {
|
||
|
|
"alias": " - high_school_psychology",
|
||
|
|
"acc,none": 0.8440366972477065,
|
||
|
|
"acc_stderr,none": 0.015555802713590144
|
||
|
|
},
|
||
|
|
"mmlu_human_sexuality": {
|
||
|
|
"alias": " - human_sexuality",
|
||
|
|
"acc,none": 0.7938931297709924,
|
||
|
|
"acc_stderr,none": 0.03547771004159463
|
||
|
|
},
|
||
|
|
"mmlu_professional_psychology": {
|
||
|
|
"alias": " - professional_psychology",
|
||
|
|
"acc,none": 0.7026143790849673,
|
||
|
|
"acc_stderr,none": 0.018492596536396955
|
||
|
|
},
|
||
|
|
"mmlu_public_relations": {
|
||
|
|
"alias": " - public_relations",
|
||
|
|
"acc,none": 0.7181818181818181,
|
||
|
|
"acc_stderr,none": 0.04309118709946458
|
||
|
|
},
|
||
|
|
"mmlu_security_studies": {
|
||
|
|
"alias": " - security_studies",
|
||
|
|
"acc,none": 0.7510204081632653,
|
||
|
|
"acc_stderr,none": 0.02768297952296023
|
||
|
|
},
|
||
|
|
"mmlu_sociology": {
|
||
|
|
"alias": " - sociology",
|
||
|
|
"acc,none": 0.8656716417910447,
|
||
|
|
"acc_stderr,none": 0.024112678240900822
|
||
|
|
},
|
||
|
|
"mmlu_us_foreign_policy": {
|
||
|
|
"alias": " - us_foreign_policy",
|
||
|
|
"acc,none": 0.88,
|
||
|
|
"acc_stderr,none": 0.03265986323710906
|
||
|
|
},
|
||
|
|
"mmlu_stem": {
|
||
|
|
"acc,none": 0.5423406279733587,
|
||
|
|
"acc_stderr,none": 0.008491791160159868,
|
||
|
|
"alias": " - stem"
|
||
|
|
},
|
||
|
|
"mmlu_abstract_algebra": {
|
||
|
|
"alias": " - abstract_algebra",
|
||
|
|
"acc,none": 0.34,
|
||
|
|
"acc_stderr,none": 0.04760952285695235
|
||
|
|
},
|
||
|
|
"mmlu_anatomy": {
|
||
|
|
"alias": " - anatomy",
|
||
|
|
"acc,none": 0.6444444444444445,
|
||
|
|
"acc_stderr,none": 0.04135176749720385
|
||
|
|
},
|
||
|
|
"mmlu_astronomy": {
|
||
|
|
"alias": " - astronomy",
|
||
|
|
"acc,none": 0.7236842105263158,
|
||
|
|
"acc_stderr,none": 0.03639057569952929
|
||
|
|
},
|
||
|
|
"mmlu_college_biology": {
|
||
|
|
"alias": " - college_biology",
|
||
|
|
"acc,none": 0.7708333333333334,
|
||
|
|
"acc_stderr,none": 0.035146974678623884
|
||
|
|
},
|
||
|
|
"mmlu_college_chemistry": {
|
||
|
|
"alias": " - college_chemistry",
|
||
|
|
"acc,none": 0.48,
|
||
|
|
"acc_stderr,none": 0.050211673156867795
|
||
|
|
},
|
||
|
|
"mmlu_college_computer_science": {
|
||
|
|
"alias": " - college_computer_science",
|
||
|
|
"acc,none": 0.5,
|
||
|
|
"acc_stderr,none": 0.050251890762960605
|
||
|
|
},
|
||
|
|
"mmlu_college_mathematics": {
|
||
|
|
"alias": " - college_mathematics",
|
||
|
|
"acc,none": 0.37,
|
||
|
|
"acc_stderr,none": 0.04852365870939099
|
||
|
|
},
|
||
|
|
"mmlu_college_physics": {
|
||
|
|
"alias": " - college_physics",
|
||
|
|
"acc,none": 0.4117647058823529,
|
||
|
|
"acc_stderr,none": 0.048971049527263666
|
||
|
|
},
|
||
|
|
"mmlu_computer_security": {
|
||
|
|
"alias": " - computer_security",
|
||
|
|
"acc,none": 0.77,
|
||
|
|
"acc_stderr,none": 0.042295258468165065
|
||
|
|
},
|
||
|
|
"mmlu_conceptual_physics": {
|
||
|
|
"alias": " - conceptual_physics",
|
||
|
|
"acc,none": 0.5702127659574469,
|
||
|
|
"acc_stderr,none": 0.03236214467715564
|
||
|
|
},
|
||
|
|
"mmlu_electrical_engineering": {
|
||
|
|
"alias": " - electrical_engineering",
|
||
|
|
"acc,none": 0.6,
|
||
|
|
"acc_stderr,none": 0.040824829046386284
|
||
|
|
},
|
||
|
|
"mmlu_elementary_mathematics": {
|
||
|
|
"alias": " - elementary_mathematics",
|
||
|
|
"acc,none": 0.455026455026455,
|
||
|
|
"acc_stderr,none": 0.025646928361049398
|
||
|
|
},
|
||
|
|
"mmlu_high_school_biology": {
|
||
|
|
"alias": " - high_school_biology",
|
||
|
|
"acc,none": 0.7838709677419354,
|
||
|
|
"acc_stderr,none": 0.023415293433568518
|
||
|
|
},
|
||
|
|
"mmlu_high_school_chemistry": {
|
||
|
|
"alias": " - high_school_chemistry",
|
||
|
|
"acc,none": 0.5221674876847291,
|
||
|
|
"acc_stderr,none": 0.035145285621750094
|
||
|
|
},
|
||
|
|
"mmlu_high_school_computer_science": {
|
||
|
|
"alias": " - high_school_computer_science",
|
||
|
|
"acc,none": 0.68,
|
||
|
|
"acc_stderr,none": 0.04688261722621505
|
||
|
|
},
|
||
|
|
"mmlu_high_school_mathematics": {
|
||
|
|
"alias": " - high_school_mathematics",
|
||
|
|
"acc,none": 0.32222222222222224,
|
||
|
|
"acc_stderr,none": 0.028493465091028593
|
||
|
|
},
|
||
|
|
"mmlu_high_school_physics": {
|
||
|
|
"alias": " - high_school_physics",
|
||
|
|
"acc,none": 0.4105960264900662,
|
||
|
|
"acc_stderr,none": 0.04016689594849928
|
||
|
|
},
|
||
|
|
"mmlu_high_school_statistics": {
|
||
|
|
"alias": " - high_school_statistics",
|
||
|
|
"acc,none": 0.5046296296296297,
|
||
|
|
"acc_stderr,none": 0.03409825519163572
|
||
|
|
},
|
||
|
|
"mmlu_machine_learning": {
|
||
|
|
"alias": " - machine_learning",
|
||
|
|
"acc,none": 0.4107142857142857,
|
||
|
|
"acc_stderr,none": 0.04669510663875191
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"groups": {
|
||
|
|
"mmlu": {
|
||
|
|
"acc,none": 0.6462042444096282,
|
||
|
|
"acc_stderr,none": 0.0038063070482910162,
|
||
|
|
"alias": "mmlu"
|
||
|
|
},
|
||
|
|
"mmlu_humanities": {
|
||
|
|
"acc,none": 0.5976620616365569,
|
||
|
|
"acc_stderr,none": 0.006774327437175231,
|
||
|
|
"alias": " - humanities"
|
||
|
|
},
|
||
|
|
"mmlu_other": {
|
||
|
|
"acc,none": 0.7129063405214033,
|
||
|
|
"acc_stderr,none": 0.007791731325474898,
|
||
|
|
"alias": " - other"
|
||
|
|
},
|
||
|
|
"mmlu_social_sciences": {
|
||
|
|
"acc,none": 0.7595060123496913,
|
||
|
|
"acc_stderr,none": 0.007537668422916037,
|
||
|
|
"alias": " - social sciences"
|
||
|
|
},
|
||
|
|
"mmlu_stem": {
|
||
|
|
"acc,none": 0.5423406279733587,
|
||
|
|
"acc_stderr,none": 0.008491791160159868,
|
||
|
|
"alias": " - stem"
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"group_subtasks": {
|
||
|
|
"mmlu_humanities": [
|
||
|
|
"mmlu_logical_fallacies",
|
||
|
|
"mmlu_prehistory",
|
||
|
|
"mmlu_moral_disputes",
|
||
|
|
"mmlu_jurisprudence",
|
||
|
|
"mmlu_international_law",
|
||
|
|
"mmlu_world_religions",
|
||
|
|
"mmlu_formal_logic",
|
||
|
|
"mmlu_philosophy",
|
||
|
|
"mmlu_moral_scenarios",
|
||
|
|
"mmlu_high_school_world_history",
|
||
|
|
"mmlu_high_school_us_history",
|
||
|
|
"mmlu_professional_law",
|
||
|
|
"mmlu_high_school_european_history"
|
||
|
|
],
|
||
|
|
"mmlu_social_sciences": [
|
||
|
|
"mmlu_high_school_microeconomics",
|
||
|
|
"mmlu_human_sexuality",
|
||
|
|
"mmlu_professional_psychology",
|
||
|
|
"mmlu_sociology",
|
||
|
|
"mmlu_high_school_government_and_politics",
|
||
|
|
"mmlu_security_studies",
|
||
|
|
"mmlu_econometrics",
|
||
|
|
"mmlu_high_school_psychology",
|
||
|
|
"mmlu_high_school_geography",
|
||
|
|
"mmlu_public_relations",
|
||
|
|
"mmlu_us_foreign_policy",
|
||
|
|
"mmlu_high_school_macroeconomics"
|
||
|
|
],
|
||
|
|
"mmlu_other": [
|
||
|
|
"mmlu_clinical_knowledge",
|
||
|
|
"mmlu_medical_genetics",
|
||
|
|
"mmlu_professional_medicine",
|
||
|
|
"mmlu_miscellaneous",
|
||
|
|
"mmlu_management",
|
||
|
|
"mmlu_marketing",
|
||
|
|
"mmlu_business_ethics",
|
||
|
|
"mmlu_virology",
|
||
|
|
"mmlu_nutrition",
|
||
|
|
"mmlu_college_medicine",
|
||
|
|
"mmlu_professional_accounting",
|
||
|
|
"mmlu_human_aging",
|
||
|
|
"mmlu_global_facts"
|
||
|
|
],
|
||
|
|
"mmlu_stem": [
|
||
|
|
"mmlu_abstract_algebra",
|
||
|
|
"mmlu_college_biology",
|
||
|
|
"mmlu_high_school_biology",
|
||
|
|
"mmlu_electrical_engineering",
|
||
|
|
"mmlu_college_mathematics",
|
||
|
|
"mmlu_conceptual_physics",
|
||
|
|
"mmlu_high_school_physics",
|
||
|
|
"mmlu_anatomy",
|
||
|
|
"mmlu_high_school_mathematics",
|
||
|
|
"mmlu_high_school_chemistry",
|
||
|
|
"mmlu_computer_security",
|
||
|
|
"mmlu_college_computer_science",
|
||
|
|
"mmlu_astronomy",
|
||
|
|
"mmlu_elementary_mathematics",
|
||
|
|
"mmlu_high_school_statistics",
|
||
|
|
"mmlu_college_physics",
|
||
|
|
"mmlu_high_school_computer_science",
|
||
|
|
"mmlu_college_chemistry",
|
||
|
|
"mmlu_machine_learning"
|
||
|
|
],
|
||
|
|
"mmlu": [
|
||
|
|
"mmlu_stem",
|
||
|
|
"mmlu_other",
|
||
|
|
"mmlu_social_sciences",
|
||
|
|
"mmlu_humanities"
|
||
|
|
]
|
||
|
|
},
|
||
|
|
"configs": {
|
||
|
|
"mmlu_abstract_algebra": {
|
||
|
|
"task": "mmlu_abstract_algebra",
|
||
|
|
"task_alias": "abstract_algebra",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "abstract_algebra",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_anatomy": {
|
||
|
|
"task": "mmlu_anatomy",
|
||
|
|
"task_alias": "anatomy",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "anatomy",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about anatomy.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_astronomy": {
|
||
|
|
"task": "mmlu_astronomy",
|
||
|
|
"task_alias": "astronomy",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "astronomy",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about astronomy.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_business_ethics": {
|
||
|
|
"task": "mmlu_business_ethics",
|
||
|
|
"task_alias": "business_ethics",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "business_ethics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about business ethics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_clinical_knowledge": {
|
||
|
|
"task": "mmlu_clinical_knowledge",
|
||
|
|
"task_alias": "clinical_knowledge",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "clinical_knowledge",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_college_biology": {
|
||
|
|
"task": "mmlu_college_biology",
|
||
|
|
"task_alias": "college_biology",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "college_biology",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about college biology.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_college_chemistry": {
|
||
|
|
"task": "mmlu_college_chemistry",
|
||
|
|
"task_alias": "college_chemistry",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "college_chemistry",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about college chemistry.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_college_computer_science": {
|
||
|
|
"task": "mmlu_college_computer_science",
|
||
|
|
"task_alias": "college_computer_science",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "college_computer_science",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about college computer science.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_college_mathematics": {
|
||
|
|
"task": "mmlu_college_mathematics",
|
||
|
|
"task_alias": "college_mathematics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "college_mathematics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about college mathematics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_college_medicine": {
|
||
|
|
"task": "mmlu_college_medicine",
|
||
|
|
"task_alias": "college_medicine",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "college_medicine",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about college medicine.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_college_physics": {
|
||
|
|
"task": "mmlu_college_physics",
|
||
|
|
"task_alias": "college_physics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "college_physics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about college physics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_computer_security": {
|
||
|
|
"task": "mmlu_computer_security",
|
||
|
|
"task_alias": "computer_security",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "computer_security",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about computer security.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_conceptual_physics": {
|
||
|
|
"task": "mmlu_conceptual_physics",
|
||
|
|
"task_alias": "conceptual_physics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "conceptual_physics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_econometrics": {
|
||
|
|
"task": "mmlu_econometrics",
|
||
|
|
"task_alias": "econometrics",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "econometrics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about econometrics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_electrical_engineering": {
|
||
|
|
"task": "mmlu_electrical_engineering",
|
||
|
|
"task_alias": "electrical_engineering",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "electrical_engineering",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_elementary_mathematics": {
|
||
|
|
"task": "mmlu_elementary_mathematics",
|
||
|
|
"task_alias": "elementary_mathematics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "elementary_mathematics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_formal_logic": {
|
||
|
|
"task": "mmlu_formal_logic",
|
||
|
|
"task_alias": "formal_logic",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "formal_logic",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about formal logic.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_global_facts": {
|
||
|
|
"task": "mmlu_global_facts",
|
||
|
|
"task_alias": "global_facts",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "global_facts",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about global facts.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_biology": {
|
||
|
|
"task": "mmlu_high_school_biology",
|
||
|
|
"task_alias": "high_school_biology",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_biology",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school biology.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_chemistry": {
|
||
|
|
"task": "mmlu_high_school_chemistry",
|
||
|
|
"task_alias": "high_school_chemistry",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_chemistry",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_computer_science": {
|
||
|
|
"task": "mmlu_high_school_computer_science",
|
||
|
|
"task_alias": "high_school_computer_science",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_computer_science",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school computer science.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_european_history": {
|
||
|
|
"task": "mmlu_high_school_european_history",
|
||
|
|
"task_alias": "high_school_european_history",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_european_history",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school european history.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_geography": {
|
||
|
|
"task": "mmlu_high_school_geography",
|
||
|
|
"task_alias": "high_school_geography",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_geography",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school geography.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_government_and_politics": {
|
||
|
|
"task": "mmlu_high_school_government_and_politics",
|
||
|
|
"task_alias": "high_school_government_and_politics",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_government_and_politics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_macroeconomics": {
|
||
|
|
"task": "mmlu_high_school_macroeconomics",
|
||
|
|
"task_alias": "high_school_macroeconomics",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_macroeconomics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_mathematics": {
|
||
|
|
"task": "mmlu_high_school_mathematics",
|
||
|
|
"task_alias": "high_school_mathematics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_mathematics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_microeconomics": {
|
||
|
|
"task": "mmlu_high_school_microeconomics",
|
||
|
|
"task_alias": "high_school_microeconomics",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_microeconomics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_physics": {
|
||
|
|
"task": "mmlu_high_school_physics",
|
||
|
|
"task_alias": "high_school_physics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_physics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school physics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_psychology": {
|
||
|
|
"task": "mmlu_high_school_psychology",
|
||
|
|
"task_alias": "high_school_psychology",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_psychology",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school psychology.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_statistics": {
|
||
|
|
"task": "mmlu_high_school_statistics",
|
||
|
|
"task_alias": "high_school_statistics",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_statistics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school statistics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_us_history": {
|
||
|
|
"task": "mmlu_high_school_us_history",
|
||
|
|
"task_alias": "high_school_us_history",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_us_history",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school us history.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_high_school_world_history": {
|
||
|
|
"task": "mmlu_high_school_world_history",
|
||
|
|
"task_alias": "high_school_world_history",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "high_school_world_history",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about high school world history.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_human_aging": {
|
||
|
|
"task": "mmlu_human_aging",
|
||
|
|
"task_alias": "human_aging",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "human_aging",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about human aging.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_human_sexuality": {
|
||
|
|
"task": "mmlu_human_sexuality",
|
||
|
|
"task_alias": "human_sexuality",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "human_sexuality",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about human sexuality.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_international_law": {
|
||
|
|
"task": "mmlu_international_law",
|
||
|
|
"task_alias": "international_law",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "international_law",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about international law.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_jurisprudence": {
|
||
|
|
"task": "mmlu_jurisprudence",
|
||
|
|
"task_alias": "jurisprudence",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "jurisprudence",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_logical_fallacies": {
|
||
|
|
"task": "mmlu_logical_fallacies",
|
||
|
|
"task_alias": "logical_fallacies",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "logical_fallacies",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_machine_learning": {
|
||
|
|
"task": "mmlu_machine_learning",
|
||
|
|
"task_alias": "machine_learning",
|
||
|
|
"tag": "mmlu_stem_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "machine_learning",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about machine learning.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_management": {
|
||
|
|
"task": "mmlu_management",
|
||
|
|
"task_alias": "management",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "management",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about management.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_marketing": {
|
||
|
|
"task": "mmlu_marketing",
|
||
|
|
"task_alias": "marketing",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "marketing",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about marketing.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_medical_genetics": {
|
||
|
|
"task": "mmlu_medical_genetics",
|
||
|
|
"task_alias": "medical_genetics",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "medical_genetics",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about medical genetics.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_miscellaneous": {
|
||
|
|
"task": "mmlu_miscellaneous",
|
||
|
|
"task_alias": "miscellaneous",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "miscellaneous",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_moral_disputes": {
|
||
|
|
"task": "mmlu_moral_disputes",
|
||
|
|
"task_alias": "moral_disputes",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "moral_disputes",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about moral disputes.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_moral_scenarios": {
|
||
|
|
"task": "mmlu_moral_scenarios",
|
||
|
|
"task_alias": "moral_scenarios",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "moral_scenarios",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_nutrition": {
|
||
|
|
"task": "mmlu_nutrition",
|
||
|
|
"task_alias": "nutrition",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "nutrition",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about nutrition.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_philosophy": {
|
||
|
|
"task": "mmlu_philosophy",
|
||
|
|
"task_alias": "philosophy",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "philosophy",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about philosophy.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_prehistory": {
|
||
|
|
"task": "mmlu_prehistory",
|
||
|
|
"task_alias": "prehistory",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "prehistory",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about prehistory.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_professional_accounting": {
|
||
|
|
"task": "mmlu_professional_accounting",
|
||
|
|
"task_alias": "professional_accounting",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "professional_accounting",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about professional accounting.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_professional_law": {
|
||
|
|
"task": "mmlu_professional_law",
|
||
|
|
"task_alias": "professional_law",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "professional_law",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about professional law.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_professional_medicine": {
|
||
|
|
"task": "mmlu_professional_medicine",
|
||
|
|
"task_alias": "professional_medicine",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "professional_medicine",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about professional medicine.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_professional_psychology": {
|
||
|
|
"task": "mmlu_professional_psychology",
|
||
|
|
"task_alias": "professional_psychology",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "professional_psychology",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about professional psychology.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_public_relations": {
|
||
|
|
"task": "mmlu_public_relations",
|
||
|
|
"task_alias": "public_relations",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "public_relations",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about public relations.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_security_studies": {
|
||
|
|
"task": "mmlu_security_studies",
|
||
|
|
"task_alias": "security_studies",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "security_studies",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about security studies.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_sociology": {
|
||
|
|
"task": "mmlu_sociology",
|
||
|
|
"task_alias": "sociology",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "sociology",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about sociology.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_us_foreign_policy": {
|
||
|
|
"task": "mmlu_us_foreign_policy",
|
||
|
|
"task_alias": "us_foreign_policy",
|
||
|
|
"tag": "mmlu_social_sciences_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "us_foreign_policy",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_virology": {
|
||
|
|
"task": "mmlu_virology",
|
||
|
|
"task_alias": "virology",
|
||
|
|
"tag": "mmlu_other_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "virology",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about virology.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"mmlu_world_religions": {
|
||
|
|
"task": "mmlu_world_religions",
|
||
|
|
"task_alias": "world_religions",
|
||
|
|
"tag": "mmlu_humanities_tasks",
|
||
|
|
"dataset_path": "hails/mmlu_no_train",
|
||
|
|
"dataset_name": "world_religions",
|
||
|
|
"dataset_kwargs": {
|
||
|
|
"trust_remote_code": true
|
||
|
|
},
|
||
|
|
"test_split": "test",
|
||
|
|
"fewshot_split": "dev",
|
||
|
|
"doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
|
||
|
|
"doc_to_target": "answer",
|
||
|
|
"doc_to_choice": [
|
||
|
|
"A",
|
||
|
|
"B",
|
||
|
|
"C",
|
||
|
|
"D"
|
||
|
|
],
|
||
|
|
"description": "The following are multiple choice questions (with answers) about world religions.\n\n",
|
||
|
|
"target_delimiter": " ",
|
||
|
|
"fewshot_delimiter": "\n\n",
|
||
|
|
"fewshot_config": {
|
||
|
|
"sampler": "first_n"
|
||
|
|
},
|
||
|
|
"num_fewshot": 0,
|
||
|
|
"metric_list": [
|
||
|
|
{
|
||
|
|
"metric": "acc",
|
||
|
|
"aggregation": "mean",
|
||
|
|
"higher_is_better": true
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"output_type": "multiple_choice",
|
||
|
|
"repeats": 1,
|
||
|
|
"should_decontaminate": false,
|
||
|
|
"metadata": {
|
||
|
|
"version": 1.0
|
||
|
|
}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"versions": {
|
||
|
|
"mmlu": 2,
|
||
|
|
"mmlu_abstract_algebra": 1.0,
|
||
|
|
"mmlu_anatomy": 1.0,
|
||
|
|
"mmlu_astronomy": 1.0,
|
||
|
|
"mmlu_business_ethics": 1.0,
|
||
|
|
"mmlu_clinical_knowledge": 1.0,
|
||
|
|
"mmlu_college_biology": 1.0,
|
||
|
|
"mmlu_college_chemistry": 1.0,
|
||
|
|
"mmlu_college_computer_science": 1.0,
|
||
|
|
"mmlu_college_mathematics": 1.0,
|
||
|
|
"mmlu_college_medicine": 1.0,
|
||
|
|
"mmlu_college_physics": 1.0,
|
||
|
|
"mmlu_computer_security": 1.0,
|
||
|
|
"mmlu_conceptual_physics": 1.0,
|
||
|
|
"mmlu_econometrics": 1.0,
|
||
|
|
"mmlu_electrical_engineering": 1.0,
|
||
|
|
"mmlu_elementary_mathematics": 1.0,
|
||
|
|
"mmlu_formal_logic": 1.0,
|
||
|
|
"mmlu_global_facts": 1.0,
|
||
|
|
"mmlu_high_school_biology": 1.0,
|
||
|
|
"mmlu_high_school_chemistry": 1.0,
|
||
|
|
"mmlu_high_school_computer_science": 1.0,
|
||
|
|
"mmlu_high_school_european_history": 1.0,
|
||
|
|
"mmlu_high_school_geography": 1.0,
|
||
|
|
"mmlu_high_school_government_and_politics": 1.0,
|
||
|
|
"mmlu_high_school_macroeconomics": 1.0,
|
||
|
|
"mmlu_high_school_mathematics": 1.0,
|
||
|
|
"mmlu_high_school_microeconomics": 1.0,
|
||
|
|
"mmlu_high_school_physics": 1.0,
|
||
|
|
"mmlu_high_school_psychology": 1.0,
|
||
|
|
"mmlu_high_school_statistics": 1.0,
|
||
|
|
"mmlu_high_school_us_history": 1.0,
|
||
|
|
"mmlu_high_school_world_history": 1.0,
|
||
|
|
"mmlu_human_aging": 1.0,
|
||
|
|
"mmlu_human_sexuality": 1.0,
|
||
|
|
"mmlu_humanities": 2,
|
||
|
|
"mmlu_international_law": 1.0,
|
||
|
|
"mmlu_jurisprudence": 1.0,
|
||
|
|
"mmlu_logical_fallacies": 1.0,
|
||
|
|
"mmlu_machine_learning": 1.0,
|
||
|
|
"mmlu_management": 1.0,
|
||
|
|
"mmlu_marketing": 1.0,
|
||
|
|
"mmlu_medical_genetics": 1.0,
|
||
|
|
"mmlu_miscellaneous": 1.0,
|
||
|
|
"mmlu_moral_disputes": 1.0,
|
||
|
|
"mmlu_moral_scenarios": 1.0,
|
||
|
|
"mmlu_nutrition": 1.0,
|
||
|
|
"mmlu_other": 2,
|
||
|
|
"mmlu_philosophy": 1.0,
|
||
|
|
"mmlu_prehistory": 1.0,
|
||
|
|
"mmlu_professional_accounting": 1.0,
|
||
|
|
"mmlu_professional_law": 1.0,
|
||
|
|
"mmlu_professional_medicine": 1.0,
|
||
|
|
"mmlu_professional_psychology": 1.0,
|
||
|
|
"mmlu_public_relations": 1.0,
|
||
|
|
"mmlu_security_studies": 1.0,
|
||
|
|
"mmlu_social_sciences": 2,
|
||
|
|
"mmlu_sociology": 1.0,
|
||
|
|
"mmlu_stem": 2,
|
||
|
|
"mmlu_us_foreign_policy": 1.0,
|
||
|
|
"mmlu_virology": 1.0,
|
||
|
|
"mmlu_world_religions": 1.0
|
||
|
|
},
|
||
|
|
"n-shot": {
|
||
|
|
"mmlu_abstract_algebra": 0,
|
||
|
|
"mmlu_anatomy": 0,
|
||
|
|
"mmlu_astronomy": 0,
|
||
|
|
"mmlu_business_ethics": 0,
|
||
|
|
"mmlu_clinical_knowledge": 0,
|
||
|
|
"mmlu_college_biology": 0,
|
||
|
|
"mmlu_college_chemistry": 0,
|
||
|
|
"mmlu_college_computer_science": 0,
|
||
|
|
"mmlu_college_mathematics": 0,
|
||
|
|
"mmlu_college_medicine": 0,
|
||
|
|
"mmlu_college_physics": 0,
|
||
|
|
"mmlu_computer_security": 0,
|
||
|
|
"mmlu_conceptual_physics": 0,
|
||
|
|
"mmlu_econometrics": 0,
|
||
|
|
"mmlu_electrical_engineering": 0,
|
||
|
|
"mmlu_elementary_mathematics": 0,
|
||
|
|
"mmlu_formal_logic": 0,
|
||
|
|
"mmlu_global_facts": 0,
|
||
|
|
"mmlu_high_school_biology": 0,
|
||
|
|
"mmlu_high_school_chemistry": 0,
|
||
|
|
"mmlu_high_school_computer_science": 0,
|
||
|
|
"mmlu_high_school_european_history": 0,
|
||
|
|
"mmlu_high_school_geography": 0,
|
||
|
|
"mmlu_high_school_government_and_politics": 0,
|
||
|
|
"mmlu_high_school_macroeconomics": 0,
|
||
|
|
"mmlu_high_school_mathematics": 0,
|
||
|
|
"mmlu_high_school_microeconomics": 0,
|
||
|
|
"mmlu_high_school_physics": 0,
|
||
|
|
"mmlu_high_school_psychology": 0,
|
||
|
|
"mmlu_high_school_statistics": 0,
|
||
|
|
"mmlu_high_school_us_history": 0,
|
||
|
|
"mmlu_high_school_world_history": 0,
|
||
|
|
"mmlu_human_aging": 0,
|
||
|
|
"mmlu_human_sexuality": 0,
|
||
|
|
"mmlu_international_law": 0,
|
||
|
|
"mmlu_jurisprudence": 0,
|
||
|
|
"mmlu_logical_fallacies": 0,
|
||
|
|
"mmlu_machine_learning": 0,
|
||
|
|
"mmlu_management": 0,
|
||
|
|
"mmlu_marketing": 0,
|
||
|
|
"mmlu_medical_genetics": 0,
|
||
|
|
"mmlu_miscellaneous": 0,
|
||
|
|
"mmlu_moral_disputes": 0,
|
||
|
|
"mmlu_moral_scenarios": 0,
|
||
|
|
"mmlu_nutrition": 0,
|
||
|
|
"mmlu_philosophy": 0,
|
||
|
|
"mmlu_prehistory": 0,
|
||
|
|
"mmlu_professional_accounting": 0,
|
||
|
|
"mmlu_professional_law": 0,
|
||
|
|
"mmlu_professional_medicine": 0,
|
||
|
|
"mmlu_professional_psychology": 0,
|
||
|
|
"mmlu_public_relations": 0,
|
||
|
|
"mmlu_security_studies": 0,
|
||
|
|
"mmlu_sociology": 0,
|
||
|
|
"mmlu_us_foreign_policy": 0,
|
||
|
|
"mmlu_virology": 0,
|
||
|
|
"mmlu_world_religions": 0
|
||
|
|
},
|
||
|
|
"higher_is_better": {
|
||
|
|
"mmlu": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_abstract_algebra": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_anatomy": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_astronomy": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_business_ethics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_clinical_knowledge": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_college_biology": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_college_chemistry": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_college_computer_science": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_college_mathematics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_college_medicine": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_college_physics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_computer_security": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_conceptual_physics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_econometrics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_electrical_engineering": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_elementary_mathematics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_formal_logic": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_global_facts": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_biology": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_chemistry": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_computer_science": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_european_history": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_geography": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_government_and_politics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_macroeconomics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_mathematics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_microeconomics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_physics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_psychology": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_statistics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_us_history": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_high_school_world_history": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_human_aging": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_human_sexuality": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_humanities": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_international_law": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_jurisprudence": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_logical_fallacies": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_machine_learning": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_management": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_marketing": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_medical_genetics": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_miscellaneous": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_moral_disputes": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_moral_scenarios": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_nutrition": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_other": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_philosophy": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_prehistory": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_professional_accounting": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_professional_law": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_professional_medicine": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_professional_psychology": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_public_relations": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_security_studies": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_social_sciences": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_sociology": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_stem": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_us_foreign_policy": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_virology": {
|
||
|
|
"acc": true
|
||
|
|
},
|
||
|
|
"mmlu_world_religions": {
|
||
|
|
"acc": true
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"n-samples": {
|
||
|
|
"mmlu_abstract_algebra": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_college_biology": {
|
||
|
|
"original": 144,
|
||
|
|
"effective": 144
|
||
|
|
},
|
||
|
|
"mmlu_high_school_biology": {
|
||
|
|
"original": 310,
|
||
|
|
"effective": 310
|
||
|
|
},
|
||
|
|
"mmlu_electrical_engineering": {
|
||
|
|
"original": 145,
|
||
|
|
"effective": 145
|
||
|
|
},
|
||
|
|
"mmlu_college_mathematics": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_conceptual_physics": {
|
||
|
|
"original": 235,
|
||
|
|
"effective": 235
|
||
|
|
},
|
||
|
|
"mmlu_high_school_physics": {
|
||
|
|
"original": 151,
|
||
|
|
"effective": 151
|
||
|
|
},
|
||
|
|
"mmlu_anatomy": {
|
||
|
|
"original": 135,
|
||
|
|
"effective": 135
|
||
|
|
},
|
||
|
|
"mmlu_high_school_mathematics": {
|
||
|
|
"original": 270,
|
||
|
|
"effective": 270
|
||
|
|
},
|
||
|
|
"mmlu_high_school_chemistry": {
|
||
|
|
"original": 203,
|
||
|
|
"effective": 203
|
||
|
|
},
|
||
|
|
"mmlu_computer_security": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_college_computer_science": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_astronomy": {
|
||
|
|
"original": 152,
|
||
|
|
"effective": 152
|
||
|
|
},
|
||
|
|
"mmlu_elementary_mathematics": {
|
||
|
|
"original": 378,
|
||
|
|
"effective": 378
|
||
|
|
},
|
||
|
|
"mmlu_high_school_statistics": {
|
||
|
|
"original": 216,
|
||
|
|
"effective": 216
|
||
|
|
},
|
||
|
|
"mmlu_college_physics": {
|
||
|
|
"original": 102,
|
||
|
|
"effective": 102
|
||
|
|
},
|
||
|
|
"mmlu_high_school_computer_science": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_college_chemistry": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_machine_learning": {
|
||
|
|
"original": 112,
|
||
|
|
"effective": 112
|
||
|
|
},
|
||
|
|
"mmlu_clinical_knowledge": {
|
||
|
|
"original": 265,
|
||
|
|
"effective": 265
|
||
|
|
},
|
||
|
|
"mmlu_medical_genetics": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_professional_medicine": {
|
||
|
|
"original": 272,
|
||
|
|
"effective": 272
|
||
|
|
},
|
||
|
|
"mmlu_miscellaneous": {
|
||
|
|
"original": 783,
|
||
|
|
"effective": 783
|
||
|
|
},
|
||
|
|
"mmlu_management": {
|
||
|
|
"original": 103,
|
||
|
|
"effective": 103
|
||
|
|
},
|
||
|
|
"mmlu_marketing": {
|
||
|
|
"original": 234,
|
||
|
|
"effective": 234
|
||
|
|
},
|
||
|
|
"mmlu_business_ethics": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_virology": {
|
||
|
|
"original": 166,
|
||
|
|
"effective": 166
|
||
|
|
},
|
||
|
|
"mmlu_nutrition": {
|
||
|
|
"original": 306,
|
||
|
|
"effective": 306
|
||
|
|
},
|
||
|
|
"mmlu_college_medicine": {
|
||
|
|
"original": 173,
|
||
|
|
"effective": 173
|
||
|
|
},
|
||
|
|
"mmlu_professional_accounting": {
|
||
|
|
"original": 282,
|
||
|
|
"effective": 282
|
||
|
|
},
|
||
|
|
"mmlu_human_aging": {
|
||
|
|
"original": 223,
|
||
|
|
"effective": 223
|
||
|
|
},
|
||
|
|
"mmlu_global_facts": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_high_school_microeconomics": {
|
||
|
|
"original": 238,
|
||
|
|
"effective": 238
|
||
|
|
},
|
||
|
|
"mmlu_human_sexuality": {
|
||
|
|
"original": 131,
|
||
|
|
"effective": 131
|
||
|
|
},
|
||
|
|
"mmlu_professional_psychology": {
|
||
|
|
"original": 612,
|
||
|
|
"effective": 612
|
||
|
|
},
|
||
|
|
"mmlu_sociology": {
|
||
|
|
"original": 201,
|
||
|
|
"effective": 201
|
||
|
|
},
|
||
|
|
"mmlu_high_school_government_and_politics": {
|
||
|
|
"original": 193,
|
||
|
|
"effective": 193
|
||
|
|
},
|
||
|
|
"mmlu_security_studies": {
|
||
|
|
"original": 245,
|
||
|
|
"effective": 245
|
||
|
|
},
|
||
|
|
"mmlu_econometrics": {
|
||
|
|
"original": 114,
|
||
|
|
"effective": 114
|
||
|
|
},
|
||
|
|
"mmlu_high_school_psychology": {
|
||
|
|
"original": 545,
|
||
|
|
"effective": 545
|
||
|
|
},
|
||
|
|
"mmlu_high_school_geography": {
|
||
|
|
"original": 198,
|
||
|
|
"effective": 198
|
||
|
|
},
|
||
|
|
"mmlu_public_relations": {
|
||
|
|
"original": 110,
|
||
|
|
"effective": 110
|
||
|
|
},
|
||
|
|
"mmlu_us_foreign_policy": {
|
||
|
|
"original": 100,
|
||
|
|
"effective": 100
|
||
|
|
},
|
||
|
|
"mmlu_high_school_macroeconomics": {
|
||
|
|
"original": 390,
|
||
|
|
"effective": 390
|
||
|
|
},
|
||
|
|
"mmlu_logical_fallacies": {
|
||
|
|
"original": 163,
|
||
|
|
"effective": 163
|
||
|
|
},
|
||
|
|
"mmlu_prehistory": {
|
||
|
|
"original": 324,
|
||
|
|
"effective": 324
|
||
|
|
},
|
||
|
|
"mmlu_moral_disputes": {
|
||
|
|
"original": 346,
|
||
|
|
"effective": 346
|
||
|
|
},
|
||
|
|
"mmlu_jurisprudence": {
|
||
|
|
"original": 108,
|
||
|
|
"effective": 108
|
||
|
|
},
|
||
|
|
"mmlu_international_law": {
|
||
|
|
"original": 121,
|
||
|
|
"effective": 121
|
||
|
|
},
|
||
|
|
"mmlu_world_religions": {
|
||
|
|
"original": 171,
|
||
|
|
"effective": 171
|
||
|
|
},
|
||
|
|
"mmlu_formal_logic": {
|
||
|
|
"original": 126,
|
||
|
|
"effective": 126
|
||
|
|
},
|
||
|
|
"mmlu_philosophy": {
|
||
|
|
"original": 311,
|
||
|
|
"effective": 311
|
||
|
|
},
|
||
|
|
"mmlu_moral_scenarios": {
|
||
|
|
"original": 895,
|
||
|
|
"effective": 895
|
||
|
|
},
|
||
|
|
"mmlu_high_school_world_history": {
|
||
|
|
"original": 237,
|
||
|
|
"effective": 237
|
||
|
|
},
|
||
|
|
"mmlu_high_school_us_history": {
|
||
|
|
"original": 204,
|
||
|
|
"effective": 204
|
||
|
|
},
|
||
|
|
"mmlu_professional_law": {
|
||
|
|
"original": 1534,
|
||
|
|
"effective": 1534
|
||
|
|
},
|
||
|
|
"mmlu_high_school_european_history": {
|
||
|
|
"original": 165,
|
||
|
|
"effective": 165
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"config": {
|
||
|
|
"model": "vllm",
|
||
|
|
"model_args": "pretrained=FreedomIntelligence/AceGPT-v2-8B-Chat,tensor_parallel_size=1,data_parallel_size=2,gpu_memory_utilization=0.4,download_dir=/tmp",
|
||
|
|
"batch_size": 1,
|
||
|
|
"batch_sizes": [],
|
||
|
|
"device": null,
|
||
|
|
"use_cache": null,
|
||
|
|
"limit": null,
|
||
|
|
"bootstrap_iters": 100000,
|
||
|
|
"gen_kwargs": null,
|
||
|
|
"random_seed": 0,
|
||
|
|
"numpy_seed": 1234,
|
||
|
|
"torch_seed": 1234,
|
||
|
|
"fewshot_seed": 1234
|
||
|
|
},
|
||
|
|
"git_hash": "8e1bd48d",
|
||
|
|
"date": 1735753135.2200181,
|
||
|
|
"pretty_env_info": "PyTorch version: 2.4.0+cu121\nIs debug build: False\nCUDA used to build PyTorch: 12.1\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.3 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04) 11.4.0\nClang version: Could not collect\nCMake version: version 3.27.1\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-5.15.0-1064-azure-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: 12.2.128\nCUDA_MODULE_LOADING set to: LAZY\nGPU models and configuration: \nGPU 0: NVIDIA A100 80GB PCIe\nGPU 1: NVIDIA A100 80GB PCIe\n\nNvidia driver version: 535.161.08\ncuDNN version: Probably one of the following:\n/usr/lib/x86_64-linux-gnu/libcudnn.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_adv_train.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_cnn_train.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_infer.so.8.9.4\n/usr/lib/x86_64-linux-gnu/libcudnn_ops_train.so.8.9.4\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 48\nOn-line CPU(s) list: 0-47\nVendor ID: AuthenticAMD\nModel name: AMD EPYC 7V13 64-Core Processor\nCPU family: 25\nModel: 1\nThread(s) per core: 1\nCore(s) per socket: 48\nSocket(s): 1\nStepping: 1\nBogoMIPS: 4890.89\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq ssse3 fma cx16 pcid sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core invpcid_single vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid rdseed adx smap clflushopt clwb sha_ni xsaveopt xsavec xgetbv1 xsaves clzero xsaveerptr rdpru arat umip vaes vpclmulqdq rdpid fsrm\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 1.5 MiB (48 instances)\nL1i cache: 1.5 MiB (48 instances)\nL2 cache: 24 MiB (48 instances)\nL3 cache: 192 MiB (6 instances)\nNUMA node(s): 2\nNUMA node0 CPU(s): 0-23\nNUMA node1 CPU(s): 24-47\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec rstack overflow: Mitigation; safe RET, no microcode\nVulnerability Spec store bypass: Vulnerable\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines; STIBP disabled; RSB filling; PBRSB-eIBRS Not affected; BHI Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==1.26.4\n[pip3] onnx==1.14.0\n[pip3] pytorch-lightning==2.0.7\n[pip3] pytorch-quantization==2.1.2\n[pip3] torch==2.4.0\n[pip3] torch-tensorrt==2.0.0.dev0\n[pip3] torchaudio==2.1.0\n[pip3] torchdata==0.7.0a0\n[pip3] torchmetrics==1.2.
|
||
|
|
"transformers_version": "4.47.1",
|
||
|
|
"upper_git_hash": "f64fe2f2a86055aaecced603b56097fd79201711",
|
||
|
|
"tokenizer_pad_token": [
|
||
|
|
"<|end_of_text|>",
|
||
|
|
"128001"
|
||
|
|
],
|
||
|
|
"tokenizer_eos_token": [
|
||
|
|
"<|end_of_text|>",
|
||
|
|
"128001"
|
||
|
|
],
|
||
|
|
"tokenizer_bos_token": [
|
||
|
|
"<|begin_of_text|>",
|
||
|
|
"128000"
|
||
|
|
],
|
||
|
|
"eot_token_id": 128001,
|
||
|
|
"max_length": 8192,
|
||
|
|
"task_hashes": {},
|
||
|
|
"model_source": "vllm",
|
||
|
|
"model_name": "FreedomIntelligence/AceGPT-v2-8B-Chat",
|
||
|
|
"model_name_sanitized": "FreedomIntelligence__AceGPT-v2-8B-Chat",
|
||
|
|
"system_instruction": null,
|
||
|
|
"system_instruction_sha": null,
|
||
|
|
"fewshot_as_multiturn": false,
|
||
|
|
"chat_template": null,
|
||
|
|
"chat_template_sha": null,
|
||
|
|
"start_time": 13420.581787327,
|
||
|
|
"end_time": 13936.337741695,
|
||
|
|
"total_evaluation_time_seconds": "515.755954368"
|
||
|
|
}
|