初始化项目,由ModelHub XC社区提供模型
Model: BEE-spoke-data/smol_llama-220M-GQA Source: Original Platform
This commit is contained in:
3996
evals/LOGS-smol_llama-220M-GQA.md
Normal file
3996
evals/LOGS-smol_llama-220M-GQA.md
Normal file
File diff suppressed because one or more lines are too long
56
evals/json_object_1.json
Normal file
56
evals/json_object_1.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"results": {
|
||||
"arc_easy": {
|
||||
"acc": 0.43813131313131315,
|
||||
"acc_stderr": 0.010180937100600052,
|
||||
"acc_norm": 0.4019360269360269,
|
||||
"acc_norm_stderr": 0.010060521220920566
|
||||
},
|
||||
"boolq": {
|
||||
"acc": 0.617737003058104,
|
||||
"acc_stderr": 0.00849914969044927
|
||||
},
|
||||
"lambada_openai": {
|
||||
"ppl": 64.94966274873535,
|
||||
"ppl_stderr": 2.5466406639926897,
|
||||
"acc": 0.26470017465554047,
|
||||
"acc_stderr": 0.006146408462993569
|
||||
},
|
||||
"openbookqa": {
|
||||
"acc": 0.166,
|
||||
"acc_stderr": 0.016656616876531142,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.020099950647503237
|
||||
},
|
||||
"piqa": {
|
||||
"acc": 0.5973884657236126,
|
||||
"acc_stderr": 0.011442395233488702,
|
||||
"acc_norm": 0.6088139281828074,
|
||||
"acc_norm_stderr": 0.0113862156067287
|
||||
},
|
||||
"winogrande": {
|
||||
"acc": 0.5098658247829518,
|
||||
"acc_stderr": 0.014049749833367589
|
||||
}
|
||||
},
|
||||
"versions": {
|
||||
"arc_easy": 0,
|
||||
"boolq": 1,
|
||||
"lambada_openai": 0,
|
||||
"openbookqa": 0,
|
||||
"piqa": 0,
|
||||
"winogrande": 0
|
||||
},
|
||||
"config": {
|
||||
"model": "hf-causal-experimental",
|
||||
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
|
||||
"num_fewshot": 0,
|
||||
"batch_size": "8",
|
||||
"batch_sizes": [],
|
||||
"device": "cuda",
|
||||
"no_cache": false,
|
||||
"limit": null,
|
||||
"bootstrap_iters": 100000,
|
||||
"description_dict": {}
|
||||
}
|
||||
}
|
||||
25
evals/json_object_2.json
Normal file
25
evals/json_object_2.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"results": {
|
||||
"arc_challenge": {
|
||||
"acc": 0.20392491467576793,
|
||||
"acc_stderr": 0.01177426247870226,
|
||||
"acc_norm": 0.25,
|
||||
"acc_norm_stderr": 0.012653835621466646
|
||||
}
|
||||
},
|
||||
"versions": {
|
||||
"arc_challenge": 0
|
||||
},
|
||||
"config": {
|
||||
"model": "hf-causal-experimental",
|
||||
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
|
||||
"num_fewshot": 25,
|
||||
"batch_size": "8",
|
||||
"batch_sizes": [],
|
||||
"device": "cuda",
|
||||
"no_cache": false,
|
||||
"limit": null,
|
||||
"bootstrap_iters": 100000,
|
||||
"description_dict": {}
|
||||
}
|
||||
}
|
||||
25
evals/json_object_3.json
Normal file
25
evals/json_object_3.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"results": {
|
||||
"hellaswag": {
|
||||
"acc": 0.2752988047808765,
|
||||
"acc_stderr": 0.008917257773359156,
|
||||
"acc_norm": 0.2968127490039841,
|
||||
"acc_norm_stderr": 0.009120663626901691
|
||||
}
|
||||
},
|
||||
"versions": {
|
||||
"hellaswag": 0
|
||||
},
|
||||
"config": {
|
||||
"model": "hf-causal-experimental",
|
||||
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
|
||||
"num_fewshot": 10,
|
||||
"batch_size": "8",
|
||||
"batch_sizes": [],
|
||||
"device": "cuda",
|
||||
"no_cache": false,
|
||||
"limit": 0.25,
|
||||
"bootstrap_iters": 100000,
|
||||
"description_dict": {}
|
||||
}
|
||||
}
|
||||
25
evals/json_object_4.json
Normal file
25
evals/json_object_4.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"results": {
|
||||
"truthfulqa_mc": {
|
||||
"mc1": 0.23745410036719705,
|
||||
"mc1_stderr": 0.014896277441041836,
|
||||
"mc2": 0.4402813457518687,
|
||||
"mc2_stderr": 0.015339681556915718
|
||||
}
|
||||
},
|
||||
"versions": {
|
||||
"truthfulqa_mc": 1
|
||||
},
|
||||
"config": {
|
||||
"model": "hf-causal-experimental",
|
||||
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
|
||||
"num_fewshot": 0,
|
||||
"batch_size": "8",
|
||||
"batch_sizes": [],
|
||||
"device": "cuda",
|
||||
"no_cache": false,
|
||||
"limit": null,
|
||||
"bootstrap_iters": 100000,
|
||||
"description_dict": {}
|
||||
}
|
||||
}
|
||||
417
evals/json_object_5.json
Normal file
417
evals/json_object_5.json
Normal file
@@ -0,0 +1,417 @@
|
||||
{
|
||||
"results": {
|
||||
"hendrycksTest-abstract_algebra": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-anatomy": {
|
||||
"acc": 0.4,
|
||||
"acc_stderr": 0.1,
|
||||
"acc_norm": 0.4,
|
||||
"acc_norm_stderr": 0.1
|
||||
},
|
||||
"hendrycksTest-astronomy": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390466,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390466
|
||||
},
|
||||
"hendrycksTest-business_ethics": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-clinical_knowledge": {
|
||||
"acc": 0.24,
|
||||
"acc_stderr": 0.08717797887081345,
|
||||
"acc_norm": 0.24,
|
||||
"acc_norm_stderr": 0.08717797887081345
|
||||
},
|
||||
"hendrycksTest-college_biology": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277262,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277262
|
||||
},
|
||||
"hendrycksTest-college_chemistry": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.0748331477354788,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.0748331477354788
|
||||
},
|
||||
"hendrycksTest-college_computer_science": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390466,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390466
|
||||
},
|
||||
"hendrycksTest-college_mathematics": {
|
||||
"acc": 0.4,
|
||||
"acc_stderr": 0.1,
|
||||
"acc_norm": 0.4,
|
||||
"acc_norm_stderr": 0.1
|
||||
},
|
||||
"hendrycksTest-college_medicine": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-college_physics": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.0748331477354788,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.0748331477354788
|
||||
},
|
||||
"hendrycksTest-computer_security": {
|
||||
"acc": 0.52,
|
||||
"acc_stderr": 0.10198039027185572,
|
||||
"acc_norm": 0.52,
|
||||
"acc_norm_stderr": 0.10198039027185572
|
||||
},
|
||||
"hendrycksTest-conceptual_physics": {
|
||||
"acc": 0.24,
|
||||
"acc_stderr": 0.08717797887081347,
|
||||
"acc_norm": 0.24,
|
||||
"acc_norm_stderr": 0.08717797887081347
|
||||
},
|
||||
"hendrycksTest-econometrics": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-electrical_engineering": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.07483314773547882,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.07483314773547882
|
||||
},
|
||||
"hendrycksTest-elementary_mathematics": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.0748331477354788,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.0748331477354788
|
||||
},
|
||||
"hendrycksTest-formal_logic": {
|
||||
"acc": 0.24,
|
||||
"acc_stderr": 0.08717797887081345,
|
||||
"acc_norm": 0.24,
|
||||
"acc_norm_stderr": 0.08717797887081345
|
||||
},
|
||||
"hendrycksTest-global_facts": {
|
||||
"acc": 0.4,
|
||||
"acc_stderr": 0.1,
|
||||
"acc_norm": 0.4,
|
||||
"acc_norm_stderr": 0.1
|
||||
},
|
||||
"hendrycksTest-high_school_biology": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390465,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390465
|
||||
},
|
||||
"hendrycksTest-high_school_chemistry": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-high_school_computer_science": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-high_school_european_history": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.0748331477354788,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.0748331477354788
|
||||
},
|
||||
"hendrycksTest-high_school_geography": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390466,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390466
|
||||
},
|
||||
"hendrycksTest-high_school_government_and_politics": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911677,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911677
|
||||
},
|
||||
"hendrycksTest-high_school_macroeconomics": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911681,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911681
|
||||
},
|
||||
"hendrycksTest-high_school_mathematics": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390467,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390467
|
||||
},
|
||||
"hendrycksTest-high_school_microeconomics": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911678,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911678
|
||||
},
|
||||
"hendrycksTest-high_school_physics": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.0748331477354788,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.0748331477354788
|
||||
},
|
||||
"hendrycksTest-high_school_psychology": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911678,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911678
|
||||
},
|
||||
"hendrycksTest-high_school_statistics": {
|
||||
"acc": 0.4,
|
||||
"acc_stderr": 0.10000000000000002,
|
||||
"acc_norm": 0.4,
|
||||
"acc_norm_stderr": 0.10000000000000002
|
||||
},
|
||||
"hendrycksTest-high_school_us_history": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.0916515138991168,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.0916515138991168
|
||||
},
|
||||
"hendrycksTest-high_school_world_history": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.0916515138991168,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.0916515138991168
|
||||
},
|
||||
"hendrycksTest-human_aging": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-human_sexuality": {
|
||||
"acc": 0.44,
|
||||
"acc_stderr": 0.10132456102380442,
|
||||
"acc_norm": 0.44,
|
||||
"acc_norm_stderr": 0.10132456102380442
|
||||
},
|
||||
"hendrycksTest-international_law": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390466,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390466
|
||||
},
|
||||
"hendrycksTest-jurisprudence": {
|
||||
"acc": 0.36,
|
||||
"acc_stderr": 0.09797958971132713,
|
||||
"acc_norm": 0.36,
|
||||
"acc_norm_stderr": 0.09797958971132713
|
||||
},
|
||||
"hendrycksTest-logical_fallacies": {
|
||||
"acc": 0.24,
|
||||
"acc_stderr": 0.08717797887081345,
|
||||
"acc_norm": 0.24,
|
||||
"acc_norm_stderr": 0.08717797887081345
|
||||
},
|
||||
"hendrycksTest-machine_learning": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.0748331477354788,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.0748331477354788
|
||||
},
|
||||
"hendrycksTest-management": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390465,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390465
|
||||
},
|
||||
"hendrycksTest-marketing": {
|
||||
"acc": 0.08,
|
||||
"acc_stderr": 0.05537749241945382,
|
||||
"acc_norm": 0.08,
|
||||
"acc_norm_stderr": 0.05537749241945382
|
||||
},
|
||||
"hendrycksTest-medical_genetics": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911678,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911678
|
||||
},
|
||||
"hendrycksTest-miscellaneous": {
|
||||
"acc": 0.08,
|
||||
"acc_stderr": 0.05537749241945382,
|
||||
"acc_norm": 0.08,
|
||||
"acc_norm_stderr": 0.05537749241945382
|
||||
},
|
||||
"hendrycksTest-moral_disputes": {
|
||||
"acc": 0.12,
|
||||
"acc_stderr": 0.066332495807108,
|
||||
"acc_norm": 0.12,
|
||||
"acc_norm_stderr": 0.066332495807108
|
||||
},
|
||||
"hendrycksTest-moral_scenarios": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-nutrition": {
|
||||
"acc": 0.52,
|
||||
"acc_stderr": 0.10198039027185572,
|
||||
"acc_norm": 0.52,
|
||||
"acc_norm_stderr": 0.10198039027185572
|
||||
},
|
||||
"hendrycksTest-philosophy": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-prehistory": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.07483314773547882,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.07483314773547882
|
||||
},
|
||||
"hendrycksTest-professional_accounting": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390465,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390465
|
||||
},
|
||||
"hendrycksTest-professional_law": {
|
||||
"acc": 0.16,
|
||||
"acc_stderr": 0.07483314773547882,
|
||||
"acc_norm": 0.16,
|
||||
"acc_norm_stderr": 0.07483314773547882
|
||||
},
|
||||
"hendrycksTest-professional_medicine": {
|
||||
"acc": 0.4,
|
||||
"acc_stderr": 0.10000000000000002,
|
||||
"acc_norm": 0.4,
|
||||
"acc_norm_stderr": 0.10000000000000002
|
||||
},
|
||||
"hendrycksTest-professional_psychology": {
|
||||
"acc": 0.24,
|
||||
"acc_stderr": 0.08717797887081345,
|
||||
"acc_norm": 0.24,
|
||||
"acc_norm_stderr": 0.08717797887081345
|
||||
},
|
||||
"hendrycksTest-public_relations": {
|
||||
"acc": 0.2,
|
||||
"acc_stderr": 0.08164965809277261,
|
||||
"acc_norm": 0.2,
|
||||
"acc_norm_stderr": 0.08164965809277261
|
||||
},
|
||||
"hendrycksTest-security_studies": {
|
||||
"acc": 0.32,
|
||||
"acc_stderr": 0.09521904571390466,
|
||||
"acc_norm": 0.32,
|
||||
"acc_norm_stderr": 0.09521904571390466
|
||||
},
|
||||
"hendrycksTest-sociology": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911678,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911678
|
||||
},
|
||||
"hendrycksTest-us_foreign_policy": {
|
||||
"acc": 0.24,
|
||||
"acc_stderr": 0.08717797887081345,
|
||||
"acc_norm": 0.24,
|
||||
"acc_norm_stderr": 0.08717797887081345
|
||||
},
|
||||
"hendrycksTest-virology": {
|
||||
"acc": 0.12,
|
||||
"acc_stderr": 0.06633249580710801,
|
||||
"acc_norm": 0.12,
|
||||
"acc_norm_stderr": 0.06633249580710801
|
||||
},
|
||||
"hendrycksTest-world_religions": {
|
||||
"acc": 0.28,
|
||||
"acc_stderr": 0.09165151389911678,
|
||||
"acc_norm": 0.28,
|
||||
"acc_norm_stderr": 0.09165151389911678
|
||||
}
|
||||
},
|
||||
"versions": {
|
||||
"hendrycksTest-abstract_algebra": 1,
|
||||
"hendrycksTest-anatomy": 1,
|
||||
"hendrycksTest-astronomy": 1,
|
||||
"hendrycksTest-business_ethics": 1,
|
||||
"hendrycksTest-clinical_knowledge": 1,
|
||||
"hendrycksTest-college_biology": 1,
|
||||
"hendrycksTest-college_chemistry": 1,
|
||||
"hendrycksTest-college_computer_science": 1,
|
||||
"hendrycksTest-college_mathematics": 1,
|
||||
"hendrycksTest-college_medicine": 1,
|
||||
"hendrycksTest-college_physics": 1,
|
||||
"hendrycksTest-computer_security": 1,
|
||||
"hendrycksTest-conceptual_physics": 1,
|
||||
"hendrycksTest-econometrics": 1,
|
||||
"hendrycksTest-electrical_engineering": 1,
|
||||
"hendrycksTest-elementary_mathematics": 1,
|
||||
"hendrycksTest-formal_logic": 1,
|
||||
"hendrycksTest-global_facts": 1,
|
||||
"hendrycksTest-high_school_biology": 1,
|
||||
"hendrycksTest-high_school_chemistry": 1,
|
||||
"hendrycksTest-high_school_computer_science": 1,
|
||||
"hendrycksTest-high_school_european_history": 1,
|
||||
"hendrycksTest-high_school_geography": 1,
|
||||
"hendrycksTest-high_school_government_and_politics": 1,
|
||||
"hendrycksTest-high_school_macroeconomics": 1,
|
||||
"hendrycksTest-high_school_mathematics": 1,
|
||||
"hendrycksTest-high_school_microeconomics": 1,
|
||||
"hendrycksTest-high_school_physics": 1,
|
||||
"hendrycksTest-high_school_psychology": 1,
|
||||
"hendrycksTest-high_school_statistics": 1,
|
||||
"hendrycksTest-high_school_us_history": 1,
|
||||
"hendrycksTest-high_school_world_history": 1,
|
||||
"hendrycksTest-human_aging": 1,
|
||||
"hendrycksTest-human_sexuality": 1,
|
||||
"hendrycksTest-international_law": 1,
|
||||
"hendrycksTest-jurisprudence": 1,
|
||||
"hendrycksTest-logical_fallacies": 1,
|
||||
"hendrycksTest-machine_learning": 1,
|
||||
"hendrycksTest-management": 1,
|
||||
"hendrycksTest-marketing": 1,
|
||||
"hendrycksTest-medical_genetics": 1,
|
||||
"hendrycksTest-miscellaneous": 1,
|
||||
"hendrycksTest-moral_disputes": 1,
|
||||
"hendrycksTest-moral_scenarios": 1,
|
||||
"hendrycksTest-nutrition": 1,
|
||||
"hendrycksTest-philosophy": 1,
|
||||
"hendrycksTest-prehistory": 1,
|
||||
"hendrycksTest-professional_accounting": 1,
|
||||
"hendrycksTest-professional_law": 1,
|
||||
"hendrycksTest-professional_medicine": 1,
|
||||
"hendrycksTest-professional_psychology": 1,
|
||||
"hendrycksTest-public_relations": 1,
|
||||
"hendrycksTest-security_studies": 1,
|
||||
"hendrycksTest-sociology": 1,
|
||||
"hendrycksTest-us_foreign_policy": 1,
|
||||
"hendrycksTest-virology": 1,
|
||||
"hendrycksTest-world_religions": 1
|
||||
},
|
||||
"config": {
|
||||
"model": "hf-causal-experimental",
|
||||
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
|
||||
"num_fewshot": 5,
|
||||
"batch_size": "8",
|
||||
"batch_sizes": [],
|
||||
"device": "cuda",
|
||||
"no_cache": false,
|
||||
"limit": 0.25,
|
||||
"bootstrap_iters": 100000,
|
||||
"description_dict": {}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user