25 lines
616 B
JSON
25 lines
616 B
JSON
|
|
{
|
||
|
|
"results": {
|
||
|
|
"hellaswag": {
|
||
|
|
"acc": 0.2752988047808765,
|
||
|
|
"acc_stderr": 0.008917257773359156,
|
||
|
|
"acc_norm": 0.2968127490039841,
|
||
|
|
"acc_norm_stderr": 0.009120663626901691
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"versions": {
|
||
|
|
"hellaswag": 0
|
||
|
|
},
|
||
|
|
"config": {
|
||
|
|
"model": "hf-causal-experimental",
|
||
|
|
"model_args": "pretrained=BEE-spoke-data/smol_llama-220M-GQA,revision=main,trust_remote_code=True,dtype='bfloat16'",
|
||
|
|
"num_fewshot": 10,
|
||
|
|
"batch_size": "8",
|
||
|
|
"batch_sizes": [],
|
||
|
|
"device": "cuda",
|
||
|
|
"no_cache": false,
|
||
|
|
"limit": 0.25,
|
||
|
|
"bootstrap_iters": 100000,
|
||
|
|
"description_dict": {}
|
||
|
|
}
|
||
|
|
}
|