Files
SGPT-5.8B-weightedmean-msma…/evaluation/mteb/TwitterSemEval2015.json

48 lines
1.6 KiB
JSON
Raw Normal View History

{
"test": {
"cos_sim": {
"accuracy": 0.8409131549144663,
"accuracy_threshold": 0.740990400314331,
"ap": 0.6686677647503386,
"f1": 0.6294631710362049,
"f1_threshold": 0.6761115789413452,
"precision": 0.5973933649289099,
"recall": 0.6651715039577837
},
"dot": {
"accuracy": 0.8027656911247542,
"accuracy_threshold": 1326.768310546875,
"ap": 0.5429172039861209,
"f1": 0.5477150537634409,
"f1_threshold": 947.6712646484375,
"precision": 0.4758660957571039,
"recall": 0.6451187335092349
},
"euclidean": {
"accuracy": 0.8276211480002384,
"accuracy_threshold": 27.691078186035156,
"ap": 0.6243039769075329,
"f1": 0.5919159053935678,
"f1_threshold": 30.392478942871094,
"precision": 0.5629611997143538,
"recall": 0.6240105540897097
},
"evaluation_time": 236.37,
"manhattan": {
"accuracy": 0.827561542588067,
"accuracy_threshold": 1430.9112548828125,
"ap": 0.6241882051995578,
"f1": 0.5932101002778785,
"f1_threshold": 1566.4383544921875,
"precision": 0.5471361711611321,
"recall": 0.6477572559366754
},
"max": {
"accuracy": 0.8409131549144663,
"ap": 0.6686677647503386,
"f1": 0.6294631710362049
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}