Files
SGPT-1.3B-weightedmean-msma…/evaluation/mteb/TwitterSemEval2015.json

48 lines
1.3 KiB
JSON
Raw Permalink Normal View History

{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"cos_sim": {
"accuracy": 0.8276807534124099,
"accuracy_threshold": 0.7320027351379395,
"ap": 0.6237052608803734,
"f1": 0.5907741493491665,
"f1_threshold": 0.6374467015266418,
"precision": 0.5207326892109501,
"recall": 0.6825857519788918
},
"dot": {
"accuracy": 0.8056267509089825,
"accuracy_threshold": 2350.834716796875,
"ap": 0.5475349561321037,
"f1": 0.5475483794372552,
"f1_threshold": 1947.31787109375,
"precision": 0.4977336499028707,
"recall": 0.60844327176781
},
"euclidean": {
"accuracy": 0.82476008821601,
"accuracy_threshold": 40.10894775390625,
"ap": 0.6117417554210511,
"f1": 0.5780318696022382,
"f1_threshold": 45.30638122558594,
"precision": 0.5362220717670955,
"recall": 0.6269129287598945
},
"evaluation_time": 56.65,
"manhattan": {
"accuracy": 0.8248792990403528,
"accuracy_threshold": 1387.340576171875,
"ap": 0.6104481629296654,
"f1": 0.5803033951360462,
"f1_threshold": 1590.137451171875,
"precision": 0.5336581045172719,
"recall": 0.6358839050131926
},
"max": {
"accuracy": 0.8276807534124099,
"ap": 0.6237052608803734,
"f1": 0.5907741493491665
}
}
}