Files
SGPT-2.7B-weightedmean-msma…/evaluation/mteb/TwitterURLCorpus.json

48 lines
1.3 KiB
JSON
Raw Permalink Normal View History

{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"cos_sim": {
"accuracy": 0.8856871191834517,
"accuracy_threshold": 0.6621477603912354,
"ap": 0.8480240716354543,
"f1": 0.7707765285922384,
"f1_threshold": 0.634358286857605,
"precision": 0.7484947406601379,
"recall": 0.7944256236526024
},
"dot": {
"accuracy": 0.8600923662048356,
"accuracy_threshold": 2335.04736328125,
"ap": 0.7865564590120729,
"f1": 0.7275837491090521,
"f1_threshold": 2066.458251953125,
"precision": 0.6772823779193206,
"recall": 0.7859562673236834
},
"euclidean": {
"accuracy": 0.8784103698529127,
"accuracy_threshold": 46.53624725341797,
"ap": 0.8350424424952835,
"f1": 0.7574496544549306,
"f1_threshold": 49.904998779296875,
"precision": 0.7319402556369381,
"recall": 0.7848013550970127
},
"evaluation_time": 437.33,
"manhattan": {
"accuracy": 0.879225365777933,
"accuracy_threshold": 1849.88671875,
"ap": 0.8349479248597825,
"f1": 0.7567748162447101,
"f1_threshold": 1964.5179443359375,
"precision": 0.7306810035842294,
"recall": 0.7848013550970127
},
"max": {
"accuracy": 0.8856871191834517,
"ap": 0.8480240716354543,
"f1": 0.7707765285922384
}
}
}