ST5-XXL
|
MTEB: Massive Text Embedding Benchmark
|
73.42
|
2022-10-13
|
|
ST5-XL
|
MTEB: Massive Text Embedding Benchmark
|
72.84
|
2022-10-13
|
|
ST5-Large
|
MTEB: Massive Text Embedding Benchmark
|
72.31
|
2022-10-13
|
|
Ada Similarity
|
MTEB: Massive Text Embedding Benchmark
|
70.44
|
2022-10-13
|
|
SGPT-5.8B-nli
|
MTEB: Massive Text Embedding Benchmark
|
70.14
|
2022-10-13
|
|
ST5-Base
|
MTEB: Massive Text Embedding Benchmark
|
69.81
|
2022-10-13
|
|
SGPT-5.8B-msmarco
|
MTEB: Massive Text Embedding Benchmark
|
68.13
|
2022-10-13
|
|
MPNet-multilingual
|
MTEB: Massive Text Embedding Benchmark
|
67.91
|
2022-10-13
|
|
GTR-XXL
|
MTEB: Massive Text Embedding Benchmark
|
67.41
|
2022-10-13
|
|
SimCSE-BERT-sup
|
MTEB: Massive Text Embedding Benchmark
|
67.32
|
2022-10-13
|
|
GTR-Large
|
MTEB: Massive Text Embedding Benchmark
|
67.14
|
2022-10-13
|
|
SGPT-2.7B-msmarco
|
MTEB: Massive Text Embedding Benchmark
|
67.13
|
2022-10-13
|
|
GTR-XL
|
MTEB: Massive Text Embedding Benchmark
|
67.11
|
2022-10-13
|
|
Contriever
|
MTEB: Massive Text Embedding Benchmark
|
66.68
|
2022-10-13
|
|
SGPT-1.3B-msmarco
|
MTEB: Massive Text Embedding Benchmark
|
66.52
|
2022-10-13
|
|
SGPT-BLOOM-7.1B-msmarco
|
MTEB: Massive Text Embedding Benchmark
|
66.19
|
2022-10-13
|
|
GTR-Base
|
MTEB: Massive Text Embedding Benchmark
|
65.25
|
2022-10-13
|
|
MPNet
|
MTEB: Massive Text Embedding Benchmark
|
65.07
|
2022-10-13
|
|
coCondenser-msmarco
|
MTEB: Massive Text Embedding Benchmark
|
64.71
|
2022-10-13
|
|
MiniLM-L12-multilingual
|
MTEB: Massive Text Embedding Benchmark
|
64.30
|
2022-10-13
|
|
MiniLM-L12
|
MTEB: Massive Text Embedding Benchmark
|
63.21
|
2022-10-13
|
|
MiniLM-L6
|
MTEB: Massive Text Embedding Benchmark
|
63.06
|
2022-10-13
|
|
LaBSE
|
MTEB: Massive Text Embedding Benchmark
|
62.71
|
2022-10-13
|
|
SimCSE-BERT-unsup
|
MTEB: Massive Text Embedding Benchmark
|
62.50
|
2022-10-13
|
|
BERT
|
MTEB: Massive Text Embedding Benchmark
|
61.66
|
2022-10-13
|
|
SGPT-125M-nli
|
MTEB: Massive Text Embedding Benchmark
|
61.46
|
2022-10-13
|
|
SGPT-125M-msmarco
|
MTEB: Massive Text Embedding Benchmark
|
60.72
|
2022-10-13
|
|
Komninos
|
MTEB: Massive Text Embedding Benchmark
|
57.65
|
2022-10-13
|
|
Glove
|
MTEB: Massive Text Embedding Benchmark
|
57.29
|
2022-10-13
|
|
LASER2
|
MTEB: Massive Text Embedding Benchmark
|
53.65
|
2022-10-13
|
|
SPECTER
|
MTEB: Massive Text Embedding Benchmark
|
52.37
|
2022-10-13
|
|