baseline-bm25s / .eval_results /MIRACLRetrievalHardNegatives.yaml
Samoed's picture
Add evaluation results for model mteb/baseline-bm25s revision 0_1_10
48e2952 verified
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_ar_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 38.751
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_bn_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 20.95
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_de_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 14.575
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_en_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 27.901
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_es_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 24.337
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_fa_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 29.789
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_fi_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 41.906
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_fr_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 15.63
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_hi_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 16.68
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_id_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 33.772
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_ja_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 5.488
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_ko_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 28.846
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_ru_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 19.821
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_sw_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 40.954
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_te_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 12.991
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_th_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 34.631
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_yo_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 50.239
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives_zh_dev
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 0.101
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb
- dataset:
id: mteb/MIRACLRetrievalHardNegatives
task_id: MIRACLRetrievalHardNegatives
revision: d7d94fa4b946cec4a27c84653aa0cf6b33f74a3c
value: 25.409
notes: Obtained using MTEB v1.38.49
source:
url: https://github.com/embeddings-benchmark/mteb/
name: Obtained using MTEB v1.38.49
user: mteb