---
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:124788
- loss:GISTEmbedLoss
base_model: Alibaba-NLP/gte-multilingual-base
widget:
- source_sentence: 其他机械、设备和有形货物租赁服务代表
sentences:
- 其他机械和设备租赁服务工作人员
- 电子和电信设备及零部件物流经理
- 工业主厨
- source_sentence: 公交车司机
sentences:
- 表演灯光设计师
- 乙烯基地板安装工
- 国际巴士司机
- source_sentence: online communication manager
sentences:
- trades union official
- social media manager
- budget manager
- source_sentence: Projektmanagerin
sentences:
- Projektmanager/Projektmanagerin
- Category-Manager
- Infanterist
- source_sentence: Volksvertreter
sentences:
- Parlamentarier
- Oberbürgermeister
- Konsul
pipeline_tag: sentence-similarity
library_name: sentence-transformers
metrics:
- cosine_accuracy@1
- cosine_accuracy@20
- cosine_accuracy@50
- cosine_accuracy@100
- cosine_accuracy@150
- cosine_accuracy@200
- cosine_precision@1
- cosine_precision@20
- cosine_precision@50
- cosine_precision@100
- cosine_precision@150
- cosine_precision@200
- cosine_recall@1
- cosine_recall@20
- cosine_recall@50
- cosine_recall@100
- cosine_recall@150
- cosine_recall@200
- cosine_ndcg@1
- cosine_ndcg@20
- cosine_ndcg@50
- cosine_ndcg@100
- cosine_ndcg@150
- cosine_ndcg@200
- cosine_mrr@1
- cosine_mrr@20
- cosine_mrr@50
- cosine_mrr@100
- cosine_mrr@150
- cosine_mrr@200
- cosine_map@1
- cosine_map@20
- cosine_map@50
- cosine_map@100
- cosine_map@150
- cosine_map@200
- cosine_map@500
model-index:
- name: SentenceTransformer based on Alibaba-NLP/gte-multilingual-base
results:
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: full en
type: full_en
metrics:
- type: cosine_accuracy@1
value: 0.6476190476190476
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 0.9904761904761905
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 0.9904761904761905
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 0.9904761904761905
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 0.9904761904761905
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 0.9904761904761905
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.6476190476190476
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.5133333333333332
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.3165714285714285
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.18857142857142858
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.13396825396825396
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.10433333333333335
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.06742481608756247
name: Cosine Recall@1
- type: cosine_recall@20
value: 0.5411228142559339
name: Cosine Recall@20
- type: cosine_recall@50
value: 0.7397482609380314
name: Cosine Recall@50
- type: cosine_recall@100
value: 0.8429667985290079
name: Cosine Recall@100
- type: cosine_recall@150
value: 0.8856357375498775
name: Cosine Recall@150
- type: cosine_recall@200
value: 0.9091330295382077
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.6476190476190476
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.6917131025478591
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.71478335831634
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.7666819432677721
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.7855970749692088
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.7960468614602451
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.6476190476190476
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.8090476190476191
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.8090476190476191
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.8090476190476191
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.8090476190476191
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.8090476190476191
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.6476190476190476
name: Cosine Map@1
- type: cosine_map@20
value: 0.5561135670751935
name: Cosine Map@20
- type: cosine_map@50
value: 0.5477711353289022
name: Cosine Map@50
- type: cosine_map@100
value: 0.5791852239372863
name: Cosine Map@100
- type: cosine_map@150
value: 0.5872469517518495
name: Cosine Map@150
- type: cosine_map@200
value: 0.5908784036739082
name: Cosine Map@200
- type: cosine_map@500
value: 0.5948564356607342
name: Cosine Map@500
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: full es
type: full_es
metrics:
- type: cosine_accuracy@1
value: 0.12972972972972974
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 1.0
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 1.0
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 1.0
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 1.0
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 1.0
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.12972972972972974
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.5705405405405405
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.38962162162162167
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.25140540540540546
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.19012612612612612
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.15154054054054056
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.0037413987812150314
name: Cosine Recall@1
- type: cosine_recall@20
value: 0.38432915927625627
name: Cosine Recall@20
- type: cosine_recall@50
value: 0.5663097940153319
name: Cosine Recall@50
- type: cosine_recall@100
value: 0.6710180189388714
name: Cosine Recall@100
- type: cosine_recall@150
value: 0.7443549924512646
name: Cosine Recall@150
- type: cosine_recall@200
value: 0.7804985217049148
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.12972972972972974
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.6133809590566169
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.5888378318443163
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.613553130716134
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.6492700673561147
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.6672020616803231
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.12972972972972974
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.5608108108108109
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.5608108108108109
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.5608108108108109
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.5608108108108109
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.5608108108108109
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.12972972972972974
name: Cosine Map@1
- type: cosine_map@20
value: 0.47928087268629077
name: Cosine Map@20
- type: cosine_map@50
value: 0.4265150109477007
name: Cosine Map@50
- type: cosine_map@100
value: 0.4308614258675324
name: Cosine Map@100
- type: cosine_map@150
value: 0.446315567522346
name: Cosine Map@150
- type: cosine_map@200
value: 0.45361884446786194
name: Cosine Map@200
- type: cosine_map@500
value: 0.46587892353181215
name: Cosine Map@500
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: full de
type: full_de
metrics:
- type: cosine_accuracy@1
value: 0.2955665024630542
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 0.9704433497536946
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 0.9852216748768473
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 0.9852216748768473
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 0.9901477832512315
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 0.9901477832512315
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.2955665024630542
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.5120689655172413
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.3664039408866995
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.2411330049261084
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.180623973727422
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.1453448275862069
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.01108543831680986
name: Cosine Recall@1
- type: cosine_recall@20
value: 0.3229666331805043
name: Cosine Recall@20
- type: cosine_recall@50
value: 0.5039915991834915
name: Cosine Recall@50
- type: cosine_recall@100
value: 0.6239950018657238
name: Cosine Recall@100
- type: cosine_recall@150
value: 0.6837127628220585
name: Cosine Recall@150
- type: cosine_recall@200
value: 0.724182886190782
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.2955665024630542
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.5416271120841382
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.5273905187096658
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.5573943264798527
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.5882759422186796
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.6082376029646045
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.2955665024630542
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.510702296647636
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.5111935025343795
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.5111935025343795
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.5112378818891037
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.5112378818891037
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.2955665024630542
name: Cosine Map@1
- type: cosine_map@20
value: 0.4032624181455029
name: Cosine Map@20
- type: cosine_map@50
value: 0.35929856113701575
name: Cosine Map@50
- type: cosine_map@100
value: 0.3633301227599498
name: Cosine Map@100
- type: cosine_map@150
value: 0.3779770424201306
name: Cosine Map@150
- type: cosine_map@200
value: 0.38546911827821406
name: Cosine Map@200
- type: cosine_map@500
value: 0.3983960288142158
name: Cosine Map@500
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: full zh
type: full_zh
metrics:
- type: cosine_accuracy@1
value: 0.6504854368932039
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 0.9805825242718447
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 0.9902912621359223
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 0.9902912621359223
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 0.9902912621359223
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 0.9902912621359223
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.6504854368932039
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.47815533980582525
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.28699029126213593
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.17563106796116504
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.12543689320388354
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.09786407766990295
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.06122803520614593
name: Cosine Recall@1
- type: cosine_recall@20
value: 0.512665335199255
name: Cosine Recall@20
- type: cosine_recall@50
value: 0.6880766978766553
name: Cosine Recall@50
- type: cosine_recall@100
value: 0.8002784995071653
name: Cosine Recall@100
- type: cosine_recall@150
value: 0.8453144636093844
name: Cosine Recall@150
- type: cosine_recall@200
value: 0.8773140543871931
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.6504854368932039
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.6531212612064398
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.6669362863744952
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.7218911998936125
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.7415597018345085
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.7535751066625261
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.6504854368932039
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.7993527508090615
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.7997572815533981
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.7997572815533981
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.7997572815533981
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.7997572815533981
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.6504854368932039
name: Cosine Map@1
- type: cosine_map@20
value: 0.5072300500933464
name: Cosine Map@20
- type: cosine_map@50
value: 0.4897274345176646
name: Cosine Map@50
- type: cosine_map@100
value: 0.5196798622563865
name: Cosine Map@100
- type: cosine_map@150
value: 0.5276837053538445
name: Cosine Map@150
- type: cosine_map@200
value: 0.5311205359244624
name: Cosine Map@200
- type: cosine_map@500
value: 0.5365056842045905
name: Cosine Map@500
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: mix es
type: mix_es
metrics:
- type: cosine_accuracy@1
value: 0.7243889755590224
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 0.9609984399375975
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 0.9797191887675507
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 0.9937597503900156
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 0.9958398335933437
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 0.9973998959958398
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.7243889755590224
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.12428497139885596
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.05134685387415497
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.026214248569942804
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.017597503900156002
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.013281331253250133
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.2802961642275215
name: Cosine Recall@1
- type: cosine_recall@20
value: 0.9183394002426764
name: Cosine Recall@20
- type: cosine_recall@50
value: 0.9482665973305597
name: Cosine Recall@50
- type: cosine_recall@100
value: 0.9692234356040907
name: Cosine Recall@100
- type: cosine_recall@150
value: 0.9756023574276305
name: Cosine Recall@150
- type: cosine_recall@200
value: 0.9821892875715027
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.7243889755590224
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.8023352815755668
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.8104895152869938
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.8150081000806421
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.8162651648802736
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.8174362445077372
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.7243889755590224
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.7938466413093047
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.7944053350960067
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.794613049565821
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.7946306448507517
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.7946402095756717
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.7243889755590224
name: Cosine Map@1
- type: cosine_map@20
value: 0.7324440771234734
name: Cosine Map@20
- type: cosine_map@50
value: 0.734716178743038
name: Cosine Map@50
- type: cosine_map@100
value: 0.7353155432601859
name: Cosine Map@100
- type: cosine_map@150
value: 0.735429453970343
name: Cosine Map@150
- type: cosine_map@200
value: 0.7355154445871764
name: Cosine Map@200
- type: cosine_map@500
value: 0.7356208832908805
name: Cosine Map@500
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: mix de
type: mix_de
metrics:
- type: cosine_accuracy@1
value: 0.6697867914716589
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 0.9505980239209568
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 0.9771190847633905
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 0.9859594383775351
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 0.9921996879875195
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 0.9942797711908476
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.6697867914716589
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.12470098803952159
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.05225169006760271
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.026708268330733236
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.01798231929277171
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.01353874154966199
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.2517940717628705
name: Cosine Recall@1
- type: cosine_recall@20
value: 0.9059022360894435
name: Cosine Recall@20
- type: cosine_recall@50
value: 0.9474345640492287
name: Cosine Recall@50
- type: cosine_recall@100
value: 0.967932050615358
name: Cosine Recall@100
- type: cosine_recall@150
value: 0.9771190847633905
name: Cosine Recall@150
- type: cosine_recall@200
value: 0.9807592303692148
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.6697867914716589
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.770344092734726
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.7819450345813985
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.7865455025019679
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.7883807621544129
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.7890604802329748
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.6697867914716589
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.7504302722692131
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.7513280223222801
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.7514573016845009
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.7515108675350354
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.7515238522218625
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.6697867914716589
name: Cosine Map@1
- type: cosine_map@20
value: 0.6929705838065172
name: Cosine Map@20
- type: cosine_map@50
value: 0.696080766802269
name: Cosine Map@50
- type: cosine_map@100
value: 0.6967651580129317
name: Cosine Map@100
- type: cosine_map@150
value: 0.6969258122016383
name: Cosine Map@150
- type: cosine_map@200
value: 0.6969715581100935
name: Cosine Map@200
- type: cosine_map@500
value: 0.6970655432634698
name: Cosine Map@500
- task:
type: information-retrieval
name: Information Retrieval
dataset:
name: mix zh
type: mix_zh
metrics:
- type: cosine_accuracy@1
value: 0.19760790431617264
name: Cosine Accuracy@1
- type: cosine_accuracy@20
value: 1.0
name: Cosine Accuracy@20
- type: cosine_accuracy@50
value: 1.0
name: Cosine Accuracy@50
- type: cosine_accuracy@100
value: 1.0
name: Cosine Accuracy@100
- type: cosine_accuracy@150
value: 1.0
name: Cosine Accuracy@150
- type: cosine_accuracy@200
value: 1.0
name: Cosine Accuracy@200
- type: cosine_precision@1
value: 0.19760790431617264
name: Cosine Precision@1
- type: cosine_precision@20
value: 0.15439417576703063
name: Cosine Precision@20
- type: cosine_precision@50
value: 0.0617576703068123
name: Cosine Precision@50
- type: cosine_precision@100
value: 0.03087883515340615
name: Cosine Precision@100
- type: cosine_precision@150
value: 0.020585890102270757
name: Cosine Precision@150
- type: cosine_precision@200
value: 0.015439417576703075
name: Cosine Precision@200
- type: cosine_recall@1
value: 0.06371492954956293
name: Cosine Recall@1
- type: cosine_recall@20
value: 1.0
name: Cosine Recall@20
- type: cosine_recall@50
value: 1.0
name: Cosine Recall@50
- type: cosine_recall@100
value: 1.0
name: Cosine Recall@100
- type: cosine_recall@150
value: 1.0
name: Cosine Recall@150
- type: cosine_recall@200
value: 1.0
name: Cosine Recall@200
- type: cosine_ndcg@1
value: 0.19760790431617264
name: Cosine Ndcg@1
- type: cosine_ndcg@20
value: 0.5478938300274205
name: Cosine Ndcg@20
- type: cosine_ndcg@50
value: 0.5478938300274205
name: Cosine Ndcg@50
- type: cosine_ndcg@100
value: 0.5478938300274205
name: Cosine Ndcg@100
- type: cosine_ndcg@150
value: 0.5478938300274205
name: Cosine Ndcg@150
- type: cosine_ndcg@200
value: 0.5478938300274205
name: Cosine Ndcg@200
- type: cosine_mrr@1
value: 0.19760790431617264
name: Cosine Mrr@1
- type: cosine_mrr@20
value: 0.4124442798779788
name: Cosine Mrr@20
- type: cosine_mrr@50
value: 0.4124442798779788
name: Cosine Mrr@50
- type: cosine_mrr@100
value: 0.4124442798779788
name: Cosine Mrr@100
- type: cosine_mrr@150
value: 0.4124442798779788
name: Cosine Mrr@150
- type: cosine_mrr@200
value: 0.4124442798779788
name: Cosine Mrr@200
- type: cosine_map@1
value: 0.19760790431617264
name: Cosine Map@1
- type: cosine_map@20
value: 0.32993583709540925
name: Cosine Map@20
- type: cosine_map@50
value: 0.32993583709540925
name: Cosine Map@50
- type: cosine_map@100
value: 0.32993583709540925
name: Cosine Map@100
- type: cosine_map@150
value: 0.32993583709540925
name: Cosine Map@150
- type: cosine_map@200
value: 0.32993583709540925
name: Cosine Map@200
- type: cosine_map@500
value: 0.32993583709540925
name: Cosine Map@500
---
# SentenceTransformer based on Alibaba-NLP/gte-multilingual-base
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [Alibaba-NLP/gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base) on the full_en, full_de, full_es, full_zh and mix datasets. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
## Model Details
### Model Description
- **Model Type:** Sentence Transformer
- **Base model:** [Alibaba-NLP/gte-multilingual-base](https://huggingface.co/Alibaba-NLP/gte-multilingual-base)
- **Maximum Sequence Length:** 512 tokens
- **Output Dimensionality:** 768 dimensions
- **Similarity Function:** Cosine Similarity
- **Training Datasets:**
- full_en
- full_de
- full_es
- full_zh
- mix
### Model Sources
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
### Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: NewModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
)
```
## Usage
### Direct Usage (Sentence Transformers)
First install the Sentence Transformers library:
```bash
pip install -U sentence-transformers
```
Then you can load this model and run inference.
```python
from sentence_transformers import SentenceTransformer
# Download from the 🤗 Hub
model = SentenceTransformer("sentence_transformers_model_id")
# Run inference
sentences = [
'Volksvertreter',
'Parlamentarier',
'Oberbürgermeister',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]
# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
```
## Evaluation
### Metrics
#### Information Retrieval
* Datasets: `full_en`, `full_es`, `full_de`, `full_zh`, `mix_es`, `mix_de` and `mix_zh`
* Evaluated with [InformationRetrievalEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
| Metric | full_en | full_es | full_de | full_zh | mix_es | mix_de | mix_zh |
|:---------------------|:----------|:-----------|:-----------|:-----------|:-----------|:-----------|:-----------|
| cosine_accuracy@1 | 0.6476 | 0.1297 | 0.2956 | 0.6505 | 0.7244 | 0.6698 | 0.1976 |
| cosine_accuracy@20 | 0.9905 | 1.0 | 0.9704 | 0.9806 | 0.961 | 0.9506 | 1.0 |
| cosine_accuracy@50 | 0.9905 | 1.0 | 0.9852 | 0.9903 | 0.9797 | 0.9771 | 1.0 |
| cosine_accuracy@100 | 0.9905 | 1.0 | 0.9852 | 0.9903 | 0.9938 | 0.986 | 1.0 |
| cosine_accuracy@150 | 0.9905 | 1.0 | 0.9901 | 0.9903 | 0.9958 | 0.9922 | 1.0 |
| cosine_accuracy@200 | 0.9905 | 1.0 | 0.9901 | 0.9903 | 0.9974 | 0.9943 | 1.0 |
| cosine_precision@1 | 0.6476 | 0.1297 | 0.2956 | 0.6505 | 0.7244 | 0.6698 | 0.1976 |
| cosine_precision@20 | 0.5133 | 0.5705 | 0.5121 | 0.4782 | 0.1243 | 0.1247 | 0.1544 |
| cosine_precision@50 | 0.3166 | 0.3896 | 0.3664 | 0.287 | 0.0513 | 0.0523 | 0.0618 |
| cosine_precision@100 | 0.1886 | 0.2514 | 0.2411 | 0.1756 | 0.0262 | 0.0267 | 0.0309 |
| cosine_precision@150 | 0.134 | 0.1901 | 0.1806 | 0.1254 | 0.0176 | 0.018 | 0.0206 |
| cosine_precision@200 | 0.1043 | 0.1515 | 0.1453 | 0.0979 | 0.0133 | 0.0135 | 0.0154 |
| cosine_recall@1 | 0.0674 | 0.0037 | 0.0111 | 0.0612 | 0.2803 | 0.2518 | 0.0637 |
| cosine_recall@20 | 0.5411 | 0.3843 | 0.323 | 0.5127 | 0.9183 | 0.9059 | 1.0 |
| cosine_recall@50 | 0.7397 | 0.5663 | 0.504 | 0.6881 | 0.9483 | 0.9474 | 1.0 |
| cosine_recall@100 | 0.843 | 0.671 | 0.624 | 0.8003 | 0.9692 | 0.9679 | 1.0 |
| cosine_recall@150 | 0.8856 | 0.7444 | 0.6837 | 0.8453 | 0.9756 | 0.9771 | 1.0 |
| cosine_recall@200 | 0.9091 | 0.7805 | 0.7242 | 0.8773 | 0.9822 | 0.9808 | 1.0 |
| cosine_ndcg@1 | 0.6476 | 0.1297 | 0.2956 | 0.6505 | 0.7244 | 0.6698 | 0.1976 |
| cosine_ndcg@20 | 0.6917 | 0.6134 | 0.5416 | 0.6531 | 0.8023 | 0.7703 | 0.5479 |
| cosine_ndcg@50 | 0.7148 | 0.5888 | 0.5274 | 0.6669 | 0.8105 | 0.7819 | 0.5479 |
| cosine_ndcg@100 | 0.7667 | 0.6136 | 0.5574 | 0.7219 | 0.815 | 0.7865 | 0.5479 |
| cosine_ndcg@150 | 0.7856 | 0.6493 | 0.5883 | 0.7416 | 0.8163 | 0.7884 | 0.5479 |
| **cosine_ndcg@200** | **0.796** | **0.6672** | **0.6082** | **0.7536** | **0.8174** | **0.7891** | **0.5479** |
| cosine_mrr@1 | 0.6476 | 0.1297 | 0.2956 | 0.6505 | 0.7244 | 0.6698 | 0.1976 |
| cosine_mrr@20 | 0.809 | 0.5608 | 0.5107 | 0.7994 | 0.7938 | 0.7504 | 0.4124 |
| cosine_mrr@50 | 0.809 | 0.5608 | 0.5112 | 0.7998 | 0.7944 | 0.7513 | 0.4124 |
| cosine_mrr@100 | 0.809 | 0.5608 | 0.5112 | 0.7998 | 0.7946 | 0.7515 | 0.4124 |
| cosine_mrr@150 | 0.809 | 0.5608 | 0.5112 | 0.7998 | 0.7946 | 0.7515 | 0.4124 |
| cosine_mrr@200 | 0.809 | 0.5608 | 0.5112 | 0.7998 | 0.7946 | 0.7515 | 0.4124 |
| cosine_map@1 | 0.6476 | 0.1297 | 0.2956 | 0.6505 | 0.7244 | 0.6698 | 0.1976 |
| cosine_map@20 | 0.5561 | 0.4793 | 0.4033 | 0.5072 | 0.7324 | 0.693 | 0.3299 |
| cosine_map@50 | 0.5478 | 0.4265 | 0.3593 | 0.4897 | 0.7347 | 0.6961 | 0.3299 |
| cosine_map@100 | 0.5792 | 0.4309 | 0.3633 | 0.5197 | 0.7353 | 0.6968 | 0.3299 |
| cosine_map@150 | 0.5872 | 0.4463 | 0.378 | 0.5277 | 0.7354 | 0.6969 | 0.3299 |
| cosine_map@200 | 0.5909 | 0.4536 | 0.3855 | 0.5311 | 0.7355 | 0.697 | 0.3299 |
| cosine_map@500 | 0.5949 | 0.4659 | 0.3984 | 0.5365 | 0.7356 | 0.6971 | 0.3299 |
## Training Details
### Training Datasets
full_en
#### full_en
* Dataset: full_en
* Size: 28,880 training samples
* Columns: anchor and positive
* Approximate statistics based on the first 1000 samples:
| | anchor | positive |
|:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
| type | string | string |
| details | - min: 3 tokens
- mean: 5.68 tokens
- max: 11 tokens
| - min: 3 tokens
- mean: 5.76 tokens
- max: 12 tokens
|
* Samples:
| anchor | positive |
|:-----------------------------------------|:-----------------------------------------|
| air commodore | flight lieutenant |
| command and control officer | flight officer |
| air commodore | command and control officer |
* Loss: [GISTEmbedLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#gistembedloss) with these parameters:
```json
{'guide': SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
), 'temperature': 0.01, 'margin_strategy': 'absolute', 'margin': 0.0}
```
full_de
#### full_de
* Dataset: full_de
* Size: 23,023 training samples
* Columns: anchor and positive
* Approximate statistics based on the first 1000 samples:
| | anchor | positive |
|:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
| type | string | string |
| details | - min: 3 tokens
- mean: 7.99 tokens
- max: 30 tokens
| - min: 3 tokens
- mean: 8.19 tokens
- max: 30 tokens
|
* Samples:
| anchor | positive |
|:----------------------------------|:-----------------------------------------------------|
| Staffelkommandantin | Kommodore |
| Luftwaffenoffizierin | Luftwaffenoffizier/Luftwaffenoffizierin |
| Staffelkommandantin | Luftwaffenoffizierin |
* Loss: [GISTEmbedLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#gistembedloss) with these parameters:
```json
{'guide': SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
), 'temperature': 0.01, 'margin_strategy': 'absolute', 'margin': 0.0}
```
full_es
#### full_es
* Dataset: full_es
* Size: 20,724 training samples
* Columns: anchor and positive
* Approximate statistics based on the first 1000 samples:
| | anchor | positive |
|:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
| type | string | string |
| details | - min: 3 tokens
- mean: 9.13 tokens
- max: 32 tokens
| - min: 3 tokens
- mean: 8.84 tokens
- max: 32 tokens
|
* Samples:
| anchor | positive |
|:------------------------------------|:-------------------------------------------|
| jefe de escuadrón | instructor |
| comandante de aeronave | instructor de simulador |
| instructor | oficial del Ejército del Aire |
* Loss: [GISTEmbedLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#gistembedloss) with these parameters:
```json
{'guide': SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
), 'temperature': 0.01, 'margin_strategy': 'absolute', 'margin': 0.0}
```
full_zh
#### full_zh
* Dataset: full_zh
* Size: 30,401 training samples
* Columns: anchor and positive
* Approximate statistics based on the first 1000 samples:
| | anchor | positive |
|:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
| type | string | string |
| details | - min: 5 tokens
- mean: 7.15 tokens
- max: 14 tokens
| - min: 5 tokens
- mean: 7.46 tokens
- max: 21 tokens
|
* Samples:
| anchor | positive |
|:------------------|:---------------------|
| 技术总监 | 技术和运营总监 |
| 技术总监 | 技术主管 |
| 技术总监 | 技术艺术总监 |
* Loss: [GISTEmbedLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#gistembedloss) with these parameters:
```json
{'guide': SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
), 'temperature': 0.01, 'margin_strategy': 'absolute', 'margin': 0.0}
```
mix
#### mix
* Dataset: mix
* Size: 21,760 training samples
* Columns: anchor and positive
* Approximate statistics based on the first 1000 samples:
| | anchor | positive |
|:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
| type | string | string |
| details | - min: 2 tokens
- mean: 6.71 tokens
- max: 19 tokens
| - min: 2 tokens
- mean: 7.69 tokens
- max: 19 tokens
|
* Samples:
| anchor | positive |
|:------------------------------------------|:----------------------------------------------------------------|
| technical manager | Technischer Direktor für Bühne, Film und Fernsehen |
| head of technical | directora técnica |
| head of technical department | 技术艺术总监 |
* Loss: [GISTEmbedLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#gistembedloss) with these parameters:
```json
{'guide': SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
), 'temperature': 0.01, 'margin_strategy': 'absolute', 'margin': 0.0}
```
### Training Hyperparameters
#### Non-Default Hyperparameters
- `eval_strategy`: steps
- `per_device_train_batch_size`: 64
- `per_device_eval_batch_size`: 128
- `gradient_accumulation_steps`: 2
- `num_train_epochs`: 5
- `warmup_ratio`: 0.05
- `log_on_each_node`: False
- `fp16`: True
- `dataloader_num_workers`: 4
- `ddp_find_unused_parameters`: True
- `batch_sampler`: no_duplicates
#### All Hyperparameters
Click to expand
- `overwrite_output_dir`: False
- `do_predict`: False
- `eval_strategy`: steps
- `prediction_loss_only`: True
- `per_device_train_batch_size`: 64
- `per_device_eval_batch_size`: 128
- `per_gpu_train_batch_size`: None
- `per_gpu_eval_batch_size`: None
- `gradient_accumulation_steps`: 2
- `eval_accumulation_steps`: None
- `torch_empty_cache_steps`: None
- `learning_rate`: 5e-05
- `weight_decay`: 0.0
- `adam_beta1`: 0.9
- `adam_beta2`: 0.999
- `adam_epsilon`: 1e-08
- `max_grad_norm`: 1.0
- `num_train_epochs`: 5
- `max_steps`: -1
- `lr_scheduler_type`: linear
- `lr_scheduler_kwargs`: {}
- `warmup_ratio`: 0.05
- `warmup_steps`: 0
- `log_level`: passive
- `log_level_replica`: warning
- `log_on_each_node`: False
- `logging_nan_inf_filter`: True
- `save_safetensors`: True
- `save_on_each_node`: False
- `save_only_model`: False
- `restore_callback_states_from_checkpoint`: False
- `no_cuda`: False
- `use_cpu`: False
- `use_mps_device`: False
- `seed`: 42
- `data_seed`: None
- `jit_mode_eval`: False
- `use_ipex`: False
- `bf16`: False
- `fp16`: True
- `fp16_opt_level`: O1
- `half_precision_backend`: auto
- `bf16_full_eval`: False
- `fp16_full_eval`: False
- `tf32`: None
- `local_rank`: 0
- `ddp_backend`: None
- `tpu_num_cores`: None
- `tpu_metrics_debug`: False
- `debug`: []
- `dataloader_drop_last`: True
- `dataloader_num_workers`: 4
- `dataloader_prefetch_factor`: None
- `past_index`: -1
- `disable_tqdm`: False
- `remove_unused_columns`: True
- `label_names`: None
- `load_best_model_at_end`: False
- `ignore_data_skip`: False
- `fsdp`: []
- `fsdp_min_num_params`: 0
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
- `tp_size`: 0
- `fsdp_transformer_layer_cls_to_wrap`: None
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
- `deepspeed`: None
- `label_smoothing_factor`: 0.0
- `optim`: adamw_torch
- `optim_args`: None
- `adafactor`: False
- `group_by_length`: False
- `length_column_name`: length
- `ddp_find_unused_parameters`: True
- `ddp_bucket_cap_mb`: None
- `ddp_broadcast_buffers`: False
- `dataloader_pin_memory`: True
- `dataloader_persistent_workers`: False
- `skip_memory_metrics`: True
- `use_legacy_prediction_loop`: False
- `push_to_hub`: False
- `resume_from_checkpoint`: None
- `hub_model_id`: None
- `hub_strategy`: every_save
- `hub_private_repo`: None
- `hub_always_push`: False
- `gradient_checkpointing`: False
- `gradient_checkpointing_kwargs`: None
- `include_inputs_for_metrics`: False
- `include_for_metrics`: []
- `eval_do_concat_batches`: True
- `fp16_backend`: auto
- `push_to_hub_model_id`: None
- `push_to_hub_organization`: None
- `mp_parameters`:
- `auto_find_batch_size`: False
- `full_determinism`: False
- `torchdynamo`: None
- `ray_scope`: last
- `ddp_timeout`: 1800
- `torch_compile`: False
- `torch_compile_backend`: None
- `torch_compile_mode`: None
- `include_tokens_per_second`: False
- `include_num_input_tokens_seen`: False
- `neftune_noise_alpha`: None
- `optim_target_modules`: None
- `batch_eval_metrics`: False
- `eval_on_start`: False
- `use_liger_kernel`: False
- `eval_use_gather_object`: False
- `average_tokens_across_devices`: False
- `prompts`: None
- `batch_sampler`: no_duplicates
- `multi_dataset_batch_sampler`: proportional
### Training Logs
| Epoch | Step | Training Loss | full_en_cosine_ndcg@200 | full_es_cosine_ndcg@200 | full_de_cosine_ndcg@200 | full_zh_cosine_ndcg@200 | mix_es_cosine_ndcg@200 | mix_de_cosine_ndcg@200 | mix_zh_cosine_ndcg@200 |
|:------:|:----:|:-------------:|:-----------------------:|:-----------------------:|:-----------------------:|:-----------------------:|:----------------------:|:----------------------:|:----------------------:|
| -1 | -1 | - | 0.7447 | 0.6125 | 0.5378 | 0.7240 | 0.7029 | 0.6345 | 0.5531 |
| 0.0010 | 1 | 3.4866 | - | - | - | - | - | - | - |
| 0.1027 | 100 | 2.5431 | - | - | - | - | - | - | - |
| 0.2053 | 200 | 1.4536 | 0.7993 | 0.6633 | 0.5974 | 0.7642 | 0.7567 | 0.7011 | 0.5498 |
| 0.3080 | 300 | 1.1018 | - | - | - | - | - | - | - |
| 0.4107 | 400 | 0.9184 | 0.7925 | 0.6586 | 0.6058 | 0.7587 | 0.7749 | 0.7278 | 0.5486 |
| 0.5133 | 500 | 0.8973 | - | - | - | - | - | - | - |
| 0.6160 | 600 | 0.7309 | 0.7951 | 0.6671 | 0.6096 | 0.7708 | 0.7793 | 0.7339 | 0.5525 |
| 0.7187 | 700 | 0.7297 | - | - | - | - | - | - | - |
| 0.8214 | 800 | 0.7281 | 0.7929 | 0.6711 | 0.6088 | 0.7645 | 0.7899 | 0.7444 | 0.5479 |
| 0.9240 | 900 | 0.6607 | - | - | - | - | - | - | - |
| 1.0267 | 1000 | 0.6075 | 0.7915 | 0.6659 | 0.6088 | 0.7665 | 0.7968 | 0.7588 | 0.5482 |
| 1.1294 | 1100 | 0.4553 | - | - | - | - | - | - | - |
| 1.2320 | 1200 | 0.4775 | 0.7979 | 0.6696 | 0.6033 | 0.7669 | 0.7959 | 0.7624 | 0.5484 |
| 1.3347 | 1300 | 0.4838 | - | - | - | - | - | - | - |
| 1.4374 | 1400 | 0.4912 | 0.7973 | 0.6757 | 0.6112 | 0.7656 | 0.7978 | 0.7650 | 0.5487 |
| 1.5400 | 1500 | 0.4732 | - | - | - | - | - | - | - |
| 1.6427 | 1600 | 0.5269 | 0.8031 | 0.6723 | 0.6108 | 0.7654 | 0.8008 | 0.7660 | 0.5492 |
| 1.7454 | 1700 | 0.4822 | - | - | - | - | - | - | - |
| 1.8480 | 1800 | 0.5072 | 0.7962 | 0.6668 | 0.6051 | 0.7592 | 0.8001 | 0.7714 | 0.5486 |
| 1.9507 | 1900 | 0.4709 | - | - | - | - | - | - | - |
| 2.0544 | 2000 | 0.3772 | 0.7940 | 0.6647 | 0.6037 | 0.7579 | 0.8064 | 0.7732 | 0.5479 |
| 2.1571 | 2100 | 0.3982 | - | - | - | - | - | - | - |
| 2.2598 | 2200 | 0.3073 | 0.7969 | 0.6652 | 0.6005 | 0.7625 | 0.8054 | 0.7734 | 0.5493 |
| 2.3624 | 2300 | 0.383 | - | - | - | - | - | - | - |
| 2.4651 | 2400 | 0.3687 | 0.7925 | 0.6690 | 0.5987 | 0.7583 | 0.8081 | 0.7735 | 0.5477 |
| 2.5678 | 2500 | 0.3472 | - | - | - | - | - | - | - |
| 2.6704 | 2600 | 0.3557 | 0.7956 | 0.6758 | 0.6019 | 0.7659 | 0.8082 | 0.7767 | 0.5491 |
| 2.7731 | 2700 | 0.3527 | - | - | - | - | - | - | - |
| 2.8758 | 2800 | 0.3446 | 0.7945 | 0.6719 | 0.6020 | 0.7616 | 0.8124 | 0.7818 | 0.5496 |
| 2.9784 | 2900 | 0.3566 | - | - | - | - | - | - | - |
| 3.0821 | 3000 | 0.3252 | 0.7948 | 0.6682 | 0.6025 | 0.7617 | 0.8152 | 0.7848 | 0.5516 |
| 3.1848 | 3100 | 0.2968 | - | - | - | - | - | - | - |
| 3.2875 | 3200 | 0.2962 | 0.7953 | 0.6717 | 0.6086 | 0.7613 | 0.8110 | 0.7824 | 0.5482 |
| 3.3901 | 3300 | 0.3084 | - | - | - | - | - | - | - |
| 3.4928 | 3400 | 0.2909 | 0.7940 | 0.6634 | 0.6023 | 0.7615 | 0.8138 | 0.7822 | 0.5457 |
| 3.5955 | 3500 | 0.2964 | - | - | - | - | - | - | - |
| 3.6982 | 3600 | 0.3193 | 0.7960 | 0.6635 | 0.6070 | 0.7534 | 0.8164 | 0.7844 | 0.5467 |
| 3.8008 | 3700 | 0.3514 | - | - | - | - | - | - | - |
| 3.9035 | 3800 | 0.3147 | 0.7973 | 0.6696 | 0.6125 | 0.7616 | 0.8176 | 0.7885 | 0.5469 |
| 4.0062 | 3900 | 0.2738 | - | - | - | - | - | - | - |
| 4.1088 | 4000 | 0.2842 | 0.7960 | 0.6672 | 0.6082 | 0.7536 | 0.8174 | 0.7891 | 0.5479 |
### Framework Versions
- Python: 3.11.11
- Sentence Transformers: 4.1.0
- Transformers: 4.51.2
- PyTorch: 2.6.0+cu124
- Accelerate: 1.6.0
- Datasets: 3.5.0
- Tokenizers: 0.21.1
## Citation
### BibTeX
#### Sentence Transformers
```bibtex
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "https://arxiv.org/abs/1908.10084",
}
```
#### GISTEmbedLoss
```bibtex
@misc{solatorio2024gistembed,
title={GISTEmbed: Guided In-sample Selection of Training Negatives for Text Embedding Fine-tuning},
author={Aivin V. Solatorio},
year={2024},
eprint={2402.16829},
archivePrefix={arXiv},
primaryClass={cs.LG}
}
```