{ "dataset_revision": "a76d16fae880597b9c73047b50159220a441cb54", "task_name": "MTOPDomainClassification", "mteb_version": "2.3.5", "scores": { "test": [ { "scores_per_experiment": [ { "accuracy": 0.930689, "f1": 0.925808, "f1_weighted": 0.930751, "precision": 0.925304, "precision_weighted": 0.932642, "recall": 0.928221, "recall_weighted": 0.930689, "ap": null, "ap_weighted": null }, { "accuracy": 0.942772, "f1": 0.940072, "f1_weighted": 0.942489, "precision": 0.939443, "precision_weighted": 0.943039, "recall": 0.941619, "recall_weighted": 0.942772, "ap": null, "ap_weighted": null }, { "accuracy": 0.920429, "f1": 0.91604, "f1_weighted": 0.919121, "precision": 0.918698, "precision_weighted": 0.920319, "recall": 0.91624, "recall_weighted": 0.920429, "ap": null, "ap_weighted": null }, { "accuracy": 0.928409, "f1": 0.92513, "f1_weighted": 0.928298, "precision": 0.923421, "precision_weighted": 0.93486, "recall": 0.933284, "recall_weighted": 0.928409, "ap": null, "ap_weighted": null }, { "accuracy": 0.926585, "f1": 0.922983, "f1_weighted": 0.926812, "precision": 0.921012, "precision_weighted": 0.928905, "recall": 0.926841, "recall_weighted": 0.926585, "ap": null, "ap_weighted": null }, { "accuracy": 0.929321, "f1": 0.927184, "f1_weighted": 0.92875, "precision": 0.924008, "precision_weighted": 0.930512, "recall": 0.932269, "recall_weighted": 0.929321, "ap": null, "ap_weighted": null }, { "accuracy": 0.919973, "f1": 0.917008, "f1_weighted": 0.919161, "precision": 0.915343, "precision_weighted": 0.921603, "recall": 0.921312, "recall_weighted": 0.919973, "ap": null, "ap_weighted": null }, { "accuracy": 0.933653, "f1": 0.929426, "f1_weighted": 0.933734, "precision": 0.926488, "precision_weighted": 0.934976, "recall": 0.933667, "recall_weighted": 0.933653, "ap": null, "ap_weighted": null }, { "accuracy": 0.925901, "f1": 0.924229, "f1_weighted": 0.926117, "precision": 0.921259, "precision_weighted": 0.928978, "recall": 0.929757, "recall_weighted": 0.925901, "ap": null, "ap_weighted": null }, { "accuracy": 0.932513, "f1": 0.927476, "f1_weighted": 0.933101, "precision": 0.92608, "precision_weighted": 0.9348, "recall": 0.93007, "recall_weighted": 0.932513, "ap": null, "ap_weighted": null } ], "accuracy": 0.929024, "f1": 0.925536, "f1_weighted": 0.928834, "precision": 0.924106, "precision_weighted": 0.931063, "recall": 0.929328, "recall_weighted": 0.929024, "ap": NaN, "ap_weighted": NaN, "main_score": 0.929024, "hf_subset": "en", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 26.848695993423462, "kg_co2_emissions": null }