{ "dataset_revision": "d604517c81ca91fe16a244d1248fc021f9ecee7a", "task_name": "TweetSentimentExtractionClassification", "mteb_version": "2.3.5", "scores": { "test": [ { "scores_per_experiment": [ { "accuracy": 0.60017, "f1": 0.602466, "f1_weighted": 0.593331, "precision": 0.599598, "precision_weighted": 0.598729, "recall": 0.616661, "recall_weighted": 0.60017, "ap": null, "ap_weighted": null }, { "accuracy": 0.589983, "f1": 0.592724, "f1_weighted": 0.582562, "precision": 0.589319, "precision_weighted": 0.588772, "recall": 0.608987, "recall_weighted": 0.589983, "ap": null, "ap_weighted": null }, { "accuracy": 0.608659, "f1": 0.610167, "f1_weighted": 0.601373, "precision": 0.612042, "precision_weighted": 0.615692, "recall": 0.628804, "recall_weighted": 0.608659, "ap": null, "ap_weighted": null }, { "accuracy": 0.588851, "f1": 0.591059, "f1_weighted": 0.579425, "precision": 0.591457, "precision_weighted": 0.591131, "recall": 0.611816, "recall_weighted": 0.588851, "ap": null, "ap_weighted": null }, { "accuracy": 0.60781, "f1": 0.611659, "f1_weighted": 0.603352, "precision": 0.609397, "precision_weighted": 0.609397, "recall": 0.62428, "recall_weighted": 0.60781, "ap": null, "ap_weighted": null }, { "accuracy": 0.605546, "f1": 0.609391, "f1_weighted": 0.598667, "precision": 0.606539, "precision_weighted": 0.605213, "recall": 0.625473, "recall_weighted": 0.605546, "ap": null, "ap_weighted": null }, { "accuracy": 0.578664, "f1": 0.577106, "f1_weighted": 0.565601, "precision": 0.576182, "precision_weighted": 0.574397, "recall": 0.598411, "recall_weighted": 0.578664, "ap": null, "ap_weighted": null }, { "accuracy": 0.621109, "f1": 0.624325, "f1_weighted": 0.615278, "precision": 0.620682, "precision_weighted": 0.620097, "recall": 0.637876, "recall_weighted": 0.621109, "ap": null, "ap_weighted": null }, { "accuracy": 0.54584, "f1": 0.548347, "f1_weighted": 0.537797, "precision": 0.544945, "precision_weighted": 0.543678, "recall": 0.564986, "recall_weighted": 0.54584, "ap": null, "ap_weighted": null }, { "accuracy": 0.568478, "f1": 0.574254, "f1_weighted": 0.566821, "precision": 0.571524, "precision_weighted": 0.56722, "recall": 0.579174, "recall_weighted": 0.568478, "ap": null, "ap_weighted": null } ], "accuracy": 0.591511, "f1": 0.59415, "f1_weighted": 0.584421, "precision": 0.592169, "precision_weighted": 0.591432, "recall": 0.609647, "recall_weighted": 0.591511, "ap": NaN, "ap_weighted": NaN, "main_score": 0.591511, "hf_subset": "default", "languages": [ "eng-Latn" ] } ] }, "evaluation_time": 27.70308542251587, "kg_co2_emissions": null }