| { | |
| "cluster_1": [ | |
| "gem_dart_1_1_0", | |
| "quarel_choose_between", | |
| "wiki_hop_original_explain_relation", | |
| "glue_qnli_2_0_0", | |
| "adversarial_qa_dbert_generate_question", | |
| "race_high_Select_the_best_answer_no_instructions_", | |
| "kilt_tasks_hotpotqa_combining_facts", | |
| "adversarial_qa_dbidaf_question_context_answer", | |
| "adversarial_qa_dbidaf_generate_question", | |
| "glue_cola_2_0_0", | |
| "imdb_reviews_plain_text_1_0_0", | |
| "squad_v1_1_3_0_0", | |
| "race_high_Is_this_the_right_answer", | |
| "qasc_qa_with_separated_facts_1", | |
| "glue_sst2_2_0_0", | |
| "wiqa_what_is_the_missing_first_step", | |
| "duorc_ParaphraseRC_generate_question_by_answer", | |
| "wmt16_translate_ro_en_1_0_0", | |
| "wiki_qa_Jeopardy_style", | |
| "quartz_answer_question_below", | |
| "ropes_prompt_beginning", | |
| "ropes_read_background_situation", | |
| "wiki_qa_Decide_good_answer", | |
| "super_glue_cb_1_0_2", | |
| "qasc_qa_with_separated_facts_4", | |
| "cot_ecqa_ii" | |
| ], | |
| "cluster_2": [ | |
| "cos_e_v1_11_question_description_option_id", | |
| "social_i_qa_Show_choices_and_generate_index", | |
| "web_questions_potential_correct_answer", | |
| "paws_wiki_1_1_0", | |
| "cos_e_v1_11_generate_explanation_given_text", | |
| "cosmos_qa_1_0_0", | |
| "sciq_Direct_Question", | |
| "super_glue_wsc_fixed_1_0_2", | |
| "race_middle_Taking_a_test", | |
| "wmt14_translate_fr_en_1_0_0", | |
| "duorc_SelfRC_extract_answer", | |
| "wiki_hop_original_generate_subject", | |
| "duorc_SelfRC_answer_question", | |
| "qasc_is_correct_1", | |
| "cos_e_v1_11_i_think", | |
| "wiki_qa_exercise", | |
| "race_middle_Write_a_multi_choice_question_options_given_", | |
| "quoref_Read_And_Extract_", | |
| "web_questions_short_general_knowledge_q", | |
| "web_questions_question_answer", | |
| "quarel_logic_test", | |
| "app_reviews_categorize_rating_using_review", | |
| "cot_strategyqa_ii", | |
| "glue_mnli_2_0_0", | |
| "quoref_Answer_Test", | |
| "super_glue_rte_1_0_2" | |
| ], | |
| "cluster_3": [ | |
| "qasc_qa_with_separated_facts_5", | |
| "wiki_qa_automatic_system", | |
| "stream_aqua_ii", | |
| "dbpedia_14_pick_one_category_for_the_following_text", | |
| "wiki_qa_Topic_Prediction_Answer_Only", | |
| "wiki_qa_Generate_Question_from_Topic", | |
| "cot_gsm8k_ii", | |
| "cos_e_v1_11_question_option_description_id", | |
| "cos_e_v1_11_question_option_description_text", | |
| "wiki_hop_original_choose_best_object_interrogative_2", | |
| "quail_context_question_answer_description_text", | |
| "race_high_Read_the_article_and_answer_the_question_no_option_", | |
| "qasc_qa_with_combined_facts_1", | |
| "adversarial_qa_dbert_answer_the_following_q", | |
| "social_i_qa_I_was_wondering", | |
| "stream_aqua", | |
| "word_segment", | |
| "ropes_plain_no_background", | |
| "super_glue_multirc_1_0_2", | |
| "wiki_hop_original_choose_best_object_affirmative_3", | |
| "app_reviews_convert_to_rating", | |
| "anli_r3_0_1_0", | |
| "app_reviews_convert_to_star_rating", | |
| "quartz_paragraph_question_plain_concat", | |
| "kilt_tasks_hotpotqa_complex_question", | |
| "quartz_use_info_from_paragraph_question" | |
| ], | |
| "cluster_4": [ | |
| "quoref_Found_Context_Online", | |
| "social_i_qa_Show_choices_and_generate_answer", | |
| "cos_e_v1_11_explain_why_human", | |
| "cot_sensemaking", | |
| "quoref_Find_Answer", | |
| "quail_context_description_question_text", | |
| "social_i_qa_Generate_the_question_from_the_answer", | |
| "quartz_having_read_above_passage", | |
| "stream_qed_ii", | |
| "wiqa_what_might_be_the_first_step_of_the_process", | |
| "wiki_bio_who", | |
| "duorc_SelfRC_generate_question_by_answer", | |
| "race_high_Select_the_best_answer_generate_span_", | |
| "lambada_1_0_0", | |
| "coqa_1_0_0", | |
| "race_high_Select_the_best_answer", | |
| "adversarial_qa_droberta_question_context_answer", | |
| "quail_context_question_answer_description_id", | |
| "adversarial_qa_dbidaf_tell_what_it_is", | |
| "adversarial_qa_droberta_tell_what_it_is", | |
| "cnn_dailymail_3_4_0", | |
| "quail_no_prompt_id", | |
| "ag_news_subset_1_0_0", | |
| "trivia_qa_rc_1_1_0", | |
| "ropes_prompt_bottom_hint_beginning", | |
| "super_glue_wic_1_0_2" | |
| ], | |
| "cluster_5": [ | |
| "snli_1_1_0", | |
| "duorc_SelfRC_question_answering", | |
| "cot_sensemaking_ii", | |
| "huggingface_xsum", | |
| "duorc_ParaphraseRC_build_story_around_qa", | |
| "wiki_qa_Topic_Prediction_Question_and_Answer_Pair", | |
| "quartz_answer_question_based_on", | |
| "ropes_plain_background_situation", | |
| "race_middle_Write_a_multi_choice_question_for_the_following_article", | |
| "quail_description_context_question_text", | |
| "web_questions_whats_the_answer", | |
| "cot_ecqa", | |
| "true_case", | |
| "adversarial_qa_dbert_question_context_answer", | |
| "duorc_SelfRC_title_generation", | |
| "quail_context_question_description_answer_id", | |
| "quarel_do_not_use", | |
| "adversarial_qa_dbidaf_answer_the_following_q", | |
| "duorc_ParaphraseRC_decide_worth_it", | |
| "race_middle_Is_this_the_right_answer", | |
| "wmt16_translate_de_en_1_0_0", | |
| "wiki_qa_Is_This_True_", | |
| "race_middle_Select_the_best_answer", | |
| "aeslc_1_0_0", | |
| "duorc_ParaphraseRC_question_answering", | |
| "wiki_hop_original_choose_best_object_affirmative_1" | |
| ], | |
| "cluster_6": [ | |
| "multi_news_1_0_0", | |
| "para_crawl_enes", | |
| "quoref_Context_Contains_Answer", | |
| "cos_e_v1_11_description_question_option_id", | |
| "glue_stsb_2_0_0", | |
| "quail_context_description_question_answer_text", | |
| "dream_read_the_following_conversation_and_answer_the_question", | |
| "cos_e_v1_11_question_description_option_text", | |
| "cos_e_v1_11_description_question_option_text", | |
| "race_high_Write_a_multi_choice_question_options_given_", | |
| "ropes_prompt_bottom_no_hint", | |
| "quarel_testing_students", | |
| "wmt16_translate_tr_en_1_0_0", | |
| "duorc_SelfRC_generate_question", | |
| "wiqa_what_is_the_final_step_of_the_following_process", | |
| "quoref_Given_Context_Answer_Question", | |
| "wiki_hop_original_generate_object", | |
| "quartz_use_info_from_question_paragraph", | |
| "duorc_SelfRC_build_story_around_qa", | |
| "drop_2_0_0", | |
| "wiqa_effect_with_string_answer", | |
| "race_high_Taking_a_test", | |
| "wiki_hop_original_generate_subject_and_object", | |
| "glue_qqp_2_0_0", | |
| "wiqa_which_of_the_following_is_the_supposed_perturbation" | |
| ], | |
| "cluster_7": [ | |
| "cot_creak_ii", | |
| "quail_description_context_question_answer_id", | |
| "kilt_tasks_hotpotqa_final_exam", | |
| "cot_gsm8k", | |
| "cos_e_v1_11_aligned_with_common_sense", | |
| "squad_v2_0_3_0_0", | |
| "duorc_SelfRC_movie_director", | |
| "anli_r2_0_1_0", | |
| "dbpedia_14_given_a_list_of_category_what_does_the_title_belong_to", | |
| "duorc_ParaphraseRC_answer_question", | |
| "ropes_background_situation_middle", | |
| "dream_generate_first_utterance", | |
| "quail_context_question_description_text", | |
| "adversarial_qa_droberta_based_on", | |
| "glue_wnli_2_0_0", | |
| "super_glue_record_1_0_2", | |
| "web_questions_get_the_answer", | |
| "ropes_prompt_mix", | |
| "app_reviews_generate_review", | |
| "cos_e_v1_11_rationale", | |
| "adversarial_qa_droberta_generate_question", | |
| "yelp_polarity_reviews_0_2_0", | |
| "ropes_given_background_situation", | |
| "qasc_qa_with_separated_facts_3", | |
| "quoref_Guess_Answer" | |
| ], | |
| "cluster_8": [ | |
| "wiki_hop_original_choose_best_object_affirmative_2", | |
| "duorc_ParaphraseRC_movie_director", | |
| "wiqa_what_might_be_the_last_step_of_the_process", | |
| "gem_common_gen_1_1_0", | |
| "quartz_read_passage_below_choose", | |
| "social_i_qa_Check_if_a_random_answer_is_valid_or_not", | |
| "cot_creak", | |
| "ropes_plain_bottom_hint", | |
| "super_glue_copa_1_0_2", | |
| "natural_questions_open_1_0_0", | |
| "trec_1_0_0", | |
| "gem_web_nlg_en_1_1_0", | |
| "wiki_bio_key_content", | |
| "wmt16_translate_fi_en_1_0_0", | |
| "quoref_Answer_Question_Given_Context", | |
| "duorc_ParaphraseRC_generate_question", | |
| "math_dataset_algebra__linear_1d_1_0_0", | |
| "duorc_ParaphraseRC_title_generation", | |
| "quail_context_description_question_answer_id", | |
| "wiki_bio_what_content", | |
| "adversarial_qa_dbert_tell_what_it_is", | |
| "sciq_Multiple_Choice_Closed_Book_", | |
| "duorc_ParaphraseRC_extract_answer", | |
| "dream_baseline", | |
| "gem_wiki_lingua_english_en_1_1_0" | |
| ], | |
| "cluster_9": [ | |
| "race_high_Write_a_multi_choice_question_for_the_following_article", | |
| "cot_qasc", | |
| "definite_pronoun_resolution_1_1_0", | |
| "wiki_qa_found_on_google", | |
| "wiki_bio_comprehension", | |
| "wiki_qa_Topic_Prediction_Question_Only", | |
| "wiki_bio_guess_person", | |
| "fix_punct", | |
| "race_middle_Select_the_best_answer_no_instructions_", | |
| "quac_1_0_0", | |
| "wiqa_does_the_supposed_perturbation_have_an_effect", | |
| "quartz_given_the_fact_answer_the_q", | |
| "ropes_new_situation_background_answer", | |
| "social_i_qa_Generate_answer", | |
| "gigaword_1_2_0", | |
| "duorc_SelfRC_decide_worth_it", | |
| "kilt_tasks_hotpotqa_straighforward_qa", | |
| "quail_no_prompt_text", | |
| "cot_esnli", | |
| "quoref_Answer_Friend_Question", | |
| "race_middle_Select_the_best_answer_generate_span_", | |
| "unified_qa_science_inst", | |
| "sciq_Direct_Question_Closed_Book_", | |
| "dbpedia_14_given_list_what_category_does_the_paragraph_belong_to", | |
| "cot_strategyqa" | |
| ], | |
| "cluster_10": [ | |
| "stream_qed", | |
| "cot_esnli_ii", | |
| "quarel_heres_a_story", | |
| "quoref_Guess_Title_For_Context", | |
| "qasc_is_correct_2", | |
| "wiqa_effect_with_label_answer", | |
| "dream_generate_last_utterance", | |
| "adversarial_qa_dbert_based_on", | |
| "dream_answer_to_dialogue", | |
| "sciq_Multiple_Choice_Question_First", | |
| "quail_context_question_description_answer_text", | |
| "wiki_qa_Direct_Answer_to_Question", | |
| "ropes_background_new_situation_answer", | |
| "adversarial_qa_droberta_answer_the_following_q", | |
| "kilt_tasks_hotpotqa_formulate", | |
| "quoref_What_Is_The_Answer", | |
| "dbpedia_14_given_a_choice_of_categories_", | |
| "qasc_qa_with_separated_facts_2", | |
| "glue_mrpc_2_0_0", | |
| "gem_e2e_nlg_1_1_0", | |
| "anli_r1_0_1_0", | |
| "race_middle_Read_the_article_and_answer_the_question_no_option_", | |
| "wiki_hop_original_choose_best_object_interrogative_1", | |
| "quail_description_context_question_answer_text", | |
| "adversarial_qa_dbidaf_based_on" | |
| ] | |
| } |