Add adapter bert-base-multilingual-cased_wikiann_ner_zh_pfeiffer version 1
Browse files- README.md +69 -0
- adapter_config.json +43 -0
- head_config.json +24 -0
- pytorch_adapter.bin +3 -0
- pytorch_model_head.bin +3 -0
    	
        README.md
    ADDED
    
    | @@ -0,0 +1,69 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            ---
         | 
| 2 | 
            +
            tags:
         | 
| 3 | 
            +
            - bert
         | 
| 4 | 
            +
            - adapter-transformers
         | 
| 5 | 
            +
            - adapterhub:wikiann/zh
         | 
| 6 | 
            +
            - token-classification
         | 
| 7 | 
            +
            license: "apache-2.0"
         | 
| 8 | 
            +
            ---
         | 
| 9 | 
            +
             | 
| 10 | 
            +
            # Adapter `bert-base-multilingual-cased_wikiann_ner_zh_pfeiffer` for bert-base-multilingual-cased
         | 
| 11 | 
            +
             | 
| 12 | 
            +
            Stacked adapter on top of Language adapter. MAD-X 2.0 style. The language adapters in the last layer (layer 11) are deleted.
         | 
| 13 | 
            +
             | 
| 14 | 
            +
            **This adapter was created for usage with the [Adapters](https://github.com/Adapter-Hub/adapters) library.**
         | 
| 15 | 
            +
             | 
| 16 | 
            +
            ## Usage
         | 
| 17 | 
            +
             | 
| 18 | 
            +
            First, install `adapters`:
         | 
| 19 | 
            +
             | 
| 20 | 
            +
            ```
         | 
| 21 | 
            +
            pip install -U adapters
         | 
| 22 | 
            +
            ```
         | 
| 23 | 
            +
             | 
| 24 | 
            +
            Now, the adapter can be loaded and activated like this:
         | 
| 25 | 
            +
             | 
| 26 | 
            +
            ```python
         | 
| 27 | 
            +
            from adapters import AutoAdapterModel
         | 
| 28 | 
            +
             | 
| 29 | 
            +
            model = AutoAdapterModel.from_pretrained("bert-base-multilingual-cased")
         | 
| 30 | 
            +
            adapter_name = model.load_adapter("AdapterHub/bert-base-multilingual-cased_wikiann_ner_zh_pfeiffer")
         | 
| 31 | 
            +
            model.set_active_adapters(adapter_name)
         | 
| 32 | 
            +
            ```
         | 
| 33 | 
            +
             | 
| 34 | 
            +
            ## Architecture & Training
         | 
| 35 | 
            +
             | 
| 36 | 
            +
            - Adapter architecture: pfeiffer
         | 
| 37 | 
            +
            - Prediction head: tagging
         | 
| 38 | 
            +
            - Dataset: [Chinese](https://adapterhub.ml/explore/wikiann/zh/)
         | 
| 39 | 
            +
             | 
| 40 | 
            +
            ## Author Information
         | 
| 41 | 
            +
             | 
| 42 | 
            +
            - Author name(s): Jonas Pfeiffer
         | 
| 43 | 
            +
            - Author email: [email protected]
         | 
| 44 | 
            +
            - Author links: [Website](https://pfeiffer.ai), [GitHub](https://github.com/JoPfeiff), [Twitter](https://twitter.com/@PfeiffJo)
         | 
| 45 | 
            +
             | 
| 46 | 
            +
            ## Versions
         | 
| 47 | 
            +
            - `1` **(main)**
         | 
| 48 | 
            +
            - `2`
         | 
| 49 | 
            +
            - `3`
         | 
| 50 | 
            +
            - `4`
         | 
| 51 | 
            +
            - `5`
         | 
| 52 | 
            +
             | 
| 53 | 
            +
            ## Citation
         | 
| 54 | 
            +
             | 
| 55 | 
            +
            ```bibtex
         | 
| 56 | 
            +
            @article{Pfeiffer21UNKs,
         | 
| 57 | 
            +
              author    = {Jonas Pfeiffer and
         | 
| 58 | 
            +
                           Ivan Vuli\'{c} and
         | 
| 59 | 
            +
                            Iryna Gurevych and
         | 
| 60 | 
            +
                            Sebastian Ruder},
         | 
| 61 | 
            +
              title     = {{UNKs Everywhere: Adapting Multilingual Language Models to New Scripts}},
         | 
| 62 | 
            +
              journal   = {arXiv preprint},
         | 
| 63 | 
            +
              year      = {2021} ,
         | 
| 64 | 
            +
              url = {https://arxiv.org/abs/2012.15562}
         | 
| 65 | 
            +
            }
         | 
| 66 | 
            +
             | 
| 67 | 
            +
            ```
         | 
| 68 | 
            +
             | 
| 69 | 
            +
            *This adapter has been auto-imported from https://github.com/Adapter-Hub/Hub/blob/master/adapters/ukp/bert-base-multilingual-cased_wikiann_ner_zh_pfeiffer.yaml*.
         | 
    	
        adapter_config.json
    ADDED
    
    | @@ -0,0 +1,43 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "config": {
         | 
| 3 | 
            +
                "adapter_residual_before_ln": false,
         | 
| 4 | 
            +
                "cross_adapter": false,
         | 
| 5 | 
            +
                "dropout": 0.0,
         | 
| 6 | 
            +
                "factorized_phm_W": true,
         | 
| 7 | 
            +
                "factorized_phm_rule": false,
         | 
| 8 | 
            +
                "hypercomplex_nonlinearity": "glorot-uniform",
         | 
| 9 | 
            +
                "init_weights": "bert",
         | 
| 10 | 
            +
                "inv_adapter": null,
         | 
| 11 | 
            +
                "inv_adapter_reduction_factor": null,
         | 
| 12 | 
            +
                "is_parallel": false,
         | 
| 13 | 
            +
                "learn_phm": true,
         | 
| 14 | 
            +
                "leave_out": [
         | 
| 15 | 
            +
                  11
         | 
| 16 | 
            +
                ],
         | 
| 17 | 
            +
                "ln_after": false,
         | 
| 18 | 
            +
                "ln_before": false,
         | 
| 19 | 
            +
                "mh_adapter": false,
         | 
| 20 | 
            +
                "non_linearity": "gelu_new",
         | 
| 21 | 
            +
                "original_ln_after": true,
         | 
| 22 | 
            +
                "original_ln_before": true,
         | 
| 23 | 
            +
                "output_adapter": true,
         | 
| 24 | 
            +
                "phm_bias": true,
         | 
| 25 | 
            +
                "phm_c_init": "normal",
         | 
| 26 | 
            +
                "phm_dim": 4,
         | 
| 27 | 
            +
                "phm_init_range": 0.0001,
         | 
| 28 | 
            +
                "phm_layer": false,
         | 
| 29 | 
            +
                "phm_rank": 1,
         | 
| 30 | 
            +
                "reduction_factor": 16,
         | 
| 31 | 
            +
                "residual_before_ln": true,
         | 
| 32 | 
            +
                "scaling": 1.0,
         | 
| 33 | 
            +
                "shared_W_phm": false,
         | 
| 34 | 
            +
                "shared_phm_rule": true,
         | 
| 35 | 
            +
                "use_gating": false
         | 
| 36 | 
            +
              },
         | 
| 37 | 
            +
              "hidden_size": 768,
         | 
| 38 | 
            +
              "model_class": "BertAdapterModel",
         | 
| 39 | 
            +
              "model_name": "bert-base-multilingual-cased",
         | 
| 40 | 
            +
              "model_type": "bert",
         | 
| 41 | 
            +
              "name": "ner",
         | 
| 42 | 
            +
              "version": "0.2.0"
         | 
| 43 | 
            +
            }
         | 
    	
        head_config.json
    ADDED
    
    | @@ -0,0 +1,24 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "config": {
         | 
| 3 | 
            +
                "activation_function": null,
         | 
| 4 | 
            +
                "dropout_prob": null,
         | 
| 5 | 
            +
                "head_type": "tagging",
         | 
| 6 | 
            +
                "label2id": {
         | 
| 7 | 
            +
                  "0": 0,
         | 
| 8 | 
            +
                  "B-LOC": 5,
         | 
| 9 | 
            +
                  "B-ORG": 3,
         | 
| 10 | 
            +
                  "B-PER": 1,
         | 
| 11 | 
            +
                  "I-LOC": 6,
         | 
| 12 | 
            +
                  "I-ORG": 4,
         | 
| 13 | 
            +
                  "I-PER": 2
         | 
| 14 | 
            +
                },
         | 
| 15 | 
            +
                "layers": 1,
         | 
| 16 | 
            +
                "num_labels": 7
         | 
| 17 | 
            +
              },
         | 
| 18 | 
            +
              "hidden_size": 768,
         | 
| 19 | 
            +
              "model_class": "BertAdapterModel",
         | 
| 20 | 
            +
              "model_name": "bert-base-multilingual-cased",
         | 
| 21 | 
            +
              "model_type": "bert",
         | 
| 22 | 
            +
              "name": "ner",
         | 
| 23 | 
            +
              "version": "0.2.0"
         | 
| 24 | 
            +
            }
         | 
    	
        pytorch_adapter.bin
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:086ccb7276ecada078eacbf7bb2ee9460df0eb51208b57e32cbf1715e79db9f0
         | 
| 3 | 
            +
            size 3295122
         | 
    	
        pytorch_model_head.bin
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:f55e4d131242f87d395ca42119607de43de86e7a1f30145e46dcd2dfa46389a7
         | 
| 3 | 
            +
            size 23066
         | 
