Upload Gemma2ForCausalLM
Browse files- README.md +199 -0
 - config.json +34 -0
 - generation_config.json +8 -0
 - model-00001-of-00008.safetensors +3 -0
 - model-00002-of-00008.safetensors +3 -0
 - model-00003-of-00008.safetensors +3 -0
 - model-00004-of-00008.safetensors +3 -0
 - model-00005-of-00008.safetensors +3 -0
 - model-00006-of-00008.safetensors +3 -0
 - model-00007-of-00008.safetensors +3 -0
 - model-00008-of-00008.safetensors +3 -0
 - model.safetensors.index.json +471 -0
 
    	
        README.md
    ADDED
    
    | 
         @@ -0,0 +1,199 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            ---
         
     | 
| 2 | 
         
            +
            library_name: transformers
         
     | 
| 3 | 
         
            +
            tags: []
         
     | 
| 4 | 
         
            +
            ---
         
     | 
| 5 | 
         
            +
             
     | 
| 6 | 
         
            +
            # Model Card for Model ID
         
     | 
| 7 | 
         
            +
             
     | 
| 8 | 
         
            +
            <!-- Provide a quick summary of what the model is/does. -->
         
     | 
| 9 | 
         
            +
             
     | 
| 10 | 
         
            +
             
     | 
| 11 | 
         
            +
             
     | 
| 12 | 
         
            +
            ## Model Details
         
     | 
| 13 | 
         
            +
             
     | 
| 14 | 
         
            +
            ### Model Description
         
     | 
| 15 | 
         
            +
             
     | 
| 16 | 
         
            +
            <!-- Provide a longer summary of what this model is. -->
         
     | 
| 17 | 
         
            +
             
     | 
| 18 | 
         
            +
            This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated.
         
     | 
| 19 | 
         
            +
             
     | 
| 20 | 
         
            +
            - **Developed by:** [More Information Needed]
         
     | 
| 21 | 
         
            +
            - **Funded by [optional]:** [More Information Needed]
         
     | 
| 22 | 
         
            +
            - **Shared by [optional]:** [More Information Needed]
         
     | 
| 23 | 
         
            +
            - **Model type:** [More Information Needed]
         
     | 
| 24 | 
         
            +
            - **Language(s) (NLP):** [More Information Needed]
         
     | 
| 25 | 
         
            +
            - **License:** [More Information Needed]
         
     | 
| 26 | 
         
            +
            - **Finetuned from model [optional]:** [More Information Needed]
         
     | 
| 27 | 
         
            +
             
     | 
| 28 | 
         
            +
            ### Model Sources [optional]
         
     | 
| 29 | 
         
            +
             
     | 
| 30 | 
         
            +
            <!-- Provide the basic links for the model. -->
         
     | 
| 31 | 
         
            +
             
     | 
| 32 | 
         
            +
            - **Repository:** [More Information Needed]
         
     | 
| 33 | 
         
            +
            - **Paper [optional]:** [More Information Needed]
         
     | 
| 34 | 
         
            +
            - **Demo [optional]:** [More Information Needed]
         
     | 
| 35 | 
         
            +
             
     | 
| 36 | 
         
            +
            ## Uses
         
     | 
| 37 | 
         
            +
             
     | 
| 38 | 
         
            +
            <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
         
     | 
| 39 | 
         
            +
             
     | 
| 40 | 
         
            +
            ### Direct Use
         
     | 
| 41 | 
         
            +
             
     | 
| 42 | 
         
            +
            <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
         
     | 
| 43 | 
         
            +
             
     | 
| 44 | 
         
            +
            [More Information Needed]
         
     | 
| 45 | 
         
            +
             
     | 
| 46 | 
         
            +
            ### Downstream Use [optional]
         
     | 
| 47 | 
         
            +
             
     | 
| 48 | 
         
            +
            <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
         
     | 
| 49 | 
         
            +
             
     | 
| 50 | 
         
            +
            [More Information Needed]
         
     | 
| 51 | 
         
            +
             
     | 
| 52 | 
         
            +
            ### Out-of-Scope Use
         
     | 
| 53 | 
         
            +
             
     | 
| 54 | 
         
            +
            <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
         
     | 
| 55 | 
         
            +
             
     | 
| 56 | 
         
            +
            [More Information Needed]
         
     | 
| 57 | 
         
            +
             
     | 
| 58 | 
         
            +
            ## Bias, Risks, and Limitations
         
     | 
| 59 | 
         
            +
             
     | 
| 60 | 
         
            +
            <!-- This section is meant to convey both technical and sociotechnical limitations. -->
         
     | 
| 61 | 
         
            +
             
     | 
| 62 | 
         
            +
            [More Information Needed]
         
     | 
| 63 | 
         
            +
             
     | 
| 64 | 
         
            +
            ### Recommendations
         
     | 
| 65 | 
         
            +
             
     | 
| 66 | 
         
            +
            <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
         
     | 
| 67 | 
         
            +
             
     | 
| 68 | 
         
            +
            Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
         
     | 
| 69 | 
         
            +
             
     | 
| 70 | 
         
            +
            ## How to Get Started with the Model
         
     | 
| 71 | 
         
            +
             
     | 
| 72 | 
         
            +
            Use the code below to get started with the model.
         
     | 
| 73 | 
         
            +
             
     | 
| 74 | 
         
            +
            [More Information Needed]
         
     | 
| 75 | 
         
            +
             
     | 
| 76 | 
         
            +
            ## Training Details
         
     | 
| 77 | 
         
            +
             
     | 
| 78 | 
         
            +
            ### Training Data
         
     | 
| 79 | 
         
            +
             
     | 
| 80 | 
         
            +
            <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
         
     | 
| 81 | 
         
            +
             
     | 
| 82 | 
         
            +
            [More Information Needed]
         
     | 
| 83 | 
         
            +
             
     | 
| 84 | 
         
            +
            ### Training Procedure
         
     | 
| 85 | 
         
            +
             
     | 
| 86 | 
         
            +
            <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
         
     | 
| 87 | 
         
            +
             
     | 
| 88 | 
         
            +
            #### Preprocessing [optional]
         
     | 
| 89 | 
         
            +
             
     | 
| 90 | 
         
            +
            [More Information Needed]
         
     | 
| 91 | 
         
            +
             
     | 
| 92 | 
         
            +
             
     | 
| 93 | 
         
            +
            #### Training Hyperparameters
         
     | 
| 94 | 
         
            +
             
     | 
| 95 | 
         
            +
            - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
         
     | 
| 96 | 
         
            +
             
     | 
| 97 | 
         
            +
            #### Speeds, Sizes, Times [optional]
         
     | 
| 98 | 
         
            +
             
     | 
| 99 | 
         
            +
            <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
         
     | 
| 100 | 
         
            +
             
     | 
| 101 | 
         
            +
            [More Information Needed]
         
     | 
| 102 | 
         
            +
             
     | 
| 103 | 
         
            +
            ## Evaluation
         
     | 
| 104 | 
         
            +
             
     | 
| 105 | 
         
            +
            <!-- This section describes the evaluation protocols and provides the results. -->
         
     | 
| 106 | 
         
            +
             
     | 
| 107 | 
         
            +
            ### Testing Data, Factors & Metrics
         
     | 
| 108 | 
         
            +
             
     | 
| 109 | 
         
            +
            #### Testing Data
         
     | 
| 110 | 
         
            +
             
     | 
| 111 | 
         
            +
            <!-- This should link to a Dataset Card if possible. -->
         
     | 
| 112 | 
         
            +
             
     | 
| 113 | 
         
            +
            [More Information Needed]
         
     | 
| 114 | 
         
            +
             
     | 
| 115 | 
         
            +
            #### Factors
         
     | 
| 116 | 
         
            +
             
     | 
| 117 | 
         
            +
            <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
         
     | 
| 118 | 
         
            +
             
     | 
| 119 | 
         
            +
            [More Information Needed]
         
     | 
| 120 | 
         
            +
             
     | 
| 121 | 
         
            +
            #### Metrics
         
     | 
| 122 | 
         
            +
             
     | 
| 123 | 
         
            +
            <!-- These are the evaluation metrics being used, ideally with a description of why. -->
         
     | 
| 124 | 
         
            +
             
     | 
| 125 | 
         
            +
            [More Information Needed]
         
     | 
| 126 | 
         
            +
             
     | 
| 127 | 
         
            +
            ### Results
         
     | 
| 128 | 
         
            +
             
     | 
| 129 | 
         
            +
            [More Information Needed]
         
     | 
| 130 | 
         
            +
             
     | 
| 131 | 
         
            +
            #### Summary
         
     | 
| 132 | 
         
            +
             
     | 
| 133 | 
         
            +
             
     | 
| 134 | 
         
            +
             
     | 
| 135 | 
         
            +
            ## Model Examination [optional]
         
     | 
| 136 | 
         
            +
             
     | 
| 137 | 
         
            +
            <!-- Relevant interpretability work for the model goes here -->
         
     | 
| 138 | 
         
            +
             
     | 
| 139 | 
         
            +
            [More Information Needed]
         
     | 
| 140 | 
         
            +
             
     | 
| 141 | 
         
            +
            ## Environmental Impact
         
     | 
| 142 | 
         
            +
             
     | 
| 143 | 
         
            +
            <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
         
     | 
| 144 | 
         
            +
             
     | 
| 145 | 
         
            +
            Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
         
     | 
| 146 | 
         
            +
             
     | 
| 147 | 
         
            +
            - **Hardware Type:** [More Information Needed]
         
     | 
| 148 | 
         
            +
            - **Hours used:** [More Information Needed]
         
     | 
| 149 | 
         
            +
            - **Cloud Provider:** [More Information Needed]
         
     | 
| 150 | 
         
            +
            - **Compute Region:** [More Information Needed]
         
     | 
| 151 | 
         
            +
            - **Carbon Emitted:** [More Information Needed]
         
     | 
| 152 | 
         
            +
             
     | 
| 153 | 
         
            +
            ## Technical Specifications [optional]
         
     | 
| 154 | 
         
            +
             
     | 
| 155 | 
         
            +
            ### Model Architecture and Objective
         
     | 
| 156 | 
         
            +
             
     | 
| 157 | 
         
            +
            [More Information Needed]
         
     | 
| 158 | 
         
            +
             
     | 
| 159 | 
         
            +
            ### Compute Infrastructure
         
     | 
| 160 | 
         
            +
             
     | 
| 161 | 
         
            +
            [More Information Needed]
         
     | 
| 162 | 
         
            +
             
     | 
| 163 | 
         
            +
            #### Hardware
         
     | 
| 164 | 
         
            +
             
     | 
| 165 | 
         
            +
            [More Information Needed]
         
     | 
| 166 | 
         
            +
             
     | 
| 167 | 
         
            +
            #### Software
         
     | 
| 168 | 
         
            +
             
     | 
| 169 | 
         
            +
            [More Information Needed]
         
     | 
| 170 | 
         
            +
             
     | 
| 171 | 
         
            +
            ## Citation [optional]
         
     | 
| 172 | 
         
            +
             
     | 
| 173 | 
         
            +
            <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
         
     | 
| 174 | 
         
            +
             
     | 
| 175 | 
         
            +
            **BibTeX:**
         
     | 
| 176 | 
         
            +
             
     | 
| 177 | 
         
            +
            [More Information Needed]
         
     | 
| 178 | 
         
            +
             
     | 
| 179 | 
         
            +
            **APA:**
         
     | 
| 180 | 
         
            +
             
     | 
| 181 | 
         
            +
            [More Information Needed]
         
     | 
| 182 | 
         
            +
             
     | 
| 183 | 
         
            +
            ## Glossary [optional]
         
     | 
| 184 | 
         
            +
             
     | 
| 185 | 
         
            +
            <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
         
     | 
| 186 | 
         
            +
             
     | 
| 187 | 
         
            +
            [More Information Needed]
         
     | 
| 188 | 
         
            +
             
     | 
| 189 | 
         
            +
            ## More Information [optional]
         
     | 
| 190 | 
         
            +
             
     | 
| 191 | 
         
            +
            [More Information Needed]
         
     | 
| 192 | 
         
            +
             
     | 
| 193 | 
         
            +
            ## Model Card Authors [optional]
         
     | 
| 194 | 
         
            +
             
     | 
| 195 | 
         
            +
            [More Information Needed]
         
     | 
| 196 | 
         
            +
             
     | 
| 197 | 
         
            +
            ## Model Card Contact
         
     | 
| 198 | 
         
            +
             
     | 
| 199 | 
         
            +
            [More Information Needed]
         
     | 
    	
        config.json
    ADDED
    
    | 
         @@ -0,0 +1,34 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "_name_or_path": "/data/junlin/SFT/gemma2-9b-it-SFT-UFUC5k_lr_8.0e-6_g_16_e_5",
         
     | 
| 3 | 
         
            +
              "architectures": [
         
     | 
| 4 | 
         
            +
                "Gemma2ForCausalLM"
         
     | 
| 5 | 
         
            +
              ],
         
     | 
| 6 | 
         
            +
              "attention_bias": false,
         
     | 
| 7 | 
         
            +
              "attention_dropout": 0.0,
         
     | 
| 8 | 
         
            +
              "attn_logit_softcapping": 50.0,
         
     | 
| 9 | 
         
            +
              "bos_token_id": 2,
         
     | 
| 10 | 
         
            +
              "cache_implementation": "hybrid",
         
     | 
| 11 | 
         
            +
              "eos_token_id": 1,
         
     | 
| 12 | 
         
            +
              "final_logit_softcapping": 30.0,
         
     | 
| 13 | 
         
            +
              "head_dim": 256,
         
     | 
| 14 | 
         
            +
              "hidden_act": "gelu_pytorch_tanh",
         
     | 
| 15 | 
         
            +
              "hidden_activation": "gelu_pytorch_tanh",
         
     | 
| 16 | 
         
            +
              "hidden_size": 3584,
         
     | 
| 17 | 
         
            +
              "initializer_range": 0.02,
         
     | 
| 18 | 
         
            +
              "intermediate_size": 14336,
         
     | 
| 19 | 
         
            +
              "max_position_embeddings": 8192,
         
     | 
| 20 | 
         
            +
              "model_type": "gemma2",
         
     | 
| 21 | 
         
            +
              "num_attention_heads": 16,
         
     | 
| 22 | 
         
            +
              "num_hidden_layers": 42,
         
     | 
| 23 | 
         
            +
              "num_key_value_heads": 8,
         
     | 
| 24 | 
         
            +
              "pad_token_id": 0,
         
     | 
| 25 | 
         
            +
              "query_pre_attn_scalar": 256,
         
     | 
| 26 | 
         
            +
              "rms_norm_eps": 1e-06,
         
     | 
| 27 | 
         
            +
              "rope_theta": 10000.0,
         
     | 
| 28 | 
         
            +
              "sliding_window": 4096,
         
     | 
| 29 | 
         
            +
              "sliding_window_size": 4096,
         
     | 
| 30 | 
         
            +
              "torch_dtype": "float32",
         
     | 
| 31 | 
         
            +
              "transformers_version": "4.44.2",
         
     | 
| 32 | 
         
            +
              "use_cache": true,
         
     | 
| 33 | 
         
            +
              "vocab_size": 256000
         
     | 
| 34 | 
         
            +
            }
         
     | 
    	
        generation_config.json
    ADDED
    
    | 
         @@ -0,0 +1,8 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "_from_model_config": true,
         
     | 
| 3 | 
         
            +
              "bos_token_id": 2,
         
     | 
| 4 | 
         
            +
              "cache_implementation": "hybrid",
         
     | 
| 5 | 
         
            +
              "eos_token_id": 1,
         
     | 
| 6 | 
         
            +
              "pad_token_id": 0,
         
     | 
| 7 | 
         
            +
              "transformers_version": "4.44.2"
         
     | 
| 8 | 
         
            +
            }
         
     | 
    	
        model-00001-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:deea49f0194479b2c9c05aac40cb27bb4976a815be18b7dea246bf6e99be234c
         
     | 
| 3 | 
         
            +
            size 4844480456
         
     | 
    	
        model-00002-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:6f84e080cbd0a717afe24d993e703148949be3dd500d4aa83e620a411caa1847
         
     | 
| 3 | 
         
            +
            size 4962213464
         
     | 
    	
        model-00003-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:e4dc8e57fbc09104eb9aa9b3f5ee14430966815c93d77257a2409d8b1e8d30cd
         
     | 
| 3 | 
         
            +
            size 4962271312
         
     | 
    	
        model-00004-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:985f7a2253e0c0f64ad210246c5594a8aacec5372d00445495c726d5e813faaa
         
     | 
| 3 | 
         
            +
            size 4932853744
         
     | 
    	
        model-00005-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:5b2fe4ef669591291fca28bd6079b1ba2d43514ef9c6935e6c7339a817994616
         
     | 
| 3 | 
         
            +
            size 4962213528
         
     | 
    	
        model-00006-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:883205eeb422038b496c26cfbb5c210ce2ffd2ce4276b57cbb85130a13a73e32
         
     | 
| 3 | 
         
            +
            size 4962213528
         
     | 
    	
        model-00007-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:d72d6fec25c21f67513fd4b99ca37a29d0ad93de8caa227234c5de1e69915430
         
     | 
| 3 | 
         
            +
            size 4962271328
         
     | 
    	
        model-00008-of-00008.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:5e62df1d2d996777ae2c114dbe827da4576e50b847d5ddaf8b2d1cc95b659aa4
         
     | 
| 3 | 
         
            +
            size 2378360680
         
     | 
    	
        model.safetensors.index.json
    ADDED
    
    | 
         @@ -0,0 +1,471 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "metadata": {
         
     | 
| 3 | 
         
            +
                "total_size": 36966823936
         
     | 
| 4 | 
         
            +
              },
         
     | 
| 5 | 
         
            +
              "weight_map": {
         
     | 
| 6 | 
         
            +
                "model.embed_tokens.weight": "model-00001-of-00008.safetensors",
         
     | 
| 7 | 
         
            +
                "model.layers.0.input_layernorm.weight": "model-00001-of-00008.safetensors",
         
     | 
| 8 | 
         
            +
                "model.layers.0.mlp.down_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 9 | 
         
            +
                "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 10 | 
         
            +
                "model.layers.0.mlp.up_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 11 | 
         
            +
                "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00008.safetensors",
         
     | 
| 12 | 
         
            +
                "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00008.safetensors",
         
     | 
| 13 | 
         
            +
                "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00008.safetensors",
         
     | 
| 14 | 
         
            +
                "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 15 | 
         
            +
                "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 16 | 
         
            +
                "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 17 | 
         
            +
                "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 18 | 
         
            +
                "model.layers.1.input_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 19 | 
         
            +
                "model.layers.1.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 20 | 
         
            +
                "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 21 | 
         
            +
                "model.layers.1.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 22 | 
         
            +
                "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 23 | 
         
            +
                "model.layers.1.post_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 24 | 
         
            +
                "model.layers.1.pre_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 25 | 
         
            +
                "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 26 | 
         
            +
                "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 27 | 
         
            +
                "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 28 | 
         
            +
                "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00008.safetensors",
         
     | 
| 29 | 
         
            +
                "model.layers.10.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 30 | 
         
            +
                "model.layers.10.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 31 | 
         
            +
                "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 32 | 
         
            +
                "model.layers.10.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 33 | 
         
            +
                "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 34 | 
         
            +
                "model.layers.10.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 35 | 
         
            +
                "model.layers.10.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 36 | 
         
            +
                "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 37 | 
         
            +
                "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 38 | 
         
            +
                "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 39 | 
         
            +
                "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 40 | 
         
            +
                "model.layers.11.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 41 | 
         
            +
                "model.layers.11.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 42 | 
         
            +
                "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 43 | 
         
            +
                "model.layers.11.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 44 | 
         
            +
                "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 45 | 
         
            +
                "model.layers.11.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 46 | 
         
            +
                "model.layers.11.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 47 | 
         
            +
                "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 48 | 
         
            +
                "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 49 | 
         
            +
                "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 50 | 
         
            +
                "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 51 | 
         
            +
                "model.layers.12.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 52 | 
         
            +
                "model.layers.12.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 53 | 
         
            +
                "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 54 | 
         
            +
                "model.layers.12.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 55 | 
         
            +
                "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 56 | 
         
            +
                "model.layers.12.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 57 | 
         
            +
                "model.layers.12.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 58 | 
         
            +
                "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 59 | 
         
            +
                "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 60 | 
         
            +
                "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 61 | 
         
            +
                "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 62 | 
         
            +
                "model.layers.13.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 63 | 
         
            +
                "model.layers.13.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 64 | 
         
            +
                "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 65 | 
         
            +
                "model.layers.13.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 66 | 
         
            +
                "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 67 | 
         
            +
                "model.layers.13.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 68 | 
         
            +
                "model.layers.13.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 69 | 
         
            +
                "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 70 | 
         
            +
                "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 71 | 
         
            +
                "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 72 | 
         
            +
                "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 73 | 
         
            +
                "model.layers.14.input_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 74 | 
         
            +
                "model.layers.14.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 75 | 
         
            +
                "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 76 | 
         
            +
                "model.layers.14.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 77 | 
         
            +
                "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 78 | 
         
            +
                "model.layers.14.post_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 79 | 
         
            +
                "model.layers.14.pre_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 80 | 
         
            +
                "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 81 | 
         
            +
                "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 82 | 
         
            +
                "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 83 | 
         
            +
                "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 84 | 
         
            +
                "model.layers.15.input_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 85 | 
         
            +
                "model.layers.15.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 86 | 
         
            +
                "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 87 | 
         
            +
                "model.layers.15.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 88 | 
         
            +
                "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 89 | 
         
            +
                "model.layers.15.post_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 90 | 
         
            +
                "model.layers.15.pre_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 91 | 
         
            +
                "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 92 | 
         
            +
                "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 93 | 
         
            +
                "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 94 | 
         
            +
                "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 95 | 
         
            +
                "model.layers.16.input_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 96 | 
         
            +
                "model.layers.16.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 97 | 
         
            +
                "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 98 | 
         
            +
                "model.layers.16.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 99 | 
         
            +
                "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 100 | 
         
            +
                "model.layers.16.post_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 101 | 
         
            +
                "model.layers.16.pre_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 102 | 
         
            +
                "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 103 | 
         
            +
                "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 104 | 
         
            +
                "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 105 | 
         
            +
                "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 106 | 
         
            +
                "model.layers.17.input_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 107 | 
         
            +
                "model.layers.17.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 108 | 
         
            +
                "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 109 | 
         
            +
                "model.layers.17.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 110 | 
         
            +
                "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 111 | 
         
            +
                "model.layers.17.post_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 112 | 
         
            +
                "model.layers.17.pre_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 113 | 
         
            +
                "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 114 | 
         
            +
                "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 115 | 
         
            +
                "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 116 | 
         
            +
                "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 117 | 
         
            +
                "model.layers.18.input_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 118 | 
         
            +
                "model.layers.18.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 119 | 
         
            +
                "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 120 | 
         
            +
                "model.layers.18.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 121 | 
         
            +
                "model.layers.18.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 122 | 
         
            +
                "model.layers.18.post_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 123 | 
         
            +
                "model.layers.18.pre_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 124 | 
         
            +
                "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 125 | 
         
            +
                "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 126 | 
         
            +
                "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 127 | 
         
            +
                "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 128 | 
         
            +
                "model.layers.19.input_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 129 | 
         
            +
                "model.layers.19.mlp.down_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 130 | 
         
            +
                "model.layers.19.mlp.gate_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 131 | 
         
            +
                "model.layers.19.mlp.up_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 132 | 
         
            +
                "model.layers.19.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 133 | 
         
            +
                "model.layers.19.post_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 134 | 
         
            +
                "model.layers.19.pre_feedforward_layernorm.weight": "model-00004-of-00008.safetensors",
         
     | 
| 135 | 
         
            +
                "model.layers.19.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 136 | 
         
            +
                "model.layers.19.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 137 | 
         
            +
                "model.layers.19.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 138 | 
         
            +
                "model.layers.19.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 139 | 
         
            +
                "model.layers.2.input_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 140 | 
         
            +
                "model.layers.2.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 141 | 
         
            +
                "model.layers.2.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 142 | 
         
            +
                "model.layers.2.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 143 | 
         
            +
                "model.layers.2.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 144 | 
         
            +
                "model.layers.2.post_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 145 | 
         
            +
                "model.layers.2.pre_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 146 | 
         
            +
                "model.layers.2.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 147 | 
         
            +
                "model.layers.2.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 148 | 
         
            +
                "model.layers.2.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 149 | 
         
            +
                "model.layers.2.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 150 | 
         
            +
                "model.layers.20.input_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 151 | 
         
            +
                "model.layers.20.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 152 | 
         
            +
                "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 153 | 
         
            +
                "model.layers.20.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 154 | 
         
            +
                "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 155 | 
         
            +
                "model.layers.20.post_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 156 | 
         
            +
                "model.layers.20.pre_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 157 | 
         
            +
                "model.layers.20.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 158 | 
         
            +
                "model.layers.20.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 159 | 
         
            +
                "model.layers.20.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 160 | 
         
            +
                "model.layers.20.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
         
     | 
| 161 | 
         
            +
                "model.layers.21.input_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 162 | 
         
            +
                "model.layers.21.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 163 | 
         
            +
                "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 164 | 
         
            +
                "model.layers.21.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 165 | 
         
            +
                "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 166 | 
         
            +
                "model.layers.21.post_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 167 | 
         
            +
                "model.layers.21.pre_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 168 | 
         
            +
                "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 169 | 
         
            +
                "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 170 | 
         
            +
                "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 171 | 
         
            +
                "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 172 | 
         
            +
                "model.layers.22.input_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 173 | 
         
            +
                "model.layers.22.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 174 | 
         
            +
                "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 175 | 
         
            +
                "model.layers.22.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 176 | 
         
            +
                "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 177 | 
         
            +
                "model.layers.22.post_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 178 | 
         
            +
                "model.layers.22.pre_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 179 | 
         
            +
                "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 180 | 
         
            +
                "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 181 | 
         
            +
                "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 182 | 
         
            +
                "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 183 | 
         
            +
                "model.layers.23.input_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 184 | 
         
            +
                "model.layers.23.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 185 | 
         
            +
                "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 186 | 
         
            +
                "model.layers.23.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 187 | 
         
            +
                "model.layers.23.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 188 | 
         
            +
                "model.layers.23.post_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 189 | 
         
            +
                "model.layers.23.pre_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 190 | 
         
            +
                "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 191 | 
         
            +
                "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 192 | 
         
            +
                "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 193 | 
         
            +
                "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 194 | 
         
            +
                "model.layers.24.input_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 195 | 
         
            +
                "model.layers.24.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 196 | 
         
            +
                "model.layers.24.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 197 | 
         
            +
                "model.layers.24.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 198 | 
         
            +
                "model.layers.24.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 199 | 
         
            +
                "model.layers.24.post_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 200 | 
         
            +
                "model.layers.24.pre_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 201 | 
         
            +
                "model.layers.24.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 202 | 
         
            +
                "model.layers.24.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 203 | 
         
            +
                "model.layers.24.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 204 | 
         
            +
                "model.layers.24.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 205 | 
         
            +
                "model.layers.25.input_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 206 | 
         
            +
                "model.layers.25.mlp.down_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 207 | 
         
            +
                "model.layers.25.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 208 | 
         
            +
                "model.layers.25.mlp.up_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 209 | 
         
            +
                "model.layers.25.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 210 | 
         
            +
                "model.layers.25.post_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 211 | 
         
            +
                "model.layers.25.pre_feedforward_layernorm.weight": "model-00005-of-00008.safetensors",
         
     | 
| 212 | 
         
            +
                "model.layers.25.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 213 | 
         
            +
                "model.layers.25.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 214 | 
         
            +
                "model.layers.25.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 215 | 
         
            +
                "model.layers.25.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 216 | 
         
            +
                "model.layers.26.input_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 217 | 
         
            +
                "model.layers.26.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 218 | 
         
            +
                "model.layers.26.mlp.gate_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 219 | 
         
            +
                "model.layers.26.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 220 | 
         
            +
                "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 221 | 
         
            +
                "model.layers.26.post_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 222 | 
         
            +
                "model.layers.26.pre_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 223 | 
         
            +
                "model.layers.26.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 224 | 
         
            +
                "model.layers.26.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 225 | 
         
            +
                "model.layers.26.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 226 | 
         
            +
                "model.layers.26.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
         
     | 
| 227 | 
         
            +
                "model.layers.27.input_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 228 | 
         
            +
                "model.layers.27.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 229 | 
         
            +
                "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 230 | 
         
            +
                "model.layers.27.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 231 | 
         
            +
                "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 232 | 
         
            +
                "model.layers.27.post_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 233 | 
         
            +
                "model.layers.27.pre_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 234 | 
         
            +
                "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 235 | 
         
            +
                "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 236 | 
         
            +
                "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 237 | 
         
            +
                "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 238 | 
         
            +
                "model.layers.28.input_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 239 | 
         
            +
                "model.layers.28.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 240 | 
         
            +
                "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 241 | 
         
            +
                "model.layers.28.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 242 | 
         
            +
                "model.layers.28.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 243 | 
         
            +
                "model.layers.28.post_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 244 | 
         
            +
                "model.layers.28.pre_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 245 | 
         
            +
                "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 246 | 
         
            +
                "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 247 | 
         
            +
                "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 248 | 
         
            +
                "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 249 | 
         
            +
                "model.layers.29.input_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 250 | 
         
            +
                "model.layers.29.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 251 | 
         
            +
                "model.layers.29.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 252 | 
         
            +
                "model.layers.29.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 253 | 
         
            +
                "model.layers.29.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 254 | 
         
            +
                "model.layers.29.post_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 255 | 
         
            +
                "model.layers.29.pre_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 256 | 
         
            +
                "model.layers.29.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 257 | 
         
            +
                "model.layers.29.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 258 | 
         
            +
                "model.layers.29.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 259 | 
         
            +
                "model.layers.29.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 260 | 
         
            +
                "model.layers.3.input_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 261 | 
         
            +
                "model.layers.3.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 262 | 
         
            +
                "model.layers.3.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 263 | 
         
            +
                "model.layers.3.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 264 | 
         
            +
                "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 265 | 
         
            +
                "model.layers.3.post_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 266 | 
         
            +
                "model.layers.3.pre_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 267 | 
         
            +
                "model.layers.3.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 268 | 
         
            +
                "model.layers.3.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 269 | 
         
            +
                "model.layers.3.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 270 | 
         
            +
                "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 271 | 
         
            +
                "model.layers.30.input_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 272 | 
         
            +
                "model.layers.30.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 273 | 
         
            +
                "model.layers.30.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 274 | 
         
            +
                "model.layers.30.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 275 | 
         
            +
                "model.layers.30.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 276 | 
         
            +
                "model.layers.30.post_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 277 | 
         
            +
                "model.layers.30.pre_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 278 | 
         
            +
                "model.layers.30.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 279 | 
         
            +
                "model.layers.30.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 280 | 
         
            +
                "model.layers.30.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 281 | 
         
            +
                "model.layers.30.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 282 | 
         
            +
                "model.layers.31.input_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 283 | 
         
            +
                "model.layers.31.mlp.down_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 284 | 
         
            +
                "model.layers.31.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 285 | 
         
            +
                "model.layers.31.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 286 | 
         
            +
                "model.layers.31.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 287 | 
         
            +
                "model.layers.31.post_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 288 | 
         
            +
                "model.layers.31.pre_feedforward_layernorm.weight": "model-00006-of-00008.safetensors",
         
     | 
| 289 | 
         
            +
                "model.layers.31.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 290 | 
         
            +
                "model.layers.31.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 291 | 
         
            +
                "model.layers.31.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 292 | 
         
            +
                "model.layers.31.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 293 | 
         
            +
                "model.layers.32.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 294 | 
         
            +
                "model.layers.32.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 295 | 
         
            +
                "model.layers.32.mlp.gate_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 296 | 
         
            +
                "model.layers.32.mlp.up_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 297 | 
         
            +
                "model.layers.32.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 298 | 
         
            +
                "model.layers.32.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 299 | 
         
            +
                "model.layers.32.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 300 | 
         
            +
                "model.layers.32.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 301 | 
         
            +
                "model.layers.32.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 302 | 
         
            +
                "model.layers.32.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 303 | 
         
            +
                "model.layers.32.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
         
     | 
| 304 | 
         
            +
                "model.layers.33.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 305 | 
         
            +
                "model.layers.33.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 306 | 
         
            +
                "model.layers.33.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 307 | 
         
            +
                "model.layers.33.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 308 | 
         
            +
                "model.layers.33.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 309 | 
         
            +
                "model.layers.33.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 310 | 
         
            +
                "model.layers.33.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 311 | 
         
            +
                "model.layers.33.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 312 | 
         
            +
                "model.layers.33.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 313 | 
         
            +
                "model.layers.33.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 314 | 
         
            +
                "model.layers.33.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 315 | 
         
            +
                "model.layers.34.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 316 | 
         
            +
                "model.layers.34.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 317 | 
         
            +
                "model.layers.34.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 318 | 
         
            +
                "model.layers.34.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 319 | 
         
            +
                "model.layers.34.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 320 | 
         
            +
                "model.layers.34.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 321 | 
         
            +
                "model.layers.34.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 322 | 
         
            +
                "model.layers.34.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 323 | 
         
            +
                "model.layers.34.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 324 | 
         
            +
                "model.layers.34.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 325 | 
         
            +
                "model.layers.34.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 326 | 
         
            +
                "model.layers.35.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 327 | 
         
            +
                "model.layers.35.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 328 | 
         
            +
                "model.layers.35.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 329 | 
         
            +
                "model.layers.35.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 330 | 
         
            +
                "model.layers.35.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 331 | 
         
            +
                "model.layers.35.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 332 | 
         
            +
                "model.layers.35.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 333 | 
         
            +
                "model.layers.35.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 334 | 
         
            +
                "model.layers.35.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 335 | 
         
            +
                "model.layers.35.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 336 | 
         
            +
                "model.layers.35.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 337 | 
         
            +
                "model.layers.36.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 338 | 
         
            +
                "model.layers.36.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 339 | 
         
            +
                "model.layers.36.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 340 | 
         
            +
                "model.layers.36.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 341 | 
         
            +
                "model.layers.36.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 342 | 
         
            +
                "model.layers.36.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 343 | 
         
            +
                "model.layers.36.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 344 | 
         
            +
                "model.layers.36.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 345 | 
         
            +
                "model.layers.36.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 346 | 
         
            +
                "model.layers.36.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 347 | 
         
            +
                "model.layers.36.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 348 | 
         
            +
                "model.layers.37.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 349 | 
         
            +
                "model.layers.37.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 350 | 
         
            +
                "model.layers.37.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 351 | 
         
            +
                "model.layers.37.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 352 | 
         
            +
                "model.layers.37.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 353 | 
         
            +
                "model.layers.37.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 354 | 
         
            +
                "model.layers.37.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 355 | 
         
            +
                "model.layers.37.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 356 | 
         
            +
                "model.layers.37.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 357 | 
         
            +
                "model.layers.37.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 358 | 
         
            +
                "model.layers.37.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 359 | 
         
            +
                "model.layers.38.input_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 360 | 
         
            +
                "model.layers.38.mlp.down_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 361 | 
         
            +
                "model.layers.38.mlp.gate_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 362 | 
         
            +
                "model.layers.38.mlp.up_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 363 | 
         
            +
                "model.layers.38.post_attention_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 364 | 
         
            +
                "model.layers.38.post_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 365 | 
         
            +
                "model.layers.38.pre_feedforward_layernorm.weight": "model-00007-of-00008.safetensors",
         
     | 
| 366 | 
         
            +
                "model.layers.38.self_attn.k_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 367 | 
         
            +
                "model.layers.38.self_attn.o_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 368 | 
         
            +
                "model.layers.38.self_attn.q_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 369 | 
         
            +
                "model.layers.38.self_attn.v_proj.weight": "model-00007-of-00008.safetensors",
         
     | 
| 370 | 
         
            +
                "model.layers.39.input_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 371 | 
         
            +
                "model.layers.39.mlp.down_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 372 | 
         
            +
                "model.layers.39.mlp.gate_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 373 | 
         
            +
                "model.layers.39.mlp.up_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 374 | 
         
            +
                "model.layers.39.post_attention_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 375 | 
         
            +
                "model.layers.39.post_feedforward_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 376 | 
         
            +
                "model.layers.39.pre_feedforward_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 377 | 
         
            +
                "model.layers.39.self_attn.k_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 378 | 
         
            +
                "model.layers.39.self_attn.o_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 379 | 
         
            +
                "model.layers.39.self_attn.q_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 380 | 
         
            +
                "model.layers.39.self_attn.v_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 381 | 
         
            +
                "model.layers.4.input_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 382 | 
         
            +
                "model.layers.4.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 383 | 
         
            +
                "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 384 | 
         
            +
                "model.layers.4.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 385 | 
         
            +
                "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 386 | 
         
            +
                "model.layers.4.post_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 387 | 
         
            +
                "model.layers.4.pre_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 388 | 
         
            +
                "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 389 | 
         
            +
                "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 390 | 
         
            +
                "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 391 | 
         
            +
                "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 392 | 
         
            +
                "model.layers.40.input_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 393 | 
         
            +
                "model.layers.40.mlp.down_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 394 | 
         
            +
                "model.layers.40.mlp.gate_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 395 | 
         
            +
                "model.layers.40.mlp.up_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 396 | 
         
            +
                "model.layers.40.post_attention_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 397 | 
         
            +
                "model.layers.40.post_feedforward_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 398 | 
         
            +
                "model.layers.40.pre_feedforward_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 399 | 
         
            +
                "model.layers.40.self_attn.k_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 400 | 
         
            +
                "model.layers.40.self_attn.o_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 401 | 
         
            +
                "model.layers.40.self_attn.q_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 402 | 
         
            +
                "model.layers.40.self_attn.v_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 403 | 
         
            +
                "model.layers.41.input_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 404 | 
         
            +
                "model.layers.41.mlp.down_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 405 | 
         
            +
                "model.layers.41.mlp.gate_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 406 | 
         
            +
                "model.layers.41.mlp.up_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 407 | 
         
            +
                "model.layers.41.post_attention_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 408 | 
         
            +
                "model.layers.41.post_feedforward_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 409 | 
         
            +
                "model.layers.41.pre_feedforward_layernorm.weight": "model-00008-of-00008.safetensors",
         
     | 
| 410 | 
         
            +
                "model.layers.41.self_attn.k_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 411 | 
         
            +
                "model.layers.41.self_attn.o_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 412 | 
         
            +
                "model.layers.41.self_attn.q_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 413 | 
         
            +
                "model.layers.41.self_attn.v_proj.weight": "model-00008-of-00008.safetensors",
         
     | 
| 414 | 
         
            +
                "model.layers.5.input_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 415 | 
         
            +
                "model.layers.5.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 416 | 
         
            +
                "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 417 | 
         
            +
                "model.layers.5.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 418 | 
         
            +
                "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 419 | 
         
            +
                "model.layers.5.post_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 420 | 
         
            +
                "model.layers.5.pre_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 421 | 
         
            +
                "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 422 | 
         
            +
                "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 423 | 
         
            +
                "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 424 | 
         
            +
                "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 425 | 
         
            +
                "model.layers.6.input_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 426 | 
         
            +
                "model.layers.6.mlp.down_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 427 | 
         
            +
                "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 428 | 
         
            +
                "model.layers.6.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 429 | 
         
            +
                "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 430 | 
         
            +
                "model.layers.6.post_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 431 | 
         
            +
                "model.layers.6.pre_feedforward_layernorm.weight": "model-00002-of-00008.safetensors",
         
     | 
| 432 | 
         
            +
                "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 433 | 
         
            +
                "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 434 | 
         
            +
                "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 435 | 
         
            +
                "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 436 | 
         
            +
                "model.layers.7.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 437 | 
         
            +
                "model.layers.7.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 438 | 
         
            +
                "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 439 | 
         
            +
                "model.layers.7.mlp.up_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 440 | 
         
            +
                "model.layers.7.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 441 | 
         
            +
                "model.layers.7.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 442 | 
         
            +
                "model.layers.7.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 443 | 
         
            +
                "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 444 | 
         
            +
                "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 445 | 
         
            +
                "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 446 | 
         
            +
                "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00008.safetensors",
         
     | 
| 447 | 
         
            +
                "model.layers.8.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 448 | 
         
            +
                "model.layers.8.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 449 | 
         
            +
                "model.layers.8.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 450 | 
         
            +
                "model.layers.8.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 451 | 
         
            +
                "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 452 | 
         
            +
                "model.layers.8.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 453 | 
         
            +
                "model.layers.8.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 454 | 
         
            +
                "model.layers.8.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 455 | 
         
            +
                "model.layers.8.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 456 | 
         
            +
                "model.layers.8.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 457 | 
         
            +
                "model.layers.8.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 458 | 
         
            +
                "model.layers.9.input_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 459 | 
         
            +
                "model.layers.9.mlp.down_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 460 | 
         
            +
                "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 461 | 
         
            +
                "model.layers.9.mlp.up_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 462 | 
         
            +
                "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 463 | 
         
            +
                "model.layers.9.post_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 464 | 
         
            +
                "model.layers.9.pre_feedforward_layernorm.weight": "model-00003-of-00008.safetensors",
         
     | 
| 465 | 
         
            +
                "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 466 | 
         
            +
                "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 467 | 
         
            +
                "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 468 | 
         
            +
                "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00008.safetensors",
         
     | 
| 469 | 
         
            +
                "model.norm.weight": "model-00008-of-00008.safetensors"
         
     | 
| 470 | 
         
            +
              }
         
     | 
| 471 | 
         
            +
            }
         
     |