Upload MllamaForConditionalGeneration
Browse files- config.json +4 -1
- generation_config.json +1 -1
    	
        config.json
    CHANGED
    
    | @@ -6,6 +6,7 @@ | |
| 6 | 
             
              "image_token_index": 128256,
         | 
| 7 | 
             
              "model_type": "mllama",
         | 
| 8 | 
             
              "text_config": {
         | 
|  | |
| 9 | 
             
                "_name_or_path": "",
         | 
| 10 | 
             
                "add_cross_attention": false,
         | 
| 11 | 
             
                "architectures": null,
         | 
| @@ -103,8 +104,10 @@ | |
| 103 | 
             
                "vocab_size": 128256
         | 
| 104 | 
             
              },
         | 
| 105 | 
             
              "torch_dtype": "bfloat16",
         | 
| 106 | 
            -
              "transformers_version": "4. | 
|  | |
| 107 | 
             
              "vision_config": {
         | 
|  | |
| 108 | 
             
                "_name_or_path": "",
         | 
| 109 | 
             
                "add_cross_attention": false,
         | 
| 110 | 
             
                "architectures": null,
         | 
|  | |
| 6 | 
             
              "image_token_index": 128256,
         | 
| 7 | 
             
              "model_type": "mllama",
         | 
| 8 | 
             
              "text_config": {
         | 
| 9 | 
            +
                "_attn_implementation_autoset": false,
         | 
| 10 | 
             
                "_name_or_path": "",
         | 
| 11 | 
             
                "add_cross_attention": false,
         | 
| 12 | 
             
                "architectures": null,
         | 
|  | |
| 104 | 
             
                "vocab_size": 128256
         | 
| 105 | 
             
              },
         | 
| 106 | 
             
              "torch_dtype": "bfloat16",
         | 
| 107 | 
            +
              "transformers_version": "4.46.3",
         | 
| 108 | 
            +
              "unsloth_fixed": true,
         | 
| 109 | 
             
              "vision_config": {
         | 
| 110 | 
            +
                "_attn_implementation_autoset": false,
         | 
| 111 | 
             
                "_name_or_path": "",
         | 
| 112 | 
             
                "add_cross_attention": false,
         | 
| 113 | 
             
                "architectures": null,
         | 
    	
        generation_config.json
    CHANGED
    
    | @@ -9,5 +9,5 @@ | |
| 9 | 
             
              "pad_token_id": 128004,
         | 
| 10 | 
             
              "temperature": 0.6,
         | 
| 11 | 
             
              "top_p": 0.9,
         | 
| 12 | 
            -
              "transformers_version": "4. | 
| 13 | 
             
            }
         | 
|  | |
| 9 | 
             
              "pad_token_id": 128004,
         | 
| 10 | 
             
              "temperature": 0.6,
         | 
| 11 | 
             
              "top_p": 0.9,
         | 
| 12 | 
            +
              "transformers_version": "4.46.3"
         | 
| 13 | 
             
            }
         | 

