Fix config.json: Remove pool from model_type and add auto_map
Browse files- config.json +6 -2
config.json
CHANGED
|
@@ -30,7 +30,7 @@
|
|
| 30 |
"mm_vision_select_feature": "patch",
|
| 31 |
"mm_vision_select_layer": -2,
|
| 32 |
"mm_vision_tower": "openai/clip-vit-large-patch14-336",
|
| 33 |
-
"model_type": "
|
| 34 |
"num_attention_heads": 40,
|
| 35 |
"num_hidden_layers": 40,
|
| 36 |
"num_key_value_heads": 40,
|
|
@@ -76,5 +76,9 @@
|
|
| 76 |
"use_mm_proj": true,
|
| 77 |
"use_total_safety_meta_token": false,
|
| 78 |
"use_txt_safety_meta_token": false,
|
| 79 |
-
"vocab_size": 32000
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
}
|
|
|
|
| 30 |
"mm_vision_select_feature": "patch",
|
| 31 |
"mm_vision_select_layer": -2,
|
| 32 |
"mm_vision_tower": "openai/clip-vit-large-patch14-336",
|
| 33 |
+
"model_type": "safe_llava_llama",
|
| 34 |
"num_attention_heads": 40,
|
| 35 |
"num_hidden_layers": 40,
|
| 36 |
"num_key_value_heads": 40,
|
|
|
|
| 76 |
"use_mm_proj": true,
|
| 77 |
"use_total_safety_meta_token": false,
|
| 78 |
"use_txt_safety_meta_token": false,
|
| 79 |
+
"vocab_size": 32000,
|
| 80 |
+
"auto_map": {
|
| 81 |
+
"AutoConfig": "modeling_safellava.SafetyConfig",
|
| 82 |
+
"AutoModelForCausalLM": "modeling_safellava.SafeLlavaLlamaForCausalLM"
|
| 83 |
+
}
|
| 84 |
}
|