| { | |
| "module": "keras_hub.src.models.pali_gemma.pali_gemma_backbone", | |
| "class_name": "PaliGemmaBackbone", | |
| "config": { | |
| "name": "pali_gemma_backbone", | |
| "trainable": true, | |
| "vocabulary_size": 257152, | |
| "image_size": 448, | |
| "num_layers": 42, | |
| "num_query_heads": 16, | |
| "num_key_value_heads": 8, | |
| "hidden_dim": 3584, | |
| "intermediate_dim": 28672, | |
| "head_dim": 256, | |
| "vit_patch_size": 14, | |
| "vit_num_heads": 16, | |
| "vit_hidden_dim": 1152, | |
| "vit_num_layers": 27, | |
| "vit_intermediate_dim": 4304, | |
| "vit_pooling": null, | |
| "vit_classifier_activation": null, | |
| "vit_name": null, | |
| "query_head_dim_normalize": true, | |
| "use_post_ffw_norm": true, | |
| "use_post_attention_norm": true, | |
| "final_logit_soft_cap": 30, | |
| "attention_logit_soft_cap": 50, | |
| "sliding_window_size": 4096, | |
| "use_sliding_window_attention": true, | |
| "layer_norm_epsilon": 1e-06, | |
| "dropout": 0 | |
| }, | |
| "registered_name": "keras_hub>PaliGemmaBackbone" | |
| } |