| { | |
| "init_logit_scale": 2.659260036932778, | |
| "text_cfg": { | |
| "context_length": 72, | |
| "heads": 20, | |
| "layers": 24, | |
| "mlp_ratio": 4.0, | |
| "output_dim": 1280, | |
| "vocab_size": 49408, | |
| "width": 1280 | |
| }, | |
| "vision_cfg": { | |
| "attn_pooler_heads": 8, | |
| "drop_path": 0.0, | |
| "heads": 16, | |
| "image_size": 448, | |
| "layers": 50, | |
| "ls_init_value": null, | |
| "mlp_ratio": 5.833333333333333, | |
| "output_dim": 1280, | |
| "patch_size": 14, | |
| "pool_type": "attn", | |
| "use_abs_posemb": true, | |
| "use_cls_token": false, | |
| "use_ln_post": true, | |
| "use_ln_pre": true, | |
| "use_rope2d": true, | |
| "width": 1536 | |
| } | |
| } |