File size: 1,921 Bytes
0b888cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
{
    "module": "keras_hub.src.models.parseq.parseq_backbone",
    "class_name": "PARSeqBackbone",
    "config": {
        "name": "par_seq_backbone",
        "trainable": true,
        "dtype": {
            "module": "keras",
            "class_name": "DTypePolicy",
            "config": {
                "name": "float32"
            },
            "registered_name": null
        },
        "image_encoder": {
            "module": "keras_hub.src.models.vit.vit_backbone",
            "class_name": "ViTBackbone",
            "config": {
                "name": "encoder",
                "trainable": true,
                "dtype": {
                    "module": "keras",
                    "class_name": "DTypePolicy",
                    "config": {
                        "name": "float32"
                    },
                    "registered_name": null
                },
                "image_shape": [
                    32,
                    128,
                    3
                ],
                "patch_size": [
                    4,
                    8
                ],
                "num_layers": 12,
                "num_heads": 6,
                "hidden_dim": 384,
                "mlp_dim": 1536,
                "dropout_rate": 0.0,
                "attention_dropout": 0.0,
                "layer_norm_epsilon": 1e-06,
                "use_mha_bias": true,
                "use_mlp_bias": true,
                "use_class_token": false,
                "use_patch_bias": true
            },
            "registered_name": "keras_hub>ViTBackbone"
        },
        "vocabulary_size": 97,
        "max_label_length": 25,
        "decoder_hidden_dim": 384,
        "num_decoder_layers": 1,
        "num_decoder_heads": 12,
        "decoder_mlp_dim": 1536,
        "dropout_rate": 0.1,
        "attention_dropout": 0.1
    },
    "registered_name": "keras_hub>PARSeqBackbone"
}