Upload folder using huggingface_hub
Browse files- 512/.DS_Store +0 -0
- 512/transformer/.DS_Store +0 -0
- 512/transformer/config.json +25 -0
    	
        512/.DS_Store
    ADDED
    
    | Binary file (6.15 kB). View file | 
|  | 
    	
        512/transformer/.DS_Store
    ADDED
    
    | Binary file (6.15 kB). View file | 
|  | 
    	
        512/transformer/config.json
    ADDED
    
    | @@ -0,0 +1,25 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "_class_name": "SymmetricTransformer2DModel",
         | 
| 3 | 
            +
              "_diffusers_version": "0.31.0",
         | 
| 4 | 
            +
              "_name_or_path": "/nvfile-heatstorage/chatrl/users/sqy/projects/UnifiedGen/outputs/8gpus_2M_bs_1024_lr_1e-4_text_weight_0.6_res_512/checkpoint-70000",
         | 
| 5 | 
            +
              "attention_head_dim": 128,
         | 
| 6 | 
            +
              "axes_dims_rope": [
         | 
| 7 | 
            +
                16,
         | 
| 8 | 
            +
                56,
         | 
| 9 | 
            +
                56
         | 
| 10 | 
            +
              ],
         | 
| 11 | 
            +
              "codebook_size": 8192,
         | 
| 12 | 
            +
              "downsample": true,
         | 
| 13 | 
            +
              "guidance_embeds": false,
         | 
| 14 | 
            +
              "in_channels": 64,
         | 
| 15 | 
            +
              "joint_attention_dim": 1024,
         | 
| 16 | 
            +
              "num_attention_heads": 8,
         | 
| 17 | 
            +
              "num_layers": 14,
         | 
| 18 | 
            +
              "num_single_layers": 28,
         | 
| 19 | 
            +
              "patch_size": 1,
         | 
| 20 | 
            +
              "pooled_projection_dim": 1024,
         | 
| 21 | 
            +
              "t5_dim": null,
         | 
| 22 | 
            +
              "tokenizer_vocab_size": 49408,
         | 
| 23 | 
            +
              "upsample": true,
         | 
| 24 | 
            +
              "vocab_size": 8256
         | 
| 25 | 
            +
            }
         | 

