dacorvo's picture
dacorvo HF Staff
Synchronizing local compiler cache.
0e8a5dc verified
raw
history blame
1.71 kB
{
"_class_name": "PixArtTransformer2DModel",
"_commit_hash": null,
"_diffusers_version": "0.28.2",
"_name_or_path": "/home/ubuntu/.cache/huggingface/hub/models--hf-internal-testing--tiny-pixart-alpha-pipe/snapshots/4bc7fd2ac07bfe6aa361a00dc0ff5bb87250eab1/transformer",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 8,
"attention_type": "default",
"caption_channels": 32,
"cross_attention_dim": 24,
"dropout": 0.0,
"in_channels": 4,
"interpolation_scale": null,
"neuron": {
"auto_cast": null,
"auto_cast_type": null,
"compiler_type": "neuronx-cc",
"compiler_version": "2.16.345.0+69131dd3",
"dynamic_batch_size": false,
"inline_weights_to_neff": false,
"input_names": [
"sample",
"encoder_hidden_states",
"timestep",
"encoder_attention_mask"
],
"model_type": "pixart-transformer-2d",
"optlevel": "2",
"output_attentions": false,
"output_hidden_states": false,
"output_names": [
"out_hidden_states"
],
"static_batch_size": 1,
"static_encoder_hidden_size": 32,
"static_height": 8,
"static_num_channels": 4,
"static_patch_size": 2,
"static_sequence_length": 16,
"static_vae_scale_factor": 1,
"static_width": 8,
"tensor_parallel_size": 1
},
"norm_elementwise_affine": false,
"norm_eps": 1e-06,
"norm_num_groups": 32,
"norm_type": "ada_norm_single",
"num_attention_heads": 3,
"num_embeds_ada_norm": 1000,
"num_layers": 2,
"out_channels": 8,
"patch_size": 2,
"sample_size": 8,
"task": "semantic-segmentation",
"transformers_version": null,
"upcast_attention": false,
"use_additional_conditions": null
}