| { | |
| "architectures": [ | |
| "NemotronHForCausalLM" | |
| ], | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "auto_map": { | |
| "AutoConfig": "configuration_nemotron_h.NemotronHConfig", | |
| "AutoModelForCausalLM": "modeling_nemotron_h.NemotronHForCausalLM" | |
| }, | |
| "bos_token_id": 1, | |
| "chunk_size": 128, | |
| "conv_kernel": 4, | |
| "dtype": "bfloat16", | |
| "eos_token_id": 12, | |
| "expand": 2, | |
| "head_dim": 128, | |
| "hidden_dropout": 0.0, | |
| "hidden_size": 4480, | |
| "hybrid_override_pattern": "M-M-M-MM-M-M-M*-M-M-M*-M-M-M-M*-M-M-M-M*-M-MM-M-M-M-M-M-", | |
| "initializer_range": 0.02, | |
| "intermediate_size": 15680, | |
| "layer_norm_epsilon": 1e-05, | |
| "mamba_head_dim": 80, | |
| "mamba_hidden_act": "silu", | |
| "mamba_num_groups": 8, | |
| "mamba_num_heads": 128, | |
| "mamba_proj_bias": false, | |
| "mamba_state_dim": 128, | |
| "max_position_embeddings": 131072, | |
| "mlp_bias": false, | |
| "mlp_hidden_act": "relu2", | |
| "model_type": "nemotron_h", | |
| "n_groups": 8, | |
| "num_attention_heads": 40, | |
| "num_hidden_layers": 56, | |
| "num_key_value_heads": 8, | |
| "num_logits_to_keep": 1, | |
| "num_query_groups": 8, | |
| "pad_token_id": 0, | |
| "quantization_config": { | |
| "config_groups": { | |
| "group_0": { | |
| "format": "pack-quantized", | |
| "input_activations": null, | |
| "output_activations": null, | |
| "targets": [ | |
| "Linear" | |
| ], | |
| "weights": { | |
| "actorder": "weight", | |
| "block_structure": null, | |
| "dynamic": false, | |
| "group_size": 64, | |
| "num_bits": 4, | |
| "observer": "mse", | |
| "observer_kwargs": {}, | |
| "strategy": "group", | |
| "symmetric": true, | |
| "type": "int" | |
| } | |
| } | |
| }, | |
| "format": "pack-quantized", | |
| "global_compression_ratio": null, | |
| "ignore": [ | |
| "lm_head" | |
| ], | |
| "kv_cache_scheme": null, | |
| "quant_method": "compressed-tensors", | |
| "quantization_status": "compressed", | |
| "sparsity_config": {}, | |
| "transform_config": {}, | |
| "version": "0.12.3.a20251013" | |
| }, | |
| "rescale_prenorm_residual": true, | |
| "residual_in_fp32": false, | |
| "rms_norm_eps": 1e-05, | |
| "sliding_window": null, | |
| "ssm_state_size": 128, | |
| "tie_word_embeddings": false, | |
| "time_step_floor": 0.0001, | |
| "time_step_limit": [ | |
| 0.0, | |
| Infinity | |
| ], | |
| "time_step_max": 0.1, | |
| "time_step_min": 0.001, | |
| "time_step_rank": 256, | |
| "transformers_version": "4.56.2", | |
| "use_bias": false, | |
| "use_cache": false, | |
| "use_conv_bias": true, | |
| "use_mamba_kernels": true, | |
| "vocab_size": 131072 | |
| } |