{ "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-4-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-4-mini-instruct--modeling_phi3.Phi3ForCausalLM", "AutoTokenizer": "microsoft/Phi-4-mini-instruct--Xenova/gpt-4o" }, "bos_token_id": 199999, "embd_pdrop": 0.0, "eos_token_id": 199999, "full_attn_mod": 1, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "interpolate_factor": 1, "lm_head_bias": false, "max_position_embeddings": 131072, "mlp_bias": false, "model_type": "phi3", "num_attention_heads": 24, "num_hidden_layers": 32, "num_key_value_heads": 8, "original_max_position_embeddings": 4096, "pad_token_id": 199999, "partial_rotary_factor": 0.75, "quantization_config": { "backend": "auto", "batch_size": 1, "bits": 8, "block_name_to_quantize": null, "cache_block_outputs": true, "checkpoint_format": "gptq_v2", "damp_percent": 0.1, "dataset": "c4", "desc_act": false, "exllama_config": { "version": 1 }, "group_size": 128, "max_input_length": null, "meta": { "quantizer": [ "optimum:1.25.3", "gptqmodel:2.2.0" ] }, "model_seqlen": null, "module_name_preceding_first_block": null, "modules_in_block_to_quantize": null, "pad_token_id": null, "quant_method": "gptq", "sym": true, "tokenizer": null, "true_sequential": true, "use_cuda_fp16": false, "use_exllama": true }, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1, 1.118320672, 1.250641126, 1.398617824, 1.564103225, 1.74916897, 1.956131817, 2.187582649, 2.446418898, 2.735880826, 3.059592084, 3.421605075, 3.826451687, 4.279200023, 4.785517845, 5.351743533, 5.984965424, 6.693110555, 7.485043894, 8.370679318, 9.36110372, 10.4687158, 11.70738129, 13.09260651, 14.64173252, 16.37415215, 18.31155283, 20.47818807, 22.90118105, 25.61086418, 28.64115884, 32.03, 32.1, 32.13, 32.23, 32.6, 32.61, 32.64, 32.66, 32.7, 32.71, 32.93, 32.97, 33.28, 33.49, 33.5, 44.16, 47.77 ], "short_factor": [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": true, "torch_dtype": "float16", "transformers_version": "4.51.3", "use_cache": true, "vocab_size": 200064 }