smcleish's picture
Upload config.json with huggingface_hub
ca32c19 verified
{
"activation_checkpoint_impl": "per-iteration",
"architecture_class_name": "RecurrentGPT",
"architectures": [
"RavenForCausalLM"
],
"auto_map": {
"AutoConfig": "raven_config_minimal.RavenConfig",
"AutoModelForCausalLM": "raven_modeling_minimal.RavenForCausalLM"
},
"bias": false,
"block_class_name": "SandwichBlock",
"block_size": 1024,
"bos_token_id": 65504,
"effective_expected_depth": 56,
"eos_token_id": 65505,
"head_dim": 64,
"init_orthogonal": false,
"init_strategy": "takase",
"init_values": {
"embed_scale": 1.0,
"embedding": 0.008703882797784892,
"out_proj": 0.0005356869554443541,
"std": 0.008703882797784892
},
"injection_type": "linear",
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"mean_backprop_depth": 8,
"mean_recurrence": 8,
"mlp_class_name": "GatedMLP",
"model_type": "huginn_raven",
"n_embd": 2048,
"n_heads": 32,
"n_layers": 14,
"n_layers_in_coda": 4,
"n_layers_in_prelude": 4,
"n_layers_in_recurrent_block": 6,
"nonlin_name": "SiLU",
"norm_class_name": "RMSNorm_llama",
"norm_eps": 1e-05,
"num_key_value_heads": 8,
"pad_token_id": 65509,
"padded_vocab_size": 128256,
"padding_multiple": 4096,
"qk_bias": false,
"rope_base": 500000.0,
"rope_scaling": {
"factor": 32.0,
"high_freq_factor": 4.0,
"low_freq_factor": 1.0,
"original_max_position_embeddings": 8192,
"rope_type": "llama3"
},
"rope_theta": 500000.0,
"sampling_scheme": "poisson-lognormal-filling",
"state_init": "like-init",
"test_time_noise": 0,
"test_time_noise_type": "fixed",
"tie_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.53.1",
"vocab_size": 128256
}