lccurious commited on
Commit
2fa561f
·
verified ·
1 Parent(s): b23c24c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -26,13 +26,13 @@
26
  "rope_scaling": null,
27
  "tie_word_embeddings": false,
28
  "torch_dtype": "bfloat16",
29
- "transformers_version": "4.36.0",
30
  "use_bias": false,
31
  "use_rmsnorm": true,
32
  "rms_norm_eps": 1e-06,
33
  "head_dim": 128,
34
  "num_shared_experts": 1,
35
- "use_cache": true,
36
  "use_qkv_bias": false,
37
  "embedding_dropout": 0.0,
38
  "norm_softmax": false,
 
26
  "rope_scaling": null,
27
  "tie_word_embeddings": false,
28
  "torch_dtype": "bfloat16",
29
+ "transformers_version": "4.52.3",
30
  "use_bias": false,
31
  "use_rmsnorm": true,
32
  "rms_norm_eps": 1e-06,
33
  "head_dim": 128,
34
  "num_shared_experts": 1,
35
+ "use_cache": false,
36
  "use_qkv_bias": false,
37
  "embedding_dropout": 0.0,
38
  "norm_softmax": false,