amd
/

Safetensors
llama
alignment-handbook
Generated from Trainer
Mingyuyang-1 commited on
Commit
7a10456
·
1 Parent(s): 8392b0f

Update hybrid_config.json

Browse files
Files changed (1) hide show
  1. hybrid_config.json +1 -4
hybrid_config.json CHANGED
@@ -40,14 +40,11 @@
40
  "rope_theta": 500000.0,
41
  "rope_scaling": {
42
  "factor": 8.0,
43
- "high_freq_factor": 4.0,
44
- "low_freq_factor": 1.0,
45
  "original_max_position_embeddings": 8192,
46
- "rope_type": "llama3"
47
  },
48
  "attention_bias": false,
49
  "attention_dropout": 0.0,
50
- "rope_type": "yarn",
51
  "d_model": 4096,
52
  "ssm_cfg": {
53
  "expand": 1,
 
40
  "rope_theta": 500000.0,
41
  "rope_scaling": {
42
  "factor": 8.0,
 
 
43
  "original_max_position_embeddings": 8192,
44
+ "rope_type": "yarn"
45
  },
46
  "attention_bias": false,
47
  "attention_dropout": 0.0,
 
48
  "d_model": 4096,
49
  "ssm_cfg": {
50
  "expand": 1,