asnassar commited on
Commit
c8f0b3a
·
verified ·
1 Parent(s): cc7f1e5

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -4
config.json CHANGED
@@ -41,12 +41,12 @@
41
  "scale_factor": 4,
42
  "text_config": {
43
  "architectures": [
44
- "llama"
45
  ],
46
  "attention_bias": false,
47
  "attention_dropout": 0.0,
48
- "bos_token_id": 100257,
49
- "eos_token_id": 100257,
50
  "head_dim": 64,
51
  "hidden_act": "silu",
52
  "hidden_size": 576,
@@ -62,11 +62,12 @@
62
  "rms_norm_eps": 1e-05,
63
  "rope_scaling": null,
64
  "rope_theta": 10000.0,
 
65
  "torch_dtype": "bfloat16",
66
  "use_cache": true,
67
  "vocab_size": 100480
68
  },
69
- "tie_word_embeddings": false,
70
  "torch_dtype": "bfloat16",
71
  "transformers_version": "4.53.0.dev0",
72
  "use_cache": true,
 
41
  "scale_factor": 4,
42
  "text_config": {
43
  "architectures": [
44
+ "VLlama3ForCausalLM"
45
  ],
46
  "attention_bias": false,
47
  "attention_dropout": 0.0,
48
+ "bos_token_id": 100264,
49
+ "eos_token_id": 100338,
50
  "head_dim": 64,
51
  "hidden_act": "silu",
52
  "hidden_size": 576,
 
62
  "rms_norm_eps": 1e-05,
63
  "rope_scaling": null,
64
  "rope_theta": 10000.0,
65
+ "tie_word_embeddings": true,
66
  "torch_dtype": "bfloat16",
67
  "use_cache": true,
68
  "vocab_size": 100480
69
  },
70
+ "tie_word_embeddings": true,
71
  "torch_dtype": "bfloat16",
72
  "transformers_version": "4.53.0.dev0",
73
  "use_cache": true,