Upload config
Browse files- config.json +113 -11
config.json
CHANGED
|
@@ -1,10 +1,7 @@
|
|
| 1 |
{
|
| 2 |
-
"
|
| 3 |
-
"MERaLiONForConditionalGeneration"
|
| 4 |
-
],
|
| 5 |
"auto_map": {
|
| 6 |
-
"AutoConfig": "configuration_meralion.MERaLiONConfig"
|
| 7 |
-
"AutoModelForSpeechSeq2Seq": "modeling_meralion.MERaLiONForConditionalGeneration"
|
| 8 |
},
|
| 9 |
"head_dim": 256,
|
| 10 |
"hidden_size": 3584,
|
|
@@ -15,7 +12,8 @@
|
|
| 15 |
"num_key_value_heads": 8,
|
| 16 |
"sliding_window": 4096,
|
| 17 |
"speech_config": {
|
| 18 |
-
"
|
|
|
|
| 19 |
"apply_spec_augment": true,
|
| 20 |
"architectures": [
|
| 21 |
"WhisperForConditionalGeneration"
|
|
@@ -34,17 +32,122 @@
|
|
| 34 |
"encoder_ffn_dim": 5120,
|
| 35 |
"encoder_layers": 32,
|
| 36 |
"eos_token_id": 50257,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
"mask_time_length": 20,
|
| 38 |
"max_length": 448,
|
| 39 |
"model_type": "meralion_speech_encoder",
|
| 40 |
"num_hidden_layers": 32,
|
| 41 |
-
"num_mel_bins":
|
| 42 |
-
"
|
| 43 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
},
|
| 45 |
"speech_mlp_scale_factor": 15,
|
| 46 |
"speech_token_index": 255999,
|
| 47 |
"text_config": {
|
|
|
|
| 48 |
"_name_or_path": "aisingapore/gemma2-9b-cpt-sea-lionv3-instruct",
|
| 49 |
"architectures": [
|
| 50 |
"Gemma2ForCausalLM"
|
|
@@ -60,6 +163,5 @@
|
|
| 60 |
"sliding_window_size": 4096,
|
| 61 |
"torch_dtype": "bfloat16"
|
| 62 |
},
|
| 63 |
-
"
|
| 64 |
-
"transformers_version": "4.44.2"
|
| 65 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"_attn_implementation_autoset": true,
|
|
|
|
|
|
|
| 3 |
"auto_map": {
|
| 4 |
+
"AutoConfig": "configuration_meralion.MERaLiONConfig"
|
|
|
|
| 5 |
},
|
| 6 |
"head_dim": 256,
|
| 7 |
"hidden_size": 3584,
|
|
|
|
| 12 |
"num_key_value_heads": 8,
|
| 13 |
"sliding_window": 4096,
|
| 14 |
"speech_config": {
|
| 15 |
+
"_attn_implementation_autoset": true,
|
| 16 |
+
"_name_or_path": "openai/whisper-large-v2",
|
| 17 |
"apply_spec_augment": true,
|
| 18 |
"architectures": [
|
| 19 |
"WhisperForConditionalGeneration"
|
|
|
|
| 32 |
"encoder_ffn_dim": 5120,
|
| 33 |
"encoder_layers": 32,
|
| 34 |
"eos_token_id": 50257,
|
| 35 |
+
"forced_decoder_ids": [
|
| 36 |
+
[
|
| 37 |
+
1,
|
| 38 |
+
50259
|
| 39 |
+
],
|
| 40 |
+
[
|
| 41 |
+
2,
|
| 42 |
+
50359
|
| 43 |
+
],
|
| 44 |
+
[
|
| 45 |
+
3,
|
| 46 |
+
50363
|
| 47 |
+
]
|
| 48 |
+
],
|
| 49 |
"mask_time_length": 20,
|
| 50 |
"max_length": 448,
|
| 51 |
"model_type": "meralion_speech_encoder",
|
| 52 |
"num_hidden_layers": 32,
|
| 53 |
+
"num_mel_bins": 80,
|
| 54 |
+
"pad_token_id": 50257,
|
| 55 |
+
"suppress_tokens": [
|
| 56 |
+
1,
|
| 57 |
+
2,
|
| 58 |
+
7,
|
| 59 |
+
8,
|
| 60 |
+
9,
|
| 61 |
+
10,
|
| 62 |
+
14,
|
| 63 |
+
25,
|
| 64 |
+
26,
|
| 65 |
+
27,
|
| 66 |
+
28,
|
| 67 |
+
29,
|
| 68 |
+
31,
|
| 69 |
+
58,
|
| 70 |
+
59,
|
| 71 |
+
60,
|
| 72 |
+
61,
|
| 73 |
+
62,
|
| 74 |
+
63,
|
| 75 |
+
90,
|
| 76 |
+
91,
|
| 77 |
+
92,
|
| 78 |
+
93,
|
| 79 |
+
359,
|
| 80 |
+
503,
|
| 81 |
+
522,
|
| 82 |
+
542,
|
| 83 |
+
873,
|
| 84 |
+
893,
|
| 85 |
+
902,
|
| 86 |
+
918,
|
| 87 |
+
922,
|
| 88 |
+
931,
|
| 89 |
+
1350,
|
| 90 |
+
1853,
|
| 91 |
+
1982,
|
| 92 |
+
2460,
|
| 93 |
+
2627,
|
| 94 |
+
3246,
|
| 95 |
+
3253,
|
| 96 |
+
3268,
|
| 97 |
+
3536,
|
| 98 |
+
3846,
|
| 99 |
+
3961,
|
| 100 |
+
4183,
|
| 101 |
+
4667,
|
| 102 |
+
6585,
|
| 103 |
+
6647,
|
| 104 |
+
7273,
|
| 105 |
+
9061,
|
| 106 |
+
9383,
|
| 107 |
+
10428,
|
| 108 |
+
10929,
|
| 109 |
+
11938,
|
| 110 |
+
12033,
|
| 111 |
+
12331,
|
| 112 |
+
12562,
|
| 113 |
+
13793,
|
| 114 |
+
14157,
|
| 115 |
+
14635,
|
| 116 |
+
15265,
|
| 117 |
+
15618,
|
| 118 |
+
16553,
|
| 119 |
+
16604,
|
| 120 |
+
18362,
|
| 121 |
+
18956,
|
| 122 |
+
20075,
|
| 123 |
+
21675,
|
| 124 |
+
22520,
|
| 125 |
+
26130,
|
| 126 |
+
26161,
|
| 127 |
+
26435,
|
| 128 |
+
28279,
|
| 129 |
+
29464,
|
| 130 |
+
31650,
|
| 131 |
+
32302,
|
| 132 |
+
32470,
|
| 133 |
+
36865,
|
| 134 |
+
42863,
|
| 135 |
+
47425,
|
| 136 |
+
49870,
|
| 137 |
+
50254,
|
| 138 |
+
50258,
|
| 139 |
+
50358,
|
| 140 |
+
50359,
|
| 141 |
+
50360,
|
| 142 |
+
50361,
|
| 143 |
+
50362
|
| 144 |
+
],
|
| 145 |
+
"torch_dtype": "bfloat16"
|
| 146 |
},
|
| 147 |
"speech_mlp_scale_factor": 15,
|
| 148 |
"speech_token_index": 255999,
|
| 149 |
"text_config": {
|
| 150 |
+
"_attn_implementation_autoset": true,
|
| 151 |
"_name_or_path": "aisingapore/gemma2-9b-cpt-sea-lionv3-instruct",
|
| 152 |
"architectures": [
|
| 153 |
"Gemma2ForCausalLM"
|
|
|
|
| 163 |
"sliding_window_size": 4096,
|
| 164 |
"torch_dtype": "bfloat16"
|
| 165 |
},
|
| 166 |
+
"transformers_version": "4.46.3"
|
|
|
|
| 167 |
}
|