UnstableLlama commited on
Commit
d7000b1
·
verified ·
1 Parent(s): 1707f48

Upload 17 files

Browse files
.gitattributes CHANGED
@@ -34,3 +34,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  repoGraph.png filter=lfs diff=lfs merge=lfs -text
 
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  repoGraph.png filter=lfs diff=lfs merge=lfs -text
37
+ quantization_config.json filter=lfs diff=lfs merge=lfs -text
38
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,212 +1,571 @@
1
  ---
2
- base_model: zerofata/GLM-4.5-Iceblink-v2-106B-A12B
3
- base_model_relation: quantized
4
- quantized_by: UnstableLlama
5
  license: mit
6
  datasets:
7
  - zerofata/Instruct-Anime
8
  - zerofata/Roleplay-Anime-Characters
9
  - zerofata/Instruct-Anime-CreativeWriting
10
  - zerofata/Summaries-Anime-FandomPages
11
- tags:
12
- - exl3
13
  ---
14
  <style>
15
- @import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;700&display=swap');
16
-
17
- .test-container {
18
- font-family: 'JetBrains Mono', 'Fira Code', monospace;
19
- background-color: #0d0d0d;
20
- color: #00ff9f; /* Neon Green text base */
21
- padding: 25px;
22
- border: 1px solid #333;
23
- border-radius: 4px;
24
- }
25
-
26
- .test-header {
27
- border-bottom: 2px solid #ff00ff; /* Magenta accent */
28
- margin-bottom: 30px;
29
- padding-bottom: 15px;
30
- }
31
-
32
- .test-header h1 {
33
- color: #ff00ff;
34
- font-size: 1.8em;
35
- text-transform: uppercase;
36
- letter-spacing: 1px;
37
- margin: 0 0 10px 0;
38
- text-shadow: 3px 3px 0px rgba(0, 255, 159, 0.2);
39
- }
40
-
41
- .test-meta {
42
- display: flex;
43
- gap: 15px;
44
- font-size: 0.9em;
45
- opacity: 0.8;
46
- }
47
-
48
- .test-card {
49
- border: 1px solid #00ff9f;
50
- margin-bottom: 25px;
51
- background-color: #111;
52
- box-shadow: 5px 5px 0px #ff00ff; /* Hard magenta shadow for depth */
53
- }
54
-
55
- .test-card h2 {
56
- background-color: #00ff9f;
57
- color: #0d0d0d;
58
- margin: 0;
59
- padding: 8px 15px;
60
- text-transform: uppercase;
61
- font-size: 1.1em;
62
- font-weight: 800;
63
- letter-spacing: 1px;
64
- }
65
-
66
- .test-content {
67
- padding: 20px;
68
- }
69
-
70
- /* Glitch Table Styling */
71
- .glitch-table-wrapper {
72
- width: fit-content; /* Shrinks box to fit content */
73
- min-width: 400px; /* keeps it from getting too small */
74
- border: 1px solid rgba(0, 255, 159, 0.4); /* slightly brighter border base */
75
- border-radius: 6px;
76
- padding: 15px;
77
- margin-top: 20px;
78
- }
79
- .glitch-table th {
80
- border-bottom: 2px solid #ff00ff;
81
- color: #ff00ff;
82
- padding: 12px;
83
- font-weight: 700;
84
- text-transform: uppercase;
85
- font-size: 0.9em;
86
- }
87
- .glitch-table td {
88
- border-bottom: 1px dashed #333;
89
- padding: 12px;
90
- transition: background 0.2s;
91
- }
92
- .glitch-table tr:hover td {
93
- background-color: #1a1a1a;
94
- color: #fff;
95
- }
96
-
97
- /* Link Styling */
98
- a.test-link {
99
- color: #00ff9f;
100
- text-decoration: none;
101
- font-weight: 700;
102
- border-bottom: 1px dotted #00ff9f;
103
- transition: all 0.2s;
104
- }
105
- a.test-link:hover {
106
- background-color: #ff00ff;
107
- color: #0d0d0d;
108
- border-bottom: none;
109
- box-shadow: 3px 3px 0px #00ff9f;
110
- }
111
-
112
- /* Code Block Styling */
113
- .cmd-block {
114
- background: #000;
115
- border: 1px solid #333;
116
- border-left: 3px solid #ff00ff;
117
- color: #e0e0e0;
118
- padding: 15px;
119
- font-size: 0.9em;
120
- overflow-x: auto;
121
- white-space: pre-wrap;
122
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
  </style>
 
 
 
 
 
 
 
 
 
 
124
 
125
- <div class="test-container">
 
 
 
 
 
 
 
 
 
126
 
127
- <div class="test-header">
128
- <h1>>> zerofata's GLM-4.5-Iceblink-v2-106B-A12B-exl3</h1>
129
- <div class="test-meta">
130
- <div>[BASE :: GLM-4.5-AIR]</div>
131
- <div>[TUNE :: ZEROFATA]</div>
132
- <div>[QUANT :: UNSTABLELLAMA]</div>
 
 
 
 
 
 
133
  </div>
134
  </div>
135
 
136
- <div class="test-card">
137
- <h2>// REPO</h2>
138
- <div class="test-content">
139
- EXL3 quantization of <b><a class="test-link" href="https://huggingface.co/zerofata/GLM-4.5-Iceblink-v2-106B-A12B" target="_blank">zerofata's ICEBLINK-v2</a></b>.
140
- <br><br>
141
- Quantized with <b><a class="test-link" href="https://github.com/turboderp-org/exllamav3" target="_blank">exllamav3 0.0.12</a></b>.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
  </div>
143
  </div>
144
 
145
- <div class="test-card">
146
- <h2>// QUANTS</h2>
147
- <div class="test-content">
148
- <table class="glitch-table">
149
- <thead>
150
- <tr>
151
- <th>[BRANCH]</th>
152
- <th>[GiB]</th>
153
- <th>[K/L_DIV]</th>
154
- <th>[PPL]</th>
155
- </tr>
156
- </thead>
157
- <tbody>
158
- <tr>
159
- <td><a class="test-link" href="https://huggingface.co/UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3/tree/2.05" target="_blank">2.05bpw</a></td>
160
- <td>26.8</td>
161
- <td>0.883</td>
162
- <td>5.676</td>
163
- </tr>
164
- <tr>
165
- <td><a class="test-link" href="https://huggingface.co/UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3/tree/2.5bpw" target="_blank">2.5bpw</a></td>
166
- <td>32.3</td>
167
- <td>0.591</td>
168
- <td>5.261</td>
169
- </tr>
170
- <tr>
171
- <td><a class="test-link" href="https://huggingface.co/UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3/tree/3.05bpw" target="_blank">3.05bpw</a></td>
172
- <td>39.1</td>
173
- <td>0.199</td>
174
- <td>4.513</td>
175
- </tr>
176
- <tr>
177
- <td><a class="test-link" href="https://huggingface.co/UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3/tree/4.0bpw" target="_blank">4.0bpw</a></td>
178
- <td>51</td>
179
- <td>0.069</td>
180
- <td>4.289</td>
181
- </tr>
182
- <tr>
183
- <td><a class="test-link" href="https://huggingface.co/UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3/tree/5.0bpw" target="_blank">5.0bpw</a></td>
184
- <td>63.3</td>
185
- <td>0.026</td>
186
- <td>4.183</td>
187
- </tr>
188
- <tr>
189
- <td><a class="test-link" href="https://huggingface.co/zerofata/GLM-4.5-Iceblink-v2-106B-A12B/tree/main" target="_blank">bf16</td>
190
- <td>205.8</td>
191
- <td>0</td>
192
- <td>4.132</td>
193
- </tr>
194
- </tbody>
195
- </table>
196
- <div style="margin-top: 25px; text-align: center;">
197
- <img src="https://huggingface.co/UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3/resolve/main/repoGraph1.svg"
198
- alt="EXL3 Quantization Results"
199
- style="max-width: 90%; border: 1px solid #333; box-shadow: 0 0 15px #ff00ff;">
200
  </div>
 
 
 
 
 
 
 
 
 
 
201
  </div>
202
  </div>
203
 
204
- <div class="test-card">
205
- <h2>// DOWNLOAD</h2>
206
- <div class="test-content">
207
- Use HF-CLI to pull specific branches to your local machine:
208
- <div class="cmd-block">huggingface-cli download UnstableLlama/zerofata_GLM-4.5-Iceblink-v2-106B-A12B-exl3 --revision "3.05bpw" --local-dir ./</div>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
  </div>
210
  </div>
211
 
212
- </div>
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
 
 
 
2
  license: mit
3
  datasets:
4
  - zerofata/Instruct-Anime
5
  - zerofata/Roleplay-Anime-Characters
6
  - zerofata/Instruct-Anime-CreativeWriting
7
  - zerofata/Summaries-Anime-FandomPages
8
+ base_model:
9
+ - zai-org/GLM-4.5-Air
10
  ---
11
  <style>
12
+ .container {
13
+ --primary-accent: #87CEEB;
14
+ --secondary-accent: #B0E5F5;
15
+ --tertiary-accent: #5FA8D3;
16
+ --ice-accent: #E0F4FF;
17
+ --silver-accent: #C8D8E4;
18
+ --glow-primary: rgba(135, 206, 235, 0.6);
19
+ --glow-secondary: rgba(176, 229, 245, 0.7);
20
+
21
+ --bg-main: #0a1628;
22
+ --bg-container: #0f1e35;
23
+ --bg-card: rgba(15, 30, 53, 0.95);
24
+ --bg-elevated: #162840;
25
+
26
+ --text-main: #E8F4F8;
27
+ --text-muted: #9BC4E2;
28
+ --text-bright: #FFFFFF;
29
+ --white: #FFFFFF;
30
+ --border-color: #2B4F76;
31
+ --border-ice: #B0E5F5;
32
+
33
+ --font-title: 'Inter', sans-serif;
34
+ --font-body: 'Source Sans Pro', sans-serif;
35
+ --font-code: 'JetBrains Mono', monospace;
36
+
37
+ font-family: var(--font-body);
38
+ color: var(--text-main);
39
+ line-height: 1.6;
40
+ font-weight: 400;
41
+
42
+ max-width: 1100px;
43
+ margin: 20px auto;
44
+ padding: 60px;
45
+ background:
46
+ linear-gradient(135deg, #0a1628 0%, #0f1e35 50%, #0a1628 100%);
47
+ min-height: calc(100vh - 40px);
48
+ position: relative;
49
+
50
+ border: 1px solid var(--border-ice);
51
+ box-shadow:
52
+ 0 0 0 3px var(--border-color),
53
+ 0 0 0 5px var(--border-ice),
54
+ 0 0 0 8px var(--border-color),
55
+ 0 0 60px rgba(135, 206, 235, 0.4),
56
+ inset 0 0 100px rgba(135, 206, 235, 0.15);
57
+ }
58
+
59
+ .container .title-container {
60
+ background: linear-gradient(135deg, var(--bg-elevated), var(--bg-card));
61
+ margin-bottom: 50px;
62
+ border: 2px solid var(--border-ice);
63
+ padding: 50px;
64
+ text-align: center;
65
+ position: relative;
66
+ box-shadow:
67
+ 0 0 0 1px var(--border-color),
68
+ 0 0 0 4px var(--border-ice),
69
+ 0 0 0 6px var(--border-color),
70
+ 0 0 40px var(--glow-primary),
71
+ inset 0 0 60px rgba(135, 206, 235, 0.2);
72
+ overflow: visible;
73
+ }
74
+
75
+ .container .title-container::before {
76
+ content: '';
77
+ position: absolute;
78
+ top: 0;
79
+ left: 0;
80
+ right: 0;
81
+ height: 3px;
82
+ background: linear-gradient(90deg, transparent, var(--ice-accent), transparent);
83
+ box-shadow: 0 0 10px var(--ice-accent);
84
+ }
85
+
86
+ .container .title-container::after {
87
+ content: '';
88
+ position: absolute;
89
+ bottom: 0;
90
+ left: 0;
91
+ right: 0;
92
+ height: 3px;
93
+ background: linear-gradient(90deg, transparent, var(--ice-accent), transparent);
94
+ box-shadow: 0 0 10px var(--ice-accent);
95
+ }
96
+
97
+
98
+
99
+ .container .title-container .title-wrapper {
100
+ position: relative;
101
+ z-index: 2;
102
+ }
103
+
104
+ .container .title-main {
105
+ color: var(--text-bright);
106
+ font-size: 3rem;
107
+ font-weight: 900;
108
+ margin: 0;
109
+ letter-spacing: 4px;
110
+ display: block;
111
+ text-transform: uppercase;
112
+ background: linear-gradient(90deg, var(--ice-accent), var(--text-bright), var(--ice-accent));
113
+ background-clip: text;
114
+ -webkit-background-clip: text;
115
+ -webkit-text-fill-color: transparent;
116
+ font-family: var(--font-title);
117
+ text-shadow:
118
+ 0 0 30px var(--ice-accent),
119
+ 0 0 60px rgba(176, 229, 245, 0.5),
120
+ 0 4px 8px rgba(135, 206, 235, 0.6);
121
+ position: relative;
122
+ }
123
+
124
+ .container .lemonade-text {
125
+ background: linear-gradient(135deg, var(--silver-accent), var(--ice-accent));
126
+ background-clip: text;
127
+ -webkit-background-clip: text;
128
+ -webkit-text-fill-color: transparent;
129
+ }
130
+
131
+ .container .version-indicator {
132
+ color: var(--text-muted);
133
+ font-size: 0.85rem;
134
+ font-weight: 600;
135
+ letter-spacing: 3px;
136
+ margin-top: 15px;
137
+ text-transform: uppercase;
138
+ font-family: var(--font-title);
139
+ opacity: 0.7;
140
+ }
141
+
142
+ .container .title-subtitle {
143
+ padding: 20px;
144
+ margin-top: 25px;
145
+ border: 1px solid var(--border-ice);
146
+ box-shadow: 0 0 20px rgba(135, 206, 235, 0.3);
147
+ }
148
+
149
+ .container .subtitle-text {
150
+ color: var(--text-muted);
151
+ font-size: 1.3rem;
152
+ font-family: var(--font-body);
153
+ font-style: italic;
154
+ font-weight: 400;
155
+ letter-spacing: 3px;
156
+ text-transform: uppercase;
157
+ }
158
+
159
+ .container img {
160
+ max-width: 100%;
161
+ border: 3px solid var(--border-ice);
162
+ margin-bottom: 40px;
163
+ box-shadow:
164
+ 0 0 0 1px var(--border-color),
165
+ 0 0 0 5px var(--border-ice),
166
+ 0 12px 24px rgba(135, 206, 235, 0.3);
167
+ }
168
+
169
+ .container .section-container {
170
+ margin-bottom: 40px;
171
+ padding: 40px;
172
+ background: linear-gradient(135deg, var(--bg-card), var(--bg-elevated));
173
+ border: 2px solid var(--border-ice);
174
+ box-shadow:
175
+ 0 0 0 1px var(--border-color),
176
+ 0 0 0 5px var(--border-ice),
177
+ 0 8px 24px rgba(135, 206, 235, 0.3),
178
+ inset 0 0 40px rgba(135, 206, 235, 0.1);
179
+ }
180
+ .container .section-container:last-of-type {
181
+ margin-bottom: 0;
182
+ }
183
+
184
+ .container .section-header {
185
+ display: flex;
186
+ align-items: center;
187
+ padding: 20px;
188
+ border: 1px solid var(--border-ice);
189
+ margin-bottom: 30px;
190
+ background: rgba(43, 79, 118, 0.2);
191
+ box-shadow: 0 0 20px rgba(135, 206, 235, 0.2);
192
+ }
193
+
194
+ .container .section-title {
195
+ font-family: var(--font-title);
196
+ background: linear-gradient(90deg, var(--ice-accent), var(--text-bright), var(--ice-accent));
197
+ background-clip: text;
198
+ -webkit-background-clip: text;
199
+ -webkit-text-fill-color: transparent;
200
+ font-size: 1.8rem;
201
+ margin: 0 !important;
202
+ padding: 0 !important;
203
+ letter-spacing: 4px;
204
+ font-weight: 800;
205
+ text-transform: uppercase;
206
+ border: none !important;
207
+ display: inline-block;
208
+ text-shadow: 0 0 20px var(--ice-accent);
209
+ }
210
+
211
+ .container .section-content {
212
+ padding: 0;
213
+ }
214
+
215
+ .container .subheading {
216
+ color: var(--text-bright);
217
+ font-size: 1.4rem;
218
+ margin-top: 30px;
219
+ margin-bottom: 20px;
220
+ font-weight: 700;
221
+ display: block;
222
+ text-transform: uppercase;
223
+ letter-spacing: 3px;
224
+ font-family: var(--font-title);
225
+ border-bottom: 2px solid var(--border-ice);
226
+ padding-bottom: 12px;
227
+ text-shadow: 0 0 15px var(--ice-accent);
228
+ }
229
+
230
+ .container .data-box {
231
+ background: linear-gradient(135deg, var(--bg-card), rgba(22, 40, 64, 0.8));
232
+ padding: 25px;
233
+ border: 2px solid var(--border-ice);
234
+ border-left: 5px solid var(--primary-accent);
235
+ margin-bottom: 25px;
236
+ box-shadow:
237
+ 0 0 20px rgba(135, 206, 235, 0.3),
238
+ inset 0 0 20px rgba(135, 206, 235, 0.1);
239
+ font-size: 1rem;
240
+ }
241
+
242
+ .container .data-row {
243
+ display: flex;
244
+ align-items: center;
245
+ margin-bottom: 12px;
246
+ padding: 10px 0;
247
+ border-bottom: 1px solid rgba(176, 229, 245, 0.2);
248
+ }
249
+
250
+ .container .data-row:last-child {
251
+ margin-bottom: 0;
252
+ }
253
+
254
+ .container .data-arrow {
255
+ color: var(--ice-accent);
256
+ font-weight: bold;
257
+ margin-right: 15px;
258
+ font-family: var(--font-code);
259
+ font-size: 1.2rem;
260
+ text-shadow: 0 0 10px var(--ice-accent);
261
+ }
262
+
263
+ .container .data-label {
264
+ color: var(--text-muted);
265
+ font-weight: 700;
266
+ font-family: var(--font-body);
267
+ margin-right: 15px;
268
+ min-width: 120px;
269
+ text-transform: uppercase;
270
+ letter-spacing: 1px;
271
+ }
272
+
273
+ .container a {
274
+ color: var(--text-bright);
275
+ text-decoration: none;
276
+ font-weight: 600;
277
+ transition: all .2s;
278
+ }
279
+
280
+ .container .data-row a {
281
+ border-bottom: 1px dotted var(--ice-accent);
282
+ }
283
+
284
+ .container a:hover {
285
+ color: var(--ice-accent);
286
+ text-shadow: 0 0 10px var(--ice-accent);
287
+ }
288
+
289
+ .container .data-row a:hover {
290
+ border-bottom-style: solid;
291
+ }
292
+
293
+ .container .dropdown-container {
294
+ margin-top: 30px;
295
+ }
296
+
297
+ .container .dropdown-summary {
298
+ cursor: pointer;
299
+ padding: 15px 20px;
300
+ color: var(--text-muted);
301
+ font-size: 1.2rem;
302
+ font-weight: 700;
303
+ text-transform: uppercase;
304
+ font-family: var(--font-title);
305
+ letter-spacing: 2px;
306
+ list-style: none;
307
+ transition: all 0.2s ease;
308
+ border: 1px solid var(--border-ice);
309
+ background: rgba(43, 79, 118, 0.2);
310
+ box-shadow: 0 0 15px rgba(135, 206, 235, 0.2);
311
+ }
312
+ .container .dropdown-summary:hover {
313
+ color: var(--ice-accent);
314
+ background: rgba(43, 79, 118, 0.3);
315
+ box-shadow: 0 0 25px rgba(135, 206, 235, 0.3);
316
+ }
317
+
318
+ .container .dropdown-summary::-webkit-details-marker {
319
+ display: none;
320
+ }
321
+
322
+ .container .dropdown-arrow {
323
+ color: var(--ice-accent);
324
+ margin-right: 15px;
325
+ transition: transform 0.2s ease;
326
+ text-shadow: 0 0 10px var(--ice-accent);
327
+ }
328
+
329
+ .container details[open] .dropdown-arrow {
330
+ transform: rotate(90deg);
331
+ }
332
+
333
+ .container .dropdown-content {
334
+ margin-top: 20px;
335
+ padding: 20px 15px;
336
+ background: linear-gradient(135deg, var(--bg-card), rgba(22, 40, 64, 0.9));
337
+ border: 2px solid var(--border-ice);
338
+ box-shadow:
339
+ 0 0 20px rgba(135, 206, 235, 0.3),
340
+ inset 0 0 30px rgba(135, 206, 235, 0.1);
341
+ }
342
+
343
+ .container .config-title {
344
+ color: var(--text-bright);
345
+ font-size: 1.1rem;
346
+ margin-bottom: 10px;
347
+ font-family: var(--font-body);
348
+ text-transform: uppercase;
349
+ letter-spacing: 2px;
350
+ font-weight: 700;
351
+ text-shadow: 0 0 10px var(--ice-accent);
352
+ }
353
+
354
+ .container pre {
355
+ background: #050a14;
356
+ padding: 8px 16px;
357
+ margin: 0;
358
+ border: 2px solid var(--border-ice);
359
+ white-space: pre;
360
+ overflow-x: auto;
361
+ color: var(--text-main);
362
+ box-shadow:
363
+ 0 0 20px rgba(135, 206, 235, 0.2),
364
+ inset 0 0 20px rgba(135, 206, 235, 0.15);
365
+ }
366
+
367
+ .container pre code {
368
+ background: none;
369
+ color: inherit;
370
+ padding: 0;
371
+ margin: 0;
372
+ display: block;
373
+ border: none;
374
+ outline: none;
375
+ }
376
+
377
+ .container code {
378
+ font-family: var(--font-code);
379
+ color: var(--ice-accent);
380
+ background: rgba(176, 229, 245, 0.15);
381
+ padding: 4px 8px;
382
+ border: 1px solid rgba(135, 206, 235, 0.3);
383
+ }
384
  </style>
385
+ <html lang="en">
386
+ <head>
387
+ <meta charset="UTF-8">
388
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
389
+ <title>Iceblink</title>
390
+ <link rel="preconnect" href="https://fonts.googleapis.com">
391
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
392
+ <link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;800&family=Source+Sans+Pro:ital,wght@0,400;0,600;1,400&family=JetBrains+Mono:wght@400;500&display=swap" rel="stylesheet">
393
+ </head>
394
+ <body>
395
 
396
+ <div class="container">
397
+ <div class="title-container">
398
+ <div class="glitchy-overlay"></div>
399
+ <div class="title-wrapper">
400
+ <h1 class="title-main">
401
+ <span class="title-prefix">ICEBLINK</span>
402
+ </h1>
403
+ <div class="version-indicator">VERSION 2</div>
404
+ </div>
405
+ </div>
406
 
407
+ ![image](https://cdn-uploads.huggingface.co/production/uploads/65b19c6c638328850e12d38c/zA2QsjdJNOC-62-bZiOun.png)
408
+
409
+ <div class="section-container">
410
+ <div class="section-header">
411
+ <div class="section-indicator"></div>
412
+ <h2 class="section-title">Overview</h2>
413
+ </div>
414
+ <div class="section-content">
415
+ <p></p>
416
+ <p>Another re-attempt at GLM 4.5 Air. This time using a different training framework, some updated data and better hyperparameters.</p>
417
+ <p>This model is a creative writing and RP model. It's pretty verbose. The intent is to keep the behavior of the original model, but to improve writing, dialogue & creativity.</p>
418
+ <p>Compared to the original Iceblink, the effect on this one is more pronounced, with hopefully minimal impact on the intelligence.</p>
419
  </div>
420
  </div>
421
 
422
+ <div class="section-container">
423
+ <div class="section-header">
424
+ <div class="section-indicator"></div>
425
+ <h2 class="section-title">SillyTavern Settings</h2>
426
+ </div>
427
+ <div class="section-content">
428
+ <h3 class="subheading">Recommended Roleplay Format</h3>
429
+ <div class="data-box">
430
+ <div class="data-row">
431
+ <span class="data-arrow">></span>
432
+ <span class="data-label">Actions:</span>
433
+ <span>In plaintext</span>
434
+ </div>
435
+ <div class="data-row">
436
+ <span class="data-arrow">></span>
437
+ <span class="data-label">Dialogue:</span>
438
+ <span>"In quotes"</span>
439
+ </div>
440
+ <div class="data-row">
441
+ <span class="data-arrow">></span>
442
+ <span class="data-label">Thoughts:</span>
443
+ <span>*In asterisks*</span>
444
+ </div>
445
+ </div>
446
+ <h3 class="subheading">Recommended Samplers</h3>
447
+ <div class="data-box">
448
+ <div class="data-row">
449
+ <span class="data-arrow">></span>
450
+ <span class="data-label">Temp:</span>
451
+ <span>0.8 - 0.9</span>
452
+ </div>
453
+ <div class="data-row">
454
+ <span class="data-arrow">></span>
455
+ <span class="data-label">MinP:</span>
456
+ <span>0.05</span>
457
+ </div>
458
+ <div class="data-row">
459
+ <span class="data-arrow">></span>
460
+ <span class="data-label">TopP:</span>
461
+ <span>0.95 - 1.00</span>
462
+ </div>
463
+ </div>
464
+ <h3 class="subheading">Instruct</h3>
465
+ <div class="data-box">
466
+ <p style="margin: 0;">GLM4.5 (no thinking): <a href="https://huggingface.co/zerofata/GLM-4.5-Iceblink-106B-A12B/raw/main/GLM45-NoThink-SillyTavern-Preset.json">SillyTavern Preset</a></p>
467
+ </div>
468
  </div>
469
  </div>
470
 
471
+ <div class="section-container">
472
+ <div class="section-header">
473
+ <div class="section-indicator"></div>
474
+ <h2 class="section-title">Quantizations</h2>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
475
  </div>
476
+ <div class="section-content">
477
+ <div style="margin-bottom: 20px;">
478
+ <h3 class="subheading">GGUF</h3>
479
+ <div class="data-box">
480
+ <div class="data-row">
481
+ <span class="data-arrow">></span>
482
+ <a href="https://huggingface.co/ddh0/Iceblink-v3-SFT-3-GGUF">iMatrix (ddh0)</a>
483
+ </div>
484
+ </div>
485
+ </div>
486
  </div>
487
  </div>
488
 
489
+ <div class="section-container">
490
+ <div class="section-header">
491
+ <div class="section-indicator"></div>
492
+ <h2 class="section-title">Creation Process</h2>
493
+ </div>
494
+ <div class="section-content">
495
+ <p>Creation Process: SFT</p>
496
+ <p>SFT on approx 13 million tokens, SFW / NSFW RP, stories, creative instruct & chat data. Some of the SFW datasets are public and can be found in the model datasets list.</p>
497
+ <p>I've switched over from Axolotl to MS-Swift w/ Megatron to train MoE models now. There's a roughly 5-10x speedup in training the models, thanks to escaping the naive MoE implementation in TRL. The training time for this run took only 40 minutes, excluding environment setup time.</p>
498
+ <p>A low LR for GLM Air appears to be king. Going any higher, I've found it extremely easy to begin overcooking the model.</p>
499
+ <div class="dropdown-container">
500
+ <details>
501
+ <summary class="dropdown-summary">
502
+ <span class="dropdown-arrow">></span>
503
+ MS-Swift config
504
+ </summary>
505
+ <div class="dropdown-content">
506
+ <p>Not optimized for cost / performance efficiency, YMMV.</p>
507
+ <div class="config-title">SFT (8*H200)</div>
508
+ <pre><code>PYTORCH_CUDA_ALLOC_CONF='expandable_segments:True' \
509
+ NPROC_PER_NODE=8 \
510
+ WANDB_API_KEY=wandb_key \
511
+ CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 \
512
+ megatron sft \
513
+ --load '/workspace/glm-4.5-air-mcore' \
514
+ --dataset '/workspace/joined_dataset_cleaned_modified.jsonl' \
515
+ --load_from_cache_file true \
516
+ --train_type lora \
517
+ --lora_rank 256 \
518
+ --lora_alpha 16 \
519
+ --use-rslora true \
520
+ --target_modules all-linear \
521
+ --split_dataset_ratio 0.01 \
522
+ --moe_permute_fusion true \
523
+ --tensor_model_parallel_size 8 \
524
+ --expert_tensor_parallel_size 1 \
525
+ --expert_model_parallel_size 8 \
526
+ --moe_grouped_gemm true \
527
+ --moe_shared_expert_overlap true \
528
+ --moe_aux_loss_coeff 6e-5 \
529
+ --micro_batch_size 4 \
530
+ --global_batch_size 32 \
531
+ --recompute_granularity full \
532
+ --recompute_method uniform \
533
+ --recompute_num_layers 1 \
534
+ --max_epochs 2 \
535
+ --cross_entropy_loss_fusion true \
536
+ --lr 6e-6 \
537
+ --lr_warmup_fraction 0.05 \
538
+ --min_lr 6e-7 \
539
+ --save megatron_output/Iceblink-v3-SFT-3 \
540
+ --eval_interval 20 \
541
+ --save_interval 25 \
542
+ --finetune true \
543
+ --packing true \
544
+ --max_length 10280 \
545
+ --num_workers 8 \
546
+ --dataset_num_proc 8 \
547
+ --no_save_optim true \
548
+ --no_save_rng true \
549
+ --sequence_parallel true \
550
+ --wandb_project Megatron-Air-SFT \
551
+ --wandb_exp_name Iceblink-v3-SFT-3 \
552
+ --attention_backend flash</code></pre>
553
+ </div>
554
+ </details>
555
+ </div>
556
  </div>
557
  </div>
558
 
559
+ <div class="section-container">
560
+ <div class="section-header">
561
+ <div class="section-indicator"></div>
562
+ <h2 class="section-title">Special Thanks</h2>
563
+ </div>
564
+ <div class="section-content">
565
+ <p>A shoutout to the people in BeaverAI discord that helped me test this model and my intermediate versions.</p>
566
+ <p>ddh0 (Madison), Ambius, Dysfunctional & my dude.</p>
567
+ </div>
568
+ </div>
569
+ </div>
570
+ </body>
571
+ </html>
args.json ADDED
@@ -0,0 +1,467 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "use_ray": false,
3
+ "ray_exp_name": null,
4
+ "device_groups": null,
5
+ "model": "zai-org/GLM-4.5-Air",
6
+ "model_type": "glm4_5",
7
+ "model_revision": null,
8
+ "task_type": "causal_lm",
9
+ "torch_dtype": "bfloat16",
10
+ "attn_impl": null,
11
+ "new_special_tokens": [],
12
+ "num_labels": null,
13
+ "problem_type": null,
14
+ "rope_scaling": null,
15
+ "device_map": null,
16
+ "max_memory": {},
17
+ "max_model_len": null,
18
+ "local_repo_path": null,
19
+ "init_strategy": null,
20
+ "template": "glm4_5",
21
+ "system": null,
22
+ "max_length": 10280,
23
+ "truncation_strategy": "delete",
24
+ "max_pixels": null,
25
+ "agent_template": null,
26
+ "norm_bbox": null,
27
+ "use_chat_template": true,
28
+ "padding_free": true,
29
+ "padding_side": "right",
30
+ "loss_scale": "default",
31
+ "sequence_parallel_size": 1,
32
+ "response_prefix": null,
33
+ "template_backend": "swift",
34
+ "dataset": [
35
+ "/workspace/joined_dataset_cleaned_modified.jsonl"
36
+ ],
37
+ "val_dataset": [],
38
+ "split_dataset_ratio": 0.01,
39
+ "data_seed": 42,
40
+ "dataset_num_proc": 8,
41
+ "load_from_cache_file": true,
42
+ "dataset_shuffle": true,
43
+ "val_dataset_shuffle": false,
44
+ "streaming": false,
45
+ "interleave_prob": null,
46
+ "stopping_strategy": "first_exhausted",
47
+ "shuffle_buffer_size": 1000,
48
+ "download_mode": "reuse_dataset_if_exists",
49
+ "columns": {},
50
+ "strict": false,
51
+ "remove_unused_columns": true,
52
+ "model_name": null,
53
+ "model_author": null,
54
+ "custom_dataset_info": [],
55
+ "quant_method": null,
56
+ "quant_bits": null,
57
+ "hqq_axis": null,
58
+ "bnb_4bit_compute_dtype": "bfloat16",
59
+ "bnb_4bit_quant_type": "nf4",
60
+ "bnb_4bit_use_double_quant": true,
61
+ "bnb_4bit_quant_storage": null,
62
+ "max_new_tokens": null,
63
+ "temperature": null,
64
+ "top_k": null,
65
+ "top_p": null,
66
+ "repetition_penalty": null,
67
+ "num_beams": 1,
68
+ "stream": false,
69
+ "stop_words": [],
70
+ "logprobs": false,
71
+ "top_logprobs": null,
72
+ "ckpt_dir": "/workspace/glm-4.5-air-mcore",
73
+ "lora_modules": [],
74
+ "tuner_backend": "peft",
75
+ "train_type": "lora",
76
+ "adapters": [],
77
+ "external_plugins": [],
78
+ "seed": 42,
79
+ "model_kwargs": {},
80
+ "load_args": false,
81
+ "load_data_args": false,
82
+ "packing": true,
83
+ "packing_length": 10280,
84
+ "lazy_tokenize": false,
85
+ "cached_dataset": [],
86
+ "custom_register_path": [],
87
+ "use_hf": false,
88
+ "hub_token": null,
89
+ "ddp_timeout": 18000000,
90
+ "ddp_backend": null,
91
+ "ignore_args_error": false,
92
+ "use_swift_lora": false,
93
+ "freeze_llm": false,
94
+ "freeze_vit": true,
95
+ "freeze_aligner": true,
96
+ "freeze_parameters": [],
97
+ "freeze_parameters_regex": null,
98
+ "freeze_parameters_ratio": 0.0,
99
+ "trainable_parameters": [],
100
+ "trainable_parameters_regex": null,
101
+ "adapter_load": null,
102
+ "target_modules": [
103
+ "all-linear"
104
+ ],
105
+ "target_regex": null,
106
+ "modules_to_save": [],
107
+ "lora_rank": 256,
108
+ "lora_alpha": 16,
109
+ "lora_dropout": 0.05,
110
+ "lora_bias": "none",
111
+ "lora_dtype": null,
112
+ "use_rslora": true,
113
+ "rlhf_type": null,
114
+ "ref_load": null,
115
+ "ref_adapter_load": null,
116
+ "beta": 0.1,
117
+ "rpo_alpha": null,
118
+ "reference_free": false,
119
+ "label_smoothing": 0.0,
120
+ "f_divergence_type": "reverse_kl",
121
+ "loss_type": null,
122
+ "desirable_weight": 1.0,
123
+ "undesirable_weight": 1.0,
124
+ "calculate_KL": null,
125
+ "center_rewards_coefficient": null,
126
+ "padded_vocab_size": 151552,
127
+ "initialize_embedding": false,
128
+ "mlp_padding_free": false,
129
+ "dataloader_persistent_workers": true,
130
+ "dataloader_prefetch_factor": 10,
131
+ "architectures": "Glm4MoeForCausalLM",
132
+ "llm_architectures": null,
133
+ "max_epochs": 2,
134
+ "enable_dft_loss": false,
135
+ "enable_channel_loss": false,
136
+ "original_max_position_embeddings": null,
137
+ "partial_rotary_factor": 0.5,
138
+ "use_shared_expert_gate": false,
139
+ "vit_gradient_checkpointing": true,
140
+ "gradient_checkpointing_kwargs": null,
141
+ "linear_num_value_heads": null,
142
+ "linear_num_key_heads": null,
143
+ "linear_key_head_dim": null,
144
+ "linear_value_head_dim": null,
145
+ "linear_conv_kernel_dim": null,
146
+ "layer_types": null,
147
+ "mrope_interleaved": false,
148
+ "micro_batch_size": 4,
149
+ "global_batch_size": 32,
150
+ "recompute_granularity": "full",
151
+ "recompute_method": "uniform",
152
+ "recompute_num_layers": 1,
153
+ "recompute_modules": [
154
+ "core_attn"
155
+ ],
156
+ "use_cpu_initialization": false,
157
+ "deterministic_mode": false,
158
+ "train_iters": null,
159
+ "log_interval": 5,
160
+ "tensorboard_dir": "/workspace/megatron_output/Iceblink-v3-SFT-3/v0-20251101-214719/runs",
161
+ "no_masked_softmax_fusion": false,
162
+ "no_bias_dropout_fusion": false,
163
+ "no_bias_swiglu_fusion": false,
164
+ "no_rope_fusion": false,
165
+ "no_gradient_accumulation_fusion": false,
166
+ "cross_entropy_loss_fusion": true,
167
+ "cross_entropy_fusion_impl": "native",
168
+ "calculate_per_token_loss": true,
169
+ "use_flash_attn": false,
170
+ "attention_backend": "flash",
171
+ "optimizer": "adam",
172
+ "optimizer_cpu_offload": false,
173
+ "optimizer_offload_fraction": 1.0,
174
+ "use_precision_aware_optimizer": false,
175
+ "main_grads_dtype": "fp32",
176
+ "main_params_dtype": "fp32",
177
+ "exp_avg_dtype": "fp32",
178
+ "exp_avg_sq_dtype": "fp32",
179
+ "dataloader_type": "cyclic",
180
+ "manual_gc": false,
181
+ "manual_gc_interval": 0,
182
+ "lr": 6e-06,
183
+ "lr_decay_style": "cosine",
184
+ "lr_decay_iters": null,
185
+ "lr_warmup_iters": 0,
186
+ "lr_warmup_fraction": 0.05,
187
+ "min_lr": 6e-07,
188
+ "weight_decay": 0.1,
189
+ "clip_grad": 1.0,
190
+ "adam_beta1": 0.9,
191
+ "adam_beta2": 0.95,
192
+ "adam_eps": 1e-08,
193
+ "sgd_momentum": 0.9,
194
+ "save": "/workspace/megatron_output/Iceblink-v3-SFT-3/v0-20251101-214719",
195
+ "save_interval": 25,
196
+ "save_retain_interval": null,
197
+ "no_save_optim": true,
198
+ "no_save_rng": true,
199
+ "load": "/workspace/glm-4.5-air-mcore",
200
+ "no_load_optim": false,
201
+ "no_load_rng": false,
202
+ "finetune": true,
203
+ "ckpt_format": "torch_dist",
204
+ "no_initialization": true,
205
+ "auto_detect_ckpt_format": true,
206
+ "exit_on_missing_checkpoint": true,
207
+ "async_save": false,
208
+ "use_persistent_ckpt_worker": false,
209
+ "ckpt_fully_parallel_load": false,
210
+ "ckpt_assume_constant_structure": false,
211
+ "distributed_backend": "nccl",
212
+ "local_rank": 0,
213
+ "use_distributed_optimizer": true,
214
+ "tensor_model_parallel_size": 8,
215
+ "pipeline_model_parallel_size": 1,
216
+ "decoder_first_pipeline_num_layers": null,
217
+ "decoder_last_pipeline_num_layers": null,
218
+ "sequence_parallel": true,
219
+ "context_parallel_size": 1,
220
+ "tp_comm_overlap": false,
221
+ "overlap_grad_reduce": false,
222
+ "overlap_param_gather": false,
223
+ "distributed_timeout_minutes": 300000,
224
+ "num_layers_per_virtual_pipeline_stage": null,
225
+ "num_virtual_stages_per_pipeline_rank": null,
226
+ "microbatch_group_size_per_virtual_pipeline_stage": null,
227
+ "pipeline_model_parallel_layout": null,
228
+ "num_layers": 46,
229
+ "hidden_size": 4096,
230
+ "ffn_hidden_size": 10944,
231
+ "num_attention_heads": 96,
232
+ "group_query_attention": true,
233
+ "num_query_groups": 8,
234
+ "max_position_embeddings": 131072,
235
+ "position_embedding_type": "rope",
236
+ "mrope_section": null,
237
+ "rotary_base": 1000000,
238
+ "rotary_percent": 1.0,
239
+ "rotary_interleaved": false,
240
+ "normalization": "RMSNorm",
241
+ "norm_epsilon": 1e-05,
242
+ "swiglu": true,
243
+ "untie_embeddings_and_output_weights": true,
244
+ "disable_bias_linear": true,
245
+ "add_qkv_bias": true,
246
+ "attention_dropout": 0.0,
247
+ "hidden_dropout": 0.0,
248
+ "kv_channels": 128,
249
+ "qk_layernorm": false,
250
+ "transformer_impl": "transformer_engine",
251
+ "num_experts": 128,
252
+ "moe_layer_freq": "[0]*1+[1]*45",
253
+ "moe_ffn_hidden_size": 1408,
254
+ "moe_shared_expert_intermediate_size": 1408,
255
+ "moe_router_topk": 8,
256
+ "moe_router_pre_softmax": false,
257
+ "moe_router_dtype": "fp32",
258
+ "moe_router_score_function": "sigmoid",
259
+ "moe_router_bias_update_rate": 0.001,
260
+ "moe_router_enable_expert_bias": true,
261
+ "moe_router_topk_scaling_factor": 1.0,
262
+ "moe_router_load_balancing_type": "aux_loss",
263
+ "expert_model_parallel_size": 8,
264
+ "expert_tensor_parallel_size": 1,
265
+ "moe_token_dispatcher_type": null,
266
+ "moe_enable_deepep": false,
267
+ "moe_grouped_gemm": true,
268
+ "moe_permute_fusion": true,
269
+ "moe_aux_loss_coeff": 6e-05,
270
+ "moe_z_loss_coeff": null,
271
+ "moe_shared_expert_overlap": true,
272
+ "moe_layer_recompute": false,
273
+ "moe_expert_capacity_factor": null,
274
+ "moe_pad_expert_input_to_capacity": false,
275
+ "moe_token_drop_policy": null,
276
+ "multi_latent_attention": false,
277
+ "q_lora_rank": null,
278
+ "kv_lora_rank": 32,
279
+ "qk_head_dim": 128,
280
+ "qk_pos_emb_head_dim": 64,
281
+ "fp8_format": null,
282
+ "fp8_recipe": "delayed",
283
+ "fp8_amax_history_len": 1024,
284
+ "fp8_amax_compute_algo": "max",
285
+ "fp8_param_gather": false,
286
+ "fp16": false,
287
+ "bf16": true,
288
+ "apply_query_key_layer_scaling": false,
289
+ "attention_softmax_in_fp32": true,
290
+ "log_params_norm": false,
291
+ "log_throughput": false,
292
+ "tensorboard_log_interval": 1,
293
+ "tensorboard_queue_size": 50,
294
+ "log_timers_to_tensorboard": true,
295
+ "no_log_learning_rate_to_tensorboard": false,
296
+ "log_validation_ppl_to_tensorboard": true,
297
+ "log_memory_to_tensorboard": true,
298
+ "logging_level": null,
299
+ "wandb_project": "Megatron-Air-SFT",
300
+ "wandb_exp_name": "Iceblink-v3-SFT-3",
301
+ "wandb_save_dir": null,
302
+ "eval_iters": -1,
303
+ "eval_interval": 20,
304
+ "seq_length": 10280,
305
+ "num_workers": 8,
306
+ "megatron_extra_kwargs": {},
307
+ "add_version": true,
308
+ "rank": 0,
309
+ "global_world_size": 8,
310
+ "local_world_size": 8,
311
+ "model_suffix": "GLM-4.5-Air",
312
+ "model_info": "ModelInfo(model_type='glm4_5', model_dir='/root/.cache/modelscope/hub/models/ZhipuAI/GLM-4___5-Air', torch_dtype=torch.bfloat16, max_model_len=131072, quant_method=None, quant_bits=None, rope_scaling=None, is_moe_model=True, config=None, task_type='causal_lm', num_labels=None)",
313
+ "model_meta": "ModelMeta(model_type='glm4_5', model_groups=[ModelGroup(models=[Model(ms_model_id='ZhipuAI/GLM-4.5-Air-Base', hf_model_id='zai-org/GLM-4.5-Air-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-Air', hf_model_id='zai-org/GLM-4.5-Air', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-Air-FP8', hf_model_id='zai-org/GLM-4.5-Air-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-Base', hf_model_id='zai-org/GLM-4.5-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5', hf_model_id='zai-org/GLM-4.5', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-FP8', hf_model_id='zai-org/GLM-4.5-FP8', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='ZhipuAI/GLM-4.6', hf_model_id='zai-org/GLM-4.6', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='glm4_5', get_function=<function get_model_tokenizer_with_flash_attn at 0x76c9ab052520>, model_arch=None, architectures=['Glm4MoeForCausalLM'], additional_saved_files=[], torch_dtype=None, is_multimodal=False, is_reward=False, is_reranker=False, task_type=None, ignore_patterns=None, requires=['transformers>=4.54'], tags=[])",
314
+ "model_dir": "/root/.cache/modelscope/hub/models/ZhipuAI/GLM-4___5-Air",
315
+ "hub": "<class 'swift.hub.hub.MSHub'>",
316
+ "megatron_model_meta": "MegatronModelMeta(megatron_model_type='gpt', model_types=['qwen2', 'qwen2_5', 'qwq', 'qwq_preview', 'qwen2_5_math', 'llama', 'llama3', 'llama3_1', 'llama3_2', 'longwriter_llama3_1', 'codefuse_codellama', 'marco_o1', 'deepseek', 'deepseek_r1_distill', 'yi', 'yi_coder', 'sus', 'skywork_o1', 'openbuddy_llama', 'openbuddy_llama3', 'megrez', 'reflection', 'numina', 'ziya', 'mengzi3', 'qwen3', 'qwen3_thinking', 'qwen3_nothinking', 'qwen2_moe', 'qwen3_moe', 'qwen3_moe_thinking', 'qwen3_coder', 'internlm3', 'mimo', 'mimo_rl', 'moonlight', 'deepseek_moe', 'deepseek_v2', 'deepseek_v2_5', 'deepseek_r1', 'dots1', 'ernie', 'glm4_5', 'deepseek_v3_1', 'ernie_thinking'], convert_mcore2hf=<function convert_mcore2hf at 0x76c91dcb0fe0>, convert_hf2mcore=<function convert_hf2mcore at 0x76c91dcb0b80>, model_cls=<class 'swift.megatron.model.gpt_model.GPTModel'>, convert_hf_config=<function convert_gpt_hf_config at 0x76c91de6c9a0>, get_transformer_layer_spec=None, model_provider=<function model_provider at 0x76c91dddb880>, visual_cls=None, extra_args_provider=None)",
317
+ "extra_args": {
318
+ "use_ray": false,
319
+ "ray_exp_name": null,
320
+ "device_groups": null,
321
+ "model": "ZhipuAI/GLM-4.5-Air",
322
+ "model_type": "glm4_5",
323
+ "model_revision": null,
324
+ "task_type": "causal_lm",
325
+ "torch_dtype": "bfloat16",
326
+ "attn_impl": null,
327
+ "new_special_tokens": [],
328
+ "num_labels": null,
329
+ "problem_type": null,
330
+ "rope_scaling": null,
331
+ "device_map": null,
332
+ "max_memory": {},
333
+ "max_model_len": null,
334
+ "local_repo_path": null,
335
+ "init_strategy": null,
336
+ "template": "glm4_5",
337
+ "system": null,
338
+ "max_length": 10280,
339
+ "truncation_strategy": "delete",
340
+ "max_pixels": null,
341
+ "agent_template": null,
342
+ "norm_bbox": null,
343
+ "use_chat_template": true,
344
+ "padding_free": true,
345
+ "padding_side": "right",
346
+ "sequence_parallel_size": 1,
347
+ "response_prefix": null,
348
+ "template_backend": "swift",
349
+ "dataset": [
350
+ "/workspace/joined_dataset_cleaned_modified.jsonl"
351
+ ],
352
+ "val_dataset": [],
353
+ "split_dataset_ratio": 0.01,
354
+ "data_seed": 42,
355
+ "dataset_num_proc": 8,
356
+ "load_from_cache_file": true,
357
+ "dataset_shuffle": true,
358
+ "val_dataset_shuffle": false,
359
+ "streaming": false,
360
+ "interleave_prob": null,
361
+ "stopping_strategy": "first_exhausted",
362
+ "shuffle_buffer_size": 1000,
363
+ "download_mode": "reuse_dataset_if_exists",
364
+ "columns": {},
365
+ "strict": false,
366
+ "remove_unused_columns": true,
367
+ "model_name": null,
368
+ "model_author": null,
369
+ "custom_dataset_info": [],
370
+ "quant_method": null,
371
+ "quant_bits": null,
372
+ "hqq_axis": null,
373
+ "bnb_4bit_compute_dtype": "bfloat16",
374
+ "bnb_4bit_quant_type": "nf4",
375
+ "bnb_4bit_use_double_quant": true,
376
+ "bnb_4bit_quant_storage": null,
377
+ "max_new_tokens": null,
378
+ "temperature": null,
379
+ "top_k": null,
380
+ "top_p": null,
381
+ "repetition_penalty": null,
382
+ "num_beams": 1,
383
+ "stream": false,
384
+ "stop_words": [],
385
+ "logprobs": false,
386
+ "top_logprobs": null,
387
+ "ckpt_dir": "/workspace/glm-4.5-air-mcore",
388
+ "lora_modules": [],
389
+ "tuner_backend": "peft",
390
+ "train_type": "lora",
391
+ "adapters": [],
392
+ "external_plugins": [],
393
+ "model_kwargs": {},
394
+ "load_args": false,
395
+ "load_data_args": false,
396
+ "packing": true,
397
+ "packing_length": 10280,
398
+ "lazy_tokenize": false,
399
+ "cached_dataset": [],
400
+ "custom_register_path": [],
401
+ "use_hf": false,
402
+ "hub_token": null,
403
+ "ddp_timeout": 18000000,
404
+ "ddp_backend": null,
405
+ "ignore_args_error": false,
406
+ "use_swift_lora": false,
407
+ "freeze_llm": false,
408
+ "freeze_vit": true,
409
+ "freeze_aligner": true,
410
+ "freeze_parameters": [],
411
+ "freeze_parameters_regex": null,
412
+ "freeze_parameters_ratio": 0.0,
413
+ "trainable_parameters": [],
414
+ "trainable_parameters_regex": null,
415
+ "adapter_load": null,
416
+ "target_modules": [
417
+ "all-linear"
418
+ ],
419
+ "target_regex": null,
420
+ "modules_to_save": [],
421
+ "lora_rank": 256,
422
+ "lora_alpha": 16,
423
+ "lora_dropout": 0.05,
424
+ "lora_bias": "none",
425
+ "lora_dtype": null,
426
+ "use_rslora": true,
427
+ "rlhf_type": null,
428
+ "ref_load": null,
429
+ "ref_adapter_load": null,
430
+ "beta": 0.1,
431
+ "rpo_alpha": null,
432
+ "reference_free": false,
433
+ "label_smoothing": 0.0,
434
+ "f_divergence_type": "reverse_kl",
435
+ "loss_type": null,
436
+ "desirable_weight": 1.0,
437
+ "undesirable_weight": 1.0,
438
+ "calculate_KL": null,
439
+ "center_rewards_coefficient": null,
440
+ "padded_vocab_size": 151552,
441
+ "initialize_embedding": false,
442
+ "mlp_padding_free": false,
443
+ "dataloader_persistent_workers": true,
444
+ "dataloader_prefetch_factor": 10,
445
+ "architectures": "Glm4MoeForCausalLM",
446
+ "llm_architectures": null,
447
+ "max_epochs": 2,
448
+ "enable_dft_loss": false,
449
+ "enable_channel_loss": false,
450
+ "original_max_position_embeddings": null,
451
+ "partial_rotary_factor": 0.5,
452
+ "use_shared_expert_gate": false,
453
+ "vit_gradient_checkpointing": true,
454
+ "gradient_checkpointing_kwargs": null,
455
+ "linear_num_value_heads": null,
456
+ "linear_num_key_heads": null,
457
+ "linear_key_head_dim": null,
458
+ "linear_value_head_dim": null,
459
+ "linear_conv_kernel_dim": null,
460
+ "layer_types": null,
461
+ "mrope_interleaved": false,
462
+ "add_version": true,
463
+ "model_info": "ModelInfo(model_type='glm4_5', model_dir='/root/.cache/modelscope/hub/models/ZhipuAI/GLM-4___5-Air', torch_dtype=torch.bfloat16, max_model_len=131072, quant_method=None, quant_bits=None, rope_scaling=None, is_moe_model=True, config=None, task_type='causal_lm', num_labels=None)",
464
+ "model_meta": "ModelMeta(model_type='glm4_5', model_groups=[ModelGroup(models=[Model(ms_model_id='ZhipuAI/GLM-4.5-Air-Base', hf_model_id='zai-org/GLM-4.5-Air-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-Air', hf_model_id='zai-org/GLM-4.5-Air', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-Air-FP8', hf_model_id='zai-org/GLM-4.5-Air-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-Base', hf_model_id='zai-org/GLM-4.5-Base', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5', hf_model_id='zai-org/GLM-4.5', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='ZhipuAI/GLM-4.5-FP8', hf_model_id='zai-org/GLM-4.5-FP8', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='ZhipuAI/GLM-4.6', hf_model_id='zai-org/GLM-4.6', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='glm4_5', get_function=<function get_model_tokenizer_with_flash_attn at 0x76c9ab052520>, model_arch=None, architectures=['Glm4MoeForCausalLM'], additional_saved_files=[], torch_dtype=None, is_multimodal=False, is_reward=False, is_reranker=False, task_type=None, ignore_patterns=None, requires=['transformers>=4.54'], tags=[])",
465
+ "megatron_model_meta": "MegatronModelMeta(megatron_model_type='gpt', model_types=['qwen2', 'qwen2_5', 'qwq', 'qwq_preview', 'qwen2_5_math', 'llama', 'llama3', 'llama3_1', 'llama3_2', 'longwriter_llama3_1', 'codefuse_codellama', 'marco_o1', 'deepseek', 'deepseek_r1_distill', 'yi', 'yi_coder', 'sus', 'skywork_o1', 'openbuddy_llama', 'openbuddy_llama3', 'megrez', 'reflection', 'numina', 'ziya', 'mengzi3', 'qwen3', 'qwen3_thinking', 'qwen3_nothinking', 'qwen2_moe', 'qwen3_moe', 'qwen3_moe_thinking', 'qwen3_coder', 'internlm3', 'mimo', 'mimo_rl', 'moonlight', 'deepseek_moe', 'deepseek_v2', 'deepseek_v2_5', 'deepseek_r1', 'dots1', 'ernie', 'glm4_5', 'deepseek_v3_1', 'ernie_thinking'], convert_mcore2hf=<function convert_mcore2hf at 0x76c91dcb0fe0>, convert_hf2mcore=<function convert_hf2mcore at 0x76c91dcb0b80>, model_cls=<class 'swift.megatron.model.gpt_model.GPTModel'>, convert_hf_config=<function convert_gpt_hf_config at 0x76c91de6c9a0>, get_transformer_layer_spec=None, model_provider=<function model_provider at 0x76c91dddb880>, visual_cls=None, extra_args_provider=None)"
466
+ }
467
+ }
chat_template.jinja ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [gMASK]<sop>
2
+ {%- if tools -%}
3
+ <|system|>
4
+ # Tools
5
+
6
+ You may call one or more functions to assist with the user query.
7
+
8
+ You are provided with function signatures within <tools></tools> XML tags:
9
+ <tools>
10
+ {% for tool in tools %}
11
+ {{ tool | tojson(ensure_ascii=False) }}
12
+ {% endfor %}
13
+ </tools>
14
+
15
+ For each function call, output the function name and arguments within the following XML format:
16
+ <tool_call>{function-name}
17
+ <arg_key>{arg-key-1}</arg_key>
18
+ <arg_value>{arg-value-1}</arg_value>
19
+ <arg_key>{arg-key-2}</arg_key>
20
+ <arg_value>{arg-value-2}</arg_value>
21
+ ...
22
+ </tool_call>{%- endif -%}
23
+ {%- macro visible_text(content) -%}
24
+ {%- if content is string -%}
25
+ {{- content }}
26
+ {%- elif content is iterable and content is not mapping -%}
27
+ {%- for item in content -%}
28
+ {%- if item is mapping and item.type == 'text' -%}
29
+ {{- item.text }}
30
+ {%- elif item is string -%}
31
+ {{- item }}
32
+ {%- endif -%}
33
+ {%- endfor -%}
34
+ {%- else -%}
35
+ {{- content }}
36
+ {%- endif -%}
37
+ {%- endmacro -%}
38
+ {%- set ns = namespace(last_user_index=-1) %}
39
+ {%- for m in messages %}
40
+ {%- if m.role == 'user' %}
41
+ {% set ns.last_user_index = loop.index0 -%}
42
+ {%- endif %}
43
+ {%- endfor %}
44
+ {% for m in messages %}
45
+ {%- if m.role == 'user' -%}<|user|>
46
+ {{ visible_text(m.content) }}
47
+ {{- '/nothink' if (enable_thinking is defined and not enable_thinking and not visible_text(m.content).endswith("/nothink")) else '' -}}
48
+ {%- elif m.role == 'assistant' -%}
49
+ <|assistant|>
50
+ {%- set reasoning_content = '' %}
51
+ {%- set content = visible_text(m.content) %}
52
+ {%- if m.reasoning_content is string %}
53
+ {%- set reasoning_content = m.reasoning_content %}
54
+ {%- else %}
55
+ {%- if '</think>' in content %}
56
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
57
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
58
+ {%- endif %}
59
+ {%- endif %}
60
+ {%- if loop.index0 > ns.last_user_index and reasoning_content -%}
61
+ {{ '\n<think>' + reasoning_content.strip() + '</think>'}}
62
+ {%- else -%}
63
+ {{ '\n<think></think>' }}
64
+ {%- endif -%}
65
+ {%- if content.strip() -%}
66
+ {{ '\n' + content.strip() }}
67
+ {%- endif -%}
68
+ {% if m.tool_calls %}
69
+ {% for tc in m.tool_calls %}
70
+ {%- if tc.function %}
71
+ {%- set tc = tc.function %}
72
+ {%- endif %}
73
+ {{ '\n<tool_call>' + tc.name }}
74
+ {% set _args = tc.arguments %}
75
+ {% for k, v in _args.items() %}
76
+ <arg_key>{{ k }}</arg_key>
77
+ <arg_value>{{ v | tojson(ensure_ascii=False) if v is not string else v }}</arg_value>
78
+ {% endfor %}
79
+ </tool_call>{% endfor %}
80
+ {% endif %}
81
+ {%- elif m.role == 'tool' -%}
82
+ {%- if m.content is string -%}
83
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
84
+ {{- '<|observation|>' }}
85
+ {%- endif %}
86
+ {{- '\n<tool_response>\n' }}
87
+ {{- m.content }}
88
+ {{- '\n</tool_response>' }}
89
+ {%- else -%}
90
+ <|observation|>{% for tr in m.content %}
91
+
92
+ <tool_response>
93
+ {{ tr.output if tr.output is defined else tr }}
94
+ </tool_response>{% endfor -%}
95
+ {% endif -%}
96
+ {%- elif m.role == 'system' -%}
97
+ <|system|>
98
+ {{ visible_text(m.content) }}
99
+ {%- endif -%}
100
+ {%- endfor -%}
101
+ {%- if add_generation_prompt -%}
102
+ <|assistant|>{{- '\n<think></think>' if (enable_thinking is defined and not enable_thinking) else '' -}}
103
+ {%- endif -%}
config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Glm4MoeForCausalLM"
4
+ ],
5
+ "attention_bias": true,
6
+ "attention_dropout": 0.0,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": [
9
+ 151329,
10
+ 151336,
11
+ 151338
12
+ ],
13
+ "first_k_dense_replace": 1,
14
+ "head_dim": 128,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 4096,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 10944,
19
+ "max_position_embeddings": 131072,
20
+ "model_type": "glm4_moe",
21
+ "moe_intermediate_size": 1408,
22
+ "n_group": 1,
23
+ "n_routed_experts": 128,
24
+ "n_shared_experts": 1,
25
+ "norm_topk_prob": true,
26
+ "num_attention_heads": 96,
27
+ "num_experts_per_tok": 8,
28
+ "num_hidden_layers": 46,
29
+ "num_key_value_heads": 8,
30
+ "num_nextn_predict_layers": 1,
31
+ "pad_token_id": 151329,
32
+ "partial_rotary_factor": 0.5,
33
+ "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 1000000,
36
+ "routed_scaling_factor": 1.0,
37
+ "tie_word_embeddings": false,
38
+ "topk_group": 1,
39
+ "transformers_version": "4.57.1",
40
+ "use_cache": true,
41
+ "use_qk_norm": false,
42
+ "vocab_size": 151552,
43
+ "quantization_config": {
44
+ "quant_method": "exl3",
45
+ "version": "0.0.12",
46
+ "bits": 4.0,
47
+ "head_bits": 6,
48
+ "calibration": {
49
+ "rows": 250,
50
+ "cols": 2048
51
+ },
52
+ "out_scales": "auto",
53
+ "codebook": "mcg"
54
+ }
55
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "eos_token_id": [
4
+ 151329,
5
+ 151336,
6
+ 151338
7
+ ],
8
+ "pad_token_id": 151329,
9
+ "transformers_version": "4.57.1"
10
+ }
model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e30a37727a5dc40d464809aab2e6ecdb082965dcd86a45c128a1428c1b5797a
3
+ size 8420456484
model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94e39e5f7caa47819de520dd01988e38484a94e6c3d3e103703f0afaec81b307
3
+ size 8232631620
model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41e8f12c5f6f1b65b3a58d6fb420b573ee8dc19768e6e6ab35fe7f7e0d50f0ec
3
+ size 8232636332
model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5ae04114158edac4649ac873fc39981db9602084f13af6c7986d1f4fbb1d6c2
3
+ size 8232636332
model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e106425960171f5a5d1da1fa179514415d2004369a4b160286bc05d567268685
3
+ size 8232636332
model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fec42ca6517b8787c5a599893bdc699346a93e2e4e61fdaf88b61b01009976d0
3
+ size 8232636332
model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a71cf142e438e1e8284ba4a6790eaaff7397e9840cdbceb3c3a5c6a7bf2acbca
3
+ size 5170249436
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
quantization_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e456e629ce44b1c4ec718089ad334f641e9c6b50c068d11974e95747bc1c371b
3
+ size 21625412
special_tokens_map.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|endoftext|>",
4
+ "[MASK]",
5
+ "[gMASK]",
6
+ "[sMASK]",
7
+ "<sop>",
8
+ "<eop>",
9
+ "<|system|>",
10
+ "<|user|>",
11
+ "<|assistant|>",
12
+ "<|observation|>",
13
+ "<|begin_of_image|>",
14
+ "<|end_of_image|>",
15
+ "<|begin_of_video|>",
16
+ "<|end_of_video|>",
17
+ "<|begin_of_audio|>",
18
+ "<|end_of_audio|>",
19
+ "<|begin_of_transcription|>",
20
+ "<|end_of_transcription|>",
21
+ "<|code_prefix|>",
22
+ "<|code_middle|>",
23
+ "<|code_suffix|>",
24
+ "/nothink"
25
+ ],
26
+ "eos_token": {
27
+ "content": "<|endoftext|>",
28
+ "lstrip": false,
29
+ "normalized": false,
30
+ "rstrip": false,
31
+ "single_word": false
32
+ },
33
+ "pad_token": {
34
+ "content": "<|endoftext|>",
35
+ "lstrip": false,
36
+ "normalized": false,
37
+ "rstrip": false,
38
+ "single_word": false
39
+ }
40
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bda8e2146c3bb7b7e0fc96dcc4f0aeff041c6c27952e3ace0665663ebff346ba
3
+ size 19970700
tokenizer_config.json ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "151329": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "151330": {
12
+ "content": "[MASK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "151331": {
20
+ "content": "[gMASK]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "151332": {
28
+ "content": "[sMASK]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "151333": {
36
+ "content": "<sop>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "151334": {
44
+ "content": "<eop>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "151335": {
52
+ "content": "<|system|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "151336": {
60
+ "content": "<|user|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "151337": {
68
+ "content": "<|assistant|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "151338": {
76
+ "content": "<|observation|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "151339": {
84
+ "content": "<|begin_of_image|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "151340": {
92
+ "content": "<|end_of_image|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "151341": {
100
+ "content": "<|begin_of_video|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "151342": {
108
+ "content": "<|end_of_video|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "151343": {
116
+ "content": "<|begin_of_audio|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "151344": {
124
+ "content": "<|end_of_audio|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "151345": {
132
+ "content": "<|begin_of_transcription|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "151346": {
140
+ "content": "<|end_of_transcription|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "151347": {
148
+ "content": "<|code_prefix|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "151348": {
156
+ "content": "<|code_middle|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "151349": {
164
+ "content": "<|code_suffix|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ },
171
+ "151350": {
172
+ "content": "<think>",
173
+ "lstrip": false,
174
+ "normalized": false,
175
+ "rstrip": false,
176
+ "single_word": false,
177
+ "special": false
178
+ },
179
+ "151351": {
180
+ "content": "</think>",
181
+ "lstrip": false,
182
+ "normalized": false,
183
+ "rstrip": false,
184
+ "single_word": false,
185
+ "special": false
186
+ },
187
+ "151352": {
188
+ "content": "<tool_call>",
189
+ "lstrip": false,
190
+ "normalized": false,
191
+ "rstrip": false,
192
+ "single_word": false,
193
+ "special": false
194
+ },
195
+ "151353": {
196
+ "content": "</tool_call>",
197
+ "lstrip": false,
198
+ "normalized": false,
199
+ "rstrip": false,
200
+ "single_word": false,
201
+ "special": false
202
+ },
203
+ "151354": {
204
+ "content": "<tool_response>",
205
+ "lstrip": false,
206
+ "normalized": false,
207
+ "rstrip": false,
208
+ "single_word": false,
209
+ "special": false
210
+ },
211
+ "151355": {
212
+ "content": "</tool_response>",
213
+ "lstrip": false,
214
+ "normalized": false,
215
+ "rstrip": false,
216
+ "single_word": false,
217
+ "special": false
218
+ },
219
+ "151356": {
220
+ "content": "<arg_key>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false,
225
+ "special": false
226
+ },
227
+ "151357": {
228
+ "content": "</arg_key>",
229
+ "lstrip": false,
230
+ "normalized": false,
231
+ "rstrip": false,
232
+ "single_word": false,
233
+ "special": false
234
+ },
235
+ "151358": {
236
+ "content": "<arg_value>",
237
+ "lstrip": false,
238
+ "normalized": false,
239
+ "rstrip": false,
240
+ "single_word": false,
241
+ "special": false
242
+ },
243
+ "151359": {
244
+ "content": "</arg_value>",
245
+ "lstrip": false,
246
+ "normalized": false,
247
+ "rstrip": false,
248
+ "single_word": false,
249
+ "special": false
250
+ },
251
+ "151360": {
252
+ "content": "/nothink",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "151361": {
260
+ "content": "<|begin_of_box|>",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": false
266
+ },
267
+ "151362": {
268
+ "content": "<|end_of_box|>",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": false
274
+ },
275
+ "151363": {
276
+ "content": "<|image|>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": false
282
+ },
283
+ "151364": {
284
+ "content": "<|video|>",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": false
290
+ }
291
+ },
292
+ "additional_special_tokens": [
293
+ "<|endoftext|>",
294
+ "[MASK]",
295
+ "[gMASK]",
296
+ "[sMASK]",
297
+ "<sop>",
298
+ "<eop>",
299
+ "<|system|>",
300
+ "<|user|>",
301
+ "<|assistant|>",
302
+ "<|observation|>",
303
+ "<|begin_of_image|>",
304
+ "<|end_of_image|>",
305
+ "<|begin_of_video|>",
306
+ "<|end_of_video|>",
307
+ "<|begin_of_audio|>",
308
+ "<|end_of_audio|>",
309
+ "<|begin_of_transcription|>",
310
+ "<|end_of_transcription|>",
311
+ "<|code_prefix|>",
312
+ "<|code_middle|>",
313
+ "<|code_suffix|>",
314
+ "/nothink"
315
+ ],
316
+ "clean_up_tokenization_spaces": false,
317
+ "do_lower_case": false,
318
+ "eos_token": "<|endoftext|>",
319
+ "extra_special_tokens": {},
320
+ "model_max_length": 128000,
321
+ "pad_token": "<|endoftext|>",
322
+ "padding_side": "left",
323
+ "remove_space": false,
324
+ "tokenizer_class": "PreTrainedTokenizerFast"
325
+ }