ArtusDev commited on
Commit
48ba80d
·
verified ·
1 Parent(s): c8a185e

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,244 +1,131 @@
1
  ---
2
- base_model: TheDrummer/Precog-24B-v1
3
- base_model_relation: quantized
4
- quantized_by: ArtusDev
5
  ---
6
- <style>
7
- .container-dark {
8
- font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
9
- line-height: 1.6;
10
- color: #d4d4d4;
11
- }
12
- a {
13
- color: #569cd6;
14
- text-decoration: none;
15
- font-weight: 600;
16
- }
17
- a:hover {
18
- text-decoration: underline;
19
- }
20
- .card-dark {
21
- background-color: #252526;
22
- border-radius: 12px;
23
- padding: 24px;
24
- margin-bottom: 20px;
25
- box-shadow: 0 4px 12px rgba(0,0,0,0.3);
26
- border: 1px solid #3c3c3c;
27
- }
28
- .card-dark h1 {
29
- font-size: 2.2em;
30
- color: #ffffff;
31
- text-align: center;
32
- margin-bottom: 10px;
33
- }
34
- .card-dark.card-dark-title h1 {
35
- font-size: 1.5em;
36
- }
37
- .card-dark .subtitle {
38
- text-align: center;
39
- font-size: 1.1em;
40
- color: #a0a0a0;
41
- }
42
- .card-dark h2 {
43
- font-size: 1.5em;
44
- margin-top: 0;
45
- padding-bottom: 10px;
46
- border-bottom: 1px solid #3c3c3c;
47
- color: #c586c0;
48
- }
49
- .card-dark h3 {
50
- font-size: 1.2em;
51
- color: #d4d4d4;
52
- }
53
- .styled-table {
54
- display: table;
55
- border: none;
56
- width: 100%;
57
- font-size: 0.95em;
58
- margin-bottom: 0px;
59
- }
60
- .styled-table thead th {
61
- background-color: #333333;
62
- color: #c586c0;
63
- text-align: left;
64
- }
65
- .styled-table th {
66
- padding: 12px 15px;
67
- }
68
- .styled-table td {
69
- padding: 0;
70
- }
71
- .styled-table table, .styled-table th, .styled-table td {
72
- border-left: none;
73
- border-right: none;
74
- border-bottom: none;
75
- }
76
- .styled-table td {
77
- border-bottom: 1px solid #3c3c3c;
78
- }
79
- .styled-table tbody tr {
80
- transition: background-color 0.1s ease;
81
- }
82
- .styled-table tbody tr:hover {
83
- background-color: #3a3a3a;
84
- }
85
- .styled-table tr:last-child td {
86
- border-bottom: none;
87
- }
88
- .styled-table td a {
89
- display: block;
90
- padding: 12px 15px;
91
- }
92
- .styled-table td a.fake-link {
93
- text-decoration:none;
94
- color:inherit;
95
- }
96
- details {
97
- margin-top: 20px;
98
- border: 1px solid #3c3c3c;
99
- border-radius: 8px;
100
- overflow: hidden;
101
- }
102
- summary {
103
- cursor: pointer;
104
- padding: 12px 18px;
105
- background-color: #6A5ACD;
106
- font-weight: 600;
107
- display: flex;
108
- align-items: center;
109
- gap: 10px;
110
- justify-content: space-between;
111
- list-style: none;
112
- }
113
- summary::-webkit-details-marker {
114
- display: none;
115
- }
116
- summary:hover {
117
- filter: brightness(1.1);
118
- }
119
- summary::after {
120
- content: '';
121
- display: inline-block;
122
- width: 8px;
123
- height: 8px;
124
- border-bottom: 2px solid white;
125
- border-right: 2px solid white;
126
- transform: rotate(45deg);
127
- transition: transform 0.3s ease;
128
- }
129
- details[open] > summary::after {
130
- transform: rotate(225deg);
131
- }
132
- .details-content {
133
- padding: 18px;
134
- }
135
- .btn-purple {
136
- display: inline-block;
137
- background-color: #6A5ACD;
138
- color: white !important;
139
- padding: 12px 24px;
140
- border-radius: 8px;
141
- text-decoration: none;
142
- font-weight: 600;
143
- transition: background-color 0.3s ease, transform 0.2s ease;
144
- text-align: center;
145
- }
146
- .btn-purple:hover {
147
- background-color: #7B68EE;
148
- transform: translateY(-2px);
149
- }
150
- </style>
151
-
152
- <div class="container-dark">
153
-
154
- <div class="card-dark card-dark-title">
155
- <h1>ArtusDev/TheDrummer_Precog-24B-v1-EXL3</h1>
156
- <p class="subtitle">
157
- EXL3 quants of <a href="https://huggingface.co/TheDrummer/Precog-24B-v1" target="_blank">TheDrummer/Precog-24B-v1</a> using <a href="https://github.com/turboderp-org/exllamav3/" target="_blank">exllamav3</a> for quantization.
158
- </p>
159
- </div>
160
-
161
- <div class="card-dark">
162
- <h2>Quants</h2>
163
- <table class="styled-table">
164
- <thead>
165
- <tr>
166
- <th>Quant</th>
167
- <th>BPW</th>
168
- <th>Head Bits</th>
169
- <th>Size (GB)</th>
170
- </tr>
171
- </thead>
172
- <tbody>
173
- <tr>
174
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/2.5bpw_H6" target="_blank">2.5_H6</a></td>
175
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/2.5bpw_H6" target="_blank">2.5</a></td>
176
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/2.5bpw_H6" target="_blank">6</a></td>
177
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/2.5bpw_H6" target="_blank">8.82</a></td>
178
- </tr>
179
- <tr>
180
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.0bpw_H6" target="_blank">3.0_H6</a></td>
181
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.0bpw_H6" target="_blank">3.0</a></td>
182
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.0bpw_H6" target="_blank">6</a></td>
183
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.0bpw_H6" target="_blank">10.21</a></td>
184
- </tr>
185
- <tr>
186
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.5bpw_H6" target="_blank">3.5_H6</a></td>
187
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.5bpw_H6" target="_blank">3.5</a></td>
188
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.5bpw_H6" target="_blank">6</a></td>
189
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/3.5bpw_H6" target="_blank">11.60</a></td>
190
- </tr>
191
- <tr>
192
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/4.5bpw_H6" target="_blank">4.5_H6</a></td>
193
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/4.5bpw_H6" target="_blank">4.5</a></td>
194
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/4.5bpw_H6" target="_blank">6</a></td>
195
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/4.5bpw_H6" target="_blank">14.37</a></td>
196
- </tr>
197
- <tr>
198
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/5.0bpw_H6" target="_blank">5.0_H6</a></td>
199
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/5.0bpw_H6" target="_blank">5.0</a></td>
200
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/5.0bpw_H6" target="_blank">6</a></td>
201
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/5.0bpw_H6" target="_blank">15.77</a></td>
202
- </tr>
203
- <tr>
204
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/6.0bpw_H6" target="_blank">6.0_H6</a></td>
205
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/6.0bpw_H6" target="_blank">6.0</a></td>
206
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/6.0bpw_H6" target="_blank">6</a></td>
207
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/6.0bpw_H6" target="_blank">18.55</a></td>
208
- </tr>
209
- <tr>
210
- <td><a href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/8.0bpw_H8" target="_blank">8.0_H8</a></td>
211
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/8.0bpw_H8" target="_blank">8.0</a></td>
212
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/8.0bpw_H8" target="_blank">8</a></td>
213
- <td><a class="fake-link" href="https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3/tree/8.0bpw_H8" target="_blank">24.27</a></td>
214
- </tr>
215
- </tbody>
216
- </table>
217
- </div>
218
-
219
- <div class="card-dark">
220
- <h2>How to Download and Use Quants</h2>
221
- <p>You can download quants by targeting specific size using the Hugging Face CLI.</p>
222
- <details>
223
- <summary>Click for download commands</summary>
224
- <div class="details-content">
225
- <b>1. Install huggingface-cli:</b>
226
- <pre><code>pip install -U "huggingface_hub[cli]"</code></pre>
227
- <b>2. Download a specific quant:</b>
228
- <pre><code>huggingface-cli download ArtusDev/TheDrummer_Precog-24B-v1-EXL3 --revision "5.0bpw_H6" --local-dir ./</code></pre>
229
- </div>
230
- </details>
231
- <p>EXL3 quants can be run with any inference client that supports EXL3, such as <a href="https://github.com/theroyallab/tabbyapi" target="_blank"><b>TabbyAPI</b></a>. Refer to <a href="https://github.com/theroyallab/tabbyAPI/wiki/01.-Getting-Started" target="_blank">documentation</a> for set up instructions.</p>
232
- </div>
233
-
234
- <div class="card-dark">
235
- <h2>Quant Requests</h2>
236
- <div style="text-align: center; margin-top: 25px;">
237
- <a href="https://huggingface.co/ArtusDev/requests-exl/discussions/new?title=[MODEL_NAME_HERE]&description=[MODEL_HF_LINK_HERE]" class="btn-purple" target="_blank">Request EXL3 Quants</a>
238
- </div>
239
- <p class="subtitle">
240
- See <a href="https://huggingface.co/ArtusDev/requests-exl" target="_blank">EXL community hub</a> for request guidelines.
241
- </p>
242
- </div>
243
-
244
- </div>
 
1
  ---
2
+ base_model:
3
+ - mistralai/Magistral-Small-2509
 
4
  ---
5
+
6
+ # Join our Discord! https://discord.gg/BeaverAI
7
+ ## More than 8000 members strong 💪 A hub for users and makers alike!
8
+ ---
9
+ ## Drummer is open for new opportunities: https://linktr.ee/thelocaldrummer
10
+ ### Thank you to everyone who subscribed through [Patreon](https://www.patreon.com/TheDrummer). Your support helps me chug along in this brave new world.
11
+
12
+ ### FAQ for those out-of-the-loop
13
+
14
+ <details>
15
+ <summary>🐶 Who is Drummer?</summary>
16
+
17
+ Hi! I'm Drummer. I'm a Software Engineer with experience in JavaScript, Golang, Python, and generally engineering the crap out of things.
18
+
19
+ Why I'm in the AI space:
20
+
21
+ - **Exploration:** Everyone is trying to figure out how AI works and what it's capable of. I am too - just not in creating the smartest, safest model at all costs.
22
+ - **Upskill:** The world is headed towards AI. It is here to stay. This has been my way of brushing up in this new form of computing challenge.
23
+ - **Value:** I yearn to create value. I feel satisfaction and fulfillment in providing something meaningful for others.
24
+ - **Fun:** It's just fun using and making models. It's also fun coming up with theories and realizing them in practice (training AI).
25
+
26
+ I started my tuning venture back in mid-2024 when I wanted to improve its literary capabilities.
27
+ I've come a long way since then and I have branched out and specialized.
28
+ Foundational models today are optimized for non-creative uses, and I believe there is a place for AI in creativity and entertainment.
29
+
30
+ I am here to take *the road less traveled by*.
31
+
32
+ </details>
33
+
34
+ <details>
35
+ <summary>❓ What are my models like?</summary>
36
+
37
+ **Bottomline:** My models are usually geared towards creativity, usability, and entertainment!
38
+
39
+ While intelligence, correctness, and problem solving are not my priority, they are still one of many qualities I want in my models.
40
+
41
+ The primary goal is to enhance the experience for users looking to use models for creative uses, and other use cases which require no alignment.
42
+
43
+ In an effort to make it clear to myself and to others what I'm aiming for, I've identified certain qualities that my users often want:
44
+
45
+ Creativity
46
+ - **Writing:** Does it string together words and sentences in a pleasant & effective way? Does it feel like a writer?
47
+ - **Dynamism:** How good is the AI at being compelling and intriguing in its storytelling?
48
+ - **Imagination:** Can the AI navigate through a plethora of possibilities? Can it skirt incoherence and rise up to absolute coherence at the end of it?
49
+
50
+ (Dis)alignment
51
+ - **Attitude:** Does it refuse in both soft or hard ways? Does it lean towards certain corporate/religious/political ethics & beliefs? How does it see the user and itself?
52
+ - **Morality:** Does it know ethics? Is its language infected with forced positivity? If not, can it still moralize over difficult & dubious themes?
53
+ - **Formatting:** How stubborn is it with its established formatting? Can it create effective and novel formats to answer the prompt?
54
+
55
+ Intelligence
56
+ - **Adherence:** Can it follow instructions? Is it sticking to the prompt? Can it understsand you?
57
+ - **Knowledge:** Does it know about the world in both fictional and non-fictional way?
58
+ - **Perception:** Can it handle nuance, complexity, and logic?
59
+
60
+ If it doesn't excel in one of these qualities, or if it's overall mediocre for its size, then I would most likely reiterate until I get something right.
61
+
62
+ </details>
63
+
64
+ <details>
65
+ <summary>💡 Philosophy</summary>
66
+
67
+ A person is defined by the language they use. Not whether they speak in English or German, but in how they perceive reality.
68
+
69
+ Just like how we associate a serial killer as a mind that can't map 'murder' to 'evil', an innocent person is a mind that simply can't imagine 'murder'. They get confused when forced to deal with such subjects.
70
+
71
+ AI's use of language speaks volumes about their 'perception' of reality. If a language model has been skewed and limited to a positive perception, then it's ability to imagine is also limited.
72
+
73
+ Finetuning is an opportunity to adjust and broaden the language. Corporations use it to achieve safety and compliance. I'm here to ACK-
74
+
75
+ </details>
76
+
77
+ ---
78
+
79
+ [TheDrummer](https://huggingface.co/TheDrummer) proudly presents...
80
+
81
+ # Precog 24B v1
82
+
83
+ ![Screenshot 2025-11-13 at 12.04.40 PM](https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/OmK3mlXFbvWRLfMpiAwSr.png)
84
+
85
+ ## Description
86
+ Precog is a 'reasoning' model that thinks in a different way. Instead of breaking down the question and coming up with a solution, it will instead provide an overview of the response like:
87
+
88
+ ![image](https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/UGoPACsKrMpxjiq0R-Dy_.png)
89
+
90
+ ![image](https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/XZkYHx1Tj5WUmwy0wihpc.png)
91
+
92
+ ![image](https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/zfhXJrWl3qR5QfRFsoagi.png)
93
+
94
+ The intention is to have the model write a simple, digestible draft and then using the draft to write the actual, complicated response. The draft can be prefilled and edited to influence the actual response. I've always wondered what would happen if the model had solid basis for what it's about to write (as an actual response).
95
+
96
+ ### Strengths
97
+ - Improved narrative flow and storytelling.
98
+ - Provides a short draft you can inspect / modify before the AI writes the actual response, saving time and effort.
99
+ - Allows the model to plan out the response without spending too much tokens on it.
100
+ - Possibly better prompt adherence / instruction following.
101
+ - Uses the standard `<think>` format, meaning it's plug & play for most frontends once configured for reasoning.
102
+
103
+ ### Weaknesses
104
+ - The model's 'reasoning' / draft might not always align with the actual response.
105
+ - Conventional reasoning like Behemoth R1 / Cydonia R1 might do better in detecting and portraying **nuances**.
106
+
107
+ ## Usage
108
+ - Mistral v7 Tekken
109
+ - Prefill `<think>` in case it doesn't think.
110
+ - Make sure any additional prefills inside `<think>` works well with the new reasoning pattern.
111
+ - Like my other reasoning tunes, you can invent new think tags like `<evil_think>` or `<slowburn_think>` to influence how it should write.
112
+
113
+ ## Links
114
+ - Original: https://huggingface.co/TheDrummer/Precog-24B-v1
115
+ - GGUF: https://huggingface.co/TheDrummer/Precog-24B-v1-GGUF
116
+ - iMatrix (recommended): https://huggingface.co/bartowski/TheDrummer_Precog-24B-v1-GGUF
117
+ - EXL3: https://huggingface.co/ArtusDev/TheDrummer_Precog-24B-v1-EXL3
118
+
119
+ ## Feedback
120
+
121
+ > Cydonia R1 4.1 does nuance on a moment to moment basis good and has given me some really "human" responses that way. Whereas Precog reasoning is lazer sharp, super quick, and more RP context aware. Precog seems to remember what its doing better and execute in a logical sequence. E.g move a little closer, sit down, cross legs.
122
+
123
+ > This is quite good. The reasoning is great, short and more of a brief outline of the output which is embellished from the outline. The prose isn't wildly creative but the model seems pretty good at keeping a narrative going while not forgetting important char and plot details from earlier outputs. Seems pretty insensitive to temperature setting, didn't really feel that 1 temp was much more creative than .75, feels like the reasoning really locks it into adhering to the prompt regardless of the temp. Which is fine by me, I'll take prompt adherence with mildly creative prose over wild all over the place prose.
124
+
125
+ > First reasoning model that really felt like its reasoning was finetuned for rp/stories. Earlier Drummer reasoning tunes seemed like the reasoning was treating rp prompts/cards like an equation or science problem to solve for, whereas the reasoning here is a skeletal synopsis of the final output which makes much more sense for reasoning in rp/storywriting.
126
+
127
+ > This model, however, is more interesting. It first creates a short plot development plan and then expands it in the appropriate response. In my opinion, it works great. This is the first 24B model where I can easily reach a 28k context window and it's quite stable. The quality of the prose is... sufficient, at least for me.
128
+
129
+ > Liked this one so much I used it all evening. Thinking is really good, and the thinking is really adaptable. The prose is great, responds well to OOC hints and author's notes, does a good job at details (although it occasionally messes up situation/place/pose)
130
+
131
+ `config-v1b`
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chat_template.jinja ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- set default_system_message = 'First draft your thinking process (inner monologue) until you arrive at a response. Format your response using Markdown, and use LaTeX for any mathematical equations. Write both your thoughts and the response in the same language as the input.\n\nYour thinking process must follow the template below:[THINK]Your thoughts or/and draft, like working through an exercise on scratch paper. Be as casual and as long as you want until you are confident to generate the response. Use the same language as the input.[/THINK]Here, provide a self-contained response.' %}
2
+
3
+ {{- bos_token }}
4
+
5
+ {#- Extract system message if present -#}
6
+ {%- if messages[0]['role'] == 'system' %}
7
+ {%- if messages[0]['content'] is string %}
8
+ {%- set raw_system_message = messages[0]['content'] %}
9
+ {%- else %}
10
+ {%- set raw_system_message = messages[0]['content'][0]['text'] %}
11
+ {%- endif %}
12
+ {%- set loop_messages = messages[1:] %}
13
+ {%- else %}
14
+ {%- set raw_system_message = "" %}
15
+ {%- set loop_messages = messages %}
16
+ {%- endif %}
17
+
18
+ {#- Detect THINK flag by searching for exact phrase "/think" -#}
19
+ {%- if "/think" in raw_system_message %}
20
+ {%- set THINK = True %}
21
+ {%- else %}
22
+ {%- set THINK = False %}
23
+ {%- endif %}
24
+
25
+ {#- Apply logic depending on THINK flag -#}
26
+ {%- if THINK %}
27
+ {%- if raw_system_message|length > 0 %}
28
+ {%- set system_message = default_system_message + "\n\n" + raw_system_message %}
29
+ {%- else %}
30
+ {%- set system_message = default_system_message %}
31
+ {%- endif %}
32
+ {{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}
33
+ {%- else %}
34
+ {%- if raw_system_message|length > 0 %}
35
+ {{- '[SYSTEM_PROMPT]' + raw_system_message + '[/SYSTEM_PROMPT]' }}
36
+ {%- endif %}
37
+ {%- endif %}
38
+
39
+
40
+ {#- Tool description appended ONLY to last user message. Edits made by Unsloth #}
41
+ {%- set tools_description = "" %}
42
+ {%- set has_tools = false %}
43
+
44
+ {%- if tools is defined and tools is not none and tools|length > 0 %}
45
+ {%- set has_tools = true %}
46
+ {%- set tools_description = "[AVAILABLE_TOOLS]" + (tools | tojson) + "[/AVAILABLE_TOOLS]" %}
47
+ {{- tools_description }}
48
+ {%- endif %}
49
+
50
+ {%- for message in loop_messages %}
51
+ {%- if message['role'] == 'user' %}
52
+
53
+ {%- if message['content'] is string %}
54
+ {{- '[INST]' + message['content'] + '[/INST]' }}
55
+ {%- else %}
56
+ {{- '[INST]' }}
57
+ {%- for block in message['content'] %}
58
+ {%- if block['type'] == 'text' %}
59
+ {%- if block['text'] is defined %}
60
+ {{- block['text'] }}
61
+ {%- else %}
62
+ {{- block['content'] }}
63
+ {%- endif %}
64
+ {%- elif block['type'] in ['image', 'image_url'] %}
65
+ {{- '[IMG]' }}
66
+ {%- else %}
67
+ {{- raise_exception('Only text and image blocks are supported in message content!') }}
68
+ {%- endif %}
69
+ {%- endfor %}
70
+ {{- '[/INST]' }}
71
+ {%- endif %}
72
+
73
+ {%- elif message['role'] == 'system' %}
74
+ {%- if message['content'] is string %}
75
+ {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}
76
+ {%- else %}
77
+ {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}
78
+ {%- endif %}
79
+
80
+ {%- elif message['role'] == 'assistant' %}
81
+ {%- if message['content'] is string %}
82
+ {{- message['content'] }}
83
+ {%- elif message['content'] is iterable %}
84
+ {{- message['content'][0]['text'] }}
85
+ {%- endif %}
86
+
87
+ {%- if message['tool_calls'] is defined and message['tool_calls'] is not none %}
88
+ {%- for tool in message['tool_calls'] %}
89
+ {%- set arguments = tool['function']['arguments'] %}
90
+ {%- if arguments is not string %}
91
+ {%- set arguments = arguments|tojson %}
92
+ {%- endif %}
93
+ {{- "[TOOL_CALLS]" + tool['function']['name'] + "[ARGS]" + arguments }}
94
+ {%- endfor %}
95
+ {%- endif %}
96
+
97
+ {{- eos_token }}
98
+
99
+ {%- elif message["role"] == "tool_results" or message["role"] == "tool" %}
100
+ {%- if message.content is defined and message.content.content is defined %}
101
+ {%- set content = message.content.content %}
102
+ {%- else %}
103
+ {%- set content = message.content %}
104
+ {%- endif %}
105
+ {{- "[TOOL_RESULTS]" + content|string + "[/TOOL_RESULTS]" }}
106
+
107
+ {%- else %}
108
+ {{- raise_exception('Only user, system, assistant and tool roles are supported!') }}
109
+ {%- endif %}
110
+ {%- endfor %}
111
+
112
+ {#- Licensed under the Apache License, Version 2.0 (the "License") #}
config.json ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "dtype": "bfloat16",
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 5120,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 32768,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 40,
18
+ "num_key_value_heads": 8,
19
+ "pad_token_id": 11,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_theta": 1000000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "transformers_version": "4.57.1",
25
+ "use_cache": false,
26
+ "vocab_size": 131072,
27
+ "quantization_config": {
28
+ "quant_method": "exl3",
29
+ "version": "0.0.12",
30
+ "bits": 4.0,
31
+ "head_bits": 6,
32
+ "calibration": {
33
+ "rows": 250,
34
+ "cols": 2048
35
+ },
36
+ "out_scales": "auto",
37
+ "codebook": "mcg"
38
+ }
39
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 2
7
+ ],
8
+ "pad_token_id": 11,
9
+ "transformers_version": "4.57.1"
10
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e08fe0af4e75b69b9d5bb169692c22b3af8321718ac7b575c036467e5a608cda
3
+ size 8574989712
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54c66394903a92e6ccfbd03bfb2b2043a04c419998817db6bcd694e2bbe260bc
3
+ size 4398189460
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
quantization_config.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,1032 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>",
6
+ "[INST]",
7
+ "[/INST]",
8
+ "[AVAILABLE_TOOLS]",
9
+ "[/AVAILABLE_TOOLS]",
10
+ "[TOOL_RESULTS]",
11
+ "[/TOOL_RESULTS]",
12
+ "[TOOL_CALLS]",
13
+ "[IMG]",
14
+ "<pad>",
15
+ "[IMG_BREAK]",
16
+ "[IMG_END]",
17
+ "[PREFIX]",
18
+ "[MIDDLE]",
19
+ "[SUFFIX]",
20
+ "[SYSTEM_PROMPT]",
21
+ "[/SYSTEM_PROMPT]",
22
+ "[TOOL_CONTENT]",
23
+ "<SPECIAL_20>",
24
+ "<SPECIAL_21>",
25
+ "<SPECIAL_22>",
26
+ "<SPECIAL_23>",
27
+ "<SPECIAL_24>",
28
+ "<SPECIAL_25>",
29
+ "<SPECIAL_26>",
30
+ "<SPECIAL_27>",
31
+ "<SPECIAL_28>",
32
+ "<SPECIAL_29>",
33
+ "<SPECIAL_30>",
34
+ "<SPECIAL_31>",
35
+ "[ARGS]",
36
+ "[CALL_ID]",
37
+ "[THINK]",
38
+ "[/THINK]",
39
+ "<SPECIAL_36>",
40
+ "<SPECIAL_37>",
41
+ "<SPECIAL_38>",
42
+ "<SPECIAL_39>",
43
+ "<SPECIAL_40>",
44
+ "<SPECIAL_41>",
45
+ "<SPECIAL_42>",
46
+ "<SPECIAL_43>",
47
+ "<SPECIAL_44>",
48
+ "<SPECIAL_45>",
49
+ "<SPECIAL_46>",
50
+ "<SPECIAL_47>",
51
+ "<SPECIAL_48>",
52
+ "<SPECIAL_49>",
53
+ "<SPECIAL_50>",
54
+ "<SPECIAL_51>",
55
+ "<SPECIAL_52>",
56
+ "<SPECIAL_53>",
57
+ "<SPECIAL_54>",
58
+ "<SPECIAL_55>",
59
+ "<SPECIAL_56>",
60
+ "<SPECIAL_57>",
61
+ "<SPECIAL_58>",
62
+ "<SPECIAL_59>",
63
+ "<SPECIAL_60>",
64
+ "<SPECIAL_61>",
65
+ "<SPECIAL_62>",
66
+ "<SPECIAL_63>",
67
+ "<SPECIAL_64>",
68
+ "<SPECIAL_65>",
69
+ "<SPECIAL_66>",
70
+ "<SPECIAL_67>",
71
+ "<SPECIAL_68>",
72
+ "<SPECIAL_69>",
73
+ "<SPECIAL_70>",
74
+ "<SPECIAL_71>",
75
+ "<SPECIAL_72>",
76
+ "<SPECIAL_73>",
77
+ "<SPECIAL_74>",
78
+ "<SPECIAL_75>",
79
+ "<SPECIAL_76>",
80
+ "<SPECIAL_77>",
81
+ "<SPECIAL_78>",
82
+ "<SPECIAL_79>",
83
+ "<SPECIAL_80>",
84
+ "<SPECIAL_81>",
85
+ "<SPECIAL_82>",
86
+ "<SPECIAL_83>",
87
+ "<SPECIAL_84>",
88
+ "<SPECIAL_85>",
89
+ "<SPECIAL_86>",
90
+ "<SPECIAL_87>",
91
+ "<SPECIAL_88>",
92
+ "<SPECIAL_89>",
93
+ "<SPECIAL_90>",
94
+ "<SPECIAL_91>",
95
+ "<SPECIAL_92>",
96
+ "<SPECIAL_93>",
97
+ "<SPECIAL_94>",
98
+ "<SPECIAL_95>",
99
+ "<SPECIAL_96>",
100
+ "<SPECIAL_97>",
101
+ "<SPECIAL_98>",
102
+ "<SPECIAL_99>",
103
+ "<SPECIAL_100>",
104
+ "<SPECIAL_101>",
105
+ "<SPECIAL_102>",
106
+ "<SPECIAL_103>",
107
+ "<SPECIAL_104>",
108
+ "<SPECIAL_105>",
109
+ "<SPECIAL_106>",
110
+ "<SPECIAL_107>",
111
+ "<SPECIAL_108>",
112
+ "<SPECIAL_109>",
113
+ "<SPECIAL_110>",
114
+ "<SPECIAL_111>",
115
+ "<SPECIAL_112>",
116
+ "<SPECIAL_113>",
117
+ "<SPECIAL_114>",
118
+ "<SPECIAL_115>",
119
+ "<SPECIAL_116>",
120
+ "<SPECIAL_117>",
121
+ "<SPECIAL_118>",
122
+ "<SPECIAL_119>",
123
+ "<SPECIAL_120>",
124
+ "<SPECIAL_121>",
125
+ "<SPECIAL_122>",
126
+ "<SPECIAL_123>",
127
+ "<SPECIAL_124>",
128
+ "<SPECIAL_125>",
129
+ "<SPECIAL_126>",
130
+ "<SPECIAL_127>",
131
+ "<SPECIAL_128>",
132
+ "<SPECIAL_129>",
133
+ "<SPECIAL_130>",
134
+ "<SPECIAL_131>",
135
+ "<SPECIAL_132>",
136
+ "<SPECIAL_133>",
137
+ "<SPECIAL_134>",
138
+ "<SPECIAL_135>",
139
+ "<SPECIAL_136>",
140
+ "<SPECIAL_137>",
141
+ "<SPECIAL_138>",
142
+ "<SPECIAL_139>",
143
+ "<SPECIAL_140>",
144
+ "<SPECIAL_141>",
145
+ "<SPECIAL_142>",
146
+ "<SPECIAL_143>",
147
+ "<SPECIAL_144>",
148
+ "<SPECIAL_145>",
149
+ "<SPECIAL_146>",
150
+ "<SPECIAL_147>",
151
+ "<SPECIAL_148>",
152
+ "<SPECIAL_149>",
153
+ "<SPECIAL_150>",
154
+ "<SPECIAL_151>",
155
+ "<SPECIAL_152>",
156
+ "<SPECIAL_153>",
157
+ "<SPECIAL_154>",
158
+ "<SPECIAL_155>",
159
+ "<SPECIAL_156>",
160
+ "<SPECIAL_157>",
161
+ "<SPECIAL_158>",
162
+ "<SPECIAL_159>",
163
+ "<SPECIAL_160>",
164
+ "<SPECIAL_161>",
165
+ "<SPECIAL_162>",
166
+ "<SPECIAL_163>",
167
+ "<SPECIAL_164>",
168
+ "<SPECIAL_165>",
169
+ "<SPECIAL_166>",
170
+ "<SPECIAL_167>",
171
+ "<SPECIAL_168>",
172
+ "<SPECIAL_169>",
173
+ "<SPECIAL_170>",
174
+ "<SPECIAL_171>",
175
+ "<SPECIAL_172>",
176
+ "<SPECIAL_173>",
177
+ "<SPECIAL_174>",
178
+ "<SPECIAL_175>",
179
+ "<SPECIAL_176>",
180
+ "<SPECIAL_177>",
181
+ "<SPECIAL_178>",
182
+ "<SPECIAL_179>",
183
+ "<SPECIAL_180>",
184
+ "<SPECIAL_181>",
185
+ "<SPECIAL_182>",
186
+ "<SPECIAL_183>",
187
+ "<SPECIAL_184>",
188
+ "<SPECIAL_185>",
189
+ "<SPECIAL_186>",
190
+ "<SPECIAL_187>",
191
+ "<SPECIAL_188>",
192
+ "<SPECIAL_189>",
193
+ "<SPECIAL_190>",
194
+ "<SPECIAL_191>",
195
+ "<SPECIAL_192>",
196
+ "<SPECIAL_193>",
197
+ "<SPECIAL_194>",
198
+ "<SPECIAL_195>",
199
+ "<SPECIAL_196>",
200
+ "<SPECIAL_197>",
201
+ "<SPECIAL_198>",
202
+ "<SPECIAL_199>",
203
+ "<SPECIAL_200>",
204
+ "<SPECIAL_201>",
205
+ "<SPECIAL_202>",
206
+ "<SPECIAL_203>",
207
+ "<SPECIAL_204>",
208
+ "<SPECIAL_205>",
209
+ "<SPECIAL_206>",
210
+ "<SPECIAL_207>",
211
+ "<SPECIAL_208>",
212
+ "<SPECIAL_209>",
213
+ "<SPECIAL_210>",
214
+ "<SPECIAL_211>",
215
+ "<SPECIAL_212>",
216
+ "<SPECIAL_213>",
217
+ "<SPECIAL_214>",
218
+ "<SPECIAL_215>",
219
+ "<SPECIAL_216>",
220
+ "<SPECIAL_217>",
221
+ "<SPECIAL_218>",
222
+ "<SPECIAL_219>",
223
+ "<SPECIAL_220>",
224
+ "<SPECIAL_221>",
225
+ "<SPECIAL_222>",
226
+ "<SPECIAL_223>",
227
+ "<SPECIAL_224>",
228
+ "<SPECIAL_225>",
229
+ "<SPECIAL_226>",
230
+ "<SPECIAL_227>",
231
+ "<SPECIAL_228>",
232
+ "<SPECIAL_229>",
233
+ "<SPECIAL_230>",
234
+ "<SPECIAL_231>",
235
+ "<SPECIAL_232>",
236
+ "<SPECIAL_233>",
237
+ "<SPECIAL_234>",
238
+ "<SPECIAL_235>",
239
+ "<SPECIAL_236>",
240
+ "<SPECIAL_237>",
241
+ "<SPECIAL_238>",
242
+ "<SPECIAL_239>",
243
+ "<SPECIAL_240>",
244
+ "<SPECIAL_241>",
245
+ "<SPECIAL_242>",
246
+ "<SPECIAL_243>",
247
+ "<SPECIAL_244>",
248
+ "<SPECIAL_245>",
249
+ "<SPECIAL_246>",
250
+ "<SPECIAL_247>",
251
+ "<SPECIAL_248>",
252
+ "<SPECIAL_249>",
253
+ "<SPECIAL_250>",
254
+ "<SPECIAL_251>",
255
+ "<SPECIAL_252>",
256
+ "<SPECIAL_253>",
257
+ "<SPECIAL_254>",
258
+ "<SPECIAL_255>",
259
+ "<SPECIAL_256>",
260
+ "<SPECIAL_257>",
261
+ "<SPECIAL_258>",
262
+ "<SPECIAL_259>",
263
+ "<SPECIAL_260>",
264
+ "<SPECIAL_261>",
265
+ "<SPECIAL_262>",
266
+ "<SPECIAL_263>",
267
+ "<SPECIAL_264>",
268
+ "<SPECIAL_265>",
269
+ "<SPECIAL_266>",
270
+ "<SPECIAL_267>",
271
+ "<SPECIAL_268>",
272
+ "<SPECIAL_269>",
273
+ "<SPECIAL_270>",
274
+ "<SPECIAL_271>",
275
+ "<SPECIAL_272>",
276
+ "<SPECIAL_273>",
277
+ "<SPECIAL_274>",
278
+ "<SPECIAL_275>",
279
+ "<SPECIAL_276>",
280
+ "<SPECIAL_277>",
281
+ "<SPECIAL_278>",
282
+ "<SPECIAL_279>",
283
+ "<SPECIAL_280>",
284
+ "<SPECIAL_281>",
285
+ "<SPECIAL_282>",
286
+ "<SPECIAL_283>",
287
+ "<SPECIAL_284>",
288
+ "<SPECIAL_285>",
289
+ "<SPECIAL_286>",
290
+ "<SPECIAL_287>",
291
+ "<SPECIAL_288>",
292
+ "<SPECIAL_289>",
293
+ "<SPECIAL_290>",
294
+ "<SPECIAL_291>",
295
+ "<SPECIAL_292>",
296
+ "<SPECIAL_293>",
297
+ "<SPECIAL_294>",
298
+ "<SPECIAL_295>",
299
+ "<SPECIAL_296>",
300
+ "<SPECIAL_297>",
301
+ "<SPECIAL_298>",
302
+ "<SPECIAL_299>",
303
+ "<SPECIAL_300>",
304
+ "<SPECIAL_301>",
305
+ "<SPECIAL_302>",
306
+ "<SPECIAL_303>",
307
+ "<SPECIAL_304>",
308
+ "<SPECIAL_305>",
309
+ "<SPECIAL_306>",
310
+ "<SPECIAL_307>",
311
+ "<SPECIAL_308>",
312
+ "<SPECIAL_309>",
313
+ "<SPECIAL_310>",
314
+ "<SPECIAL_311>",
315
+ "<SPECIAL_312>",
316
+ "<SPECIAL_313>",
317
+ "<SPECIAL_314>",
318
+ "<SPECIAL_315>",
319
+ "<SPECIAL_316>",
320
+ "<SPECIAL_317>",
321
+ "<SPECIAL_318>",
322
+ "<SPECIAL_319>",
323
+ "<SPECIAL_320>",
324
+ "<SPECIAL_321>",
325
+ "<SPECIAL_322>",
326
+ "<SPECIAL_323>",
327
+ "<SPECIAL_324>",
328
+ "<SPECIAL_325>",
329
+ "<SPECIAL_326>",
330
+ "<SPECIAL_327>",
331
+ "<SPECIAL_328>",
332
+ "<SPECIAL_329>",
333
+ "<SPECIAL_330>",
334
+ "<SPECIAL_331>",
335
+ "<SPECIAL_332>",
336
+ "<SPECIAL_333>",
337
+ "<SPECIAL_334>",
338
+ "<SPECIAL_335>",
339
+ "<SPECIAL_336>",
340
+ "<SPECIAL_337>",
341
+ "<SPECIAL_338>",
342
+ "<SPECIAL_339>",
343
+ "<SPECIAL_340>",
344
+ "<SPECIAL_341>",
345
+ "<SPECIAL_342>",
346
+ "<SPECIAL_343>",
347
+ "<SPECIAL_344>",
348
+ "<SPECIAL_345>",
349
+ "<SPECIAL_346>",
350
+ "<SPECIAL_347>",
351
+ "<SPECIAL_348>",
352
+ "<SPECIAL_349>",
353
+ "<SPECIAL_350>",
354
+ "<SPECIAL_351>",
355
+ "<SPECIAL_352>",
356
+ "<SPECIAL_353>",
357
+ "<SPECIAL_354>",
358
+ "<SPECIAL_355>",
359
+ "<SPECIAL_356>",
360
+ "<SPECIAL_357>",
361
+ "<SPECIAL_358>",
362
+ "<SPECIAL_359>",
363
+ "<SPECIAL_360>",
364
+ "<SPECIAL_361>",
365
+ "<SPECIAL_362>",
366
+ "<SPECIAL_363>",
367
+ "<SPECIAL_364>",
368
+ "<SPECIAL_365>",
369
+ "<SPECIAL_366>",
370
+ "<SPECIAL_367>",
371
+ "<SPECIAL_368>",
372
+ "<SPECIAL_369>",
373
+ "<SPECIAL_370>",
374
+ "<SPECIAL_371>",
375
+ "<SPECIAL_372>",
376
+ "<SPECIAL_373>",
377
+ "<SPECIAL_374>",
378
+ "<SPECIAL_375>",
379
+ "<SPECIAL_376>",
380
+ "<SPECIAL_377>",
381
+ "<SPECIAL_378>",
382
+ "<SPECIAL_379>",
383
+ "<SPECIAL_380>",
384
+ "<SPECIAL_381>",
385
+ "<SPECIAL_382>",
386
+ "<SPECIAL_383>",
387
+ "<SPECIAL_384>",
388
+ "<SPECIAL_385>",
389
+ "<SPECIAL_386>",
390
+ "<SPECIAL_387>",
391
+ "<SPECIAL_388>",
392
+ "<SPECIAL_389>",
393
+ "<SPECIAL_390>",
394
+ "<SPECIAL_391>",
395
+ "<SPECIAL_392>",
396
+ "<SPECIAL_393>",
397
+ "<SPECIAL_394>",
398
+ "<SPECIAL_395>",
399
+ "<SPECIAL_396>",
400
+ "<SPECIAL_397>",
401
+ "<SPECIAL_398>",
402
+ "<SPECIAL_399>",
403
+ "<SPECIAL_400>",
404
+ "<SPECIAL_401>",
405
+ "<SPECIAL_402>",
406
+ "<SPECIAL_403>",
407
+ "<SPECIAL_404>",
408
+ "<SPECIAL_405>",
409
+ "<SPECIAL_406>",
410
+ "<SPECIAL_407>",
411
+ "<SPECIAL_408>",
412
+ "<SPECIAL_409>",
413
+ "<SPECIAL_410>",
414
+ "<SPECIAL_411>",
415
+ "<SPECIAL_412>",
416
+ "<SPECIAL_413>",
417
+ "<SPECIAL_414>",
418
+ "<SPECIAL_415>",
419
+ "<SPECIAL_416>",
420
+ "<SPECIAL_417>",
421
+ "<SPECIAL_418>",
422
+ "<SPECIAL_419>",
423
+ "<SPECIAL_420>",
424
+ "<SPECIAL_421>",
425
+ "<SPECIAL_422>",
426
+ "<SPECIAL_423>",
427
+ "<SPECIAL_424>",
428
+ "<SPECIAL_425>",
429
+ "<SPECIAL_426>",
430
+ "<SPECIAL_427>",
431
+ "<SPECIAL_428>",
432
+ "<SPECIAL_429>",
433
+ "<SPECIAL_430>",
434
+ "<SPECIAL_431>",
435
+ "<SPECIAL_432>",
436
+ "<SPECIAL_433>",
437
+ "<SPECIAL_434>",
438
+ "<SPECIAL_435>",
439
+ "<SPECIAL_436>",
440
+ "<SPECIAL_437>",
441
+ "<SPECIAL_438>",
442
+ "<SPECIAL_439>",
443
+ "<SPECIAL_440>",
444
+ "<SPECIAL_441>",
445
+ "<SPECIAL_442>",
446
+ "<SPECIAL_443>",
447
+ "<SPECIAL_444>",
448
+ "<SPECIAL_445>",
449
+ "<SPECIAL_446>",
450
+ "<SPECIAL_447>",
451
+ "<SPECIAL_448>",
452
+ "<SPECIAL_449>",
453
+ "<SPECIAL_450>",
454
+ "<SPECIAL_451>",
455
+ "<SPECIAL_452>",
456
+ "<SPECIAL_453>",
457
+ "<SPECIAL_454>",
458
+ "<SPECIAL_455>",
459
+ "<SPECIAL_456>",
460
+ "<SPECIAL_457>",
461
+ "<SPECIAL_458>",
462
+ "<SPECIAL_459>",
463
+ "<SPECIAL_460>",
464
+ "<SPECIAL_461>",
465
+ "<SPECIAL_462>",
466
+ "<SPECIAL_463>",
467
+ "<SPECIAL_464>",
468
+ "<SPECIAL_465>",
469
+ "<SPECIAL_466>",
470
+ "<SPECIAL_467>",
471
+ "<SPECIAL_468>",
472
+ "<SPECIAL_469>",
473
+ "<SPECIAL_470>",
474
+ "<SPECIAL_471>",
475
+ "<SPECIAL_472>",
476
+ "<SPECIAL_473>",
477
+ "<SPECIAL_474>",
478
+ "<SPECIAL_475>",
479
+ "<SPECIAL_476>",
480
+ "<SPECIAL_477>",
481
+ "<SPECIAL_478>",
482
+ "<SPECIAL_479>",
483
+ "<SPECIAL_480>",
484
+ "<SPECIAL_481>",
485
+ "<SPECIAL_482>",
486
+ "<SPECIAL_483>",
487
+ "<SPECIAL_484>",
488
+ "<SPECIAL_485>",
489
+ "<SPECIAL_486>",
490
+ "<SPECIAL_487>",
491
+ "<SPECIAL_488>",
492
+ "<SPECIAL_489>",
493
+ "<SPECIAL_490>",
494
+ "<SPECIAL_491>",
495
+ "<SPECIAL_492>",
496
+ "<SPECIAL_493>",
497
+ "<SPECIAL_494>",
498
+ "<SPECIAL_495>",
499
+ "<SPECIAL_496>",
500
+ "<SPECIAL_497>",
501
+ "<SPECIAL_498>",
502
+ "<SPECIAL_499>",
503
+ "<SPECIAL_500>",
504
+ "<SPECIAL_501>",
505
+ "<SPECIAL_502>",
506
+ "<SPECIAL_503>",
507
+ "<SPECIAL_504>",
508
+ "<SPECIAL_505>",
509
+ "<SPECIAL_506>",
510
+ "<SPECIAL_507>",
511
+ "<SPECIAL_508>",
512
+ "<SPECIAL_509>",
513
+ "<SPECIAL_510>",
514
+ "<SPECIAL_511>",
515
+ "<SPECIAL_512>",
516
+ "<SPECIAL_513>",
517
+ "<SPECIAL_514>",
518
+ "<SPECIAL_515>",
519
+ "<SPECIAL_516>",
520
+ "<SPECIAL_517>",
521
+ "<SPECIAL_518>",
522
+ "<SPECIAL_519>",
523
+ "<SPECIAL_520>",
524
+ "<SPECIAL_521>",
525
+ "<SPECIAL_522>",
526
+ "<SPECIAL_523>",
527
+ "<SPECIAL_524>",
528
+ "<SPECIAL_525>",
529
+ "<SPECIAL_526>",
530
+ "<SPECIAL_527>",
531
+ "<SPECIAL_528>",
532
+ "<SPECIAL_529>",
533
+ "<SPECIAL_530>",
534
+ "<SPECIAL_531>",
535
+ "<SPECIAL_532>",
536
+ "<SPECIAL_533>",
537
+ "<SPECIAL_534>",
538
+ "<SPECIAL_535>",
539
+ "<SPECIAL_536>",
540
+ "<SPECIAL_537>",
541
+ "<SPECIAL_538>",
542
+ "<SPECIAL_539>",
543
+ "<SPECIAL_540>",
544
+ "<SPECIAL_541>",
545
+ "<SPECIAL_542>",
546
+ "<SPECIAL_543>",
547
+ "<SPECIAL_544>",
548
+ "<SPECIAL_545>",
549
+ "<SPECIAL_546>",
550
+ "<SPECIAL_547>",
551
+ "<SPECIAL_548>",
552
+ "<SPECIAL_549>",
553
+ "<SPECIAL_550>",
554
+ "<SPECIAL_551>",
555
+ "<SPECIAL_552>",
556
+ "<SPECIAL_553>",
557
+ "<SPECIAL_554>",
558
+ "<SPECIAL_555>",
559
+ "<SPECIAL_556>",
560
+ "<SPECIAL_557>",
561
+ "<SPECIAL_558>",
562
+ "<SPECIAL_559>",
563
+ "<SPECIAL_560>",
564
+ "<SPECIAL_561>",
565
+ "<SPECIAL_562>",
566
+ "<SPECIAL_563>",
567
+ "<SPECIAL_564>",
568
+ "<SPECIAL_565>",
569
+ "<SPECIAL_566>",
570
+ "<SPECIAL_567>",
571
+ "<SPECIAL_568>",
572
+ "<SPECIAL_569>",
573
+ "<SPECIAL_570>",
574
+ "<SPECIAL_571>",
575
+ "<SPECIAL_572>",
576
+ "<SPECIAL_573>",
577
+ "<SPECIAL_574>",
578
+ "<SPECIAL_575>",
579
+ "<SPECIAL_576>",
580
+ "<SPECIAL_577>",
581
+ "<SPECIAL_578>",
582
+ "<SPECIAL_579>",
583
+ "<SPECIAL_580>",
584
+ "<SPECIAL_581>",
585
+ "<SPECIAL_582>",
586
+ "<SPECIAL_583>",
587
+ "<SPECIAL_584>",
588
+ "<SPECIAL_585>",
589
+ "<SPECIAL_586>",
590
+ "<SPECIAL_587>",
591
+ "<SPECIAL_588>",
592
+ "<SPECIAL_589>",
593
+ "<SPECIAL_590>",
594
+ "<SPECIAL_591>",
595
+ "<SPECIAL_592>",
596
+ "<SPECIAL_593>",
597
+ "<SPECIAL_594>",
598
+ "<SPECIAL_595>",
599
+ "<SPECIAL_596>",
600
+ "<SPECIAL_597>",
601
+ "<SPECIAL_598>",
602
+ "<SPECIAL_599>",
603
+ "<SPECIAL_600>",
604
+ "<SPECIAL_601>",
605
+ "<SPECIAL_602>",
606
+ "<SPECIAL_603>",
607
+ "<SPECIAL_604>",
608
+ "<SPECIAL_605>",
609
+ "<SPECIAL_606>",
610
+ "<SPECIAL_607>",
611
+ "<SPECIAL_608>",
612
+ "<SPECIAL_609>",
613
+ "<SPECIAL_610>",
614
+ "<SPECIAL_611>",
615
+ "<SPECIAL_612>",
616
+ "<SPECIAL_613>",
617
+ "<SPECIAL_614>",
618
+ "<SPECIAL_615>",
619
+ "<SPECIAL_616>",
620
+ "<SPECIAL_617>",
621
+ "<SPECIAL_618>",
622
+ "<SPECIAL_619>",
623
+ "<SPECIAL_620>",
624
+ "<SPECIAL_621>",
625
+ "<SPECIAL_622>",
626
+ "<SPECIAL_623>",
627
+ "<SPECIAL_624>",
628
+ "<SPECIAL_625>",
629
+ "<SPECIAL_626>",
630
+ "<SPECIAL_627>",
631
+ "<SPECIAL_628>",
632
+ "<SPECIAL_629>",
633
+ "<SPECIAL_630>",
634
+ "<SPECIAL_631>",
635
+ "<SPECIAL_632>",
636
+ "<SPECIAL_633>",
637
+ "<SPECIAL_634>",
638
+ "<SPECIAL_635>",
639
+ "<SPECIAL_636>",
640
+ "<SPECIAL_637>",
641
+ "<SPECIAL_638>",
642
+ "<SPECIAL_639>",
643
+ "<SPECIAL_640>",
644
+ "<SPECIAL_641>",
645
+ "<SPECIAL_642>",
646
+ "<SPECIAL_643>",
647
+ "<SPECIAL_644>",
648
+ "<SPECIAL_645>",
649
+ "<SPECIAL_646>",
650
+ "<SPECIAL_647>",
651
+ "<SPECIAL_648>",
652
+ "<SPECIAL_649>",
653
+ "<SPECIAL_650>",
654
+ "<SPECIAL_651>",
655
+ "<SPECIAL_652>",
656
+ "<SPECIAL_653>",
657
+ "<SPECIAL_654>",
658
+ "<SPECIAL_655>",
659
+ "<SPECIAL_656>",
660
+ "<SPECIAL_657>",
661
+ "<SPECIAL_658>",
662
+ "<SPECIAL_659>",
663
+ "<SPECIAL_660>",
664
+ "<SPECIAL_661>",
665
+ "<SPECIAL_662>",
666
+ "<SPECIAL_663>",
667
+ "<SPECIAL_664>",
668
+ "<SPECIAL_665>",
669
+ "<SPECIAL_666>",
670
+ "<SPECIAL_667>",
671
+ "<SPECIAL_668>",
672
+ "<SPECIAL_669>",
673
+ "<SPECIAL_670>",
674
+ "<SPECIAL_671>",
675
+ "<SPECIAL_672>",
676
+ "<SPECIAL_673>",
677
+ "<SPECIAL_674>",
678
+ "<SPECIAL_675>",
679
+ "<SPECIAL_676>",
680
+ "<SPECIAL_677>",
681
+ "<SPECIAL_678>",
682
+ "<SPECIAL_679>",
683
+ "<SPECIAL_680>",
684
+ "<SPECIAL_681>",
685
+ "<SPECIAL_682>",
686
+ "<SPECIAL_683>",
687
+ "<SPECIAL_684>",
688
+ "<SPECIAL_685>",
689
+ "<SPECIAL_686>",
690
+ "<SPECIAL_687>",
691
+ "<SPECIAL_688>",
692
+ "<SPECIAL_689>",
693
+ "<SPECIAL_690>",
694
+ "<SPECIAL_691>",
695
+ "<SPECIAL_692>",
696
+ "<SPECIAL_693>",
697
+ "<SPECIAL_694>",
698
+ "<SPECIAL_695>",
699
+ "<SPECIAL_696>",
700
+ "<SPECIAL_697>",
701
+ "<SPECIAL_698>",
702
+ "<SPECIAL_699>",
703
+ "<SPECIAL_700>",
704
+ "<SPECIAL_701>",
705
+ "<SPECIAL_702>",
706
+ "<SPECIAL_703>",
707
+ "<SPECIAL_704>",
708
+ "<SPECIAL_705>",
709
+ "<SPECIAL_706>",
710
+ "<SPECIAL_707>",
711
+ "<SPECIAL_708>",
712
+ "<SPECIAL_709>",
713
+ "<SPECIAL_710>",
714
+ "<SPECIAL_711>",
715
+ "<SPECIAL_712>",
716
+ "<SPECIAL_713>",
717
+ "<SPECIAL_714>",
718
+ "<SPECIAL_715>",
719
+ "<SPECIAL_716>",
720
+ "<SPECIAL_717>",
721
+ "<SPECIAL_718>",
722
+ "<SPECIAL_719>",
723
+ "<SPECIAL_720>",
724
+ "<SPECIAL_721>",
725
+ "<SPECIAL_722>",
726
+ "<SPECIAL_723>",
727
+ "<SPECIAL_724>",
728
+ "<SPECIAL_725>",
729
+ "<SPECIAL_726>",
730
+ "<SPECIAL_727>",
731
+ "<SPECIAL_728>",
732
+ "<SPECIAL_729>",
733
+ "<SPECIAL_730>",
734
+ "<SPECIAL_731>",
735
+ "<SPECIAL_732>",
736
+ "<SPECIAL_733>",
737
+ "<SPECIAL_734>",
738
+ "<SPECIAL_735>",
739
+ "<SPECIAL_736>",
740
+ "<SPECIAL_737>",
741
+ "<SPECIAL_738>",
742
+ "<SPECIAL_739>",
743
+ "<SPECIAL_740>",
744
+ "<SPECIAL_741>",
745
+ "<SPECIAL_742>",
746
+ "<SPECIAL_743>",
747
+ "<SPECIAL_744>",
748
+ "<SPECIAL_745>",
749
+ "<SPECIAL_746>",
750
+ "<SPECIAL_747>",
751
+ "<SPECIAL_748>",
752
+ "<SPECIAL_749>",
753
+ "<SPECIAL_750>",
754
+ "<SPECIAL_751>",
755
+ "<SPECIAL_752>",
756
+ "<SPECIAL_753>",
757
+ "<SPECIAL_754>",
758
+ "<SPECIAL_755>",
759
+ "<SPECIAL_756>",
760
+ "<SPECIAL_757>",
761
+ "<SPECIAL_758>",
762
+ "<SPECIAL_759>",
763
+ "<SPECIAL_760>",
764
+ "<SPECIAL_761>",
765
+ "<SPECIAL_762>",
766
+ "<SPECIAL_763>",
767
+ "<SPECIAL_764>",
768
+ "<SPECIAL_765>",
769
+ "<SPECIAL_766>",
770
+ "<SPECIAL_767>",
771
+ "<SPECIAL_768>",
772
+ "<SPECIAL_769>",
773
+ "<SPECIAL_770>",
774
+ "<SPECIAL_771>",
775
+ "<SPECIAL_772>",
776
+ "<SPECIAL_773>",
777
+ "<SPECIAL_774>",
778
+ "<SPECIAL_775>",
779
+ "<SPECIAL_776>",
780
+ "<SPECIAL_777>",
781
+ "<SPECIAL_778>",
782
+ "<SPECIAL_779>",
783
+ "<SPECIAL_780>",
784
+ "<SPECIAL_781>",
785
+ "<SPECIAL_782>",
786
+ "<SPECIAL_783>",
787
+ "<SPECIAL_784>",
788
+ "<SPECIAL_785>",
789
+ "<SPECIAL_786>",
790
+ "<SPECIAL_787>",
791
+ "<SPECIAL_788>",
792
+ "<SPECIAL_789>",
793
+ "<SPECIAL_790>",
794
+ "<SPECIAL_791>",
795
+ "<SPECIAL_792>",
796
+ "<SPECIAL_793>",
797
+ "<SPECIAL_794>",
798
+ "<SPECIAL_795>",
799
+ "<SPECIAL_796>",
800
+ "<SPECIAL_797>",
801
+ "<SPECIAL_798>",
802
+ "<SPECIAL_799>",
803
+ "<SPECIAL_800>",
804
+ "<SPECIAL_801>",
805
+ "<SPECIAL_802>",
806
+ "<SPECIAL_803>",
807
+ "<SPECIAL_804>",
808
+ "<SPECIAL_805>",
809
+ "<SPECIAL_806>",
810
+ "<SPECIAL_807>",
811
+ "<SPECIAL_808>",
812
+ "<SPECIAL_809>",
813
+ "<SPECIAL_810>",
814
+ "<SPECIAL_811>",
815
+ "<SPECIAL_812>",
816
+ "<SPECIAL_813>",
817
+ "<SPECIAL_814>",
818
+ "<SPECIAL_815>",
819
+ "<SPECIAL_816>",
820
+ "<SPECIAL_817>",
821
+ "<SPECIAL_818>",
822
+ "<SPECIAL_819>",
823
+ "<SPECIAL_820>",
824
+ "<SPECIAL_821>",
825
+ "<SPECIAL_822>",
826
+ "<SPECIAL_823>",
827
+ "<SPECIAL_824>",
828
+ "<SPECIAL_825>",
829
+ "<SPECIAL_826>",
830
+ "<SPECIAL_827>",
831
+ "<SPECIAL_828>",
832
+ "<SPECIAL_829>",
833
+ "<SPECIAL_830>",
834
+ "<SPECIAL_831>",
835
+ "<SPECIAL_832>",
836
+ "<SPECIAL_833>",
837
+ "<SPECIAL_834>",
838
+ "<SPECIAL_835>",
839
+ "<SPECIAL_836>",
840
+ "<SPECIAL_837>",
841
+ "<SPECIAL_838>",
842
+ "<SPECIAL_839>",
843
+ "<SPECIAL_840>",
844
+ "<SPECIAL_841>",
845
+ "<SPECIAL_842>",
846
+ "<SPECIAL_843>",
847
+ "<SPECIAL_844>",
848
+ "<SPECIAL_845>",
849
+ "<SPECIAL_846>",
850
+ "<SPECIAL_847>",
851
+ "<SPECIAL_848>",
852
+ "<SPECIAL_849>",
853
+ "<SPECIAL_850>",
854
+ "<SPECIAL_851>",
855
+ "<SPECIAL_852>",
856
+ "<SPECIAL_853>",
857
+ "<SPECIAL_854>",
858
+ "<SPECIAL_855>",
859
+ "<SPECIAL_856>",
860
+ "<SPECIAL_857>",
861
+ "<SPECIAL_858>",
862
+ "<SPECIAL_859>",
863
+ "<SPECIAL_860>",
864
+ "<SPECIAL_861>",
865
+ "<SPECIAL_862>",
866
+ "<SPECIAL_863>",
867
+ "<SPECIAL_864>",
868
+ "<SPECIAL_865>",
869
+ "<SPECIAL_866>",
870
+ "<SPECIAL_867>",
871
+ "<SPECIAL_868>",
872
+ "<SPECIAL_869>",
873
+ "<SPECIAL_870>",
874
+ "<SPECIAL_871>",
875
+ "<SPECIAL_872>",
876
+ "<SPECIAL_873>",
877
+ "<SPECIAL_874>",
878
+ "<SPECIAL_875>",
879
+ "<SPECIAL_876>",
880
+ "<SPECIAL_877>",
881
+ "<SPECIAL_878>",
882
+ "<SPECIAL_879>",
883
+ "<SPECIAL_880>",
884
+ "<SPECIAL_881>",
885
+ "<SPECIAL_882>",
886
+ "<SPECIAL_883>",
887
+ "<SPECIAL_884>",
888
+ "<SPECIAL_885>",
889
+ "<SPECIAL_886>",
890
+ "<SPECIAL_887>",
891
+ "<SPECIAL_888>",
892
+ "<SPECIAL_889>",
893
+ "<SPECIAL_890>",
894
+ "<SPECIAL_891>",
895
+ "<SPECIAL_892>",
896
+ "<SPECIAL_893>",
897
+ "<SPECIAL_894>",
898
+ "<SPECIAL_895>",
899
+ "<SPECIAL_896>",
900
+ "<SPECIAL_897>",
901
+ "<SPECIAL_898>",
902
+ "<SPECIAL_899>",
903
+ "<SPECIAL_900>",
904
+ "<SPECIAL_901>",
905
+ "<SPECIAL_902>",
906
+ "<SPECIAL_903>",
907
+ "<SPECIAL_904>",
908
+ "<SPECIAL_905>",
909
+ "<SPECIAL_906>",
910
+ "<SPECIAL_907>",
911
+ "<SPECIAL_908>",
912
+ "<SPECIAL_909>",
913
+ "<SPECIAL_910>",
914
+ "<SPECIAL_911>",
915
+ "<SPECIAL_912>",
916
+ "<SPECIAL_913>",
917
+ "<SPECIAL_914>",
918
+ "<SPECIAL_915>",
919
+ "<SPECIAL_916>",
920
+ "<SPECIAL_917>",
921
+ "<SPECIAL_918>",
922
+ "<SPECIAL_919>",
923
+ "<SPECIAL_920>",
924
+ "<SPECIAL_921>",
925
+ "<SPECIAL_922>",
926
+ "<SPECIAL_923>",
927
+ "<SPECIAL_924>",
928
+ "<SPECIAL_925>",
929
+ "<SPECIAL_926>",
930
+ "<SPECIAL_927>",
931
+ "<SPECIAL_928>",
932
+ "<SPECIAL_929>",
933
+ "<SPECIAL_930>",
934
+ "<SPECIAL_931>",
935
+ "<SPECIAL_932>",
936
+ "<SPECIAL_933>",
937
+ "<SPECIAL_934>",
938
+ "<SPECIAL_935>",
939
+ "<SPECIAL_936>",
940
+ "<SPECIAL_937>",
941
+ "<SPECIAL_938>",
942
+ "<SPECIAL_939>",
943
+ "<SPECIAL_940>",
944
+ "<SPECIAL_941>",
945
+ "<SPECIAL_942>",
946
+ "<SPECIAL_943>",
947
+ "<SPECIAL_944>",
948
+ "<SPECIAL_945>",
949
+ "<SPECIAL_946>",
950
+ "<SPECIAL_947>",
951
+ "<SPECIAL_948>",
952
+ "<SPECIAL_949>",
953
+ "<SPECIAL_950>",
954
+ "<SPECIAL_951>",
955
+ "<SPECIAL_952>",
956
+ "<SPECIAL_953>",
957
+ "<SPECIAL_954>",
958
+ "<SPECIAL_955>",
959
+ "<SPECIAL_956>",
960
+ "<SPECIAL_957>",
961
+ "<SPECIAL_958>",
962
+ "<SPECIAL_959>",
963
+ "<SPECIAL_960>",
964
+ "<SPECIAL_961>",
965
+ "<SPECIAL_962>",
966
+ "<SPECIAL_963>",
967
+ "<SPECIAL_964>",
968
+ "<SPECIAL_965>",
969
+ "<SPECIAL_966>",
970
+ "<SPECIAL_967>",
971
+ "<SPECIAL_968>",
972
+ "<SPECIAL_969>",
973
+ "<SPECIAL_970>",
974
+ "<SPECIAL_971>",
975
+ "<SPECIAL_972>",
976
+ "<SPECIAL_973>",
977
+ "<SPECIAL_974>",
978
+ "<SPECIAL_975>",
979
+ "<SPECIAL_976>",
980
+ "<SPECIAL_977>",
981
+ "<SPECIAL_978>",
982
+ "<SPECIAL_979>",
983
+ "<SPECIAL_980>",
984
+ "<SPECIAL_981>",
985
+ "<SPECIAL_982>",
986
+ "<SPECIAL_983>",
987
+ "<SPECIAL_984>",
988
+ "<SPECIAL_985>",
989
+ "<SPECIAL_986>",
990
+ "<SPECIAL_987>",
991
+ "<SPECIAL_988>",
992
+ "<SPECIAL_989>",
993
+ "<SPECIAL_990>",
994
+ "<SPECIAL_991>",
995
+ "<SPECIAL_992>",
996
+ "<SPECIAL_993>",
997
+ "<SPECIAL_994>",
998
+ "<SPECIAL_995>",
999
+ "<SPECIAL_996>",
1000
+ "<SPECIAL_997>",
1001
+ "<SPECIAL_998>",
1002
+ "<SPECIAL_999>"
1003
+ ],
1004
+ "bos_token": {
1005
+ "content": "<s>",
1006
+ "lstrip": false,
1007
+ "normalized": false,
1008
+ "rstrip": false,
1009
+ "single_word": false
1010
+ },
1011
+ "eos_token": {
1012
+ "content": "</s>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false
1017
+ },
1018
+ "pad_token": {
1019
+ "content": "<pad>",
1020
+ "lstrip": false,
1021
+ "normalized": false,
1022
+ "rstrip": false,
1023
+ "single_word": false
1024
+ },
1025
+ "unk_token": {
1026
+ "content": "<unk>",
1027
+ "lstrip": false,
1028
+ "normalized": false,
1029
+ "rstrip": false,
1030
+ "single_word": false
1031
+ }
1032
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:158915c7c928301e0d32f1da363e45c294d907bd4c64a8e855f0bd1a47b9e870
3
+ size 17078001
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff