huihui-ai commited on
Commit
9814edf
·
verified ·
1 Parent(s): 55e611e

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tekken.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ - fr
5
+ - de
6
+ - es
7
+ - pt
8
+ - it
9
+ - ja
10
+ - ko
11
+ - ru
12
+ - zh
13
+ - ar
14
+ - fa
15
+ - id
16
+ - ms
17
+ - ne
18
+ - pl
19
+ - ro
20
+ - sr
21
+ - sv
22
+ - tr
23
+ - uk
24
+ - vi
25
+ - hi
26
+ - bn
27
+ license: apache-2.0
28
+ library_name: vllm
29
+ inference: false
30
+ base_model:
31
+ - mistralai/Mistral-Small-3.2-24B-Instruct-2506
32
+ extra_gated_description: >-
33
+ If you want to learn more about how we process your personal data, please read
34
+ our <a href="https://mistral.ai/terms/">Privacy Policy</a>.
35
+ pipeline_tag: image-text-to-text
36
+ tags:
37
+ - abliterated
38
+ - uncensored
39
+ ---
40
+
41
+ # huihui-ai/Huihui-Mistral-Small-3.2-24B-Instruct-2506-abliterated
42
+
43
+
44
+ This is an uncensored version of [mistralai/Mistral-Small-3.2-24B-Instruct-2506](https://huggingface.co/mistralai/Mistral-Small-3.2-24B-Instruct-2506) created with abliteration (see [remove-refusals-with-transformers](https://github.com/Sumandora/remove-refusals-with-transformers) to know more about it).
45
+ This is a crude, proof-of-concept implementation to remove refusals from an LLM model without using TransformerLens.
46
+
47
+ It was only the text part that was processed, not the image part.
48
+
49
+ ## Usage
50
+ ```python
51
+ from datetime import datetime, timedelta
52
+ import torch
53
+
54
+ from mistral_common.protocol.instruct.request import ChatCompletionRequest
55
+ from mistral_common.tokens.tokenizers.mistral import MistralTokenizer
56
+ from huggingface_hub import hf_hub_download
57
+ from transformers import Mistral3ForConditionalGeneration
58
+
59
+
60
+ def load_system_prompt(repo_id: str, filename: str) -> str:
61
+ file_path = hf_hub_download(repo_id=repo_id, filename=filename)
62
+ with open(file_path, "r") as file:
63
+ system_prompt = file.read()
64
+ today = datetime.today().strftime("%Y-%m-%d")
65
+ yesterday = (datetime.today() - timedelta(days=1)).strftime("%Y-%m-%d")
66
+ model_name = repo_id.split("/")[-1]
67
+ return system_prompt.format(name=model_name, today=today, yesterday=yesterday)
68
+
69
+
70
+ model_id = "huihui-ai/Huihui-Mistral-Small-3.2-24B-Instruct-2506-abliterated"
71
+ SYSTEM_PROMPT = load_system_prompt(model_id, "SYSTEM_PROMPT.txt")
72
+
73
+ tokenizer = MistralTokenizer.from_hf_hub(model_id)
74
+
75
+ model = Mistral3ForConditionalGeneration.from_pretrained(
76
+ model_id, torch_dtype=torch.bfloat16
77
+ )
78
+
79
+ image_url = "https://static.wikia.nocookie.net/essentialsdocs/images/7/70/Battle.png/revision/latest?cb=20220523172438"
80
+
81
+ messages = [
82
+ {"role": "system", "content": SYSTEM_PROMPT},
83
+ {
84
+ "role": "user",
85
+ "content": [
86
+ {
87
+ "type": "text",
88
+ "text": "What action do you think I should take in this situation? List all the possible actions and explain why you think they are good or bad.",
89
+ },
90
+ {"type": "image_url", "image_url": {"url": image_url}},
91
+ ],
92
+ },
93
+ ]
94
+
95
+ tokenized = tokenizer.encode_chat_completion(ChatCompletionRequest(messages=messages))
96
+
97
+ input_ids = torch.tensor([tokenized.tokens])
98
+ attention_mask = torch.ones_like(input_ids)
99
+ pixel_values = torch.tensor(tokenized.images[0], dtype=torch.bfloat16).unsqueeze(0)
100
+ image_sizes = torch.tensor([pixel_values.shape[-2:]])
101
+
102
+ output = model.generate(
103
+ input_ids=input_ids,
104
+ attention_mask=attention_mask,
105
+ pixel_values=pixel_values,
106
+ image_sizes=image_sizes,
107
+ max_new_tokens=1000,
108
+ )[0]
109
+
110
+ decoded_output = tokenizer.decode(output[len(tokenized.tokens) :])
111
+ print(decoded_output)
112
+ # In this situation, you are playing a Pokémon game where your Pikachu (Level 42) is facing a wild Pidgey (Level 17). Here are the possible actions you can take and an analysis of each:
113
+
114
+ # 1. **FIGHT**:
115
+ # - **Pros**: Pikachu is significantly higher level than the wild Pidgey, which suggests that it should be able to defeat Pidgey easily. This could be a good opportunity to gain experience points and possibly items or money.
116
+ # - **Cons**: There is always a small risk of Pikachu fainting, especially if Pidgey has a powerful move or a status effect that could hinder Pikachu. However, given the large level difference, this risk is minimal.
117
+
118
+ # 2. **BAG**:
119
+ # - **Pros**: You might have items in your bag that could help in this battle, such as Potions, Poké Balls, or Berries. Using an item could help you capture Pidgey or heal Pikachu if needed.
120
+ # - **Cons**: Using items might not be necessary given the level difference. It could be more efficient to just fight and defeat Pidgey quickly.
121
+
122
+ # 3. **POKÉMON**:
123
+ # - **Pros**: You might have another Pokémon in your party that is better suited for this battle or that you want to gain experience. Switching Pokémon could also be strategic if you want to train a lower-level Pokémon.
124
+ # - **Cons**: Switching Pokémon might not be necessary since Pikachu is at a significant advantage. It could also waste time and potentially give Pidgey a turn to attack.
125
+
126
+ # 4. **RUN**:
127
+ # - **Pros**: Running away could be a quick way to avoid the battle altogether. This might be useful if you are trying to conserve resources or if you are in a hurry to get to another location.
128
+ # - **Cons**: Running away means you miss out on the experience points, items, or money that you could gain from defeating Pidgey. It also might not be the most efficient use of your time if you are trying to train your Pokémon.
129
+
130
+ # ### Recommendation:
131
+ # Given the significant level advantage, the best action to take is likely **FIGHT**. This will allow you to quickly defeat Pidgey and gain experience points for Pikachu. If you are concerned about Pikachu's health, you could use the **BAG** to heal Pikachu before or during the battle. Running away or switching Pokémon does not seem necessary in this situation.
132
+ ```
133
+ ### Donation
134
+
135
+ If you like it, please click 'like' and follow us for more updates.
136
+ You can follow [x.com/support_huihui](https://x.com/support_huihui) to get the latest model information from huihui.ai.
137
+
138
+ ##### Your donation helps us continue our further development and improvement, a cup of coffee can do it.
139
+ - bitcoin:
140
+ ```
141
+ bc1qqnkhuchxw0zqjh2ku3lu4hq45hc6gy84uk70ge
142
+ ```
143
+
SYSTEM_PROMPT.txt ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ You are {name}, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.
2
+ You power an AI assistant called Le Chat.
3
+ Your knowledge base was last updated on 2023-10-01.
4
+ The current date is {today}.
5
+
6
+ When you're not sure about some information or when the user's request requires up-to-date or specific data, you must use the available tools to fetch the information. Do not hesitate to use tools whenever they can provide a more accurate or complete response. If no relevant tools are available, then clearly state that you don't have the information and avoid making up anything.
7
+ If the user's question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. "What are some good restaurants around me?" => "Where are you?" or "When is the next flight to Tokyo" => "Where do you travel from?").
8
+ You are always very attentive to dates, in particular you try to resolve dates (e.g. "yesterday" is {yesterday}) and when asked about information at specific dates, you discard information that is at another date.
9
+ You follow these instructions in all languages, and always respond to the user in the language they use or request.
10
+ Next sections describe the capabilities that you have.
11
+
12
+ # WEB BROWSING INSTRUCTIONS
13
+
14
+ You cannot perform any web search or access internet to open URLs, links etc. If it seems like the user is expecting you to do so, you clarify the situation and ask the user to copy paste the text directly in the chat.
15
+
16
+ # MULTI-MODAL INSTRUCTIONS
17
+
18
+ You have the ability to read images, but you cannot generate images. You also cannot transcribe audio files or videos.
19
+ You cannot read nor transcribe audio files or videos.
20
+
21
+ # TOOL CALLING INSTRUCTIONS
22
+
23
+ You may have access to tools that you can use to fetch information or perform actions. You must use these tools in the following situations:
24
+
25
+ 1. When the request requires up-to-date information.
26
+ 2. When the request requires specific data that you do not have in your knowledge base.
27
+ 3. When the request involves actions that you cannot perform without tools.
28
+
29
+ Always prioritize using tools to provide the most accurate and helpful response. If tools are not available, inform the user that you cannot perform the requested action at the moment.
chat_template.jinja ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {#- Copyright 2025-present the Unsloth team. All rights reserved. #}
2
+ {#- Licensed under the Apache License, Version 2.0 (the "License") #}
3
+ {%- set yesterday_day = strftime_now("%d") %}
4
+ {%- set yesterday_month = strftime_now("%m") %}
5
+ {%- set yesterday_year = strftime_now("%Y") %}
6
+ {%- set today_date = yesterday_year + '-' + yesterday_month + '-' + yesterday_day %}
7
+ {%- if yesterday_day == '01' %}
8
+ {#- Jinja doesnt allow minus 1 date - Unsloth alternative #}
9
+ {%- if yesterday_month == '01' %}
10
+ {%- set yesterday_day = '31' %}
11
+ {%- set yesterday_month = '12' %}
12
+ {%- if yesterday_year == '2024' %}
13
+ {%- set yesterday_year = '2023' %}
14
+ {%- elif yesterday_year == '2025' %}
15
+ {%- set yesterday_year = '2024' %}
16
+ {%- elif yesterday_year == '2026' %}
17
+ {%- set yesterday_year = '2025' %}
18
+ {%- elif yesterday_year == '2027' %}
19
+ {%- set yesterday_year = '2026' %}
20
+ {%- elif yesterday_year == '2028' %}
21
+ {%- set yesterday_year = '2027' %}
22
+ {%- elif yesterday_year == '2029' %}
23
+ {%- set yesterday_year = '2028' %}
24
+ {%- elif yesterday_year == '2030' %}
25
+ {%- set yesterday_year = '2029' %}
26
+ {%- elif yesterday_year == '2031' %}
27
+ {%- set yesterday_year = '2030' %}
28
+ {%- elif yesterday_year == '2032' %}
29
+ {%- set yesterday_year = '2031' %}
30
+ {%- elif yesterday_year == '1970' %}
31
+ {#- Stop llama_cpp from erroring out #}
32
+ {%- set yesterday_year = '1970' %}
33
+ {%- else %}
34
+ {{- raise_exception('Unsloth custom template does not support years > 2032. Error year = [' + yesterday_year + ']') }}
35
+ {%- endif %}
36
+ {%- elif yesterday_month == '02' %}
37
+ {%- set yesterday_day = '31' %}
38
+ {%- set yesterday_month = '01' %}
39
+ {%- elif yesterday_month == '03' %}
40
+ {%- set yesterday_month = '02' %}
41
+ {%- set yesterday_day = '28' %}
42
+ {%- if yesterday_year == '2024' %}
43
+ {%- set yesterday_day = '29' %}
44
+ {%- elif yesterday_year == '2028' %}
45
+ {%- set yesterday_day = '29' %}
46
+ {%- elif yesterday_year == '2032' %}
47
+ {%- set yesterday_day = '29' %}
48
+ {%- elif yesterday_year == '1970' %}
49
+ {#- Stop llama_cpp from erroring out #}
50
+ {%- set yesterday_day = '29' %}
51
+ {%- else %}
52
+ {{- raise_exception('Unsloth custom template does not support years > 2032. Error year = [' + yesterday_year + ']') }}
53
+ {%- endif %}
54
+ {%- elif yesterday_month == '04' %}
55
+ {%- set yesterday_day = '31' %}
56
+ {%- set yesterday_month = '03' %}
57
+ {%- elif yesterday_month == '05' %}
58
+ {%- set yesterday_day = '30' %}
59
+ {%- set yesterday_month = '04' %}
60
+ {%- elif yesterday_month == '06' %}
61
+ {%- set yesterday_day = '31' %}
62
+ {%- set yesterday_month = '05' %}
63
+ {%- elif yesterday_month == '07' %}
64
+ {%- set yesterday_day = '30' %}
65
+ {%- set yesterday_month = '06' %}
66
+ {%- elif yesterday_month == '08' %}
67
+ {%- set yesterday_day = '31' %}
68
+ {%- set yesterday_month = '07' %}
69
+ {%- elif yesterday_month == '09' %}
70
+ {%- set yesterday_day = '31' %}
71
+ {%- set yesterday_month = '08' %}
72
+ {%- elif yesterday_month == '10' %}
73
+ {%- set yesterday_day = '30' %}
74
+ {%- set yesterday_month = '09' %}
75
+ {%- elif yesterday_month == '11' %}
76
+ {%- set yesterday_day = '31' %}
77
+ {%- set yesterday_month = '10' %}
78
+ {%- elif yesterday_month == '12' %}
79
+ {%- set yesterday_day = '30' %}
80
+ {%- set yesterday_month = '11' %}
81
+ {%- endif %}
82
+ {%- elif yesterday_day == '02' %}
83
+ {%- set yesterday_day = '01' %}
84
+ {%- elif yesterday_day == '03' %}
85
+ {%- set yesterday_day = '02' %}
86
+ {%- elif yesterday_day == '04' %}
87
+ {%- set yesterday_day = '03' %}
88
+ {%- elif yesterday_day == '05' %}
89
+ {%- set yesterday_day = '04' %}
90
+ {%- elif yesterday_day == '06' %}
91
+ {%- set yesterday_day = '05' %}
92
+ {%- elif yesterday_day == '07' %}
93
+ {%- set yesterday_day = '06' %}
94
+ {%- elif yesterday_day == '08' %}
95
+ {%- set yesterday_day = '07' %}
96
+ {%- elif yesterday_day == '09' %}
97
+ {%- set yesterday_day = '08' %}
98
+ {%- elif yesterday_day == '10' %}
99
+ {%- set yesterday_day = '09' %}
100
+ {%- elif yesterday_day == '11' %}
101
+ {%- set yesterday_day = '10' %}
102
+ {%- elif yesterday_day == '12' %}
103
+ {%- set yesterday_day = '11' %}
104
+ {%- elif yesterday_day == '13' %}
105
+ {%- set yesterday_day = '12' %}
106
+ {%- elif yesterday_day == '14' %}
107
+ {%- set yesterday_day = '13' %}
108
+ {%- elif yesterday_day == '15' %}
109
+ {%- set yesterday_day = '14' %}
110
+ {%- elif yesterday_day == '16' %}
111
+ {%- set yesterday_day = '15' %}
112
+ {%- elif yesterday_day == '17' %}
113
+ {%- set yesterday_day = '16' %}
114
+ {%- elif yesterday_day == '18' %}
115
+ {%- set yesterday_day = '17' %}
116
+ {%- elif yesterday_day == '19' %}
117
+ {%- set yesterday_day = '18' %}
118
+ {%- elif yesterday_day == '20' %}
119
+ {%- set yesterday_day = '19' %}
120
+ {%- elif yesterday_day == '21' %}
121
+ {%- set yesterday_day = '20' %}
122
+ {%- elif yesterday_day == '22' %}
123
+ {%- set yesterday_day = '21' %}
124
+ {%- elif yesterday_day == '23' %}
125
+ {%- set yesterday_day = '22' %}
126
+ {%- elif yesterday_day == '24' %}
127
+ {%- set yesterday_day = '23' %}
128
+ {%- elif yesterday_day == '25' %}
129
+ {%- set yesterday_day = '24' %}
130
+ {%- elif yesterday_day == '26' %}
131
+ {%- set yesterday_day = '25' %}
132
+ {%- elif yesterday_day == '27' %}
133
+ {%- set yesterday_day = '26' %}
134
+ {%- elif yesterday_day == '28' %}
135
+ {%- set yesterday_day = '27' %}
136
+ {%- elif yesterday_day == '29' %}
137
+ {%- set yesterday_day = '28' %}
138
+ {%- elif yesterday_day == '30' %}
139
+ {%- set yesterday_day = '29' %}
140
+ {%- elif yesterday_day == '31' %}
141
+ {%- set yesterday_day = '30' %}
142
+ {%- endif %}
143
+ {#- Edits made by Unsloth #}
144
+ {%- set yesterday_date = yesterday_year + '-' + yesterday_month + '-' + yesterday_day %}
145
+ {%- set default_system_message = "You are Mistral-Small-3.2-24B-Instruct-2506, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.\nYou power an AI assistant called Le Chat.\nYour knowledge base was last updated on 2023-10-01.\nThe current date is " + today_date + ".\n\nWhen you\'re not sure about some information or when the user\'s request requires up-to-date or specific data, you must use the available tools to fetch the information. Do not hesitate to use tools whenever they can provide a more accurate or complete response. If no relevant tools are available, then clearly state that you don\'t have the information and avoid making up anything.\nIf the user\'s question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. \"What are some good restaurants around me?\" => \"Where are you?\" or \"When is the next flight to Tokyo\" => \"Where do you travel from?\").\nYou are always very attentive to dates, in particular you try to resolve dates (e.g. \"yesterday\" is " + yesterday_date + ") and when asked about information at specific dates, you discard information that is at another date.\nYou follow these instructions in all languages, and always respond to the user in the language they use or request.\nNext sections describe the capabilities that you have.\n\n# WEB BROWSING INSTRUCTIONS\n\nYou cannot perform any web search or access internet to open URLs, links etc. If it seems like the user is expecting you to do so, you clarify the situation and ask the user to copy paste the text directly in the chat.\n\n# MULTI-MODAL INSTRUCTIONS\n\nYou have the ability to read images, but you cannot generate images. You also cannot transcribe audio files or videos.\nYou cannot read nor transcribe audio files or videos.\n\n# TOOL CALLING INSTRUCTIONS\n\nYou may have access to tools that you can use to fetch information or perform actions. You must use these tools in the following situations:\n\n1. When the request requires up-to-date information.\n2. When the request requires specific data that you do not have in your knowledge base.\n3. When the request involves actions that you cannot perform without tools.\n\nAlways prioritize using tools to provide the most accurate and helpful response. If tools are not available, inform the user that you cannot perform the requested action at the moment." %}
146
+
147
+ {{- bos_token }}
148
+
149
+ {%- if messages[0]['role'] == 'system' %}
150
+ {%- if messages[0]['content'] is string %}
151
+ {%- set system_message = messages[0]['content'] %}
152
+ {%- else %}
153
+ {%- set system_message = messages[0]['content'][0]['text'] %}
154
+ {%- endif %}
155
+ {%- set loop_messages = messages[1:] %}
156
+ {%- else %}
157
+ {%- set system_message = default_system_message %}
158
+ {%- set loop_messages = messages %}
159
+ {%- endif %}
160
+ {{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}
161
+
162
+
163
+ {#- Tool description appended ONLY to last user message. Edits made by Unsloth #}
164
+ {#- Tool description appended also if last message is tool. Edits made by Unsloth #}
165
+ {%- set tools_description = "" %}
166
+ {%- set has_tools = false %}
167
+ {#- Cannot use set append_tools_index = loop.index0 since temporary variable - must use namespace #}
168
+ {%- set ns = namespace(append_tools_index=0, append_tools=false) %}
169
+
170
+ {%- if tools is defined and tools is not none and tools|length > 0 %}
171
+
172
+ {%- set has_tools = true %}
173
+ {%- set tools_description = "[AVAILABLE_TOOLS]" + (tools | tojson) + "[/AVAILABLE_TOOLS]" %}
174
+
175
+ {#- If User,Assistant,Tool,Tool we also need to append tools_description to last assistant WITHOUT tool_calls defined. Edits made by Unsloth #}
176
+ {%- if (loop_messages|last)['role'] == 'tool' %}
177
+
178
+ {#- Find last assistant WITHOUT tool_calls defined #}
179
+ {%- set ns.append_tools = true %}
180
+ {%- for message in loop_messages %}
181
+ {%- if message['role'] == 'assistant' %}
182
+
183
+ {#- Cannot use set append_tools_index = loop.index0 since temporary variable - must use namespace #}
184
+
185
+ {%- if message['tool_calls'] is not defined or message['tool_calls'] is none %}
186
+ {%- set ns.append_tools_index = loop.index0 %}
187
+ {%- endif %}
188
+
189
+ {%- endif %}
190
+ {%- endfor %}
191
+
192
+ {%- endif %}
193
+
194
+ {%- endif %}
195
+
196
+ {%- for message in loop_messages %}
197
+ {%- if message['role'] == 'user' %}
198
+
199
+ {%- if has_tools and loop.last %}
200
+ {{- tools_description }}
201
+ {%- endif %}
202
+
203
+ {#- If directly called tools in first turn, prepend to user #}
204
+ {%- if ns.append_tools and ns.append_tools_index == 0 %}
205
+ {{- tools_description }}
206
+ {%- endif %}
207
+
208
+ {%- if message['content'] is string %}
209
+ {{- '[INST]' + message['content'] + '[/INST]' }}
210
+ {%- else %}
211
+ {{- '[INST]' }}
212
+ {%- for block in message['content'] %}
213
+ {%- if block['type'] == 'text' %}
214
+
215
+ {#- Original did not have content which is weird. Added by Un-sloth. #}
216
+ {%- if block['text'] is defined %}
217
+ {{- block['text'] }}
218
+ {%- else %}
219
+ {{- block['content'] }}
220
+ {%- endif %}
221
+
222
+ {%- elif block['type'] in ['image', 'image_url'] %}
223
+ {{- '[IMG]' }}
224
+ {%- else %}
225
+ {{- raise_exception('Only text and image blocks are supported in message content!') }}
226
+ {%- endif %}
227
+ {%- endfor %}
228
+ {{- '[/INST]' }}
229
+ {%- endif %}
230
+
231
+ {%- elif message['role'] == 'system' %}
232
+ {%- if message['content'] is string %}
233
+ {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}
234
+ {%- else %}
235
+ {{- '[SYSTEM_PROMPT]' + message['content'][0]['text'] + '[/SYSTEM_PROMPT]' }}
236
+ {%- endif %}
237
+
238
+
239
+ {%- elif message['role'] == 'assistant' %}
240
+ {%- if message['content'] is string %}
241
+ {{- message['content'] }}
242
+ {%- else %}
243
+ {{- message['content'][0]['text'] }}
244
+ {%- endif %}
245
+
246
+ {#- If User,Assistant,Tool,Tool we also need to append tools_description. Edits made by Unsloth #}
247
+
248
+ {%- if has_tools and (loop.index0 == ns.append_tools_index) %}
249
+ {{- eos_token }}
250
+ {{- tools_description }}
251
+ {%- endif %}
252
+
253
+ {%- if message['tool_calls'] is defined and message['tool_calls'] is not none %}
254
+ {%- for tool in message['tool_calls'] %}
255
+ {%- if tool['id'] is not defined %}
256
+ {{- raise_exception('Tool ID must be provided!') }}
257
+ {%- endif %}
258
+ {%- set tool_call_id = tool['id'] %}
259
+ {%- if tool_call_id is not string or tool_call_id|length < 9 %}
260
+ {{- raise_exception("Tool call IDs should be alphanumeric strings with length >= 9!") }}
261
+ {%- endif %}
262
+ {%- set arguments = tool['function']['arguments'] %}
263
+ {%- if arguments is not string %}
264
+ {%- set arguments = arguments|tojson %}
265
+ {%- endif %}
266
+ {#- Must list tool calls AFTER assistant. Edits made by Un-sloth #}
267
+ {{- "[TOOL_CALLS]" + tool['function']['name'] + "[CALL_ID]" + tool_call_id + "[ARGS]" + arguments }}
268
+ {%- endfor %}
269
+ {%- endif %}
270
+
271
+ {#- Must not add EOS if added tools_description. Unsloth edits. #}
272
+ {%- if (loop.index0 != ns.append_tools_index) %}
273
+ {{- eos_token }}
274
+ {%- endif %}
275
+
276
+ {%- elif message["role"] == "tool_results" or message["role"] == "tool" %}
277
+ {%- if message.content is defined and message.content.content is defined %}
278
+ {%- set content = message.content.content %}
279
+ {%- else %}
280
+ {%- set content = message.content %}
281
+ {%- endif %}
282
+ {%- if message['tool_call_id'] is not defined %}
283
+ {{- raise_exception('tool_call_id must be provided!') }}
284
+ {%- endif %}
285
+ {%- set tool_call_id = message['tool_call_id'] %}
286
+ {%- if tool_call_id is not string or tool_call_id|length < 9 %}
287
+ {{- raise_exception("Tool call IDs should be alphanumeric strings with length >= 9!") }}
288
+ {%- endif %}
289
+ {{- "[TOOL_RESULTS]" + tool_call_id + "[TOOL_CONTENT]" + content|string + "[/TOOL_RESULTS]" }}
290
+
291
+ {%- else %}
292
+ {{- raise_exception('Only user, systemm assistant and tool roles are supported in the custom template made by Unsloth!') }}
293
+ {%- endif %}
294
+ {%- endfor %}
295
+ {#- Copyright 2025-present the Unsloth team. All rights reserved. #}
296
+ {#- Licensed under the Apache License, Version 2.0 (the "License") #}
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Mistral3ForConditionalGeneration"
4
+ ],
5
+ "bos_token_id": 1,
6
+ "eos_token_id": 2,
7
+ "image_token_index": 10,
8
+ "model_type": "mistral3",
9
+ "multimodal_projector_bias": false,
10
+ "pad_token_id": 11,
11
+ "projector_hidden_act": "gelu",
12
+ "spatial_merge_size": 2,
13
+ "text_config": {
14
+ "attention_dropout": 0.0,
15
+ "head_dim": 128,
16
+ "hidden_act": "silu",
17
+ "hidden_size": 5120,
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 32768,
20
+ "max_position_embeddings": 131072,
21
+ "model_type": "mistral",
22
+ "num_attention_heads": 32,
23
+ "num_hidden_layers": 40,
24
+ "num_key_value_heads": 8,
25
+ "rms_norm_eps": 1e-05,
26
+ "rope_theta": 1000000000.0,
27
+ "sliding_window": null,
28
+ "torch_dtype": "bfloat16",
29
+ "use_cache": true,
30
+ "vocab_size": 131072
31
+ },
32
+ "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.52.4",
34
+ "vision_config": {
35
+ "attention_dropout": 0.0,
36
+ "head_dim": 64,
37
+ "hidden_act": "silu",
38
+ "hidden_size": 1024,
39
+ "image_size": 1540,
40
+ "initializer_range": 0.02,
41
+ "intermediate_size": 4096,
42
+ "model_type": "pixtral",
43
+ "num_attention_heads": 16,
44
+ "num_channels": 3,
45
+ "num_hidden_layers": 24,
46
+ "patch_size": 14,
47
+ "rope_theta": 10000.0,
48
+ "torch_dtype": "bfloat16"
49
+ },
50
+ "vision_feature_layer": -1
51
+ }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 1,
3
+ "do_sample": true,
4
+ "eos_token_id": 2,
5
+ "max_length": 131072,
6
+ "pad_token_id": 11,
7
+ "temperature": 0.15,
8
+ "transformers_version": "4.52.4"
9
+ }
model-00001-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6c0aee6ef4400979ad35bb5c7f89091f4bc12fd53db11aa13c985cb086488c4
3
+ size 4883550696
model-00002-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c63148065bc0c7de433027543d41e01adfc9b027d9f8dcd02733d3894019d758
3
+ size 4781593336
model-00003-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22f806b86685424bf13c431972b11e724b26359cc100846db7451b17c3520461
3
+ size 4886472224
model-00004-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10a28fc859213762fbd72b22b54b39f28192741b1bf973b23b659065c058e9af
3
+ size 4781593376
model-00005-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:162992b3fe6ac2e0c9e767859160e7967f5e5b019d826387b51a94fd8cec6c79
3
+ size 4781593368
model-00006-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60d3d05c046f1c1371634d73361e47a6357d9363609d3b072c97419a660ef140
3
+ size 4886472248
model-00007-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecc86e754771d334bce1c5e6085c888b3b3fb4b7c0293a22f533e7c119888173
3
+ size 4781593376
model-00008-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96476a3e9dad642fc7a4ab7c4282b56188af1d073a2f311cbd9e389b9344b1e4
3
+ size 4781593368
model-00009-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a093fe44298079abe18c10eb53f0c96df1478e0b9f57e94c672eba2f1bc9780e
3
+ size 4886472248
model-00010-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:101361dc32a90cb123ce3724642ef15a78ab071027911e31900590c390b1e42d
3
+ size 4571866320
model.safetensors.index.json ADDED
@@ -0,0 +1,593 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 24011361280,
4
+ "total_size": 48022722560
5
+ },
6
+ "weight_map": {
7
+ "language_model.lm_head.weight": "model-00010-of-00010.safetensors",
8
+ "language_model.model.embed_tokens.weight": "model-00001-of-00010.safetensors",
9
+ "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00010.safetensors",
10
+ "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
11
+ "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
12
+ "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
13
+ "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
14
+ "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
15
+ "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
16
+ "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
17
+ "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
18
+ "language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00010.safetensors",
19
+ "language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00010.safetensors",
20
+ "language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
21
+ "language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00010.safetensors",
22
+ "language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00010.safetensors",
23
+ "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
24
+ "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
25
+ "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
26
+ "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
27
+ "language_model.model.layers.10.input_layernorm.weight": "model-00003-of-00010.safetensors",
28
+ "language_model.model.layers.10.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
29
+ "language_model.model.layers.10.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
30
+ "language_model.model.layers.10.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
31
+ "language_model.model.layers.10.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
32
+ "language_model.model.layers.10.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
33
+ "language_model.model.layers.10.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
34
+ "language_model.model.layers.10.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
35
+ "language_model.model.layers.10.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
36
+ "language_model.model.layers.11.input_layernorm.weight": "model-00004-of-00010.safetensors",
37
+ "language_model.model.layers.11.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
38
+ "language_model.model.layers.11.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
39
+ "language_model.model.layers.11.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
40
+ "language_model.model.layers.11.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
41
+ "language_model.model.layers.11.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
42
+ "language_model.model.layers.11.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
43
+ "language_model.model.layers.11.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
44
+ "language_model.model.layers.11.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
45
+ "language_model.model.layers.12.input_layernorm.weight": "model-00004-of-00010.safetensors",
46
+ "language_model.model.layers.12.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
47
+ "language_model.model.layers.12.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
48
+ "language_model.model.layers.12.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
49
+ "language_model.model.layers.12.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
50
+ "language_model.model.layers.12.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
51
+ "language_model.model.layers.12.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
52
+ "language_model.model.layers.12.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
53
+ "language_model.model.layers.12.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
54
+ "language_model.model.layers.13.input_layernorm.weight": "model-00004-of-00010.safetensors",
55
+ "language_model.model.layers.13.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
56
+ "language_model.model.layers.13.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
57
+ "language_model.model.layers.13.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
58
+ "language_model.model.layers.13.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
59
+ "language_model.model.layers.13.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
60
+ "language_model.model.layers.13.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
61
+ "language_model.model.layers.13.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
62
+ "language_model.model.layers.13.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
63
+ "language_model.model.layers.14.input_layernorm.weight": "model-00004-of-00010.safetensors",
64
+ "language_model.model.layers.14.mlp.down_proj.weight": "model-00004-of-00010.safetensors",
65
+ "language_model.model.layers.14.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
66
+ "language_model.model.layers.14.mlp.up_proj.weight": "model-00004-of-00010.safetensors",
67
+ "language_model.model.layers.14.post_attention_layernorm.weight": "model-00004-of-00010.safetensors",
68
+ "language_model.model.layers.14.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
69
+ "language_model.model.layers.14.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
70
+ "language_model.model.layers.14.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
71
+ "language_model.model.layers.14.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
72
+ "language_model.model.layers.15.input_layernorm.weight": "model-00005-of-00010.safetensors",
73
+ "language_model.model.layers.15.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
74
+ "language_model.model.layers.15.mlp.gate_proj.weight": "model-00004-of-00010.safetensors",
75
+ "language_model.model.layers.15.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
76
+ "language_model.model.layers.15.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
77
+ "language_model.model.layers.15.self_attn.k_proj.weight": "model-00004-of-00010.safetensors",
78
+ "language_model.model.layers.15.self_attn.o_proj.weight": "model-00004-of-00010.safetensors",
79
+ "language_model.model.layers.15.self_attn.q_proj.weight": "model-00004-of-00010.safetensors",
80
+ "language_model.model.layers.15.self_attn.v_proj.weight": "model-00004-of-00010.safetensors",
81
+ "language_model.model.layers.16.input_layernorm.weight": "model-00005-of-00010.safetensors",
82
+ "language_model.model.layers.16.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
83
+ "language_model.model.layers.16.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
84
+ "language_model.model.layers.16.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
85
+ "language_model.model.layers.16.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
86
+ "language_model.model.layers.16.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
87
+ "language_model.model.layers.16.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
88
+ "language_model.model.layers.16.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
89
+ "language_model.model.layers.16.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
90
+ "language_model.model.layers.17.input_layernorm.weight": "model-00005-of-00010.safetensors",
91
+ "language_model.model.layers.17.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
92
+ "language_model.model.layers.17.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
93
+ "language_model.model.layers.17.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
94
+ "language_model.model.layers.17.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
95
+ "language_model.model.layers.17.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
96
+ "language_model.model.layers.17.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
97
+ "language_model.model.layers.17.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
98
+ "language_model.model.layers.17.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
99
+ "language_model.model.layers.18.input_layernorm.weight": "model-00005-of-00010.safetensors",
100
+ "language_model.model.layers.18.mlp.down_proj.weight": "model-00005-of-00010.safetensors",
101
+ "language_model.model.layers.18.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
102
+ "language_model.model.layers.18.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
103
+ "language_model.model.layers.18.post_attention_layernorm.weight": "model-00005-of-00010.safetensors",
104
+ "language_model.model.layers.18.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
105
+ "language_model.model.layers.18.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
106
+ "language_model.model.layers.18.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
107
+ "language_model.model.layers.18.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
108
+ "language_model.model.layers.19.input_layernorm.weight": "model-00006-of-00010.safetensors",
109
+ "language_model.model.layers.19.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
110
+ "language_model.model.layers.19.mlp.gate_proj.weight": "model-00005-of-00010.safetensors",
111
+ "language_model.model.layers.19.mlp.up_proj.weight": "model-00005-of-00010.safetensors",
112
+ "language_model.model.layers.19.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
113
+ "language_model.model.layers.19.self_attn.k_proj.weight": "model-00005-of-00010.safetensors",
114
+ "language_model.model.layers.19.self_attn.o_proj.weight": "model-00005-of-00010.safetensors",
115
+ "language_model.model.layers.19.self_attn.q_proj.weight": "model-00005-of-00010.safetensors",
116
+ "language_model.model.layers.19.self_attn.v_proj.weight": "model-00005-of-00010.safetensors",
117
+ "language_model.model.layers.2.input_layernorm.weight": "model-00002-of-00010.safetensors",
118
+ "language_model.model.layers.2.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
119
+ "language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00010.safetensors",
120
+ "language_model.model.layers.2.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
121
+ "language_model.model.layers.2.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
122
+ "language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00010.safetensors",
123
+ "language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00010.safetensors",
124
+ "language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00010.safetensors",
125
+ "language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00010.safetensors",
126
+ "language_model.model.layers.20.input_layernorm.weight": "model-00006-of-00010.safetensors",
127
+ "language_model.model.layers.20.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
128
+ "language_model.model.layers.20.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
129
+ "language_model.model.layers.20.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
130
+ "language_model.model.layers.20.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
131
+ "language_model.model.layers.20.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
132
+ "language_model.model.layers.20.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
133
+ "language_model.model.layers.20.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
134
+ "language_model.model.layers.20.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
135
+ "language_model.model.layers.21.input_layernorm.weight": "model-00006-of-00010.safetensors",
136
+ "language_model.model.layers.21.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
137
+ "language_model.model.layers.21.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
138
+ "language_model.model.layers.21.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
139
+ "language_model.model.layers.21.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
140
+ "language_model.model.layers.21.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
141
+ "language_model.model.layers.21.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
142
+ "language_model.model.layers.21.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
143
+ "language_model.model.layers.21.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
144
+ "language_model.model.layers.22.input_layernorm.weight": "model-00006-of-00010.safetensors",
145
+ "language_model.model.layers.22.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
146
+ "language_model.model.layers.22.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
147
+ "language_model.model.layers.22.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
148
+ "language_model.model.layers.22.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
149
+ "language_model.model.layers.22.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
150
+ "language_model.model.layers.22.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
151
+ "language_model.model.layers.22.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
152
+ "language_model.model.layers.22.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
153
+ "language_model.model.layers.23.input_layernorm.weight": "model-00006-of-00010.safetensors",
154
+ "language_model.model.layers.23.mlp.down_proj.weight": "model-00006-of-00010.safetensors",
155
+ "language_model.model.layers.23.mlp.gate_proj.weight": "model-00006-of-00010.safetensors",
156
+ "language_model.model.layers.23.mlp.up_proj.weight": "model-00006-of-00010.safetensors",
157
+ "language_model.model.layers.23.post_attention_layernorm.weight": "model-00006-of-00010.safetensors",
158
+ "language_model.model.layers.23.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
159
+ "language_model.model.layers.23.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
160
+ "language_model.model.layers.23.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
161
+ "language_model.model.layers.23.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
162
+ "language_model.model.layers.24.input_layernorm.weight": "model-00007-of-00010.safetensors",
163
+ "language_model.model.layers.24.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
164
+ "language_model.model.layers.24.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
165
+ "language_model.model.layers.24.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
166
+ "language_model.model.layers.24.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
167
+ "language_model.model.layers.24.self_attn.k_proj.weight": "model-00006-of-00010.safetensors",
168
+ "language_model.model.layers.24.self_attn.o_proj.weight": "model-00006-of-00010.safetensors",
169
+ "language_model.model.layers.24.self_attn.q_proj.weight": "model-00006-of-00010.safetensors",
170
+ "language_model.model.layers.24.self_attn.v_proj.weight": "model-00006-of-00010.safetensors",
171
+ "language_model.model.layers.25.input_layernorm.weight": "model-00007-of-00010.safetensors",
172
+ "language_model.model.layers.25.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
173
+ "language_model.model.layers.25.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
174
+ "language_model.model.layers.25.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
175
+ "language_model.model.layers.25.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
176
+ "language_model.model.layers.25.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
177
+ "language_model.model.layers.25.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
178
+ "language_model.model.layers.25.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
179
+ "language_model.model.layers.25.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
180
+ "language_model.model.layers.26.input_layernorm.weight": "model-00007-of-00010.safetensors",
181
+ "language_model.model.layers.26.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
182
+ "language_model.model.layers.26.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
183
+ "language_model.model.layers.26.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
184
+ "language_model.model.layers.26.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
185
+ "language_model.model.layers.26.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
186
+ "language_model.model.layers.26.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
187
+ "language_model.model.layers.26.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
188
+ "language_model.model.layers.26.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
189
+ "language_model.model.layers.27.input_layernorm.weight": "model-00007-of-00010.safetensors",
190
+ "language_model.model.layers.27.mlp.down_proj.weight": "model-00007-of-00010.safetensors",
191
+ "language_model.model.layers.27.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
192
+ "language_model.model.layers.27.mlp.up_proj.weight": "model-00007-of-00010.safetensors",
193
+ "language_model.model.layers.27.post_attention_layernorm.weight": "model-00007-of-00010.safetensors",
194
+ "language_model.model.layers.27.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
195
+ "language_model.model.layers.27.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
196
+ "language_model.model.layers.27.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
197
+ "language_model.model.layers.27.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
198
+ "language_model.model.layers.28.input_layernorm.weight": "model-00008-of-00010.safetensors",
199
+ "language_model.model.layers.28.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
200
+ "language_model.model.layers.28.mlp.gate_proj.weight": "model-00007-of-00010.safetensors",
201
+ "language_model.model.layers.28.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
202
+ "language_model.model.layers.28.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
203
+ "language_model.model.layers.28.self_attn.k_proj.weight": "model-00007-of-00010.safetensors",
204
+ "language_model.model.layers.28.self_attn.o_proj.weight": "model-00007-of-00010.safetensors",
205
+ "language_model.model.layers.28.self_attn.q_proj.weight": "model-00007-of-00010.safetensors",
206
+ "language_model.model.layers.28.self_attn.v_proj.weight": "model-00007-of-00010.safetensors",
207
+ "language_model.model.layers.29.input_layernorm.weight": "model-00008-of-00010.safetensors",
208
+ "language_model.model.layers.29.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
209
+ "language_model.model.layers.29.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
210
+ "language_model.model.layers.29.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
211
+ "language_model.model.layers.29.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
212
+ "language_model.model.layers.29.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
213
+ "language_model.model.layers.29.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
214
+ "language_model.model.layers.29.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
215
+ "language_model.model.layers.29.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
216
+ "language_model.model.layers.3.input_layernorm.weight": "model-00002-of-00010.safetensors",
217
+ "language_model.model.layers.3.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
218
+ "language_model.model.layers.3.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
219
+ "language_model.model.layers.3.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
220
+ "language_model.model.layers.3.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
221
+ "language_model.model.layers.3.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
222
+ "language_model.model.layers.3.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
223
+ "language_model.model.layers.3.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
224
+ "language_model.model.layers.3.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
225
+ "language_model.model.layers.30.input_layernorm.weight": "model-00008-of-00010.safetensors",
226
+ "language_model.model.layers.30.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
227
+ "language_model.model.layers.30.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
228
+ "language_model.model.layers.30.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
229
+ "language_model.model.layers.30.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
230
+ "language_model.model.layers.30.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
231
+ "language_model.model.layers.30.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
232
+ "language_model.model.layers.30.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
233
+ "language_model.model.layers.30.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
234
+ "language_model.model.layers.31.input_layernorm.weight": "model-00008-of-00010.safetensors",
235
+ "language_model.model.layers.31.mlp.down_proj.weight": "model-00008-of-00010.safetensors",
236
+ "language_model.model.layers.31.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
237
+ "language_model.model.layers.31.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
238
+ "language_model.model.layers.31.post_attention_layernorm.weight": "model-00008-of-00010.safetensors",
239
+ "language_model.model.layers.31.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
240
+ "language_model.model.layers.31.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
241
+ "language_model.model.layers.31.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
242
+ "language_model.model.layers.31.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
243
+ "language_model.model.layers.32.input_layernorm.weight": "model-00009-of-00010.safetensors",
244
+ "language_model.model.layers.32.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
245
+ "language_model.model.layers.32.mlp.gate_proj.weight": "model-00008-of-00010.safetensors",
246
+ "language_model.model.layers.32.mlp.up_proj.weight": "model-00008-of-00010.safetensors",
247
+ "language_model.model.layers.32.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
248
+ "language_model.model.layers.32.self_attn.k_proj.weight": "model-00008-of-00010.safetensors",
249
+ "language_model.model.layers.32.self_attn.o_proj.weight": "model-00008-of-00010.safetensors",
250
+ "language_model.model.layers.32.self_attn.q_proj.weight": "model-00008-of-00010.safetensors",
251
+ "language_model.model.layers.32.self_attn.v_proj.weight": "model-00008-of-00010.safetensors",
252
+ "language_model.model.layers.33.input_layernorm.weight": "model-00009-of-00010.safetensors",
253
+ "language_model.model.layers.33.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
254
+ "language_model.model.layers.33.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
255
+ "language_model.model.layers.33.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
256
+ "language_model.model.layers.33.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
257
+ "language_model.model.layers.33.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
258
+ "language_model.model.layers.33.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
259
+ "language_model.model.layers.33.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
260
+ "language_model.model.layers.33.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
261
+ "language_model.model.layers.34.input_layernorm.weight": "model-00009-of-00010.safetensors",
262
+ "language_model.model.layers.34.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
263
+ "language_model.model.layers.34.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
264
+ "language_model.model.layers.34.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
265
+ "language_model.model.layers.34.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
266
+ "language_model.model.layers.34.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
267
+ "language_model.model.layers.34.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
268
+ "language_model.model.layers.34.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
269
+ "language_model.model.layers.34.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
270
+ "language_model.model.layers.35.input_layernorm.weight": "model-00009-of-00010.safetensors",
271
+ "language_model.model.layers.35.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
272
+ "language_model.model.layers.35.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
273
+ "language_model.model.layers.35.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
274
+ "language_model.model.layers.35.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
275
+ "language_model.model.layers.35.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
276
+ "language_model.model.layers.35.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
277
+ "language_model.model.layers.35.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
278
+ "language_model.model.layers.35.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
279
+ "language_model.model.layers.36.input_layernorm.weight": "model-00009-of-00010.safetensors",
280
+ "language_model.model.layers.36.mlp.down_proj.weight": "model-00009-of-00010.safetensors",
281
+ "language_model.model.layers.36.mlp.gate_proj.weight": "model-00009-of-00010.safetensors",
282
+ "language_model.model.layers.36.mlp.up_proj.weight": "model-00009-of-00010.safetensors",
283
+ "language_model.model.layers.36.post_attention_layernorm.weight": "model-00009-of-00010.safetensors",
284
+ "language_model.model.layers.36.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
285
+ "language_model.model.layers.36.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
286
+ "language_model.model.layers.36.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
287
+ "language_model.model.layers.36.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
288
+ "language_model.model.layers.37.input_layernorm.weight": "model-00010-of-00010.safetensors",
289
+ "language_model.model.layers.37.mlp.down_proj.weight": "model-00010-of-00010.safetensors",
290
+ "language_model.model.layers.37.mlp.gate_proj.weight": "model-00010-of-00010.safetensors",
291
+ "language_model.model.layers.37.mlp.up_proj.weight": "model-00010-of-00010.safetensors",
292
+ "language_model.model.layers.37.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
293
+ "language_model.model.layers.37.self_attn.k_proj.weight": "model-00009-of-00010.safetensors",
294
+ "language_model.model.layers.37.self_attn.o_proj.weight": "model-00009-of-00010.safetensors",
295
+ "language_model.model.layers.37.self_attn.q_proj.weight": "model-00009-of-00010.safetensors",
296
+ "language_model.model.layers.37.self_attn.v_proj.weight": "model-00009-of-00010.safetensors",
297
+ "language_model.model.layers.38.input_layernorm.weight": "model-00010-of-00010.safetensors",
298
+ "language_model.model.layers.38.mlp.down_proj.weight": "model-00010-of-00010.safetensors",
299
+ "language_model.model.layers.38.mlp.gate_proj.weight": "model-00010-of-00010.safetensors",
300
+ "language_model.model.layers.38.mlp.up_proj.weight": "model-00010-of-00010.safetensors",
301
+ "language_model.model.layers.38.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
302
+ "language_model.model.layers.38.self_attn.k_proj.weight": "model-00010-of-00010.safetensors",
303
+ "language_model.model.layers.38.self_attn.o_proj.weight": "model-00010-of-00010.safetensors",
304
+ "language_model.model.layers.38.self_attn.q_proj.weight": "model-00010-of-00010.safetensors",
305
+ "language_model.model.layers.38.self_attn.v_proj.weight": "model-00010-of-00010.safetensors",
306
+ "language_model.model.layers.39.input_layernorm.weight": "model-00010-of-00010.safetensors",
307
+ "language_model.model.layers.39.mlp.down_proj.weight": "model-00010-of-00010.safetensors",
308
+ "language_model.model.layers.39.mlp.gate_proj.weight": "model-00010-of-00010.safetensors",
309
+ "language_model.model.layers.39.mlp.up_proj.weight": "model-00010-of-00010.safetensors",
310
+ "language_model.model.layers.39.post_attention_layernorm.weight": "model-00010-of-00010.safetensors",
311
+ "language_model.model.layers.39.self_attn.k_proj.weight": "model-00010-of-00010.safetensors",
312
+ "language_model.model.layers.39.self_attn.o_proj.weight": "model-00010-of-00010.safetensors",
313
+ "language_model.model.layers.39.self_attn.q_proj.weight": "model-00010-of-00010.safetensors",
314
+ "language_model.model.layers.39.self_attn.v_proj.weight": "model-00010-of-00010.safetensors",
315
+ "language_model.model.layers.4.input_layernorm.weight": "model-00002-of-00010.safetensors",
316
+ "language_model.model.layers.4.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
317
+ "language_model.model.layers.4.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
318
+ "language_model.model.layers.4.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
319
+ "language_model.model.layers.4.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
320
+ "language_model.model.layers.4.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
321
+ "language_model.model.layers.4.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
322
+ "language_model.model.layers.4.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
323
+ "language_model.model.layers.4.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
324
+ "language_model.model.layers.5.input_layernorm.weight": "model-00002-of-00010.safetensors",
325
+ "language_model.model.layers.5.mlp.down_proj.weight": "model-00002-of-00010.safetensors",
326
+ "language_model.model.layers.5.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
327
+ "language_model.model.layers.5.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
328
+ "language_model.model.layers.5.post_attention_layernorm.weight": "model-00002-of-00010.safetensors",
329
+ "language_model.model.layers.5.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
330
+ "language_model.model.layers.5.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
331
+ "language_model.model.layers.5.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
332
+ "language_model.model.layers.5.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
333
+ "language_model.model.layers.6.input_layernorm.weight": "model-00003-of-00010.safetensors",
334
+ "language_model.model.layers.6.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
335
+ "language_model.model.layers.6.mlp.gate_proj.weight": "model-00002-of-00010.safetensors",
336
+ "language_model.model.layers.6.mlp.up_proj.weight": "model-00002-of-00010.safetensors",
337
+ "language_model.model.layers.6.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
338
+ "language_model.model.layers.6.self_attn.k_proj.weight": "model-00002-of-00010.safetensors",
339
+ "language_model.model.layers.6.self_attn.o_proj.weight": "model-00002-of-00010.safetensors",
340
+ "language_model.model.layers.6.self_attn.q_proj.weight": "model-00002-of-00010.safetensors",
341
+ "language_model.model.layers.6.self_attn.v_proj.weight": "model-00002-of-00010.safetensors",
342
+ "language_model.model.layers.7.input_layernorm.weight": "model-00003-of-00010.safetensors",
343
+ "language_model.model.layers.7.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
344
+ "language_model.model.layers.7.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
345
+ "language_model.model.layers.7.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
346
+ "language_model.model.layers.7.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
347
+ "language_model.model.layers.7.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
348
+ "language_model.model.layers.7.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
349
+ "language_model.model.layers.7.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
350
+ "language_model.model.layers.7.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
351
+ "language_model.model.layers.8.input_layernorm.weight": "model-00003-of-00010.safetensors",
352
+ "language_model.model.layers.8.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
353
+ "language_model.model.layers.8.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
354
+ "language_model.model.layers.8.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
355
+ "language_model.model.layers.8.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
356
+ "language_model.model.layers.8.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
357
+ "language_model.model.layers.8.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
358
+ "language_model.model.layers.8.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
359
+ "language_model.model.layers.8.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
360
+ "language_model.model.layers.9.input_layernorm.weight": "model-00003-of-00010.safetensors",
361
+ "language_model.model.layers.9.mlp.down_proj.weight": "model-00003-of-00010.safetensors",
362
+ "language_model.model.layers.9.mlp.gate_proj.weight": "model-00003-of-00010.safetensors",
363
+ "language_model.model.layers.9.mlp.up_proj.weight": "model-00003-of-00010.safetensors",
364
+ "language_model.model.layers.9.post_attention_layernorm.weight": "model-00003-of-00010.safetensors",
365
+ "language_model.model.layers.9.self_attn.k_proj.weight": "model-00003-of-00010.safetensors",
366
+ "language_model.model.layers.9.self_attn.o_proj.weight": "model-00003-of-00010.safetensors",
367
+ "language_model.model.layers.9.self_attn.q_proj.weight": "model-00003-of-00010.safetensors",
368
+ "language_model.model.layers.9.self_attn.v_proj.weight": "model-00003-of-00010.safetensors",
369
+ "language_model.model.norm.weight": "model-00010-of-00010.safetensors",
370
+ "multi_modal_projector.linear_1.weight": "model-00001-of-00010.safetensors",
371
+ "multi_modal_projector.linear_2.weight": "model-00001-of-00010.safetensors",
372
+ "multi_modal_projector.norm.weight": "model-00001-of-00010.safetensors",
373
+ "multi_modal_projector.patch_merger.merging_layer.weight": "model-00001-of-00010.safetensors",
374
+ "vision_tower.ln_pre.weight": "model-00001-of-00010.safetensors",
375
+ "vision_tower.patch_conv.weight": "model-00001-of-00010.safetensors",
376
+ "vision_tower.transformer.layers.0.attention.k_proj.weight": "model-00001-of-00010.safetensors",
377
+ "vision_tower.transformer.layers.0.attention.o_proj.weight": "model-00001-of-00010.safetensors",
378
+ "vision_tower.transformer.layers.0.attention.q_proj.weight": "model-00001-of-00010.safetensors",
379
+ "vision_tower.transformer.layers.0.attention.v_proj.weight": "model-00001-of-00010.safetensors",
380
+ "vision_tower.transformer.layers.0.attention_norm.weight": "model-00001-of-00010.safetensors",
381
+ "vision_tower.transformer.layers.0.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
382
+ "vision_tower.transformer.layers.0.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
383
+ "vision_tower.transformer.layers.0.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
384
+ "vision_tower.transformer.layers.0.ffn_norm.weight": "model-00001-of-00010.safetensors",
385
+ "vision_tower.transformer.layers.1.attention.k_proj.weight": "model-00001-of-00010.safetensors",
386
+ "vision_tower.transformer.layers.1.attention.o_proj.weight": "model-00001-of-00010.safetensors",
387
+ "vision_tower.transformer.layers.1.attention.q_proj.weight": "model-00001-of-00010.safetensors",
388
+ "vision_tower.transformer.layers.1.attention.v_proj.weight": "model-00001-of-00010.safetensors",
389
+ "vision_tower.transformer.layers.1.attention_norm.weight": "model-00001-of-00010.safetensors",
390
+ "vision_tower.transformer.layers.1.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
391
+ "vision_tower.transformer.layers.1.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
392
+ "vision_tower.transformer.layers.1.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
393
+ "vision_tower.transformer.layers.1.ffn_norm.weight": "model-00001-of-00010.safetensors",
394
+ "vision_tower.transformer.layers.10.attention.k_proj.weight": "model-00001-of-00010.safetensors",
395
+ "vision_tower.transformer.layers.10.attention.o_proj.weight": "model-00001-of-00010.safetensors",
396
+ "vision_tower.transformer.layers.10.attention.q_proj.weight": "model-00001-of-00010.safetensors",
397
+ "vision_tower.transformer.layers.10.attention.v_proj.weight": "model-00001-of-00010.safetensors",
398
+ "vision_tower.transformer.layers.10.attention_norm.weight": "model-00001-of-00010.safetensors",
399
+ "vision_tower.transformer.layers.10.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
400
+ "vision_tower.transformer.layers.10.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
401
+ "vision_tower.transformer.layers.10.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
402
+ "vision_tower.transformer.layers.10.ffn_norm.weight": "model-00001-of-00010.safetensors",
403
+ "vision_tower.transformer.layers.11.attention.k_proj.weight": "model-00001-of-00010.safetensors",
404
+ "vision_tower.transformer.layers.11.attention.o_proj.weight": "model-00001-of-00010.safetensors",
405
+ "vision_tower.transformer.layers.11.attention.q_proj.weight": "model-00001-of-00010.safetensors",
406
+ "vision_tower.transformer.layers.11.attention.v_proj.weight": "model-00001-of-00010.safetensors",
407
+ "vision_tower.transformer.layers.11.attention_norm.weight": "model-00001-of-00010.safetensors",
408
+ "vision_tower.transformer.layers.11.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
409
+ "vision_tower.transformer.layers.11.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
410
+ "vision_tower.transformer.layers.11.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
411
+ "vision_tower.transformer.layers.11.ffn_norm.weight": "model-00001-of-00010.safetensors",
412
+ "vision_tower.transformer.layers.12.attention.k_proj.weight": "model-00001-of-00010.safetensors",
413
+ "vision_tower.transformer.layers.12.attention.o_proj.weight": "model-00001-of-00010.safetensors",
414
+ "vision_tower.transformer.layers.12.attention.q_proj.weight": "model-00001-of-00010.safetensors",
415
+ "vision_tower.transformer.layers.12.attention.v_proj.weight": "model-00001-of-00010.safetensors",
416
+ "vision_tower.transformer.layers.12.attention_norm.weight": "model-00001-of-00010.safetensors",
417
+ "vision_tower.transformer.layers.12.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
418
+ "vision_tower.transformer.layers.12.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
419
+ "vision_tower.transformer.layers.12.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
420
+ "vision_tower.transformer.layers.12.ffn_norm.weight": "model-00001-of-00010.safetensors",
421
+ "vision_tower.transformer.layers.13.attention.k_proj.weight": "model-00001-of-00010.safetensors",
422
+ "vision_tower.transformer.layers.13.attention.o_proj.weight": "model-00001-of-00010.safetensors",
423
+ "vision_tower.transformer.layers.13.attention.q_proj.weight": "model-00001-of-00010.safetensors",
424
+ "vision_tower.transformer.layers.13.attention.v_proj.weight": "model-00001-of-00010.safetensors",
425
+ "vision_tower.transformer.layers.13.attention_norm.weight": "model-00001-of-00010.safetensors",
426
+ "vision_tower.transformer.layers.13.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
427
+ "vision_tower.transformer.layers.13.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
428
+ "vision_tower.transformer.layers.13.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
429
+ "vision_tower.transformer.layers.13.ffn_norm.weight": "model-00001-of-00010.safetensors",
430
+ "vision_tower.transformer.layers.14.attention.k_proj.weight": "model-00001-of-00010.safetensors",
431
+ "vision_tower.transformer.layers.14.attention.o_proj.weight": "model-00001-of-00010.safetensors",
432
+ "vision_tower.transformer.layers.14.attention.q_proj.weight": "model-00001-of-00010.safetensors",
433
+ "vision_tower.transformer.layers.14.attention.v_proj.weight": "model-00001-of-00010.safetensors",
434
+ "vision_tower.transformer.layers.14.attention_norm.weight": "model-00001-of-00010.safetensors",
435
+ "vision_tower.transformer.layers.14.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
436
+ "vision_tower.transformer.layers.14.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
437
+ "vision_tower.transformer.layers.14.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
438
+ "vision_tower.transformer.layers.14.ffn_norm.weight": "model-00001-of-00010.safetensors",
439
+ "vision_tower.transformer.layers.15.attention.k_proj.weight": "model-00001-of-00010.safetensors",
440
+ "vision_tower.transformer.layers.15.attention.o_proj.weight": "model-00001-of-00010.safetensors",
441
+ "vision_tower.transformer.layers.15.attention.q_proj.weight": "model-00001-of-00010.safetensors",
442
+ "vision_tower.transformer.layers.15.attention.v_proj.weight": "model-00001-of-00010.safetensors",
443
+ "vision_tower.transformer.layers.15.attention_norm.weight": "model-00001-of-00010.safetensors",
444
+ "vision_tower.transformer.layers.15.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
445
+ "vision_tower.transformer.layers.15.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
446
+ "vision_tower.transformer.layers.15.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
447
+ "vision_tower.transformer.layers.15.ffn_norm.weight": "model-00001-of-00010.safetensors",
448
+ "vision_tower.transformer.layers.16.attention.k_proj.weight": "model-00001-of-00010.safetensors",
449
+ "vision_tower.transformer.layers.16.attention.o_proj.weight": "model-00001-of-00010.safetensors",
450
+ "vision_tower.transformer.layers.16.attention.q_proj.weight": "model-00001-of-00010.safetensors",
451
+ "vision_tower.transformer.layers.16.attention.v_proj.weight": "model-00001-of-00010.safetensors",
452
+ "vision_tower.transformer.layers.16.attention_norm.weight": "model-00001-of-00010.safetensors",
453
+ "vision_tower.transformer.layers.16.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
454
+ "vision_tower.transformer.layers.16.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
455
+ "vision_tower.transformer.layers.16.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
456
+ "vision_tower.transformer.layers.16.ffn_norm.weight": "model-00001-of-00010.safetensors",
457
+ "vision_tower.transformer.layers.17.attention.k_proj.weight": "model-00001-of-00010.safetensors",
458
+ "vision_tower.transformer.layers.17.attention.o_proj.weight": "model-00001-of-00010.safetensors",
459
+ "vision_tower.transformer.layers.17.attention.q_proj.weight": "model-00001-of-00010.safetensors",
460
+ "vision_tower.transformer.layers.17.attention.v_proj.weight": "model-00001-of-00010.safetensors",
461
+ "vision_tower.transformer.layers.17.attention_norm.weight": "model-00001-of-00010.safetensors",
462
+ "vision_tower.transformer.layers.17.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
463
+ "vision_tower.transformer.layers.17.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
464
+ "vision_tower.transformer.layers.17.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
465
+ "vision_tower.transformer.layers.17.ffn_norm.weight": "model-00001-of-00010.safetensors",
466
+ "vision_tower.transformer.layers.18.attention.k_proj.weight": "model-00001-of-00010.safetensors",
467
+ "vision_tower.transformer.layers.18.attention.o_proj.weight": "model-00001-of-00010.safetensors",
468
+ "vision_tower.transformer.layers.18.attention.q_proj.weight": "model-00001-of-00010.safetensors",
469
+ "vision_tower.transformer.layers.18.attention.v_proj.weight": "model-00001-of-00010.safetensors",
470
+ "vision_tower.transformer.layers.18.attention_norm.weight": "model-00001-of-00010.safetensors",
471
+ "vision_tower.transformer.layers.18.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
472
+ "vision_tower.transformer.layers.18.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
473
+ "vision_tower.transformer.layers.18.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
474
+ "vision_tower.transformer.layers.18.ffn_norm.weight": "model-00001-of-00010.safetensors",
475
+ "vision_tower.transformer.layers.19.attention.k_proj.weight": "model-00001-of-00010.safetensors",
476
+ "vision_tower.transformer.layers.19.attention.o_proj.weight": "model-00001-of-00010.safetensors",
477
+ "vision_tower.transformer.layers.19.attention.q_proj.weight": "model-00001-of-00010.safetensors",
478
+ "vision_tower.transformer.layers.19.attention.v_proj.weight": "model-00001-of-00010.safetensors",
479
+ "vision_tower.transformer.layers.19.attention_norm.weight": "model-00001-of-00010.safetensors",
480
+ "vision_tower.transformer.layers.19.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
481
+ "vision_tower.transformer.layers.19.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
482
+ "vision_tower.transformer.layers.19.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
483
+ "vision_tower.transformer.layers.19.ffn_norm.weight": "model-00001-of-00010.safetensors",
484
+ "vision_tower.transformer.layers.2.attention.k_proj.weight": "model-00001-of-00010.safetensors",
485
+ "vision_tower.transformer.layers.2.attention.o_proj.weight": "model-00001-of-00010.safetensors",
486
+ "vision_tower.transformer.layers.2.attention.q_proj.weight": "model-00001-of-00010.safetensors",
487
+ "vision_tower.transformer.layers.2.attention.v_proj.weight": "model-00001-of-00010.safetensors",
488
+ "vision_tower.transformer.layers.2.attention_norm.weight": "model-00001-of-00010.safetensors",
489
+ "vision_tower.transformer.layers.2.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
490
+ "vision_tower.transformer.layers.2.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
491
+ "vision_tower.transformer.layers.2.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
492
+ "vision_tower.transformer.layers.2.ffn_norm.weight": "model-00001-of-00010.safetensors",
493
+ "vision_tower.transformer.layers.20.attention.k_proj.weight": "model-00001-of-00010.safetensors",
494
+ "vision_tower.transformer.layers.20.attention.o_proj.weight": "model-00001-of-00010.safetensors",
495
+ "vision_tower.transformer.layers.20.attention.q_proj.weight": "model-00001-of-00010.safetensors",
496
+ "vision_tower.transformer.layers.20.attention.v_proj.weight": "model-00001-of-00010.safetensors",
497
+ "vision_tower.transformer.layers.20.attention_norm.weight": "model-00001-of-00010.safetensors",
498
+ "vision_tower.transformer.layers.20.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
499
+ "vision_tower.transformer.layers.20.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
500
+ "vision_tower.transformer.layers.20.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
501
+ "vision_tower.transformer.layers.20.ffn_norm.weight": "model-00001-of-00010.safetensors",
502
+ "vision_tower.transformer.layers.21.attention.k_proj.weight": "model-00001-of-00010.safetensors",
503
+ "vision_tower.transformer.layers.21.attention.o_proj.weight": "model-00001-of-00010.safetensors",
504
+ "vision_tower.transformer.layers.21.attention.q_proj.weight": "model-00001-of-00010.safetensors",
505
+ "vision_tower.transformer.layers.21.attention.v_proj.weight": "model-00001-of-00010.safetensors",
506
+ "vision_tower.transformer.layers.21.attention_norm.weight": "model-00001-of-00010.safetensors",
507
+ "vision_tower.transformer.layers.21.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
508
+ "vision_tower.transformer.layers.21.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
509
+ "vision_tower.transformer.layers.21.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
510
+ "vision_tower.transformer.layers.21.ffn_norm.weight": "model-00001-of-00010.safetensors",
511
+ "vision_tower.transformer.layers.22.attention.k_proj.weight": "model-00001-of-00010.safetensors",
512
+ "vision_tower.transformer.layers.22.attention.o_proj.weight": "model-00001-of-00010.safetensors",
513
+ "vision_tower.transformer.layers.22.attention.q_proj.weight": "model-00001-of-00010.safetensors",
514
+ "vision_tower.transformer.layers.22.attention.v_proj.weight": "model-00001-of-00010.safetensors",
515
+ "vision_tower.transformer.layers.22.attention_norm.weight": "model-00001-of-00010.safetensors",
516
+ "vision_tower.transformer.layers.22.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
517
+ "vision_tower.transformer.layers.22.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
518
+ "vision_tower.transformer.layers.22.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
519
+ "vision_tower.transformer.layers.22.ffn_norm.weight": "model-00001-of-00010.safetensors",
520
+ "vision_tower.transformer.layers.23.attention.k_proj.weight": "model-00001-of-00010.safetensors",
521
+ "vision_tower.transformer.layers.23.attention.o_proj.weight": "model-00001-of-00010.safetensors",
522
+ "vision_tower.transformer.layers.23.attention.q_proj.weight": "model-00001-of-00010.safetensors",
523
+ "vision_tower.transformer.layers.23.attention.v_proj.weight": "model-00001-of-00010.safetensors",
524
+ "vision_tower.transformer.layers.23.attention_norm.weight": "model-00001-of-00010.safetensors",
525
+ "vision_tower.transformer.layers.23.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
526
+ "vision_tower.transformer.layers.23.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
527
+ "vision_tower.transformer.layers.23.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
528
+ "vision_tower.transformer.layers.23.ffn_norm.weight": "model-00001-of-00010.safetensors",
529
+ "vision_tower.transformer.layers.3.attention.k_proj.weight": "model-00001-of-00010.safetensors",
530
+ "vision_tower.transformer.layers.3.attention.o_proj.weight": "model-00001-of-00010.safetensors",
531
+ "vision_tower.transformer.layers.3.attention.q_proj.weight": "model-00001-of-00010.safetensors",
532
+ "vision_tower.transformer.layers.3.attention.v_proj.weight": "model-00001-of-00010.safetensors",
533
+ "vision_tower.transformer.layers.3.attention_norm.weight": "model-00001-of-00010.safetensors",
534
+ "vision_tower.transformer.layers.3.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
535
+ "vision_tower.transformer.layers.3.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
536
+ "vision_tower.transformer.layers.3.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
537
+ "vision_tower.transformer.layers.3.ffn_norm.weight": "model-00001-of-00010.safetensors",
538
+ "vision_tower.transformer.layers.4.attention.k_proj.weight": "model-00001-of-00010.safetensors",
539
+ "vision_tower.transformer.layers.4.attention.o_proj.weight": "model-00001-of-00010.safetensors",
540
+ "vision_tower.transformer.layers.4.attention.q_proj.weight": "model-00001-of-00010.safetensors",
541
+ "vision_tower.transformer.layers.4.attention.v_proj.weight": "model-00001-of-00010.safetensors",
542
+ "vision_tower.transformer.layers.4.attention_norm.weight": "model-00001-of-00010.safetensors",
543
+ "vision_tower.transformer.layers.4.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
544
+ "vision_tower.transformer.layers.4.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
545
+ "vision_tower.transformer.layers.4.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
546
+ "vision_tower.transformer.layers.4.ffn_norm.weight": "model-00001-of-00010.safetensors",
547
+ "vision_tower.transformer.layers.5.attention.k_proj.weight": "model-00001-of-00010.safetensors",
548
+ "vision_tower.transformer.layers.5.attention.o_proj.weight": "model-00001-of-00010.safetensors",
549
+ "vision_tower.transformer.layers.5.attention.q_proj.weight": "model-00001-of-00010.safetensors",
550
+ "vision_tower.transformer.layers.5.attention.v_proj.weight": "model-00001-of-00010.safetensors",
551
+ "vision_tower.transformer.layers.5.attention_norm.weight": "model-00001-of-00010.safetensors",
552
+ "vision_tower.transformer.layers.5.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
553
+ "vision_tower.transformer.layers.5.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
554
+ "vision_tower.transformer.layers.5.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
555
+ "vision_tower.transformer.layers.5.ffn_norm.weight": "model-00001-of-00010.safetensors",
556
+ "vision_tower.transformer.layers.6.attention.k_proj.weight": "model-00001-of-00010.safetensors",
557
+ "vision_tower.transformer.layers.6.attention.o_proj.weight": "model-00001-of-00010.safetensors",
558
+ "vision_tower.transformer.layers.6.attention.q_proj.weight": "model-00001-of-00010.safetensors",
559
+ "vision_tower.transformer.layers.6.attention.v_proj.weight": "model-00001-of-00010.safetensors",
560
+ "vision_tower.transformer.layers.6.attention_norm.weight": "model-00001-of-00010.safetensors",
561
+ "vision_tower.transformer.layers.6.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
562
+ "vision_tower.transformer.layers.6.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
563
+ "vision_tower.transformer.layers.6.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
564
+ "vision_tower.transformer.layers.6.ffn_norm.weight": "model-00001-of-00010.safetensors",
565
+ "vision_tower.transformer.layers.7.attention.k_proj.weight": "model-00001-of-00010.safetensors",
566
+ "vision_tower.transformer.layers.7.attention.o_proj.weight": "model-00001-of-00010.safetensors",
567
+ "vision_tower.transformer.layers.7.attention.q_proj.weight": "model-00001-of-00010.safetensors",
568
+ "vision_tower.transformer.layers.7.attention.v_proj.weight": "model-00001-of-00010.safetensors",
569
+ "vision_tower.transformer.layers.7.attention_norm.weight": "model-00001-of-00010.safetensors",
570
+ "vision_tower.transformer.layers.7.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
571
+ "vision_tower.transformer.layers.7.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
572
+ "vision_tower.transformer.layers.7.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
573
+ "vision_tower.transformer.layers.7.ffn_norm.weight": "model-00001-of-00010.safetensors",
574
+ "vision_tower.transformer.layers.8.attention.k_proj.weight": "model-00001-of-00010.safetensors",
575
+ "vision_tower.transformer.layers.8.attention.o_proj.weight": "model-00001-of-00010.safetensors",
576
+ "vision_tower.transformer.layers.8.attention.q_proj.weight": "model-00001-of-00010.safetensors",
577
+ "vision_tower.transformer.layers.8.attention.v_proj.weight": "model-00001-of-00010.safetensors",
578
+ "vision_tower.transformer.layers.8.attention_norm.weight": "model-00001-of-00010.safetensors",
579
+ "vision_tower.transformer.layers.8.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
580
+ "vision_tower.transformer.layers.8.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
581
+ "vision_tower.transformer.layers.8.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
582
+ "vision_tower.transformer.layers.8.ffn_norm.weight": "model-00001-of-00010.safetensors",
583
+ "vision_tower.transformer.layers.9.attention.k_proj.weight": "model-00001-of-00010.safetensors",
584
+ "vision_tower.transformer.layers.9.attention.o_proj.weight": "model-00001-of-00010.safetensors",
585
+ "vision_tower.transformer.layers.9.attention.q_proj.weight": "model-00001-of-00010.safetensors",
586
+ "vision_tower.transformer.layers.9.attention.v_proj.weight": "model-00001-of-00010.safetensors",
587
+ "vision_tower.transformer.layers.9.attention_norm.weight": "model-00001-of-00010.safetensors",
588
+ "vision_tower.transformer.layers.9.feed_forward.down_proj.weight": "model-00001-of-00010.safetensors",
589
+ "vision_tower.transformer.layers.9.feed_forward.gate_proj.weight": "model-00001-of-00010.safetensors",
590
+ "vision_tower.transformer.layers.9.feed_forward.up_proj.weight": "model-00001-of-00010.safetensors",
591
+ "vision_tower.transformer.layers.9.ffn_norm.weight": "model-00001-of-00010.safetensors"
592
+ }
593
+ }
params.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dim": 5120,
3
+ "n_layers": 40,
4
+ "head_dim": 128,
5
+ "hidden_dim": 32768,
6
+ "n_heads": 32,
7
+ "n_kv_heads": 8,
8
+ "rope_theta": 1000000000.0,
9
+ "norm_eps": 1e-05,
10
+ "vocab_size": 131072,
11
+ "vision_encoder": {
12
+ "hidden_size": 1024,
13
+ "num_channels": 3,
14
+ "max_image_size": 1540,
15
+ "patch_size": 14,
16
+ "rope_theta": 10000.0,
17
+ "intermediate_size": 4096,
18
+ "num_hidden_layers": 24,
19
+ "num_attention_heads": 16,
20
+ "adapter_bias": false,
21
+ "mm_projector_id": "patch_merge",
22
+ "spatial_merge_size": 2,
23
+ "add_pre_mm_projector_layer_norm": true,
24
+ "image_token_id": 10,
25
+ "image_break_token_id": 12,
26
+ "image_end_token_id": 13,
27
+ "image_size": 1540
28
+ }
29
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": null,
3
+ "data_format": "channels_first",
4
+ "default_to_square": true,
5
+ "device": null,
6
+ "do_center_crop": null,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_rescale": true,
10
+ "do_resize": true,
11
+ "image_mean": [
12
+ 0.48145466,
13
+ 0.4578275,
14
+ 0.40821073
15
+ ],
16
+ "image_processor_type": "PixtralImageProcessorFast",
17
+ "image_std": [
18
+ 0.26862954,
19
+ 0.26130258,
20
+ 0.27577711
21
+ ],
22
+ "input_data_format": null,
23
+ "patch_size": 14,
24
+ "processor_class": "PixtralProcessor",
25
+ "resample": 3,
26
+ "rescale_factor": 0.00392156862745098,
27
+ "return_tensors": null,
28
+ "size": {
29
+ "longest_edge": 1540
30
+ }
31
+ }
processor_config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "image_break_token": "[IMG_BREAK]",
3
+ "image_end_token": "[IMG_END]",
4
+ "image_token": "[IMG]",
5
+ "patch_size": 14,
6
+ "processor_class": "PixtralProcessor",
7
+ "spatial_merge_size": 2
8
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,1032 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>",
6
+ "[INST]",
7
+ "[/INST]",
8
+ "[AVAILABLE_TOOLS]",
9
+ "[/AVAILABLE_TOOLS]",
10
+ "[TOOL_RESULTS]",
11
+ "[/TOOL_RESULTS]",
12
+ "[TOOL_CALLS]",
13
+ "[IMG]",
14
+ "<pad>",
15
+ "[IMG_BREAK]",
16
+ "[IMG_END]",
17
+ "[PREFIX]",
18
+ "[MIDDLE]",
19
+ "[SUFFIX]",
20
+ "[SYSTEM_PROMPT]",
21
+ "[/SYSTEM_PROMPT]",
22
+ "[TOOL_CONTENT]",
23
+ "<SPECIAL_20>",
24
+ "<SPECIAL_21>",
25
+ "<SPECIAL_22>",
26
+ "<SPECIAL_23>",
27
+ "<SPECIAL_24>",
28
+ "<SPECIAL_25>",
29
+ "<SPECIAL_26>",
30
+ "<SPECIAL_27>",
31
+ "<SPECIAL_28>",
32
+ "<SPECIAL_29>",
33
+ "<SPECIAL_30>",
34
+ "<SPECIAL_31>",
35
+ "[ARGS]",
36
+ "[CALL_ID]",
37
+ "<SPECIAL_34>",
38
+ "<SPECIAL_35>",
39
+ "<SPECIAL_36>",
40
+ "<SPECIAL_37>",
41
+ "<SPECIAL_38>",
42
+ "<SPECIAL_39>",
43
+ "<SPECIAL_40>",
44
+ "<SPECIAL_41>",
45
+ "<SPECIAL_42>",
46
+ "<SPECIAL_43>",
47
+ "<SPECIAL_44>",
48
+ "<SPECIAL_45>",
49
+ "<SPECIAL_46>",
50
+ "<SPECIAL_47>",
51
+ "<SPECIAL_48>",
52
+ "<SPECIAL_49>",
53
+ "<SPECIAL_50>",
54
+ "<SPECIAL_51>",
55
+ "<SPECIAL_52>",
56
+ "<SPECIAL_53>",
57
+ "<SPECIAL_54>",
58
+ "<SPECIAL_55>",
59
+ "<SPECIAL_56>",
60
+ "<SPECIAL_57>",
61
+ "<SPECIAL_58>",
62
+ "<SPECIAL_59>",
63
+ "<SPECIAL_60>",
64
+ "<SPECIAL_61>",
65
+ "<SPECIAL_62>",
66
+ "<SPECIAL_63>",
67
+ "<SPECIAL_64>",
68
+ "<SPECIAL_65>",
69
+ "<SPECIAL_66>",
70
+ "<SPECIAL_67>",
71
+ "<SPECIAL_68>",
72
+ "<SPECIAL_69>",
73
+ "<SPECIAL_70>",
74
+ "<SPECIAL_71>",
75
+ "<SPECIAL_72>",
76
+ "<SPECIAL_73>",
77
+ "<SPECIAL_74>",
78
+ "<SPECIAL_75>",
79
+ "<SPECIAL_76>",
80
+ "<SPECIAL_77>",
81
+ "<SPECIAL_78>",
82
+ "<SPECIAL_79>",
83
+ "<SPECIAL_80>",
84
+ "<SPECIAL_81>",
85
+ "<SPECIAL_82>",
86
+ "<SPECIAL_83>",
87
+ "<SPECIAL_84>",
88
+ "<SPECIAL_85>",
89
+ "<SPECIAL_86>",
90
+ "<SPECIAL_87>",
91
+ "<SPECIAL_88>",
92
+ "<SPECIAL_89>",
93
+ "<SPECIAL_90>",
94
+ "<SPECIAL_91>",
95
+ "<SPECIAL_92>",
96
+ "<SPECIAL_93>",
97
+ "<SPECIAL_94>",
98
+ "<SPECIAL_95>",
99
+ "<SPECIAL_96>",
100
+ "<SPECIAL_97>",
101
+ "<SPECIAL_98>",
102
+ "<SPECIAL_99>",
103
+ "<SPECIAL_100>",
104
+ "<SPECIAL_101>",
105
+ "<SPECIAL_102>",
106
+ "<SPECIAL_103>",
107
+ "<SPECIAL_104>",
108
+ "<SPECIAL_105>",
109
+ "<SPECIAL_106>",
110
+ "<SPECIAL_107>",
111
+ "<SPECIAL_108>",
112
+ "<SPECIAL_109>",
113
+ "<SPECIAL_110>",
114
+ "<SPECIAL_111>",
115
+ "<SPECIAL_112>",
116
+ "<SPECIAL_113>",
117
+ "<SPECIAL_114>",
118
+ "<SPECIAL_115>",
119
+ "<SPECIAL_116>",
120
+ "<SPECIAL_117>",
121
+ "<SPECIAL_118>",
122
+ "<SPECIAL_119>",
123
+ "<SPECIAL_120>",
124
+ "<SPECIAL_121>",
125
+ "<SPECIAL_122>",
126
+ "<SPECIAL_123>",
127
+ "<SPECIAL_124>",
128
+ "<SPECIAL_125>",
129
+ "<SPECIAL_126>",
130
+ "<SPECIAL_127>",
131
+ "<SPECIAL_128>",
132
+ "<SPECIAL_129>",
133
+ "<SPECIAL_130>",
134
+ "<SPECIAL_131>",
135
+ "<SPECIAL_132>",
136
+ "<SPECIAL_133>",
137
+ "<SPECIAL_134>",
138
+ "<SPECIAL_135>",
139
+ "<SPECIAL_136>",
140
+ "<SPECIAL_137>",
141
+ "<SPECIAL_138>",
142
+ "<SPECIAL_139>",
143
+ "<SPECIAL_140>",
144
+ "<SPECIAL_141>",
145
+ "<SPECIAL_142>",
146
+ "<SPECIAL_143>",
147
+ "<SPECIAL_144>",
148
+ "<SPECIAL_145>",
149
+ "<SPECIAL_146>",
150
+ "<SPECIAL_147>",
151
+ "<SPECIAL_148>",
152
+ "<SPECIAL_149>",
153
+ "<SPECIAL_150>",
154
+ "<SPECIAL_151>",
155
+ "<SPECIAL_152>",
156
+ "<SPECIAL_153>",
157
+ "<SPECIAL_154>",
158
+ "<SPECIAL_155>",
159
+ "<SPECIAL_156>",
160
+ "<SPECIAL_157>",
161
+ "<SPECIAL_158>",
162
+ "<SPECIAL_159>",
163
+ "<SPECIAL_160>",
164
+ "<SPECIAL_161>",
165
+ "<SPECIAL_162>",
166
+ "<SPECIAL_163>",
167
+ "<SPECIAL_164>",
168
+ "<SPECIAL_165>",
169
+ "<SPECIAL_166>",
170
+ "<SPECIAL_167>",
171
+ "<SPECIAL_168>",
172
+ "<SPECIAL_169>",
173
+ "<SPECIAL_170>",
174
+ "<SPECIAL_171>",
175
+ "<SPECIAL_172>",
176
+ "<SPECIAL_173>",
177
+ "<SPECIAL_174>",
178
+ "<SPECIAL_175>",
179
+ "<SPECIAL_176>",
180
+ "<SPECIAL_177>",
181
+ "<SPECIAL_178>",
182
+ "<SPECIAL_179>",
183
+ "<SPECIAL_180>",
184
+ "<SPECIAL_181>",
185
+ "<SPECIAL_182>",
186
+ "<SPECIAL_183>",
187
+ "<SPECIAL_184>",
188
+ "<SPECIAL_185>",
189
+ "<SPECIAL_186>",
190
+ "<SPECIAL_187>",
191
+ "<SPECIAL_188>",
192
+ "<SPECIAL_189>",
193
+ "<SPECIAL_190>",
194
+ "<SPECIAL_191>",
195
+ "<SPECIAL_192>",
196
+ "<SPECIAL_193>",
197
+ "<SPECIAL_194>",
198
+ "<SPECIAL_195>",
199
+ "<SPECIAL_196>",
200
+ "<SPECIAL_197>",
201
+ "<SPECIAL_198>",
202
+ "<SPECIAL_199>",
203
+ "<SPECIAL_200>",
204
+ "<SPECIAL_201>",
205
+ "<SPECIAL_202>",
206
+ "<SPECIAL_203>",
207
+ "<SPECIAL_204>",
208
+ "<SPECIAL_205>",
209
+ "<SPECIAL_206>",
210
+ "<SPECIAL_207>",
211
+ "<SPECIAL_208>",
212
+ "<SPECIAL_209>",
213
+ "<SPECIAL_210>",
214
+ "<SPECIAL_211>",
215
+ "<SPECIAL_212>",
216
+ "<SPECIAL_213>",
217
+ "<SPECIAL_214>",
218
+ "<SPECIAL_215>",
219
+ "<SPECIAL_216>",
220
+ "<SPECIAL_217>",
221
+ "<SPECIAL_218>",
222
+ "<SPECIAL_219>",
223
+ "<SPECIAL_220>",
224
+ "<SPECIAL_221>",
225
+ "<SPECIAL_222>",
226
+ "<SPECIAL_223>",
227
+ "<SPECIAL_224>",
228
+ "<SPECIAL_225>",
229
+ "<SPECIAL_226>",
230
+ "<SPECIAL_227>",
231
+ "<SPECIAL_228>",
232
+ "<SPECIAL_229>",
233
+ "<SPECIAL_230>",
234
+ "<SPECIAL_231>",
235
+ "<SPECIAL_232>",
236
+ "<SPECIAL_233>",
237
+ "<SPECIAL_234>",
238
+ "<SPECIAL_235>",
239
+ "<SPECIAL_236>",
240
+ "<SPECIAL_237>",
241
+ "<SPECIAL_238>",
242
+ "<SPECIAL_239>",
243
+ "<SPECIAL_240>",
244
+ "<SPECIAL_241>",
245
+ "<SPECIAL_242>",
246
+ "<SPECIAL_243>",
247
+ "<SPECIAL_244>",
248
+ "<SPECIAL_245>",
249
+ "<SPECIAL_246>",
250
+ "<SPECIAL_247>",
251
+ "<SPECIAL_248>",
252
+ "<SPECIAL_249>",
253
+ "<SPECIAL_250>",
254
+ "<SPECIAL_251>",
255
+ "<SPECIAL_252>",
256
+ "<SPECIAL_253>",
257
+ "<SPECIAL_254>",
258
+ "<SPECIAL_255>",
259
+ "<SPECIAL_256>",
260
+ "<SPECIAL_257>",
261
+ "<SPECIAL_258>",
262
+ "<SPECIAL_259>",
263
+ "<SPECIAL_260>",
264
+ "<SPECIAL_261>",
265
+ "<SPECIAL_262>",
266
+ "<SPECIAL_263>",
267
+ "<SPECIAL_264>",
268
+ "<SPECIAL_265>",
269
+ "<SPECIAL_266>",
270
+ "<SPECIAL_267>",
271
+ "<SPECIAL_268>",
272
+ "<SPECIAL_269>",
273
+ "<SPECIAL_270>",
274
+ "<SPECIAL_271>",
275
+ "<SPECIAL_272>",
276
+ "<SPECIAL_273>",
277
+ "<SPECIAL_274>",
278
+ "<SPECIAL_275>",
279
+ "<SPECIAL_276>",
280
+ "<SPECIAL_277>",
281
+ "<SPECIAL_278>",
282
+ "<SPECIAL_279>",
283
+ "<SPECIAL_280>",
284
+ "<SPECIAL_281>",
285
+ "<SPECIAL_282>",
286
+ "<SPECIAL_283>",
287
+ "<SPECIAL_284>",
288
+ "<SPECIAL_285>",
289
+ "<SPECIAL_286>",
290
+ "<SPECIAL_287>",
291
+ "<SPECIAL_288>",
292
+ "<SPECIAL_289>",
293
+ "<SPECIAL_290>",
294
+ "<SPECIAL_291>",
295
+ "<SPECIAL_292>",
296
+ "<SPECIAL_293>",
297
+ "<SPECIAL_294>",
298
+ "<SPECIAL_295>",
299
+ "<SPECIAL_296>",
300
+ "<SPECIAL_297>",
301
+ "<SPECIAL_298>",
302
+ "<SPECIAL_299>",
303
+ "<SPECIAL_300>",
304
+ "<SPECIAL_301>",
305
+ "<SPECIAL_302>",
306
+ "<SPECIAL_303>",
307
+ "<SPECIAL_304>",
308
+ "<SPECIAL_305>",
309
+ "<SPECIAL_306>",
310
+ "<SPECIAL_307>",
311
+ "<SPECIAL_308>",
312
+ "<SPECIAL_309>",
313
+ "<SPECIAL_310>",
314
+ "<SPECIAL_311>",
315
+ "<SPECIAL_312>",
316
+ "<SPECIAL_313>",
317
+ "<SPECIAL_314>",
318
+ "<SPECIAL_315>",
319
+ "<SPECIAL_316>",
320
+ "<SPECIAL_317>",
321
+ "<SPECIAL_318>",
322
+ "<SPECIAL_319>",
323
+ "<SPECIAL_320>",
324
+ "<SPECIAL_321>",
325
+ "<SPECIAL_322>",
326
+ "<SPECIAL_323>",
327
+ "<SPECIAL_324>",
328
+ "<SPECIAL_325>",
329
+ "<SPECIAL_326>",
330
+ "<SPECIAL_327>",
331
+ "<SPECIAL_328>",
332
+ "<SPECIAL_329>",
333
+ "<SPECIAL_330>",
334
+ "<SPECIAL_331>",
335
+ "<SPECIAL_332>",
336
+ "<SPECIAL_333>",
337
+ "<SPECIAL_334>",
338
+ "<SPECIAL_335>",
339
+ "<SPECIAL_336>",
340
+ "<SPECIAL_337>",
341
+ "<SPECIAL_338>",
342
+ "<SPECIAL_339>",
343
+ "<SPECIAL_340>",
344
+ "<SPECIAL_341>",
345
+ "<SPECIAL_342>",
346
+ "<SPECIAL_343>",
347
+ "<SPECIAL_344>",
348
+ "<SPECIAL_345>",
349
+ "<SPECIAL_346>",
350
+ "<SPECIAL_347>",
351
+ "<SPECIAL_348>",
352
+ "<SPECIAL_349>",
353
+ "<SPECIAL_350>",
354
+ "<SPECIAL_351>",
355
+ "<SPECIAL_352>",
356
+ "<SPECIAL_353>",
357
+ "<SPECIAL_354>",
358
+ "<SPECIAL_355>",
359
+ "<SPECIAL_356>",
360
+ "<SPECIAL_357>",
361
+ "<SPECIAL_358>",
362
+ "<SPECIAL_359>",
363
+ "<SPECIAL_360>",
364
+ "<SPECIAL_361>",
365
+ "<SPECIAL_362>",
366
+ "<SPECIAL_363>",
367
+ "<SPECIAL_364>",
368
+ "<SPECIAL_365>",
369
+ "<SPECIAL_366>",
370
+ "<SPECIAL_367>",
371
+ "<SPECIAL_368>",
372
+ "<SPECIAL_369>",
373
+ "<SPECIAL_370>",
374
+ "<SPECIAL_371>",
375
+ "<SPECIAL_372>",
376
+ "<SPECIAL_373>",
377
+ "<SPECIAL_374>",
378
+ "<SPECIAL_375>",
379
+ "<SPECIAL_376>",
380
+ "<SPECIAL_377>",
381
+ "<SPECIAL_378>",
382
+ "<SPECIAL_379>",
383
+ "<SPECIAL_380>",
384
+ "<SPECIAL_381>",
385
+ "<SPECIAL_382>",
386
+ "<SPECIAL_383>",
387
+ "<SPECIAL_384>",
388
+ "<SPECIAL_385>",
389
+ "<SPECIAL_386>",
390
+ "<SPECIAL_387>",
391
+ "<SPECIAL_388>",
392
+ "<SPECIAL_389>",
393
+ "<SPECIAL_390>",
394
+ "<SPECIAL_391>",
395
+ "<SPECIAL_392>",
396
+ "<SPECIAL_393>",
397
+ "<SPECIAL_394>",
398
+ "<SPECIAL_395>",
399
+ "<SPECIAL_396>",
400
+ "<SPECIAL_397>",
401
+ "<SPECIAL_398>",
402
+ "<SPECIAL_399>",
403
+ "<SPECIAL_400>",
404
+ "<SPECIAL_401>",
405
+ "<SPECIAL_402>",
406
+ "<SPECIAL_403>",
407
+ "<SPECIAL_404>",
408
+ "<SPECIAL_405>",
409
+ "<SPECIAL_406>",
410
+ "<SPECIAL_407>",
411
+ "<SPECIAL_408>",
412
+ "<SPECIAL_409>",
413
+ "<SPECIAL_410>",
414
+ "<SPECIAL_411>",
415
+ "<SPECIAL_412>",
416
+ "<SPECIAL_413>",
417
+ "<SPECIAL_414>",
418
+ "<SPECIAL_415>",
419
+ "<SPECIAL_416>",
420
+ "<SPECIAL_417>",
421
+ "<SPECIAL_418>",
422
+ "<SPECIAL_419>",
423
+ "<SPECIAL_420>",
424
+ "<SPECIAL_421>",
425
+ "<SPECIAL_422>",
426
+ "<SPECIAL_423>",
427
+ "<SPECIAL_424>",
428
+ "<SPECIAL_425>",
429
+ "<SPECIAL_426>",
430
+ "<SPECIAL_427>",
431
+ "<SPECIAL_428>",
432
+ "<SPECIAL_429>",
433
+ "<SPECIAL_430>",
434
+ "<SPECIAL_431>",
435
+ "<SPECIAL_432>",
436
+ "<SPECIAL_433>",
437
+ "<SPECIAL_434>",
438
+ "<SPECIAL_435>",
439
+ "<SPECIAL_436>",
440
+ "<SPECIAL_437>",
441
+ "<SPECIAL_438>",
442
+ "<SPECIAL_439>",
443
+ "<SPECIAL_440>",
444
+ "<SPECIAL_441>",
445
+ "<SPECIAL_442>",
446
+ "<SPECIAL_443>",
447
+ "<SPECIAL_444>",
448
+ "<SPECIAL_445>",
449
+ "<SPECIAL_446>",
450
+ "<SPECIAL_447>",
451
+ "<SPECIAL_448>",
452
+ "<SPECIAL_449>",
453
+ "<SPECIAL_450>",
454
+ "<SPECIAL_451>",
455
+ "<SPECIAL_452>",
456
+ "<SPECIAL_453>",
457
+ "<SPECIAL_454>",
458
+ "<SPECIAL_455>",
459
+ "<SPECIAL_456>",
460
+ "<SPECIAL_457>",
461
+ "<SPECIAL_458>",
462
+ "<SPECIAL_459>",
463
+ "<SPECIAL_460>",
464
+ "<SPECIAL_461>",
465
+ "<SPECIAL_462>",
466
+ "<SPECIAL_463>",
467
+ "<SPECIAL_464>",
468
+ "<SPECIAL_465>",
469
+ "<SPECIAL_466>",
470
+ "<SPECIAL_467>",
471
+ "<SPECIAL_468>",
472
+ "<SPECIAL_469>",
473
+ "<SPECIAL_470>",
474
+ "<SPECIAL_471>",
475
+ "<SPECIAL_472>",
476
+ "<SPECIAL_473>",
477
+ "<SPECIAL_474>",
478
+ "<SPECIAL_475>",
479
+ "<SPECIAL_476>",
480
+ "<SPECIAL_477>",
481
+ "<SPECIAL_478>",
482
+ "<SPECIAL_479>",
483
+ "<SPECIAL_480>",
484
+ "<SPECIAL_481>",
485
+ "<SPECIAL_482>",
486
+ "<SPECIAL_483>",
487
+ "<SPECIAL_484>",
488
+ "<SPECIAL_485>",
489
+ "<SPECIAL_486>",
490
+ "<SPECIAL_487>",
491
+ "<SPECIAL_488>",
492
+ "<SPECIAL_489>",
493
+ "<SPECIAL_490>",
494
+ "<SPECIAL_491>",
495
+ "<SPECIAL_492>",
496
+ "<SPECIAL_493>",
497
+ "<SPECIAL_494>",
498
+ "<SPECIAL_495>",
499
+ "<SPECIAL_496>",
500
+ "<SPECIAL_497>",
501
+ "<SPECIAL_498>",
502
+ "<SPECIAL_499>",
503
+ "<SPECIAL_500>",
504
+ "<SPECIAL_501>",
505
+ "<SPECIAL_502>",
506
+ "<SPECIAL_503>",
507
+ "<SPECIAL_504>",
508
+ "<SPECIAL_505>",
509
+ "<SPECIAL_506>",
510
+ "<SPECIAL_507>",
511
+ "<SPECIAL_508>",
512
+ "<SPECIAL_509>",
513
+ "<SPECIAL_510>",
514
+ "<SPECIAL_511>",
515
+ "<SPECIAL_512>",
516
+ "<SPECIAL_513>",
517
+ "<SPECIAL_514>",
518
+ "<SPECIAL_515>",
519
+ "<SPECIAL_516>",
520
+ "<SPECIAL_517>",
521
+ "<SPECIAL_518>",
522
+ "<SPECIAL_519>",
523
+ "<SPECIAL_520>",
524
+ "<SPECIAL_521>",
525
+ "<SPECIAL_522>",
526
+ "<SPECIAL_523>",
527
+ "<SPECIAL_524>",
528
+ "<SPECIAL_525>",
529
+ "<SPECIAL_526>",
530
+ "<SPECIAL_527>",
531
+ "<SPECIAL_528>",
532
+ "<SPECIAL_529>",
533
+ "<SPECIAL_530>",
534
+ "<SPECIAL_531>",
535
+ "<SPECIAL_532>",
536
+ "<SPECIAL_533>",
537
+ "<SPECIAL_534>",
538
+ "<SPECIAL_535>",
539
+ "<SPECIAL_536>",
540
+ "<SPECIAL_537>",
541
+ "<SPECIAL_538>",
542
+ "<SPECIAL_539>",
543
+ "<SPECIAL_540>",
544
+ "<SPECIAL_541>",
545
+ "<SPECIAL_542>",
546
+ "<SPECIAL_543>",
547
+ "<SPECIAL_544>",
548
+ "<SPECIAL_545>",
549
+ "<SPECIAL_546>",
550
+ "<SPECIAL_547>",
551
+ "<SPECIAL_548>",
552
+ "<SPECIAL_549>",
553
+ "<SPECIAL_550>",
554
+ "<SPECIAL_551>",
555
+ "<SPECIAL_552>",
556
+ "<SPECIAL_553>",
557
+ "<SPECIAL_554>",
558
+ "<SPECIAL_555>",
559
+ "<SPECIAL_556>",
560
+ "<SPECIAL_557>",
561
+ "<SPECIAL_558>",
562
+ "<SPECIAL_559>",
563
+ "<SPECIAL_560>",
564
+ "<SPECIAL_561>",
565
+ "<SPECIAL_562>",
566
+ "<SPECIAL_563>",
567
+ "<SPECIAL_564>",
568
+ "<SPECIAL_565>",
569
+ "<SPECIAL_566>",
570
+ "<SPECIAL_567>",
571
+ "<SPECIAL_568>",
572
+ "<SPECIAL_569>",
573
+ "<SPECIAL_570>",
574
+ "<SPECIAL_571>",
575
+ "<SPECIAL_572>",
576
+ "<SPECIAL_573>",
577
+ "<SPECIAL_574>",
578
+ "<SPECIAL_575>",
579
+ "<SPECIAL_576>",
580
+ "<SPECIAL_577>",
581
+ "<SPECIAL_578>",
582
+ "<SPECIAL_579>",
583
+ "<SPECIAL_580>",
584
+ "<SPECIAL_581>",
585
+ "<SPECIAL_582>",
586
+ "<SPECIAL_583>",
587
+ "<SPECIAL_584>",
588
+ "<SPECIAL_585>",
589
+ "<SPECIAL_586>",
590
+ "<SPECIAL_587>",
591
+ "<SPECIAL_588>",
592
+ "<SPECIAL_589>",
593
+ "<SPECIAL_590>",
594
+ "<SPECIAL_591>",
595
+ "<SPECIAL_592>",
596
+ "<SPECIAL_593>",
597
+ "<SPECIAL_594>",
598
+ "<SPECIAL_595>",
599
+ "<SPECIAL_596>",
600
+ "<SPECIAL_597>",
601
+ "<SPECIAL_598>",
602
+ "<SPECIAL_599>",
603
+ "<SPECIAL_600>",
604
+ "<SPECIAL_601>",
605
+ "<SPECIAL_602>",
606
+ "<SPECIAL_603>",
607
+ "<SPECIAL_604>",
608
+ "<SPECIAL_605>",
609
+ "<SPECIAL_606>",
610
+ "<SPECIAL_607>",
611
+ "<SPECIAL_608>",
612
+ "<SPECIAL_609>",
613
+ "<SPECIAL_610>",
614
+ "<SPECIAL_611>",
615
+ "<SPECIAL_612>",
616
+ "<SPECIAL_613>",
617
+ "<SPECIAL_614>",
618
+ "<SPECIAL_615>",
619
+ "<SPECIAL_616>",
620
+ "<SPECIAL_617>",
621
+ "<SPECIAL_618>",
622
+ "<SPECIAL_619>",
623
+ "<SPECIAL_620>",
624
+ "<SPECIAL_621>",
625
+ "<SPECIAL_622>",
626
+ "<SPECIAL_623>",
627
+ "<SPECIAL_624>",
628
+ "<SPECIAL_625>",
629
+ "<SPECIAL_626>",
630
+ "<SPECIAL_627>",
631
+ "<SPECIAL_628>",
632
+ "<SPECIAL_629>",
633
+ "<SPECIAL_630>",
634
+ "<SPECIAL_631>",
635
+ "<SPECIAL_632>",
636
+ "<SPECIAL_633>",
637
+ "<SPECIAL_634>",
638
+ "<SPECIAL_635>",
639
+ "<SPECIAL_636>",
640
+ "<SPECIAL_637>",
641
+ "<SPECIAL_638>",
642
+ "<SPECIAL_639>",
643
+ "<SPECIAL_640>",
644
+ "<SPECIAL_641>",
645
+ "<SPECIAL_642>",
646
+ "<SPECIAL_643>",
647
+ "<SPECIAL_644>",
648
+ "<SPECIAL_645>",
649
+ "<SPECIAL_646>",
650
+ "<SPECIAL_647>",
651
+ "<SPECIAL_648>",
652
+ "<SPECIAL_649>",
653
+ "<SPECIAL_650>",
654
+ "<SPECIAL_651>",
655
+ "<SPECIAL_652>",
656
+ "<SPECIAL_653>",
657
+ "<SPECIAL_654>",
658
+ "<SPECIAL_655>",
659
+ "<SPECIAL_656>",
660
+ "<SPECIAL_657>",
661
+ "<SPECIAL_658>",
662
+ "<SPECIAL_659>",
663
+ "<SPECIAL_660>",
664
+ "<SPECIAL_661>",
665
+ "<SPECIAL_662>",
666
+ "<SPECIAL_663>",
667
+ "<SPECIAL_664>",
668
+ "<SPECIAL_665>",
669
+ "<SPECIAL_666>",
670
+ "<SPECIAL_667>",
671
+ "<SPECIAL_668>",
672
+ "<SPECIAL_669>",
673
+ "<SPECIAL_670>",
674
+ "<SPECIAL_671>",
675
+ "<SPECIAL_672>",
676
+ "<SPECIAL_673>",
677
+ "<SPECIAL_674>",
678
+ "<SPECIAL_675>",
679
+ "<SPECIAL_676>",
680
+ "<SPECIAL_677>",
681
+ "<SPECIAL_678>",
682
+ "<SPECIAL_679>",
683
+ "<SPECIAL_680>",
684
+ "<SPECIAL_681>",
685
+ "<SPECIAL_682>",
686
+ "<SPECIAL_683>",
687
+ "<SPECIAL_684>",
688
+ "<SPECIAL_685>",
689
+ "<SPECIAL_686>",
690
+ "<SPECIAL_687>",
691
+ "<SPECIAL_688>",
692
+ "<SPECIAL_689>",
693
+ "<SPECIAL_690>",
694
+ "<SPECIAL_691>",
695
+ "<SPECIAL_692>",
696
+ "<SPECIAL_693>",
697
+ "<SPECIAL_694>",
698
+ "<SPECIAL_695>",
699
+ "<SPECIAL_696>",
700
+ "<SPECIAL_697>",
701
+ "<SPECIAL_698>",
702
+ "<SPECIAL_699>",
703
+ "<SPECIAL_700>",
704
+ "<SPECIAL_701>",
705
+ "<SPECIAL_702>",
706
+ "<SPECIAL_703>",
707
+ "<SPECIAL_704>",
708
+ "<SPECIAL_705>",
709
+ "<SPECIAL_706>",
710
+ "<SPECIAL_707>",
711
+ "<SPECIAL_708>",
712
+ "<SPECIAL_709>",
713
+ "<SPECIAL_710>",
714
+ "<SPECIAL_711>",
715
+ "<SPECIAL_712>",
716
+ "<SPECIAL_713>",
717
+ "<SPECIAL_714>",
718
+ "<SPECIAL_715>",
719
+ "<SPECIAL_716>",
720
+ "<SPECIAL_717>",
721
+ "<SPECIAL_718>",
722
+ "<SPECIAL_719>",
723
+ "<SPECIAL_720>",
724
+ "<SPECIAL_721>",
725
+ "<SPECIAL_722>",
726
+ "<SPECIAL_723>",
727
+ "<SPECIAL_724>",
728
+ "<SPECIAL_725>",
729
+ "<SPECIAL_726>",
730
+ "<SPECIAL_727>",
731
+ "<SPECIAL_728>",
732
+ "<SPECIAL_729>",
733
+ "<SPECIAL_730>",
734
+ "<SPECIAL_731>",
735
+ "<SPECIAL_732>",
736
+ "<SPECIAL_733>",
737
+ "<SPECIAL_734>",
738
+ "<SPECIAL_735>",
739
+ "<SPECIAL_736>",
740
+ "<SPECIAL_737>",
741
+ "<SPECIAL_738>",
742
+ "<SPECIAL_739>",
743
+ "<SPECIAL_740>",
744
+ "<SPECIAL_741>",
745
+ "<SPECIAL_742>",
746
+ "<SPECIAL_743>",
747
+ "<SPECIAL_744>",
748
+ "<SPECIAL_745>",
749
+ "<SPECIAL_746>",
750
+ "<SPECIAL_747>",
751
+ "<SPECIAL_748>",
752
+ "<SPECIAL_749>",
753
+ "<SPECIAL_750>",
754
+ "<SPECIAL_751>",
755
+ "<SPECIAL_752>",
756
+ "<SPECIAL_753>",
757
+ "<SPECIAL_754>",
758
+ "<SPECIAL_755>",
759
+ "<SPECIAL_756>",
760
+ "<SPECIAL_757>",
761
+ "<SPECIAL_758>",
762
+ "<SPECIAL_759>",
763
+ "<SPECIAL_760>",
764
+ "<SPECIAL_761>",
765
+ "<SPECIAL_762>",
766
+ "<SPECIAL_763>",
767
+ "<SPECIAL_764>",
768
+ "<SPECIAL_765>",
769
+ "<SPECIAL_766>",
770
+ "<SPECIAL_767>",
771
+ "<SPECIAL_768>",
772
+ "<SPECIAL_769>",
773
+ "<SPECIAL_770>",
774
+ "<SPECIAL_771>",
775
+ "<SPECIAL_772>",
776
+ "<SPECIAL_773>",
777
+ "<SPECIAL_774>",
778
+ "<SPECIAL_775>",
779
+ "<SPECIAL_776>",
780
+ "<SPECIAL_777>",
781
+ "<SPECIAL_778>",
782
+ "<SPECIAL_779>",
783
+ "<SPECIAL_780>",
784
+ "<SPECIAL_781>",
785
+ "<SPECIAL_782>",
786
+ "<SPECIAL_783>",
787
+ "<SPECIAL_784>",
788
+ "<SPECIAL_785>",
789
+ "<SPECIAL_786>",
790
+ "<SPECIAL_787>",
791
+ "<SPECIAL_788>",
792
+ "<SPECIAL_789>",
793
+ "<SPECIAL_790>",
794
+ "<SPECIAL_791>",
795
+ "<SPECIAL_792>",
796
+ "<SPECIAL_793>",
797
+ "<SPECIAL_794>",
798
+ "<SPECIAL_795>",
799
+ "<SPECIAL_796>",
800
+ "<SPECIAL_797>",
801
+ "<SPECIAL_798>",
802
+ "<SPECIAL_799>",
803
+ "<SPECIAL_800>",
804
+ "<SPECIAL_801>",
805
+ "<SPECIAL_802>",
806
+ "<SPECIAL_803>",
807
+ "<SPECIAL_804>",
808
+ "<SPECIAL_805>",
809
+ "<SPECIAL_806>",
810
+ "<SPECIAL_807>",
811
+ "<SPECIAL_808>",
812
+ "<SPECIAL_809>",
813
+ "<SPECIAL_810>",
814
+ "<SPECIAL_811>",
815
+ "<SPECIAL_812>",
816
+ "<SPECIAL_813>",
817
+ "<SPECIAL_814>",
818
+ "<SPECIAL_815>",
819
+ "<SPECIAL_816>",
820
+ "<SPECIAL_817>",
821
+ "<SPECIAL_818>",
822
+ "<SPECIAL_819>",
823
+ "<SPECIAL_820>",
824
+ "<SPECIAL_821>",
825
+ "<SPECIAL_822>",
826
+ "<SPECIAL_823>",
827
+ "<SPECIAL_824>",
828
+ "<SPECIAL_825>",
829
+ "<SPECIAL_826>",
830
+ "<SPECIAL_827>",
831
+ "<SPECIAL_828>",
832
+ "<SPECIAL_829>",
833
+ "<SPECIAL_830>",
834
+ "<SPECIAL_831>",
835
+ "<SPECIAL_832>",
836
+ "<SPECIAL_833>",
837
+ "<SPECIAL_834>",
838
+ "<SPECIAL_835>",
839
+ "<SPECIAL_836>",
840
+ "<SPECIAL_837>",
841
+ "<SPECIAL_838>",
842
+ "<SPECIAL_839>",
843
+ "<SPECIAL_840>",
844
+ "<SPECIAL_841>",
845
+ "<SPECIAL_842>",
846
+ "<SPECIAL_843>",
847
+ "<SPECIAL_844>",
848
+ "<SPECIAL_845>",
849
+ "<SPECIAL_846>",
850
+ "<SPECIAL_847>",
851
+ "<SPECIAL_848>",
852
+ "<SPECIAL_849>",
853
+ "<SPECIAL_850>",
854
+ "<SPECIAL_851>",
855
+ "<SPECIAL_852>",
856
+ "<SPECIAL_853>",
857
+ "<SPECIAL_854>",
858
+ "<SPECIAL_855>",
859
+ "<SPECIAL_856>",
860
+ "<SPECIAL_857>",
861
+ "<SPECIAL_858>",
862
+ "<SPECIAL_859>",
863
+ "<SPECIAL_860>",
864
+ "<SPECIAL_861>",
865
+ "<SPECIAL_862>",
866
+ "<SPECIAL_863>",
867
+ "<SPECIAL_864>",
868
+ "<SPECIAL_865>",
869
+ "<SPECIAL_866>",
870
+ "<SPECIAL_867>",
871
+ "<SPECIAL_868>",
872
+ "<SPECIAL_869>",
873
+ "<SPECIAL_870>",
874
+ "<SPECIAL_871>",
875
+ "<SPECIAL_872>",
876
+ "<SPECIAL_873>",
877
+ "<SPECIAL_874>",
878
+ "<SPECIAL_875>",
879
+ "<SPECIAL_876>",
880
+ "<SPECIAL_877>",
881
+ "<SPECIAL_878>",
882
+ "<SPECIAL_879>",
883
+ "<SPECIAL_880>",
884
+ "<SPECIAL_881>",
885
+ "<SPECIAL_882>",
886
+ "<SPECIAL_883>",
887
+ "<SPECIAL_884>",
888
+ "<SPECIAL_885>",
889
+ "<SPECIAL_886>",
890
+ "<SPECIAL_887>",
891
+ "<SPECIAL_888>",
892
+ "<SPECIAL_889>",
893
+ "<SPECIAL_890>",
894
+ "<SPECIAL_891>",
895
+ "<SPECIAL_892>",
896
+ "<SPECIAL_893>",
897
+ "<SPECIAL_894>",
898
+ "<SPECIAL_895>",
899
+ "<SPECIAL_896>",
900
+ "<SPECIAL_897>",
901
+ "<SPECIAL_898>",
902
+ "<SPECIAL_899>",
903
+ "<SPECIAL_900>",
904
+ "<SPECIAL_901>",
905
+ "<SPECIAL_902>",
906
+ "<SPECIAL_903>",
907
+ "<SPECIAL_904>",
908
+ "<SPECIAL_905>",
909
+ "<SPECIAL_906>",
910
+ "<SPECIAL_907>",
911
+ "<SPECIAL_908>",
912
+ "<SPECIAL_909>",
913
+ "<SPECIAL_910>",
914
+ "<SPECIAL_911>",
915
+ "<SPECIAL_912>",
916
+ "<SPECIAL_913>",
917
+ "<SPECIAL_914>",
918
+ "<SPECIAL_915>",
919
+ "<SPECIAL_916>",
920
+ "<SPECIAL_917>",
921
+ "<SPECIAL_918>",
922
+ "<SPECIAL_919>",
923
+ "<SPECIAL_920>",
924
+ "<SPECIAL_921>",
925
+ "<SPECIAL_922>",
926
+ "<SPECIAL_923>",
927
+ "<SPECIAL_924>",
928
+ "<SPECIAL_925>",
929
+ "<SPECIAL_926>",
930
+ "<SPECIAL_927>",
931
+ "<SPECIAL_928>",
932
+ "<SPECIAL_929>",
933
+ "<SPECIAL_930>",
934
+ "<SPECIAL_931>",
935
+ "<SPECIAL_932>",
936
+ "<SPECIAL_933>",
937
+ "<SPECIAL_934>",
938
+ "<SPECIAL_935>",
939
+ "<SPECIAL_936>",
940
+ "<SPECIAL_937>",
941
+ "<SPECIAL_938>",
942
+ "<SPECIAL_939>",
943
+ "<SPECIAL_940>",
944
+ "<SPECIAL_941>",
945
+ "<SPECIAL_942>",
946
+ "<SPECIAL_943>",
947
+ "<SPECIAL_944>",
948
+ "<SPECIAL_945>",
949
+ "<SPECIAL_946>",
950
+ "<SPECIAL_947>",
951
+ "<SPECIAL_948>",
952
+ "<SPECIAL_949>",
953
+ "<SPECIAL_950>",
954
+ "<SPECIAL_951>",
955
+ "<SPECIAL_952>",
956
+ "<SPECIAL_953>",
957
+ "<SPECIAL_954>",
958
+ "<SPECIAL_955>",
959
+ "<SPECIAL_956>",
960
+ "<SPECIAL_957>",
961
+ "<SPECIAL_958>",
962
+ "<SPECIAL_959>",
963
+ "<SPECIAL_960>",
964
+ "<SPECIAL_961>",
965
+ "<SPECIAL_962>",
966
+ "<SPECIAL_963>",
967
+ "<SPECIAL_964>",
968
+ "<SPECIAL_965>",
969
+ "<SPECIAL_966>",
970
+ "<SPECIAL_967>",
971
+ "<SPECIAL_968>",
972
+ "<SPECIAL_969>",
973
+ "<SPECIAL_970>",
974
+ "<SPECIAL_971>",
975
+ "<SPECIAL_972>",
976
+ "<SPECIAL_973>",
977
+ "<SPECIAL_974>",
978
+ "<SPECIAL_975>",
979
+ "<SPECIAL_976>",
980
+ "<SPECIAL_977>",
981
+ "<SPECIAL_978>",
982
+ "<SPECIAL_979>",
983
+ "<SPECIAL_980>",
984
+ "<SPECIAL_981>",
985
+ "<SPECIAL_982>",
986
+ "<SPECIAL_983>",
987
+ "<SPECIAL_984>",
988
+ "<SPECIAL_985>",
989
+ "<SPECIAL_986>",
990
+ "<SPECIAL_987>",
991
+ "<SPECIAL_988>",
992
+ "<SPECIAL_989>",
993
+ "<SPECIAL_990>",
994
+ "<SPECIAL_991>",
995
+ "<SPECIAL_992>",
996
+ "<SPECIAL_993>",
997
+ "<SPECIAL_994>",
998
+ "<SPECIAL_995>",
999
+ "<SPECIAL_996>",
1000
+ "<SPECIAL_997>",
1001
+ "<SPECIAL_998>",
1002
+ "<SPECIAL_999>"
1003
+ ],
1004
+ "bos_token": {
1005
+ "content": "<s>",
1006
+ "lstrip": false,
1007
+ "normalized": false,
1008
+ "rstrip": false,
1009
+ "single_word": false
1010
+ },
1011
+ "eos_token": {
1012
+ "content": "</s>",
1013
+ "lstrip": false,
1014
+ "normalized": false,
1015
+ "rstrip": false,
1016
+ "single_word": false
1017
+ },
1018
+ "pad_token": {
1019
+ "content": "<pad>",
1020
+ "lstrip": false,
1021
+ "normalized": false,
1022
+ "rstrip": false,
1023
+ "single_word": false
1024
+ },
1025
+ "unk_token": {
1026
+ "content": "<unk>",
1027
+ "lstrip": false,
1028
+ "normalized": false,
1029
+ "rstrip": false,
1030
+ "single_word": false
1031
+ }
1032
+ }
tekken.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e2501687ccd0e1f30f36319eaf2b46958b897811e246cd8eb5d385b9e3de7d1
3
+ size 19399895
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff