AIDXteam commited on
Commit
13be179
·
verified ·
1 Parent(s): ea904a6

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ model.safetensors.index.json filter=lfs diff=lfs merge=lfs -text
37
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
chat_template.jinja ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- for message in messages %}
18
+ {%- if message.content is string %}
19
+ {%- set content = message.content %}
20
+ {%- else %}
21
+ {%- set content = '' %}
22
+ {%- endif %}
23
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
24
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
25
+ {%- elif message.role == "assistant" %}
26
+ {{- '<|im_start|>' + message.role + '\n' + content }}
27
+ {%- if message.tool_calls %}
28
+ {%- for tool_call in message.tool_calls %}
29
+ {%- if (loop.first and content) or (not loop.first) %}
30
+ {{- '\n' }}
31
+ {%- endif %}
32
+ {%- if tool_call.function %}
33
+ {%- set tool_call = tool_call.function %}
34
+ {%- endif %}
35
+ {{- '<tool_call>\n{"name": "' }}
36
+ {{- tool_call.name }}
37
+ {{- '", "arguments": ' }}
38
+ {%- if tool_call.arguments is string %}
39
+ {{- tool_call.arguments }}
40
+ {%- else %}
41
+ {{- tool_call.arguments | tojson }}
42
+ {%- endif %}
43
+ {{- '}\n</tool_call>' }}
44
+ {%- endfor %}
45
+ {%- endif %}
46
+ {{- '<|im_end|>\n' }}
47
+ {%- elif message.role == "tool" %}
48
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
49
+ {{- '<|im_start|>user' }}
50
+ {%- endif %}
51
+ {{- '\n<tool_response>\n' }}
52
+ {{- content }}
53
+ {{- '\n</tool_response>' }}
54
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
55
+ {{- '<|im_end|>\n' }}
56
+ {%- endif %}
57
+ {%- endif %}
58
+ {%- endfor %}
59
+ {%- if add_generation_prompt %}
60
+ {{- '<|im_start|>assistant\n' }}
61
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3MoeForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "decoder_sparse_step": 1,
9
+ "dtype": "bfloat16",
10
+ "eos_token_id": 151645,
11
+ "head_dim": 128,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 4096,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 12288,
16
+ "max_position_embeddings": 262144,
17
+ "max_window_layers": 94,
18
+ "mlp_only_layers": [],
19
+ "model_type": "qwen3_moe",
20
+ "moe_intermediate_size": 1536,
21
+ "norm_topk_prob": true,
22
+ "num_attention_heads": 64,
23
+ "num_experts": 128,
24
+ "num_experts_per_tok": 8,
25
+ "num_hidden_layers": 94,
26
+ "num_key_value_heads": 4,
27
+ "output_router_logits": false,
28
+ "quantization_config": {
29
+ "config_groups": {
30
+ "group_0": {
31
+ "format": "nvfp4-pack-quantized",
32
+ "input_activations": null,
33
+ "output_activations": null,
34
+ "targets": [
35
+ "Linear"
36
+ ],
37
+ "weights": {
38
+ "actorder": null,
39
+ "block_structure": null,
40
+ "dynamic": false,
41
+ "group_size": 16,
42
+ "num_bits": 4,
43
+ "observer": "minmax",
44
+ "observer_kwargs": {},
45
+ "strategy": "tensor_group",
46
+ "symmetric": true,
47
+ "type": "float"
48
+ }
49
+ }
50
+ },
51
+ "format": "nvfp4-pack-quantized",
52
+ "global_compression_ratio": null,
53
+ "ignore": [
54
+ "model.layers.0.mlp.gate",
55
+ "model.layers.1.mlp.gate",
56
+ "model.layers.2.mlp.gate",
57
+ "model.layers.3.mlp.gate",
58
+ "model.layers.4.mlp.gate",
59
+ "model.layers.5.mlp.gate",
60
+ "model.layers.6.mlp.gate",
61
+ "model.layers.7.mlp.gate",
62
+ "model.layers.8.mlp.gate",
63
+ "model.layers.9.mlp.gate",
64
+ "model.layers.10.mlp.gate",
65
+ "model.layers.11.mlp.gate",
66
+ "model.layers.12.mlp.gate",
67
+ "model.layers.13.mlp.gate",
68
+ "model.layers.14.mlp.gate",
69
+ "model.layers.15.mlp.gate",
70
+ "model.layers.16.mlp.gate",
71
+ "model.layers.17.mlp.gate",
72
+ "model.layers.18.mlp.gate",
73
+ "model.layers.19.mlp.gate",
74
+ "model.layers.20.mlp.gate",
75
+ "model.layers.21.mlp.gate",
76
+ "model.layers.22.mlp.gate",
77
+ "model.layers.23.mlp.gate",
78
+ "model.layers.24.mlp.gate",
79
+ "model.layers.25.mlp.gate",
80
+ "model.layers.26.mlp.gate",
81
+ "model.layers.27.mlp.gate",
82
+ "model.layers.28.mlp.gate",
83
+ "model.layers.29.mlp.gate",
84
+ "model.layers.30.mlp.gate",
85
+ "model.layers.31.mlp.gate",
86
+ "model.layers.32.mlp.gate",
87
+ "model.layers.33.mlp.gate",
88
+ "model.layers.34.mlp.gate",
89
+ "model.layers.35.mlp.gate",
90
+ "model.layers.36.mlp.gate",
91
+ "model.layers.37.mlp.gate",
92
+ "model.layers.38.mlp.gate",
93
+ "model.layers.39.mlp.gate",
94
+ "model.layers.40.mlp.gate",
95
+ "model.layers.41.mlp.gate",
96
+ "model.layers.42.mlp.gate",
97
+ "model.layers.43.mlp.gate",
98
+ "model.layers.44.mlp.gate",
99
+ "model.layers.45.mlp.gate",
100
+ "model.layers.46.mlp.gate",
101
+ "model.layers.47.mlp.gate",
102
+ "model.layers.48.mlp.gate",
103
+ "model.layers.49.mlp.gate",
104
+ "model.layers.50.mlp.gate",
105
+ "model.layers.51.mlp.gate",
106
+ "model.layers.52.mlp.gate",
107
+ "model.layers.53.mlp.gate",
108
+ "model.layers.54.mlp.gate",
109
+ "model.layers.55.mlp.gate",
110
+ "model.layers.56.mlp.gate",
111
+ "model.layers.57.mlp.gate",
112
+ "model.layers.58.mlp.gate",
113
+ "model.layers.59.mlp.gate",
114
+ "model.layers.60.mlp.gate",
115
+ "model.layers.61.mlp.gate",
116
+ "model.layers.62.mlp.gate",
117
+ "model.layers.63.mlp.gate",
118
+ "model.layers.64.mlp.gate",
119
+ "model.layers.65.mlp.gate",
120
+ "model.layers.66.mlp.gate",
121
+ "model.layers.67.mlp.gate",
122
+ "model.layers.68.mlp.gate",
123
+ "model.layers.69.mlp.gate",
124
+ "model.layers.70.mlp.gate",
125
+ "model.layers.71.mlp.gate",
126
+ "model.layers.72.mlp.gate",
127
+ "model.layers.73.mlp.gate",
128
+ "model.layers.74.mlp.gate",
129
+ "model.layers.75.mlp.gate",
130
+ "model.layers.76.mlp.gate",
131
+ "model.layers.77.mlp.gate",
132
+ "model.layers.78.mlp.gate",
133
+ "model.layers.79.mlp.gate",
134
+ "model.layers.80.mlp.gate",
135
+ "model.layers.81.mlp.gate",
136
+ "model.layers.82.mlp.gate",
137
+ "model.layers.83.mlp.gate",
138
+ "model.layers.84.mlp.gate",
139
+ "model.layers.85.mlp.gate",
140
+ "model.layers.86.mlp.gate",
141
+ "model.layers.87.mlp.gate",
142
+ "model.layers.88.mlp.gate",
143
+ "model.layers.89.mlp.gate",
144
+ "model.layers.90.mlp.gate",
145
+ "model.layers.91.mlp.gate",
146
+ "model.layers.92.mlp.gate",
147
+ "model.layers.93.mlp.gate",
148
+ "lm_head"
149
+ ],
150
+ "kv_cache_scheme": null,
151
+ "quant_method": "compressed-tensors",
152
+ "quantization_status": "compressed",
153
+ "sparsity_config": {},
154
+ "transform_config": {},
155
+ "version": "0.12.2"
156
+ },
157
+ "rms_norm_eps": 1e-06,
158
+ "rope_scaling": null,
159
+ "rope_theta": 5000000,
160
+ "router_aux_loss_coef": 0.001,
161
+ "sliding_window": null,
162
+ "tie_word_embeddings": false,
163
+ "transformers_version": "4.56.2",
164
+ "use_cache": true,
165
+ "use_sliding_window": false,
166
+ "vocab_size": 151936
167
+ }
generation_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "temperature": 0.7,
10
+ "top_k": 20,
11
+ "top_p": 0.8,
12
+ "transformers_version": "4.56.2"
13
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c02c3362553d88fbcf0c45b59a24e5d55a10b6a7db6f48ea5d16b04a6873373
3
+ size 4999517216
model-00002-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6fb97f6707ff3be8ae8501ffe468ffeeaedfb092531c7536e1b3bf6bc577557
3
+ size 4999416856
model-00003-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea369edb0cc01a88c8e4f4e196f042b18bb2e2e6e6e6eec9bdfcebc2cd2fe1f8
3
+ size 5000319784
model-00004-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0230d199341e14403cc80967ceb9e8fa58af6ee0b068c0d90ba6604f60881027
3
+ size 4999814120
model-00005-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4453ba829f52f2942eecd938d0fd09573f059812efbd67548ca1f4afe895b700
3
+ size 5000324144
model-00006-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89e6ef20ecedb7dcfdac83ee2b633a3be6a58a4cec48a922d766a57200d89a3a
3
+ size 4999814152
model-00007-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4112b55a33ea2f7f8f478dded48b875405534b2671d340684af089e7d64d982
3
+ size 4999421016
model-00008-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d31f2c59ca861c5f2349a5d2f827654d0cd2966dd01f283ea4506db976865cc5
3
+ size 5000323936
model-00009-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92c6759e76ab99753a263609d127e5cbe6f8c3eb6ff4103a35f7d05aa0e2d9de
3
+ size 4999814368
model-00010-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e24c69c76edecbac38259b3c31593526400037a296a66c35d57df0d01ba1ac80
3
+ size 5000323968
model-00011-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db9ae9162dfa6481f3b5e20620e3ced3e83e88bf06a344205d7ded9f7fd29994
3
+ size 4999814336
model-00012-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b2ae57588bf3bfbd05d2f088ef9f85e515a6de9a2f307b3f50dc7d8dab61c42
3
+ size 5000324136
model-00013-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9442884e20c2e81e9d0e4af15c8d80b88db8495d09fd3761a6566dc72a4e7b21
3
+ size 4999814160
model-00014-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1997d195b692851f7a6073cdcf7c8c6288ea7f4251611d5e54d6506fdb93cafe
3
+ size 4999421024
model-00015-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c62a0e379d2374d398b049196c901aa887e2da92c6a0a4245c08a3aec54376a5
3
+ size 5000323928
model-00016-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edd61620c827662aca349086b1509e7129c03b9da727c8fb1eb6152340fa3f0d
3
+ size 4999814368
model-00017-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92e42c2102c09518def253ee643b81782520c1795e67a59e90729a5f387b42d3
3
+ size 5000323960
model-00018-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:823aacdc6cf145037d401cdebc972ce1460019dd7c93daa9565b8ad8e2556384
3
+ size 4999814336
model-00019-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98a1916cf4078a27e200c07ddeeac8a08035a10c5b90a928c5865b849ad8dffe
3
+ size 5000324128
model-00020-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddb6036eadb7e320d4faec8629031ee4f0cea5fcd6377196eee8201646859edc
3
+ size 4999814176
model-00021-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7185c12657ed06129739353e568f7892535a72307eeea313e91c5512a23eba54
3
+ size 4999421024
model-00022-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f3d89aaf83869ea5c557eee8f384e92c23eb984367755f68d030c752b8928d6
3
+ size 5000323920
model-00023-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87b3854e148738f0e892ef61113e0b4ddf628fa6c531b3dfb678a2a09736018e
3
+ size 4999814368
model-00024-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af083aae0af25db55187822838d14e32b32c38971dd9883791e8317948fffb48
3
+ size 5000323952
model-00025-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c13acfdd011c36e6f6429c860b1d30416ca5370462583307cdd567ce18e239d7
3
+ size 4999814344
model-00026-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5585ec5a39b7d841f6b486a8b47fe3b9c156ad9f7eb126fb3778884c2d3465c8
3
+ size 5000324128
model-00027-of-00027.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36275f2817101c6a4ac696a698bb3972523a7735beaf61c30d1da602152a90dc
3
+ size 4116419688
model.safetensors.index.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:907207aebb4fd92dc2a1895923165e1af9d957d0b278708c0e9fa43294a40b0c
3
+ size 10866669
recipe.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ default_stage:
2
+ default_modifiers:
3
+ QuantizationModifier:
4
+ targets: [Linear]
5
+ ignore: [lm_head, 're:.*mlp.gate$']
6
+ scheme: NVFP4A16
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
tokenizer_config.json ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "clean_up_tokenization_spaces": false,
231
+ "eos_token": "<|im_end|>",
232
+ "errors": "replace",
233
+ "extra_special_tokens": {},
234
+ "model_max_length": 1010000,
235
+ "pad_token": "<|endoftext|>",
236
+ "split_special_tokens": false,
237
+ "tokenizer_class": "Qwen2Tokenizer",
238
+ "unk_token": null
239
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff