smpanaro commited on
Commit
1a5aa2e
·
verified ·
1 Parent(s): 40ebb58

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/Users/stephen/.cache/huggingface/hub/models--Qwen--Qwen2.5-0.5B/snapshots/060db6499f32faf8b98477b0a26969ef7d8b9987",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151643,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 896,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 4864,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 24,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 14,
17
+ "num_hidden_layers": 24,
18
+ "num_key_value_heads": 2,
19
+ "quantization_config": {
20
+ "bits": 4,
21
+ "checkpoint_format": "fake",
22
+ "desc_act": true,
23
+ "group_size": 128,
24
+ "lm_head": false,
25
+ "meta": {
26
+ "damp_auto_increment": 0.0025,
27
+ "damp_percent": 0.01,
28
+ "mse": 0.0,
29
+ "quantizer": [
30
+ "gptqmodel:3.1.0-dev"
31
+ ],
32
+ "static_groups": false,
33
+ "true_sequential": true,
34
+ "uri": "https://github.com/modelcloud/gptqmodel",
35
+ "v2": false,
36
+ "v2_alpha": 0.25
37
+ },
38
+ "pack_dtype": "int32",
39
+ "quant_method": "ganq",
40
+ "sym": true
41
+ },
42
+ "rms_norm_eps": 1e-06,
43
+ "rope_scaling": null,
44
+ "rope_theta": 1000000.0,
45
+ "sliding_window": 32768,
46
+ "tie_word_embeddings": false,
47
+ "torch_dtype": "float16",
48
+ "transformers_version": "4.49.0",
49
+ "use_cache": true,
50
+ "use_mrope": false,
51
+ "use_sliding_window": false,
52
+ "vocab_size": 151936
53
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "eos_token_id": 151643,
4
+ "max_new_tokens": 2048,
5
+ "transformers_version": "4.49.0"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3ec5a99e4e105d5d5df717330ce4c89b1f9a8c437ef48c7bc9387812d60b901
3
+ size 1263195360
quant_log.csv ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ layer,module,loss,samples,damp,time
2
+ 0,self_attn.k_proj,13.6294784546,605.30756,5.540
3
+ 0,self_attn.v_proj,0.0593941013,605.30756,5.142
4
+ 0,self_attn.q_proj,157.1803283691,605.30756,7.996
5
+ 0,self_attn.o_proj,0.0397497887,0.47635,9.437
6
+ 0,mlp.gate_proj,890.1560058594,1462.71399,25.367
7
+ 0,mlp.up_proj,447.1669514974,1462.71399,30.830
8
+ 0,mlp.down_proj,9.7131913503,7.69736,75.129
9
+ 1,self_attn.k_proj,55.9226175944,442.29816,5.188
10
+ 1,self_attn.v_proj,2.5177942912,442.29816,5.111
11
+ 1,self_attn.q_proj,203.8958536784,442.29816,9.935
12
+ 1,self_attn.o_proj,8.2487831116,150.97090,9.100
13
+ 1,mlp.gate_proj,1590.2045898438,1133.41455,27.094
14
+ 1,mlp.up_proj,801.0877278646,1133.41455,35.035
15
+ 1,mlp.down_proj,7.3508618673,5.83138,84.990
16
+ 2,self_attn.k_proj,87.0443725586,1056.47473,4.303
17
+ 2,self_attn.v_proj,6.6566747030,1056.47473,4.548
18
+ 2,self_attn.q_proj,395.9968668620,1056.47473,10.522
19
+ 2,self_attn.o_proj,2.5991245906,10.64848,7.522
20
+ 2,mlp.gate_proj,2042.9488932292,1195.52600,26.076
21
+ 2,mlp.up_proj,1048.3621419271,1195.52600,33.747
22
+ 2,mlp.down_proj,1312.2178548177,4760.32471,156.162
23
+ 3,self_attn.k_proj,77.2405090332,990.21216,5.008
24
+ 3,self_attn.v_proj,18.3168106079,990.21216,4.276
25
+ 3,self_attn.q_proj,378.6944173177,990.21216,7.728
26
+ 3,self_attn.o_proj,5.8858553569,49.03431,9.194
27
+ 3,mlp.gate_proj,2371.3829752604,1158.04968,26.045
28
+ 3,mlp.up_proj,1394.1834309896,1158.04968,34.357
29
+ 3,mlp.down_proj,871.7003580729,2827.41553,117.447
30
+ 4,self_attn.k_proj,67.2893168132,952.60614,4.925
31
+ 4,self_attn.v_proj,30.1960449219,952.60614,5.645
32
+ 4,self_attn.q_proj,363.8845214844,952.60614,8.904
33
+ 4,self_attn.o_proj,6.2175553640,15.03408,10.165
34
+ 4,mlp.gate_proj,2374.8307291667,1006.35736,25.839
35
+ 4,mlp.up_proj,1383.4635416667,1006.35736,33.502
36
+ 4,mlp.down_proj,19.3202476501,12.27280,98.435
37
+ 5,self_attn.k_proj,93.2561645508,816.20465,5.153
38
+ 5,self_attn.v_proj,34.1669718424,816.20465,6.024
39
+ 5,self_attn.q_proj,454.1962890625,816.20465,9.394
40
+ 5,self_attn.o_proj,6.5742734273,26.67174,9.096
41
+ 5,mlp.gate_proj,3746.5504557292,930.19141,25.428
42
+ 5,mlp.up_proj,1632.7145182292,930.19141,33.412
43
+ 5,mlp.down_proj,24.5360412598,29.75147,97.224
44
+ 6,self_attn.k_proj,61.9318389893,923.27863,4.630
45
+ 6,self_attn.v_proj,20.0416208903,923.27863,3.967
46
+ 6,self_attn.q_proj,358.1446940104,923.27863,8.877
47
+ 6,self_attn.o_proj,5.9309406281,33.73212,9.581
48
+ 6,mlp.gate_proj,2318.1250000000,1190.69312,25.888
49
+ 6,mlp.up_proj,1741.2809244792,1190.69312,35.493
50
+ 6,mlp.down_proj,21.2185401917,19.22627,94.275
51
+ 7,self_attn.k_proj,72.7023824056,1116.11365,4.691
52
+ 7,self_attn.v_proj,30.6320037842,1116.11365,5.059
53
+ 7,self_attn.q_proj,389.1239420573,1116.11365,8.003
54
+ 7,self_attn.o_proj,8.3826980591,28.69539,11.004
55
+ 7,mlp.gate_proj,2247.6565755208,1140.49548,26.098
56
+ 7,mlp.up_proj,1951.6266276042,1140.49548,37.386
57
+ 7,mlp.down_proj,22.1270675659,14.99066,86.959
58
+ 8,self_attn.k_proj,85.8061218262,1018.94623,5.397
59
+ 8,self_attn.v_proj,28.8810068766,1018.94623,5.698
60
+ 8,self_attn.q_proj,1564.2822265625,1018.94623,7.483
61
+ 8,self_attn.o_proj,12.9232457479,29.53937,8.102
62
+ 8,mlp.gate_proj,2300.9586588542,1060.96912,27.350
63
+ 8,mlp.up_proj,1910.5530598958,1060.96912,38.651
64
+ 8,mlp.down_proj,16.0145187378,12.30956,85.434
65
+ 9,self_attn.k_proj,237.0528971354,969.32983,4.468
66
+ 9,self_attn.v_proj,58.2208048503,969.32983,5.599
67
+ 9,self_attn.q_proj,734.7415364583,969.32983,9.142
68
+ 9,self_attn.o_proj,8.3034648895,45.79228,11.177
69
+ 9,mlp.gate_proj,2231.6967773438,1003.12390,27.630
70
+ 9,mlp.up_proj,2487.6049804688,1003.12390,39.319
71
+ 9,mlp.down_proj,15.8297729492,8.70068,96.616
72
+ 10,self_attn.k_proj,104.2002156576,926.95062,5.397
73
+ 10,self_attn.v_proj,107.8991699219,926.95062,4.747
74
+ 10,self_attn.q_proj,682.7618001302,926.95062,8.931
75
+ 10,self_attn.o_proj,12.0142873128,50.35853,8.762
76
+ 10,mlp.gate_proj,2639.0266927083,1051.56311,27.400
77
+ 10,mlp.up_proj,2503.0823567708,1051.56311,39.298
78
+ 10,mlp.down_proj,14.0178667704,14.42505,89.843
79
+ 11,self_attn.k_proj,231.1263020833,934.36938,5.108
80
+ 11,self_attn.v_proj,67.8643188477,934.36938,5.310
81
+ 11,self_attn.q_proj,909.9117838542,934.36938,9.004
82
+ 11,self_attn.o_proj,9.9704589844,61.24993,9.079
83
+ 11,mlp.gate_proj,2271.5257161458,1017.91833,31.507
84
+ 11,mlp.up_proj,3008.4290364583,1017.91833,41.184
85
+ 11,mlp.down_proj,17.7088826497,8.19535,103.007
86
+ 12,self_attn.k_proj,115.5236816406,995.07947,4.272
87
+ 12,self_attn.v_proj,132.0898742676,995.07947,6.886
88
+ 12,self_attn.q_proj,769.4633789062,995.07947,9.626
89
+ 12,self_attn.o_proj,10.4315388997,31.66696,10.267
90
+ 12,mlp.gate_proj,2505.3906250000,1110.15466,30.753
91
+ 12,mlp.up_proj,2742.0341796875,1110.15466,39.504
92
+ 12,mlp.down_proj,15.2585194906,14.90878,92.752
93
+ 13,self_attn.k_proj,176.4259440104,1088.85889,4.977
94
+ 13,self_attn.v_proj,93.5887044271,1088.85889,5.134
95
+ 13,self_attn.q_proj,1085.7306315104,1088.85889,11.256
96
+ 13,self_attn.o_proj,9.4796975454,41.00184,11.165
97
+ 13,mlp.gate_proj,2282.0263671875,1187.87073,28.772
98
+ 13,mlp.up_proj,2481.6005859375,1187.87073,41.142
99
+ 13,mlp.down_proj,20.0558713277,16.08819,94.825
100
+ 14,self_attn.k_proj,116.5531616211,1114.20764,4.826
101
+ 14,self_attn.v_proj,86.3167928060,1114.20764,5.255
102
+ 14,self_attn.q_proj,941.1445312500,1114.20764,9.584
103
+ 14,self_attn.o_proj,24.7816441854,55.38456,10.449
104
+ 14,mlp.gate_proj,2092.9990234375,1262.28064,26.551
105
+ 14,mlp.up_proj,2056.2060546875,1262.28064,37.744
106
+ 14,mlp.down_proj,25.7261505127,26.69924,90.999
107
+ 15,self_attn.k_proj,139.1018981934,1153.68311,5.429
108
+ 15,self_attn.v_proj,79.7861022949,1153.68311,5.691
109
+ 15,self_attn.q_proj,836.4055989583,1153.68311,9.833
110
+ 15,self_attn.o_proj,12.8856709798,50.51323,10.526
111
+ 15,mlp.gate_proj,2218.1608072917,1279.81482,27.225
112
+ 15,mlp.up_proj,2204.1611328125,1279.81482,31.898
113
+ 15,mlp.down_proj,42.3582916260,44.55193,84.342
114
+ 16,self_attn.k_proj,245.3200276693,1384.79602,5.580
115
+ 16,self_attn.v_proj,47.8493194580,1384.79602,3.957
116
+ 16,self_attn.q_proj,1299.0154622396,1384.79602,10.560
117
+ 16,self_attn.o_proj,9.6014226278,20.25651,9.861
118
+ 16,mlp.gate_proj,2797.5843098958,1337.23083,27.117
119
+ 16,mlp.up_proj,2085.0297851562,1337.23083,31.113
120
+ 16,mlp.down_proj,62.3565826416,48.95044,87.688
121
+ 17,self_attn.k_proj,97.5990091960,1351.28711,5.412
122
+ 17,self_attn.v_proj,67.4315694173,1351.28711,5.643
123
+ 17,self_attn.q_proj,684.9749348958,1351.28711,8.743
124
+ 17,self_attn.o_proj,37.4314753215,367.50916,9.770
125
+ 17,mlp.gate_proj,4554.3723958333,1617.45947,26.211
126
+ 17,mlp.up_proj,2929.2919921875,1617.45947,32.329
127
+ 17,mlp.down_proj,65.9468078613,76.05813,103.378
128
+ 18,self_attn.k_proj,112.8488566081,1662.32361,4.357
129
+ 18,self_attn.v_proj,116.6432393392,1662.32361,5.624
130
+ 18,self_attn.q_proj,701.7857259115,1662.32361,8.790
131
+ 18,self_attn.o_proj,17.4409243266,75.69903,9.602
132
+ 18,mlp.gate_proj,4205.5540364583,1721.79260,26.148
133
+ 18,mlp.up_proj,3236.3281250000,1721.79260,30.677
134
+ 18,mlp.down_proj,65.6810506185,56.74680,94.329
135
+ 19,self_attn.k_proj,96.5452880859,1723.78723,4.000
136
+ 19,self_attn.v_proj,88.7289326986,1723.78723,4.382
137
+ 19,self_attn.q_proj,667.5008138021,1723.78723,7.848
138
+ 19,self_attn.o_proj,20.7163060506,102.27396,9.993
139
+ 19,mlp.gate_proj,4838.9674479167,1710.58948,27.312
140
+ 19,mlp.up_proj,3917.8339843750,1710.58948,31.722
141
+ 19,mlp.down_proj,152.3315022786,165.63925,93.959
142
+ 20,self_attn.k_proj,126.2777099609,1647.77136,4.257
143
+ 20,self_attn.v_proj,179.2138264974,1647.77136,4.101
144
+ 20,self_attn.q_proj,782.6456705729,1647.77136,9.434
145
+ 20,self_attn.o_proj,118.5550740560,1024.22034,9.781
146
+ 20,mlp.gate_proj,5164.3636067708,1835.57153,26.448
147
+ 20,mlp.up_proj,4517.5175781250,1835.57153,27.997
148
+ 20,mlp.down_proj,284.3208007812,369.92227,88.879
149
+ 21,self_attn.k_proj,110.6768595378,1752.93469,4.578
150
+ 21,self_attn.v_proj,772.3488769531,1752.93469,5.362
151
+ 21,self_attn.q_proj,740.8355305990,1752.93469,8.532
152
+ 21,self_attn.o_proj,240.7297363281,1923.49170,11.185
153
+ 21,mlp.gate_proj,10588.8710937500,2117.36060,23.510
154
+ 21,mlp.up_proj,6085.0781250000,2117.36060,27.459
155
+ 21,mlp.down_proj,751.7847493490,1350.53333,86.471
156
+ 22,self_attn.k_proj,170.1837158203,2146.93823,4.568
157
+ 22,self_attn.v_proj,763.1953125000,2146.93823,4.651
158
+ 22,self_attn.q_proj,936.7376302083,2146.93823,7.255
159
+ 22,self_attn.o_proj,34.7368698120,169.29544,9.543
160
+ 22,mlp.gate_proj,5529.0826822917,2239.34619,27.010
161
+ 22,mlp.up_proj,5456.7252604167,2239.34619,31.153
162
+ 22,mlp.down_proj,877.9711914062,1597.97424,94.481
163
+ 23,self_attn.k_proj,206.4596354167,2032.07556,5.227
164
+ 23,self_attn.v_proj,649.5153808594,2032.07556,4.418
165
+ 23,self_attn.q_proj,1010.6800944010,2032.07556,8.517
166
+ 23,self_attn.o_proj,123.7481180827,609.02002,8.939
167
+ 23,mlp.gate_proj,8399.9153645833,2461.04858,25.634
168
+ 23,mlp.up_proj,7469.1373697917,2461.04858,26.189
169
+ 23,mlp.down_proj,6214.3098958333,17589.48047,94.444
quantize_config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "group_size": 128,
4
+ "desc_act": true,
5
+ "sym": true,
6
+ "lm_head": false,
7
+ "quant_method": "ganq",
8
+ "checkpoint_format": "fake",
9
+ "pack_dtype": "int32",
10
+ "meta": {
11
+ "quantizer": [
12
+ "gptqmodel:3.1.0-dev"
13
+ ],
14
+ "uri": "https://github.com/modelcloud/gptqmodel",
15
+ "damp_percent": 0.01,
16
+ "damp_auto_increment": 0.0025,
17
+ "static_groups": false,
18
+ "true_sequential": true,
19
+ "mse": 0.0,
20
+ "v2": false,
21
+ "v2_alpha": 0.25
22
+ }
23
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": "<|fim_pad|>"
25
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|endoftext|>",
201
+ "errors": "replace",
202
+ "extra_special_tokens": {},
203
+ "model_max_length": 131072,
204
+ "pad_token": "<|fim_pad|>",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2TokenizerFast",
207
+ "unk_token": null,
208
+ "_commit_hash": null
209
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff